WIP - add extractor, generate snippet_data
This commit is contained in:
31
node_modules/eslint/lib/util/ajv.js
generated
vendored
Normal file
31
node_modules/eslint/lib/util/ajv.js
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
/**
|
||||
* @fileoverview The instance of Ajv validator.
|
||||
* @author Evgeny Poberezkin
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const Ajv = require("ajv"),
|
||||
metaSchema = require("ajv/lib/refs/json-schema-draft-04.json");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const ajv = new Ajv({
|
||||
meta: false,
|
||||
useDefaults: true,
|
||||
validateSchema: false,
|
||||
missingRefs: "ignore",
|
||||
verbose: true,
|
||||
schemaId: "auto"
|
||||
});
|
||||
|
||||
ajv.addMetaSchema(metaSchema);
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
ajv._opts.defaultMeta = metaSchema.id;
|
||||
|
||||
module.exports = ajv;
|
||||
159
node_modules/eslint/lib/util/apply-disable-directives.js
generated
vendored
Normal file
159
node_modules/eslint/lib/util/apply-disable-directives.js
generated
vendored
Normal file
@ -0,0 +1,159 @@
|
||||
/**
|
||||
* @fileoverview A module that filters reported problems based on `eslint-disable` and `eslint-enable` comments
|
||||
* @author Teddy Katz
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const lodash = require("lodash");
|
||||
|
||||
/**
|
||||
* Compares the locations of two objects in a source file
|
||||
* @param {{line: number, column: number}} itemA The first object
|
||||
* @param {{line: number, column: number}} itemB The second object
|
||||
* @returns {number} A value less than 1 if itemA appears before itemB in the source file, greater than 1 if
|
||||
* itemA appears after itemB in the source file, or 0 if itemA and itemB have the same location.
|
||||
*/
|
||||
function compareLocations(itemA, itemB) {
|
||||
return itemA.line - itemB.line || itemA.column - itemB.column;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the same as the exported function, except that it
|
||||
* doesn't handle disable-line and disable-next-line directives, and it always reports unused
|
||||
* disable directives.
|
||||
* @param {Object} options options for applying directives. This is the same as the options
|
||||
* for the exported function, except that `reportUnusedDisableDirectives` is not supported
|
||||
* (this function always reports unused disable directives).
|
||||
* @returns {{problems: Problem[], unusedDisableDirectives: Problem[]}} An object with a list
|
||||
* of filtered problems and unused eslint-disable directives
|
||||
*/
|
||||
function applyDirectives(options) {
|
||||
const problems = [];
|
||||
let nextDirectiveIndex = 0;
|
||||
let currentGlobalDisableDirective = null;
|
||||
const disabledRuleMap = new Map();
|
||||
|
||||
// enabledRules is only used when there is a current global disable directive.
|
||||
const enabledRules = new Set();
|
||||
const usedDisableDirectives = new Set();
|
||||
|
||||
for (const problem of options.problems) {
|
||||
while (
|
||||
nextDirectiveIndex < options.directives.length &&
|
||||
compareLocations(options.directives[nextDirectiveIndex], problem) <= 0
|
||||
) {
|
||||
const directive = options.directives[nextDirectiveIndex++];
|
||||
|
||||
switch (directive.type) {
|
||||
case "disable":
|
||||
if (directive.ruleId === null) {
|
||||
currentGlobalDisableDirective = directive;
|
||||
disabledRuleMap.clear();
|
||||
enabledRules.clear();
|
||||
} else if (currentGlobalDisableDirective) {
|
||||
enabledRules.delete(directive.ruleId);
|
||||
disabledRuleMap.set(directive.ruleId, directive);
|
||||
} else {
|
||||
disabledRuleMap.set(directive.ruleId, directive);
|
||||
}
|
||||
break;
|
||||
|
||||
case "enable":
|
||||
if (directive.ruleId === null) {
|
||||
currentGlobalDisableDirective = null;
|
||||
disabledRuleMap.clear();
|
||||
} else if (currentGlobalDisableDirective) {
|
||||
enabledRules.add(directive.ruleId);
|
||||
disabledRuleMap.delete(directive.ruleId);
|
||||
} else {
|
||||
disabledRuleMap.delete(directive.ruleId);
|
||||
}
|
||||
break;
|
||||
|
||||
// no default
|
||||
}
|
||||
}
|
||||
|
||||
if (disabledRuleMap.has(problem.ruleId)) {
|
||||
usedDisableDirectives.add(disabledRuleMap.get(problem.ruleId));
|
||||
} else if (currentGlobalDisableDirective && !enabledRules.has(problem.ruleId)) {
|
||||
usedDisableDirectives.add(currentGlobalDisableDirective);
|
||||
} else {
|
||||
problems.push(problem);
|
||||
}
|
||||
}
|
||||
|
||||
const unusedDisableDirectives = options.directives
|
||||
.filter(directive => directive.type === "disable" && !usedDisableDirectives.has(directive))
|
||||
.map(directive => ({
|
||||
ruleId: null,
|
||||
message: directive.ruleId
|
||||
? `Unused eslint-disable directive (no problems were reported from '${directive.ruleId}').`
|
||||
: "Unused eslint-disable directive (no problems were reported).",
|
||||
line: directive.unprocessedDirective.line,
|
||||
column: directive.unprocessedDirective.column,
|
||||
severity: 2,
|
||||
nodeType: null
|
||||
}));
|
||||
|
||||
return { problems, unusedDisableDirectives };
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a list of directive comments (i.e. metadata about eslint-disable and eslint-enable comments) and a list
|
||||
* of reported problems, determines which problems should be reported.
|
||||
* @param {Object} options Information about directives and problems
|
||||
* @param {{
|
||||
* type: ("disable"|"enable"|"disable-line"|"disable-next-line"),
|
||||
* ruleId: (string|null),
|
||||
* line: number,
|
||||
* column: number
|
||||
* }} options.directives Directive comments found in the file, with one-based columns.
|
||||
* Two directive comments can only have the same location if they also have the same type (e.g. a single eslint-disable
|
||||
* comment for two different rules is represented as two directives).
|
||||
* @param {{ruleId: (string|null), line: number, column: number}[]} options.problems
|
||||
* A list of problems reported by rules, sorted by increasing location in the file, with one-based columns.
|
||||
* @param {boolean} options.reportUnusedDisableDirectives If `true`, adds additional problems for unused directives
|
||||
* @returns {{ruleId: (string|null), line: number, column: number}[]}
|
||||
* A list of reported problems that were not disabled by the directive comments.
|
||||
*/
|
||||
module.exports = options => {
|
||||
const blockDirectives = options.directives
|
||||
.filter(directive => directive.type === "disable" || directive.type === "enable")
|
||||
.map(directive => Object.assign({}, directive, { unprocessedDirective: directive }))
|
||||
.sort(compareLocations);
|
||||
|
||||
const lineDirectives = lodash.flatMap(options.directives, directive => {
|
||||
switch (directive.type) {
|
||||
case "disable":
|
||||
case "enable":
|
||||
return [];
|
||||
|
||||
case "disable-line":
|
||||
return [
|
||||
{ type: "disable", line: directive.line, column: 1, ruleId: directive.ruleId, unprocessedDirective: directive },
|
||||
{ type: "enable", line: directive.line + 1, column: 0, ruleId: directive.ruleId, unprocessedDirective: directive }
|
||||
];
|
||||
|
||||
case "disable-next-line":
|
||||
return [
|
||||
{ type: "disable", line: directive.line + 1, column: 1, ruleId: directive.ruleId, unprocessedDirective: directive },
|
||||
{ type: "enable", line: directive.line + 2, column: 0, ruleId: directive.ruleId, unprocessedDirective: directive }
|
||||
];
|
||||
|
||||
default:
|
||||
throw new TypeError(`Unrecognized directive type '${directive.type}'`);
|
||||
}
|
||||
}).sort(compareLocations);
|
||||
|
||||
const blockDirectivesResult = applyDirectives({ problems: options.problems, directives: blockDirectives });
|
||||
const lineDirectivesResult = applyDirectives({ problems: blockDirectivesResult.problems, directives: lineDirectives });
|
||||
|
||||
return options.reportUnusedDisableDirectives
|
||||
? lineDirectivesResult.problems
|
||||
.concat(blockDirectivesResult.unusedDisableDirectives)
|
||||
.concat(lineDirectivesResult.unusedDisableDirectives)
|
||||
.sort(compareLocations)
|
||||
: lineDirectivesResult.problems;
|
||||
};
|
||||
1346
node_modules/eslint/lib/util/ast-utils.js
generated
vendored
Normal file
1346
node_modules/eslint/lib/util/ast-utils.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
141
node_modules/eslint/lib/util/config-comment-parser.js
generated
vendored
Normal file
141
node_modules/eslint/lib/util/config-comment-parser.js
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
/**
|
||||
* @fileoverview Config Comment Parser
|
||||
* @author Nicholas C. Zakas
|
||||
*/
|
||||
|
||||
/* eslint-disable class-methods-use-this*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const levn = require("levn"),
|
||||
ConfigOps = require("../config/config-ops");
|
||||
|
||||
const debug = require("debug")("eslint:config-comment-parser");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Object to parse ESLint configuration comments inside JavaScript files.
|
||||
* @name ConfigCommentParser
|
||||
*/
|
||||
module.exports = class ConfigCommentParser {
|
||||
|
||||
/**
|
||||
* Parses a list of "name:string_value" or/and "name" options divided by comma or
|
||||
* whitespace. Used for "global" and "exported" comments.
|
||||
* @param {string} string The string to parse.
|
||||
* @param {Comment} comment The comment node which has the string.
|
||||
* @returns {Object} Result map object of names and string values, or null values if no value was provided
|
||||
*/
|
||||
parseStringConfig(string, comment) {
|
||||
debug("Parsing String config");
|
||||
|
||||
const items = {};
|
||||
|
||||
// Collapse whitespace around `:` and `,` to make parsing easier
|
||||
const trimmedString = string.replace(/\s*([:,])\s*/gu, "$1");
|
||||
|
||||
trimmedString.split(/\s|,+/u).forEach(name => {
|
||||
if (!name) {
|
||||
return;
|
||||
}
|
||||
|
||||
// value defaults to null (if not provided), e.g: "foo" => ["foo", null]
|
||||
const [key, value = null] = name.split(":");
|
||||
|
||||
items[key] = { value, comment };
|
||||
});
|
||||
return items;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a JSON-like config.
|
||||
* @param {string} string The string to parse.
|
||||
* @param {Object} location Start line and column of comments for potential error message.
|
||||
* @returns {({success: true, config: Object}|{success: false, error: Problem})} Result map object
|
||||
*/
|
||||
parseJsonConfig(string, location) {
|
||||
debug("Parsing JSON config");
|
||||
|
||||
let items = {};
|
||||
|
||||
// Parses a JSON-like comment by the same way as parsing CLI option.
|
||||
try {
|
||||
items = levn.parse("Object", string) || {};
|
||||
|
||||
// Some tests say that it should ignore invalid comments such as `/*eslint no-alert:abc*/`.
|
||||
// Also, commaless notations have invalid severity:
|
||||
// "no-alert: 2 no-console: 2" --> {"no-alert": "2 no-console: 2"}
|
||||
// Should ignore that case as well.
|
||||
if (ConfigOps.isEverySeverityValid(items)) {
|
||||
return {
|
||||
success: true,
|
||||
config: items
|
||||
};
|
||||
}
|
||||
} catch (ex) {
|
||||
|
||||
debug("Levn parsing failed; falling back to manual parsing.");
|
||||
|
||||
// ignore to parse the string by a fallback.
|
||||
}
|
||||
|
||||
/*
|
||||
* Optionator cannot parse commaless notations.
|
||||
* But we are supporting that. So this is a fallback for that.
|
||||
*/
|
||||
items = {};
|
||||
const normalizedString = string.replace(/([a-zA-Z0-9\-/]+):/gu, "\"$1\":").replace(/(\]|[0-9])\s+(?=")/u, "$1,");
|
||||
|
||||
try {
|
||||
items = JSON.parse(`{${normalizedString}}`);
|
||||
} catch (ex) {
|
||||
debug("Manual parsing failed.");
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
ruleId: null,
|
||||
fatal: true,
|
||||
severity: 2,
|
||||
message: `Failed to parse JSON from '${normalizedString}': ${ex.message}`,
|
||||
line: location.start.line,
|
||||
column: location.start.column + 1
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
config: items
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a config of values separated by comma.
|
||||
* @param {string} string The string to parse.
|
||||
* @returns {Object} Result map of values and true values
|
||||
*/
|
||||
parseListConfig(string) {
|
||||
debug("Parsing list config");
|
||||
|
||||
const items = {};
|
||||
|
||||
// Collapse whitespace around commas
|
||||
string.replace(/\s*,\s*/gu, ",").split(/,+/u).forEach(name => {
|
||||
const trimmedName = name.trim();
|
||||
|
||||
if (trimmedName) {
|
||||
items[trimmedName] = true;
|
||||
}
|
||||
});
|
||||
return items;
|
||||
}
|
||||
|
||||
};
|
||||
144
node_modules/eslint/lib/util/file-finder.js
generated
vendored
Normal file
144
node_modules/eslint/lib/util/file-finder.js
generated
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
/**
|
||||
* @fileoverview Util class to find config files.
|
||||
* @author Aliaksei Shytkin
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const fs = require("fs"),
|
||||
path = require("path");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Get the entries for a directory. Including a try-catch may be detrimental to
|
||||
* function performance, so move it out here a separate function.
|
||||
* @param {string} directory The directory to search in.
|
||||
* @returns {string[]} The entries in the directory or an empty array on error.
|
||||
* @private
|
||||
*/
|
||||
function getDirectoryEntries(directory) {
|
||||
try {
|
||||
|
||||
return fs.readdirSync(directory);
|
||||
} catch (ex) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a hash of filenames from a directory listing
|
||||
* @param {string[]} entries Array of directory entries.
|
||||
* @param {string} directory Path to a current directory.
|
||||
* @param {string[]} supportedConfigs List of support filenames.
|
||||
* @returns {Object} Hashmap of filenames
|
||||
*/
|
||||
function normalizeDirectoryEntries(entries, directory, supportedConfigs) {
|
||||
const fileHash = {};
|
||||
|
||||
entries.forEach(entry => {
|
||||
if (supportedConfigs.indexOf(entry) >= 0) {
|
||||
const resolvedEntry = path.resolve(directory, entry);
|
||||
|
||||
if (fs.statSync(resolvedEntry).isFile()) {
|
||||
fileHash[entry] = resolvedEntry;
|
||||
}
|
||||
}
|
||||
});
|
||||
return fileHash;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// API
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* FileFinder class
|
||||
*/
|
||||
class FileFinder {
|
||||
|
||||
/**
|
||||
* @param {string[]} files The basename(s) of the file(s) to find.
|
||||
* @param {stirng} cwd Current working directory
|
||||
*/
|
||||
constructor(files, cwd) {
|
||||
this.fileNames = Array.isArray(files) ? files : [files];
|
||||
this.cwd = cwd || process.cwd();
|
||||
this.cache = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all instances of files with the specified file names, in directory and
|
||||
* parent directories. Cache the results.
|
||||
* Does not check if a matching directory entry is a file.
|
||||
* Searches for all the file names in this.fileNames.
|
||||
* Is currently used by lib/config.js to find .eslintrc and package.json files.
|
||||
* @param {string} relativeDirectory The directory to start the search from.
|
||||
* @returns {GeneratorFunction} to iterate the file paths found
|
||||
*/
|
||||
*findAllInDirectoryAndParents(relativeDirectory) {
|
||||
const cache = this.cache;
|
||||
|
||||
const initialDirectory = relativeDirectory
|
||||
? path.resolve(this.cwd, relativeDirectory)
|
||||
: this.cwd;
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(cache, initialDirectory)) {
|
||||
yield* cache[initialDirectory];
|
||||
return; // to avoid doing the normal loop afterwards
|
||||
}
|
||||
|
||||
const dirs = [];
|
||||
const fileNames = this.fileNames;
|
||||
let searched = 0;
|
||||
let directory = initialDirectory;
|
||||
|
||||
do {
|
||||
dirs[searched++] = directory;
|
||||
cache[directory] = [];
|
||||
|
||||
const filesMap = normalizeDirectoryEntries(getDirectoryEntries(directory), directory, fileNames);
|
||||
|
||||
if (Object.keys(filesMap).length) {
|
||||
for (let k = 0; k < fileNames.length; k++) {
|
||||
|
||||
if (filesMap[fileNames[k]]) {
|
||||
const filePath = filesMap[fileNames[k]];
|
||||
|
||||
// Add the file path to the cache of each directory searched.
|
||||
for (let j = 0; j < searched; j++) {
|
||||
cache[dirs[j]].push(filePath);
|
||||
}
|
||||
yield filePath;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const child = directory;
|
||||
|
||||
// Assign parent directory to directory.
|
||||
directory = path.dirname(directory);
|
||||
|
||||
if (directory === child) {
|
||||
return;
|
||||
}
|
||||
|
||||
} while (!Object.prototype.hasOwnProperty.call(cache, directory));
|
||||
|
||||
// Add what has been cached previously to the cache of each directory searched.
|
||||
for (let i = 0; i < searched; i++) {
|
||||
cache[dirs[i]].push(...cache[directory]);
|
||||
}
|
||||
|
||||
yield* cache[dirs[0]];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileFinder;
|
||||
120
node_modules/eslint/lib/util/fix-tracker.js
generated
vendored
Normal file
120
node_modules/eslint/lib/util/fix-tracker.js
generated
vendored
Normal file
@ -0,0 +1,120 @@
|
||||
/**
|
||||
* @fileoverview Helper class to aid in constructing fix commands.
|
||||
* @author Alan Pierce
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("../util/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* A helper class to combine fix options into a fix command. Currently, it
|
||||
* exposes some "retain" methods that extend the range of the text being
|
||||
* replaced so that other fixes won't touch that region in the same pass.
|
||||
*/
|
||||
class FixTracker {
|
||||
|
||||
/**
|
||||
* Create a new FixTracker.
|
||||
*
|
||||
* @param {ruleFixer} fixer A ruleFixer instance.
|
||||
* @param {SourceCode} sourceCode A SourceCode object for the current code.
|
||||
*/
|
||||
constructor(fixer, sourceCode) {
|
||||
this.fixer = fixer;
|
||||
this.sourceCode = sourceCode;
|
||||
this.retainedRange = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark the given range as "retained", meaning that other fixes may not
|
||||
* may not modify this region in the same pass.
|
||||
*
|
||||
* @param {int[]} range The range to retain.
|
||||
* @returns {FixTracker} The same RuleFixer, for chained calls.
|
||||
*/
|
||||
retainRange(range) {
|
||||
this.retainedRange = range;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node, find the function containing it (or the entire program) and
|
||||
* mark it as retained, meaning that other fixes may not modify it in this
|
||||
* pass. This is useful for avoiding conflicts in fixes that modify control
|
||||
* flow.
|
||||
*
|
||||
* @param {ASTNode} node The node to use as a starting point.
|
||||
* @returns {FixTracker} The same RuleFixer, for chained calls.
|
||||
*/
|
||||
retainEnclosingFunction(node) {
|
||||
const functionNode = astUtils.getUpperFunction(node);
|
||||
|
||||
return this.retainRange(functionNode ? functionNode.range : this.sourceCode.ast.range);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a node or token, find the token before and afterward, and mark that
|
||||
* range as retained, meaning that other fixes may not modify it in this
|
||||
* pass. This is useful for avoiding conflicts in fixes that make a small
|
||||
* change to the code where the AST should not be changed.
|
||||
*
|
||||
* @param {ASTNode|Token} nodeOrToken The node or token to use as a starting
|
||||
* point. The token to the left and right are use in the range.
|
||||
* @returns {FixTracker} The same RuleFixer, for chained calls.
|
||||
*/
|
||||
retainSurroundingTokens(nodeOrToken) {
|
||||
const tokenBefore = this.sourceCode.getTokenBefore(nodeOrToken) || nodeOrToken;
|
||||
const tokenAfter = this.sourceCode.getTokenAfter(nodeOrToken) || nodeOrToken;
|
||||
|
||||
return this.retainRange([tokenBefore.range[0], tokenAfter.range[1]]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a fix command that replaces the given range with the given text,
|
||||
* accounting for any retained ranges.
|
||||
*
|
||||
* @param {int[]} range The range to remove in the fix.
|
||||
* @param {string} text The text to insert in place of the range.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
replaceTextRange(range, text) {
|
||||
let actualRange;
|
||||
|
||||
if (this.retainedRange) {
|
||||
actualRange = [
|
||||
Math.min(this.retainedRange[0], range[0]),
|
||||
Math.max(this.retainedRange[1], range[1])
|
||||
];
|
||||
} else {
|
||||
actualRange = range;
|
||||
}
|
||||
|
||||
return this.fixer.replaceTextRange(
|
||||
actualRange,
|
||||
this.sourceCode.text.slice(actualRange[0], range[0]) +
|
||||
text +
|
||||
this.sourceCode.text.slice(range[1], actualRange[1])
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a fix command that removes the given node or token, accounting for
|
||||
* any retained ranges.
|
||||
*
|
||||
* @param {ASTNode|Token} nodeOrToken The node or token to remove.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
remove(nodeOrToken) {
|
||||
return this.replaceTextRange(nodeOrToken.range, "");
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FixTracker;
|
||||
285
node_modules/eslint/lib/util/glob-utils.js
generated
vendored
Normal file
285
node_modules/eslint/lib/util/glob-utils.js
generated
vendored
Normal file
@ -0,0 +1,285 @@
|
||||
/**
|
||||
* @fileoverview Utilities for working with globs and the filesystem.
|
||||
* @author Ian VanSchooten
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const lodash = require("lodash"),
|
||||
fs = require("fs"),
|
||||
path = require("path"),
|
||||
GlobSync = require("./glob"),
|
||||
|
||||
pathUtils = require("./path-utils"),
|
||||
IgnoredPaths = require("./ignored-paths");
|
||||
|
||||
const debug = require("debug")("eslint:glob-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Checks whether a directory exists at the given location
|
||||
* @param {string} resolvedPath A path from the CWD
|
||||
* @returns {boolean} `true` if a directory exists
|
||||
*/
|
||||
function directoryExists(resolvedPath) {
|
||||
return fs.existsSync(resolvedPath) && fs.statSync(resolvedPath).isDirectory();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a provided path is a directory and returns a glob string matching
|
||||
* all files under that directory if so, the path itself otherwise.
|
||||
*
|
||||
* Reason for this is that `glob` needs `/**` to collect all the files under a
|
||||
* directory where as our previous implementation without `glob` simply walked
|
||||
* a directory that is passed. So this is to maintain backwards compatibility.
|
||||
*
|
||||
* Also makes sure all path separators are POSIX style for `glob` compatibility.
|
||||
*
|
||||
* @param {Object} [options] An options object
|
||||
* @param {string[]} [options.extensions=[".js"]] An array of accepted extensions
|
||||
* @param {string} [options.cwd=process.cwd()] The cwd to use to resolve relative pathnames
|
||||
* @returns {Function} A function that takes a pathname and returns a glob that
|
||||
* matches all files with the provided extensions if
|
||||
* pathname is a directory.
|
||||
*/
|
||||
function processPath(options) {
|
||||
const cwd = (options && options.cwd) || process.cwd();
|
||||
let extensions = (options && options.extensions) || [".js"];
|
||||
|
||||
extensions = extensions.map(ext => ext.replace(/^\./u, ""));
|
||||
|
||||
let suffix = "/**";
|
||||
|
||||
if (extensions.length === 1) {
|
||||
suffix += `/*.${extensions[0]}`;
|
||||
} else {
|
||||
suffix += `/*.{${extensions.join(",")}}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* A function that converts a directory name to a glob pattern
|
||||
*
|
||||
* @param {string} pathname The directory path to be modified
|
||||
* @returns {string} The glob path or the file path itself
|
||||
* @private
|
||||
*/
|
||||
return function(pathname) {
|
||||
if (pathname === "") {
|
||||
return "";
|
||||
}
|
||||
|
||||
let newPath = pathname;
|
||||
const resolvedPath = path.resolve(cwd, pathname);
|
||||
|
||||
if (directoryExists(resolvedPath)) {
|
||||
newPath = pathname.replace(/[/\\]$/u, "") + suffix;
|
||||
}
|
||||
|
||||
return pathUtils.convertPathToPosix(newPath);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The error type when no files match a glob.
|
||||
*/
|
||||
class NoFilesFoundError extends Error {
|
||||
|
||||
/**
|
||||
* @param {string} pattern - The glob pattern which was not found.
|
||||
*/
|
||||
constructor(pattern) {
|
||||
super(`No files matching '${pattern}' were found.`);
|
||||
|
||||
this.messageTemplate = "file-not-found";
|
||||
this.messageData = { pattern };
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* The error type when there are files matched by a glob, but all of them have been ignored.
|
||||
*/
|
||||
class AllFilesIgnoredError extends Error {
|
||||
|
||||
/**
|
||||
* @param {string} pattern - The glob pattern which was not found.
|
||||
*/
|
||||
constructor(pattern) {
|
||||
super(`All files matched by '${pattern}' are ignored.`);
|
||||
this.messageTemplate = "all-files-ignored";
|
||||
this.messageData = { pattern };
|
||||
}
|
||||
}
|
||||
|
||||
const NORMAL_LINT = {};
|
||||
const SILENTLY_IGNORE = {};
|
||||
const IGNORE_AND_WARN = {};
|
||||
|
||||
/**
|
||||
* Tests whether a file should be linted or ignored
|
||||
* @param {string} filename The file to be processed
|
||||
* @param {{ignore: (boolean|null)}} options If `ignore` is false, updates the behavior to
|
||||
* not process custom ignore paths, and lint files specified by direct path even if they
|
||||
* match the default ignore path
|
||||
* @param {boolean} isDirectPath True if the file was provided as a direct path
|
||||
* (as opposed to being resolved from a glob)
|
||||
* @param {IgnoredPaths} ignoredPaths An instance of IgnoredPaths to check whether a given
|
||||
* file is ignored.
|
||||
* @returns {(NORMAL_LINT|SILENTLY_IGNORE|IGNORE_AND_WARN)} A directive for how the
|
||||
* file should be processed (either linted normally, or silently ignored, or ignored
|
||||
* with a warning that it is being ignored)
|
||||
*/
|
||||
function testFileAgainstIgnorePatterns(filename, options, isDirectPath, ignoredPaths) {
|
||||
const shouldProcessCustomIgnores = options.ignore !== false;
|
||||
const shouldLintIgnoredDirectPaths = options.ignore === false;
|
||||
const fileMatchesIgnorePatterns = ignoredPaths.contains(filename, "default") ||
|
||||
(shouldProcessCustomIgnores && ignoredPaths.contains(filename, "custom"));
|
||||
|
||||
if (fileMatchesIgnorePatterns && isDirectPath && !shouldLintIgnoredDirectPaths) {
|
||||
return IGNORE_AND_WARN;
|
||||
}
|
||||
|
||||
if (!fileMatchesIgnorePatterns || (isDirectPath && shouldLintIgnoredDirectPaths)) {
|
||||
return NORMAL_LINT;
|
||||
}
|
||||
|
||||
return SILENTLY_IGNORE;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Resolves any directory patterns into glob-based patterns for easier handling.
|
||||
* @param {string[]} patterns File patterns (such as passed on the command line).
|
||||
* @param {Object} options An options object.
|
||||
* @param {string} [options.globInputPaths] False disables glob resolution.
|
||||
* @returns {string[]} The equivalent glob patterns and filepath strings.
|
||||
*/
|
||||
function resolveFileGlobPatterns(patterns, options) {
|
||||
if (options.globInputPaths === false) {
|
||||
return patterns;
|
||||
}
|
||||
|
||||
const processPathExtensions = processPath(options);
|
||||
|
||||
return patterns.map(processPathExtensions);
|
||||
}
|
||||
|
||||
const dotfilesPattern = /(?:(?:^\.)|(?:[/\\]\.))[^/\\.].*/u;
|
||||
|
||||
/**
|
||||
* Build a list of absolute filesnames on which ESLint will act.
|
||||
* Ignored files are excluded from the results, as are duplicates.
|
||||
*
|
||||
* @param {string[]} globPatterns Glob patterns.
|
||||
* @param {Object} [providedOptions] An options object.
|
||||
* @param {string} [providedOptions.cwd] CWD (considered for relative filenames)
|
||||
* @param {boolean} [providedOptions.ignore] False disables use of .eslintignore.
|
||||
* @param {string} [providedOptions.ignorePath] The ignore file to use instead of .eslintignore.
|
||||
* @param {string} [providedOptions.ignorePattern] A pattern of files to ignore.
|
||||
* @param {string} [providedOptions.globInputPaths] False disables glob resolution.
|
||||
* @returns {string[]} Resolved absolute filenames.
|
||||
*/
|
||||
function listFilesToProcess(globPatterns, providedOptions) {
|
||||
const options = providedOptions || { ignore: true };
|
||||
const cwd = options.cwd || process.cwd();
|
||||
|
||||
const getIgnorePaths = lodash.memoize(
|
||||
optionsObj =>
|
||||
new IgnoredPaths(optionsObj)
|
||||
);
|
||||
|
||||
/*
|
||||
* The test "should use default options if none are provided" (source-code-utils.js) checks that 'module.exports.resolveFileGlobPatterns' was called.
|
||||
* So it cannot use the local function "resolveFileGlobPatterns".
|
||||
*/
|
||||
const resolvedGlobPatterns = module.exports.resolveFileGlobPatterns(globPatterns, options);
|
||||
|
||||
debug("Creating list of files to process.");
|
||||
const resolvedPathsByGlobPattern = resolvedGlobPatterns.map(pattern => {
|
||||
if (pattern === "") {
|
||||
return [{
|
||||
filename: "",
|
||||
behavior: SILENTLY_IGNORE
|
||||
}];
|
||||
}
|
||||
|
||||
const file = path.resolve(cwd, pattern);
|
||||
|
||||
if (options.globInputPaths === false || (fs.existsSync(file) && fs.statSync(file).isFile())) {
|
||||
const ignoredPaths = getIgnorePaths(options);
|
||||
const fullPath = options.globInputPaths === false ? file : fs.realpathSync(file);
|
||||
|
||||
return [{
|
||||
filename: fullPath,
|
||||
behavior: testFileAgainstIgnorePatterns(fullPath, options, true, ignoredPaths)
|
||||
}];
|
||||
}
|
||||
|
||||
// regex to find .hidden or /.hidden patterns, but not ./relative or ../relative
|
||||
const globIncludesDotfiles = dotfilesPattern.test(pattern);
|
||||
let newOptions = options;
|
||||
|
||||
if (!options.dotfiles) {
|
||||
newOptions = Object.assign({}, options, { dotfiles: globIncludesDotfiles });
|
||||
}
|
||||
|
||||
const ignoredPaths = getIgnorePaths(newOptions);
|
||||
const shouldIgnore = ignoredPaths.getIgnoredFoldersGlobChecker();
|
||||
const globOptions = {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd
|
||||
};
|
||||
|
||||
return new GlobSync(pattern, globOptions, shouldIgnore).found.map(globMatch => {
|
||||
const relativePath = path.resolve(cwd, globMatch);
|
||||
|
||||
return {
|
||||
filename: relativePath,
|
||||
behavior: testFileAgainstIgnorePatterns(relativePath, options, false, ignoredPaths)
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
const allPathDescriptors = resolvedPathsByGlobPattern.reduce((pathsForAllGlobs, pathsForCurrentGlob, index) => {
|
||||
if (pathsForCurrentGlob.every(pathDescriptor => pathDescriptor.behavior === SILENTLY_IGNORE && pathDescriptor.filename !== "")) {
|
||||
throw new (pathsForCurrentGlob.length ? AllFilesIgnoredError : NoFilesFoundError)(globPatterns[index]);
|
||||
}
|
||||
|
||||
pathsForCurrentGlob.forEach(pathDescriptor => {
|
||||
switch (pathDescriptor.behavior) {
|
||||
case NORMAL_LINT:
|
||||
pathsForAllGlobs.push({ filename: pathDescriptor.filename, ignored: false });
|
||||
break;
|
||||
case IGNORE_AND_WARN:
|
||||
pathsForAllGlobs.push({ filename: pathDescriptor.filename, ignored: true });
|
||||
break;
|
||||
case SILENTLY_IGNORE:
|
||||
|
||||
// do nothing
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Unexpected file behavior for ${pathDescriptor.filename}`);
|
||||
}
|
||||
});
|
||||
|
||||
return pathsForAllGlobs;
|
||||
}, []);
|
||||
|
||||
return lodash.uniqBy(allPathDescriptors, pathDescriptor => pathDescriptor.filename);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
resolveFileGlobPatterns,
|
||||
listFilesToProcess
|
||||
};
|
||||
63
node_modules/eslint/lib/util/glob.js
generated
vendored
Normal file
63
node_modules/eslint/lib/util/glob.js
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
/**
|
||||
* @fileoverview An inherited `glob.GlobSync` to support .gitignore patterns.
|
||||
* @author Kael Zhang
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const Sync = require("glob").GlobSync,
|
||||
util = require("util");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Private
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const IGNORE = Symbol("ignore");
|
||||
|
||||
/**
|
||||
* Subclass of `glob.GlobSync`
|
||||
* @param {string} pattern Pattern to be matched.
|
||||
* @param {Object} options `options` for `glob`
|
||||
* @param {function()} shouldIgnore Method to check whether a directory should be ignored.
|
||||
* @constructor
|
||||
*/
|
||||
function GlobSync(pattern, options, shouldIgnore) {
|
||||
|
||||
/**
|
||||
* We don't put this thing to argument `options` to avoid
|
||||
* further problems, such as `options` validation.
|
||||
*
|
||||
* Use `Symbol` as much as possible to avoid confliction.
|
||||
*/
|
||||
this[IGNORE] = shouldIgnore;
|
||||
|
||||
Sync.call(this, pattern, options);
|
||||
}
|
||||
|
||||
util.inherits(GlobSync, Sync);
|
||||
|
||||
/* eslint no-underscore-dangle: ["error", { "allow": ["_readdir", "_mark"] }] */
|
||||
|
||||
GlobSync.prototype._readdir = function(abs, inGlobStar) {
|
||||
|
||||
/**
|
||||
* `options.nodir` makes `options.mark` as `true`.
|
||||
* Mark `abs` first
|
||||
* to make sure `"node_modules"` will be ignored immediately with ignore pattern `"node_modules/"`.
|
||||
*
|
||||
* There is a built-in cache about marked `File.Stat` in `glob`, so that we could not worry about the extra invocation of `this._mark()`
|
||||
*/
|
||||
const marked = this._mark(abs);
|
||||
|
||||
if (this[IGNORE](marked)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Sync.prototype._readdir.call(this, abs, inGlobStar);
|
||||
};
|
||||
|
||||
|
||||
module.exports = GlobSync;
|
||||
35
node_modules/eslint/lib/util/hash.js
generated
vendored
Normal file
35
node_modules/eslint/lib/util/hash.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
/**
|
||||
* @fileoverview Defining the hashing function in one place.
|
||||
* @author Michael Ficarra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const murmur = require("imurmurhash");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Private
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* hash the given string
|
||||
* @param {string} str the string to hash
|
||||
* @returns {string} the hash
|
||||
*/
|
||||
function hash(str) {
|
||||
return murmur(str).result().toString(36);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
module.exports = hash;
|
||||
381
node_modules/eslint/lib/util/ignored-paths.js
generated
vendored
Normal file
381
node_modules/eslint/lib/util/ignored-paths.js
generated
vendored
Normal file
@ -0,0 +1,381 @@
|
||||
/**
|
||||
* @fileoverview Responsible for loading ignore config files and managing ignore patterns
|
||||
* @author Jonathan Rajavuori
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const fs = require("fs"),
|
||||
path = require("path"),
|
||||
ignore = require("ignore"),
|
||||
pathUtils = require("./path-utils");
|
||||
|
||||
const debug = require("debug")("eslint:ignored-paths");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Constants
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const ESLINT_IGNORE_FILENAME = ".eslintignore";
|
||||
|
||||
/**
|
||||
* Adds `"*"` at the end of `"node_modules/"`,
|
||||
* so that subtle directories could be re-included by .gitignore patterns
|
||||
* such as `"!node_modules/should_not_ignored"`
|
||||
*/
|
||||
const DEFAULT_IGNORE_DIRS = [
|
||||
"/node_modules/*",
|
||||
"/bower_components/*"
|
||||
];
|
||||
const DEFAULT_OPTIONS = {
|
||||
dotfiles: false,
|
||||
cwd: process.cwd()
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Find a file in the current directory.
|
||||
* @param {string} cwd Current working directory
|
||||
* @param {string} name File name
|
||||
* @returns {string} Path of ignore file or an empty string.
|
||||
*/
|
||||
function findFile(cwd, name) {
|
||||
const ignoreFilePath = path.resolve(cwd, name);
|
||||
|
||||
return fs.existsSync(ignoreFilePath) && fs.statSync(ignoreFilePath).isFile() ? ignoreFilePath : "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an ignore file in the current directory.
|
||||
* @param {string} cwd Current working directory
|
||||
* @returns {string} Path of ignore file or an empty string.
|
||||
*/
|
||||
function findIgnoreFile(cwd) {
|
||||
return findFile(cwd, ESLINT_IGNORE_FILENAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an package.json file in the current directory.
|
||||
* @param {string} cwd Current working directory
|
||||
* @returns {string} Path of package.json file or an empty string.
|
||||
*/
|
||||
function findPackageJSONFile(cwd) {
|
||||
return findFile(cwd, "package.json");
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge options with defaults
|
||||
* @param {Object} options Options to merge with DEFAULT_OPTIONS constant
|
||||
* @returns {Object} Merged options
|
||||
*/
|
||||
function mergeDefaultOptions(options) {
|
||||
return Object.assign({}, DEFAULT_OPTIONS, options);
|
||||
}
|
||||
|
||||
/* eslint-disable valid-jsdoc */
|
||||
/**
|
||||
* Normalize the path separators in a given string.
|
||||
* On Windows environment, this replaces `\` by `/`.
|
||||
* Otherwrise, this does nothing.
|
||||
* @param {string} str The path string to normalize.
|
||||
* @returns {string} The normalized path.
|
||||
*/
|
||||
const normalizePathSeps = path.sep === "/"
|
||||
? (str => str)
|
||||
: ((seps, str) => str.replace(seps, "/")).bind(null, new RegExp(`\\${path.sep}`, "gu"));
|
||||
/* eslint-enable valid-jsdoc */
|
||||
|
||||
/**
|
||||
* Converts a glob pattern to a new glob pattern relative to a different directory
|
||||
* @param {string} globPattern The glob pattern, relative the the old base directory
|
||||
* @param {string} relativePathToOldBaseDir A relative path from the new base directory to the old one
|
||||
* @returns {string} A glob pattern relative to the new base directory
|
||||
*/
|
||||
function relativize(globPattern, relativePathToOldBaseDir) {
|
||||
if (relativePathToOldBaseDir === "") {
|
||||
return globPattern;
|
||||
}
|
||||
|
||||
const prefix = globPattern.startsWith("!") ? "!" : "";
|
||||
const globWithoutPrefix = globPattern.replace(/^!/u, "");
|
||||
|
||||
if (globWithoutPrefix.startsWith("/")) {
|
||||
return `${prefix}/${normalizePathSeps(relativePathToOldBaseDir)}${globWithoutPrefix}`;
|
||||
}
|
||||
|
||||
return globPattern;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* IgnoredPaths class
|
||||
*/
|
||||
class IgnoredPaths {
|
||||
|
||||
/**
|
||||
* @param {Object} providedOptions object containing 'ignore', 'ignorePath' and 'patterns' properties
|
||||
*/
|
||||
constructor(providedOptions) {
|
||||
const options = mergeDefaultOptions(providedOptions);
|
||||
|
||||
this.cache = {};
|
||||
|
||||
this.defaultPatterns = [].concat(DEFAULT_IGNORE_DIRS, options.patterns || []);
|
||||
|
||||
this.ignoreFileDir = options.ignore !== false && options.ignorePath
|
||||
? path.dirname(path.resolve(options.cwd, options.ignorePath))
|
||||
: options.cwd;
|
||||
this.options = options;
|
||||
this._baseDir = null;
|
||||
|
||||
this.ig = {
|
||||
custom: ignore(),
|
||||
default: ignore()
|
||||
};
|
||||
|
||||
this.defaultPatterns.forEach(pattern => this.addPatternRelativeToCwd(this.ig.default, pattern));
|
||||
if (options.dotfiles !== true) {
|
||||
|
||||
/*
|
||||
* ignore files beginning with a dot, but not files in a parent or
|
||||
* ancestor directory (which in relative format will begin with `../`).
|
||||
*/
|
||||
this.addPatternRelativeToCwd(this.ig.default, ".*");
|
||||
this.addPatternRelativeToCwd(this.ig.default, "!../");
|
||||
}
|
||||
|
||||
/*
|
||||
* Add a way to keep track of ignored files. This was present in node-ignore
|
||||
* 2.x, but dropped for now as of 3.0.10.
|
||||
*/
|
||||
this.ig.custom.ignoreFiles = [];
|
||||
this.ig.default.ignoreFiles = [];
|
||||
|
||||
if (options.ignore !== false) {
|
||||
let ignorePath;
|
||||
|
||||
if (options.ignorePath) {
|
||||
debug("Using specific ignore file");
|
||||
|
||||
try {
|
||||
fs.statSync(options.ignorePath);
|
||||
ignorePath = options.ignorePath;
|
||||
} catch (e) {
|
||||
e.message = `Cannot read ignore file: ${options.ignorePath}\nError: ${e.message}`;
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
debug(`Looking for ignore file in ${options.cwd}`);
|
||||
ignorePath = findIgnoreFile(options.cwd);
|
||||
|
||||
try {
|
||||
fs.statSync(ignorePath);
|
||||
debug(`Loaded ignore file ${ignorePath}`);
|
||||
} catch (e) {
|
||||
debug("Could not find ignore file in cwd");
|
||||
}
|
||||
}
|
||||
|
||||
if (ignorePath) {
|
||||
debug(`Adding ${ignorePath}`);
|
||||
this.addIgnoreFile(this.ig.custom, ignorePath);
|
||||
this.addIgnoreFile(this.ig.default, ignorePath);
|
||||
} else {
|
||||
try {
|
||||
|
||||
// if the ignoreFile does not exist, check package.json for eslintIgnore
|
||||
const packageJSONPath = findPackageJSONFile(options.cwd);
|
||||
|
||||
if (packageJSONPath) {
|
||||
let packageJSONOptions;
|
||||
|
||||
try {
|
||||
packageJSONOptions = JSON.parse(fs.readFileSync(packageJSONPath, "utf8"));
|
||||
} catch (e) {
|
||||
debug("Could not read package.json file to check eslintIgnore property");
|
||||
e.messageTemplate = "failed-to-read-json";
|
||||
e.messageData = {
|
||||
path: packageJSONPath,
|
||||
message: e.message
|
||||
};
|
||||
throw e;
|
||||
}
|
||||
|
||||
if (packageJSONOptions.eslintIgnore) {
|
||||
if (Array.isArray(packageJSONOptions.eslintIgnore)) {
|
||||
packageJSONOptions.eslintIgnore.forEach(pattern => {
|
||||
this.addPatternRelativeToIgnoreFile(this.ig.custom, pattern);
|
||||
this.addPatternRelativeToIgnoreFile(this.ig.default, pattern);
|
||||
});
|
||||
} else {
|
||||
throw new TypeError("Package.json eslintIgnore property requires an array of paths");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
debug("Could not find package.json to check eslintIgnore property");
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.ignorePattern) {
|
||||
this.addPatternRelativeToCwd(this.ig.custom, options.ignorePattern);
|
||||
this.addPatternRelativeToCwd(this.ig.default, options.ignorePattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* If `ignoreFileDir` is a subdirectory of `cwd`, all paths will be normalized to be relative to `cwd`.
|
||||
* Otherwise, all paths will be normalized to be relative to `ignoreFileDir`.
|
||||
* This ensures that the final normalized ignore rule will not contain `..`, which is forbidden in
|
||||
* ignore rules.
|
||||
*/
|
||||
|
||||
addPatternRelativeToCwd(ig, pattern) {
|
||||
const baseDir = this.getBaseDir();
|
||||
const cookedPattern = baseDir === this.options.cwd
|
||||
? pattern
|
||||
: relativize(pattern, path.relative(baseDir, this.options.cwd));
|
||||
|
||||
ig.addPattern(cookedPattern);
|
||||
debug("addPatternRelativeToCwd:\n original = %j\n cooked = %j", pattern, cookedPattern);
|
||||
}
|
||||
|
||||
addPatternRelativeToIgnoreFile(ig, pattern) {
|
||||
const baseDir = this.getBaseDir();
|
||||
const cookedPattern = baseDir === this.ignoreFileDir
|
||||
? pattern
|
||||
: relativize(pattern, path.relative(baseDir, this.ignoreFileDir));
|
||||
|
||||
ig.addPattern(cookedPattern);
|
||||
debug("addPatternRelativeToIgnoreFile:\n original = %j\n cooked = %j", pattern, cookedPattern);
|
||||
}
|
||||
|
||||
// Detect the common ancestor
|
||||
getBaseDir() {
|
||||
if (!this._baseDir) {
|
||||
const a = path.resolve(this.options.cwd);
|
||||
const b = path.resolve(this.ignoreFileDir);
|
||||
let lastSepPos = 0;
|
||||
|
||||
// Set the shorter one (it's the common ancestor if one includes the other).
|
||||
this._baseDir = a.length < b.length ? a : b;
|
||||
|
||||
// Set the common ancestor.
|
||||
for (let i = 0; i < a.length && i < b.length; ++i) {
|
||||
if (a[i] !== b[i]) {
|
||||
this._baseDir = a.slice(0, lastSepPos);
|
||||
break;
|
||||
}
|
||||
if (a[i] === path.sep) {
|
||||
lastSepPos = i;
|
||||
}
|
||||
}
|
||||
|
||||
// If it's only Windows drive letter, it needs \
|
||||
if (/^[A-Z]:$/u.test(this._baseDir)) {
|
||||
this._baseDir += "\\";
|
||||
}
|
||||
|
||||
debug("baseDir = %j", this._baseDir);
|
||||
}
|
||||
return this._baseDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* read ignore filepath
|
||||
* @param {string} filePath, file to add to ig
|
||||
* @returns {Array} raw ignore rules
|
||||
*/
|
||||
readIgnoreFile(filePath) {
|
||||
if (typeof this.cache[filePath] === "undefined") {
|
||||
this.cache[filePath] = fs.readFileSync(filePath, "utf8").split(/\r?\n/gu).filter(Boolean);
|
||||
}
|
||||
return this.cache[filePath];
|
||||
}
|
||||
|
||||
/**
|
||||
* add ignore file to node-ignore instance
|
||||
* @param {Object} ig instance of node-ignore
|
||||
* @param {string} filePath file to add to ig
|
||||
* @returns {void}
|
||||
*/
|
||||
addIgnoreFile(ig, filePath) {
|
||||
ig.ignoreFiles.push(filePath);
|
||||
this
|
||||
.readIgnoreFile(filePath)
|
||||
.forEach(ignoreRule => this.addPatternRelativeToIgnoreFile(ig, ignoreRule));
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether a file path is included in the default or custom ignore patterns
|
||||
* @param {string} filepath Path to check
|
||||
* @param {string} [category=undefined] check 'default', 'custom' or both (undefined)
|
||||
* @returns {boolean} true if the file path matches one or more patterns, false otherwise
|
||||
*/
|
||||
contains(filepath, category) {
|
||||
|
||||
let result = false;
|
||||
const absolutePath = path.resolve(this.options.cwd, filepath);
|
||||
const relativePath = pathUtils.getRelativePath(absolutePath, this.getBaseDir());
|
||||
|
||||
if (typeof category === "undefined") {
|
||||
result = (this.ig.default.filter([relativePath]).length === 0) ||
|
||||
(this.ig.custom.filter([relativePath]).length === 0);
|
||||
} else {
|
||||
result = (this.ig[category].filter([relativePath]).length === 0);
|
||||
}
|
||||
debug("contains:");
|
||||
debug(" target = %j", filepath);
|
||||
debug(" result = %j", result);
|
||||
|
||||
return result;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of dir patterns for glob to ignore
|
||||
* @returns {function()} method to check whether a folder should be ignored by glob.
|
||||
*/
|
||||
getIgnoredFoldersGlobChecker() {
|
||||
const baseDir = this.getBaseDir();
|
||||
const ig = ignore();
|
||||
|
||||
DEFAULT_IGNORE_DIRS.forEach(ignoreDir => this.addPatternRelativeToCwd(ig, ignoreDir));
|
||||
|
||||
if (this.options.dotfiles !== true) {
|
||||
|
||||
// Ignore hidden folders. (This cannot be ".*", or else it's not possible to unignore hidden files)
|
||||
ig.add([".*/*", "!../*"]);
|
||||
}
|
||||
|
||||
if (this.options.ignore) {
|
||||
ig.add(this.ig.custom);
|
||||
}
|
||||
|
||||
const filter = ig.createFilter();
|
||||
|
||||
return function(absolutePath) {
|
||||
const relative = pathUtils.getRelativePath(absolutePath, baseDir);
|
||||
|
||||
if (!relative) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return !filter(relative);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = IgnoredPaths;
|
||||
28
node_modules/eslint/lib/util/interpolate.js
generated
vendored
Normal file
28
node_modules/eslint/lib/util/interpolate.js
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
/**
|
||||
* @fileoverview Interpolate keys from an object into a string with {{ }} markers.
|
||||
* @author Jed Fox
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
module.exports = (text, data) => {
|
||||
if (!data) {
|
||||
return text;
|
||||
}
|
||||
|
||||
// Substitution content for any {{ }} markers.
|
||||
return text.replace(/\{\{([^{}]+?)\}\}/gu, (fullMatch, termWithWhitespace) => {
|
||||
const term = termWithWhitespace.trim();
|
||||
|
||||
if (term in data) {
|
||||
return data[term];
|
||||
}
|
||||
|
||||
// Preserve old behavior: If parameter name not provided, don't replace it.
|
||||
return fullMatch;
|
||||
});
|
||||
};
|
||||
67
node_modules/eslint/lib/util/keywords.js
generated
vendored
Normal file
67
node_modules/eslint/lib/util/keywords.js
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
/**
|
||||
* @fileoverview A shared list of ES3 keywords.
|
||||
* @author Josh Perez
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
module.exports = [
|
||||
"abstract",
|
||||
"boolean",
|
||||
"break",
|
||||
"byte",
|
||||
"case",
|
||||
"catch",
|
||||
"char",
|
||||
"class",
|
||||
"const",
|
||||
"continue",
|
||||
"debugger",
|
||||
"default",
|
||||
"delete",
|
||||
"do",
|
||||
"double",
|
||||
"else",
|
||||
"enum",
|
||||
"export",
|
||||
"extends",
|
||||
"false",
|
||||
"final",
|
||||
"finally",
|
||||
"float",
|
||||
"for",
|
||||
"function",
|
||||
"goto",
|
||||
"if",
|
||||
"implements",
|
||||
"import",
|
||||
"in",
|
||||
"instanceof",
|
||||
"int",
|
||||
"interface",
|
||||
"long",
|
||||
"native",
|
||||
"new",
|
||||
"null",
|
||||
"package",
|
||||
"private",
|
||||
"protected",
|
||||
"public",
|
||||
"return",
|
||||
"short",
|
||||
"static",
|
||||
"super",
|
||||
"switch",
|
||||
"synchronized",
|
||||
"this",
|
||||
"throw",
|
||||
"throws",
|
||||
"transient",
|
||||
"true",
|
||||
"try",
|
||||
"typeof",
|
||||
"var",
|
||||
"void",
|
||||
"volatile",
|
||||
"while",
|
||||
"with"
|
||||
];
|
||||
146
node_modules/eslint/lib/util/lint-result-cache.js
generated
vendored
Normal file
146
node_modules/eslint/lib/util/lint-result-cache.js
generated
vendored
Normal file
@ -0,0 +1,146 @@
|
||||
/**
|
||||
* @fileoverview Utility for caching lint results.
|
||||
* @author Kevin Partington
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
const assert = require("assert"),
|
||||
fs = require("fs"),
|
||||
fileEntryCache = require("file-entry-cache"),
|
||||
hash = require("./hash"),
|
||||
pkg = require("../../package.json"),
|
||||
stringify = require("json-stable-stringify-without-jsonify");
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
const configHashCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* Calculates the hash of the config file used to validate a given file
|
||||
* @param {Object} configHelper The config helper for retrieving configuration information
|
||||
* @param {string} filename The path of the file to retrieve a config object for to calculate the hash
|
||||
* @returns {string} The hash of the config
|
||||
*/
|
||||
function hashOfConfigFor(configHelper, filename) {
|
||||
const config = configHelper.getConfig(filename);
|
||||
|
||||
if (!configHashCache.has(config)) {
|
||||
configHashCache.set(config, hash(`${pkg.version}_${stringify(config)}`));
|
||||
}
|
||||
|
||||
return configHashCache.get(config);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Lint result cache. This wraps around the file-entry-cache module,
|
||||
* transparently removing properties that are difficult or expensive to
|
||||
* serialize and adding them back in on retrieval.
|
||||
*/
|
||||
class LintResultCache {
|
||||
|
||||
/**
|
||||
* Creates a new LintResultCache instance.
|
||||
* @constructor
|
||||
* @param {string} cacheFileLocation The cache file location.
|
||||
* @param {Object} configHelper The configuration helper (used for
|
||||
* configuration lookup by file path).
|
||||
*/
|
||||
constructor(cacheFileLocation, configHelper) {
|
||||
assert(cacheFileLocation, "Cache file location is required");
|
||||
assert(configHelper, "Config helper is required");
|
||||
|
||||
this.fileEntryCache = fileEntryCache.create(cacheFileLocation);
|
||||
this.configHelper = configHelper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve cached lint results for a given file path, if present in the
|
||||
* cache. If the file is present and has not been changed, rebuild any
|
||||
* missing result information.
|
||||
* @param {string} filePath The file for which to retrieve lint results.
|
||||
* @returns {Object|null} The rebuilt lint results, or null if the file is
|
||||
* changed or not in the filesystem.
|
||||
*/
|
||||
getCachedLintResults(filePath) {
|
||||
|
||||
/*
|
||||
* Cached lint results are valid if and only if:
|
||||
* 1. The file is present in the filesystem
|
||||
* 2. The file has not changed since the time it was previously linted
|
||||
* 3. The ESLint configuration has not changed since the time the file
|
||||
* was previously linted
|
||||
* If any of these are not true, we will not reuse the lint results.
|
||||
*/
|
||||
|
||||
const fileDescriptor = this.fileEntryCache.getFileDescriptor(filePath);
|
||||
const hashOfConfig = hashOfConfigFor(this.configHelper, filePath);
|
||||
const changed = fileDescriptor.changed || fileDescriptor.meta.hashOfConfig !== hashOfConfig;
|
||||
|
||||
if (fileDescriptor.notFound || changed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If source is present but null, need to reread the file from the filesystem.
|
||||
if (fileDescriptor.meta.results && fileDescriptor.meta.results.source === null) {
|
||||
fileDescriptor.meta.results.source = fs.readFileSync(filePath, "utf-8");
|
||||
}
|
||||
|
||||
return fileDescriptor.meta.results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cached lint results for a given file path, after removing any
|
||||
* information that will be both unnecessary and difficult to serialize.
|
||||
* Avoids caching results with an "output" property (meaning fixes were
|
||||
* applied), to prevent potentially incorrect results if fixes are not
|
||||
* written to disk.
|
||||
* @param {string} filePath The file for which to set lint results.
|
||||
* @param {Object} result The lint result to be set for the file.
|
||||
* @returns {void}
|
||||
*/
|
||||
setCachedLintResults(filePath, result) {
|
||||
if (result && Object.prototype.hasOwnProperty.call(result, "output")) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileDescriptor = this.fileEntryCache.getFileDescriptor(filePath);
|
||||
|
||||
if (fileDescriptor && !fileDescriptor.notFound) {
|
||||
|
||||
// Serialize the result, except that we want to remove the file source if present.
|
||||
const resultToSerialize = Object.assign({}, result);
|
||||
|
||||
/*
|
||||
* Set result.source to null.
|
||||
* In `getCachedLintResults`, if source is explicitly null, we will
|
||||
* read the file from the filesystem to set the value again.
|
||||
*/
|
||||
if (Object.prototype.hasOwnProperty.call(resultToSerialize, "source")) {
|
||||
resultToSerialize.source = null;
|
||||
}
|
||||
|
||||
fileDescriptor.meta.results = resultToSerialize;
|
||||
fileDescriptor.meta.hashOfConfig = hashOfConfigFor(this.configHelper, result.filePath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists the in-memory cache to disk.
|
||||
* @returns {void}
|
||||
*/
|
||||
reconcile() {
|
||||
this.fileEntryCache.reconcile();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LintResultCache;
|
||||
28
node_modules/eslint/lib/util/logging.js
generated
vendored
Normal file
28
node_modules/eslint/lib/util/logging.js
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
/**
|
||||
* @fileoverview Handle logging for ESLint
|
||||
* @author Gyandeep Singh
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
/* eslint no-console: "off" */
|
||||
|
||||
/* istanbul ignore next */
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* Cover for console.log
|
||||
* @returns {void}
|
||||
*/
|
||||
info(...args) {
|
||||
console.log(...args);
|
||||
},
|
||||
|
||||
/**
|
||||
* Cover for console.error
|
||||
* @returns {void}
|
||||
*/
|
||||
error(...args) {
|
||||
console.error(...args);
|
||||
}
|
||||
};
|
||||
83
node_modules/eslint/lib/util/module-resolver.js
generated
vendored
Normal file
83
node_modules/eslint/lib/util/module-resolver.js
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
/**
|
||||
* @fileoverview Implements the Node.js require.resolve algorithm
|
||||
* @author Nicholas C. Zakas
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const Module = require("module");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Private
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const DEFAULT_OPTIONS = {
|
||||
|
||||
/*
|
||||
* module.paths is an array of paths to search for resolving things relative
|
||||
* to this file. Module.globalPaths contains all of the special Node.js
|
||||
* directories that can also be searched for modules.
|
||||
*
|
||||
* Need to check for existence of module.paths because Jest seems not to
|
||||
* include it. See https://github.com/eslint/eslint/issues/5791.
|
||||
*/
|
||||
lookupPaths: module.paths ? module.paths.concat(Module.globalPaths) : Module.globalPaths.concat()
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolves modules based on a set of options.
|
||||
*/
|
||||
class ModuleResolver {
|
||||
|
||||
/**
|
||||
* Resolves modules based on a set of options.
|
||||
* @param {Object} options The options for resolving modules.
|
||||
* @param {string[]} options.lookupPaths An array of paths to include in the
|
||||
* lookup with the highest priority paths coming first.
|
||||
*/
|
||||
constructor(options) {
|
||||
this.options = Object.assign({}, DEFAULT_OPTIONS, options || {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the file location of a given module relative to the configured
|
||||
* lookup paths.
|
||||
* @param {string} name The module name to resolve.
|
||||
* @param {string} extraLookupPath An extra path to look into for the module.
|
||||
* This path is used with the highest priority.
|
||||
* @returns {string} The resolved file path for the module.
|
||||
* @throws {Error} If the module cannot be resolved.
|
||||
*/
|
||||
resolve(name, extraLookupPath) {
|
||||
|
||||
/*
|
||||
* First, clone the lookup paths so we're not messing things up for
|
||||
* subsequent calls to this function. Then, move the extraLookupPath to the
|
||||
* top of the lookup paths list so it will be searched first.
|
||||
*/
|
||||
const lookupPaths = [extraLookupPath, ...this.options.lookupPaths];
|
||||
|
||||
/**
|
||||
* Module._findPath is an internal method to Node.js, then one they use to
|
||||
* lookup file paths when require() is called. So, we are hooking into the
|
||||
* exact same logic that Node.js uses.
|
||||
*/
|
||||
const result = Module._findPath(name, lookupPaths); // eslint-disable-line no-underscore-dangle
|
||||
|
||||
if (!result) {
|
||||
throw new Error(`Cannot find module '${name}'`);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public API
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
module.exports = ModuleResolver;
|
||||
107
node_modules/eslint/lib/util/naming.js
generated
vendored
Normal file
107
node_modules/eslint/lib/util/naming.js
generated
vendored
Normal file
@ -0,0 +1,107 @@
|
||||
/**
|
||||
* @fileoverview Common helpers for naming of plugins, formatters and configs
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const pathUtils = require("../util/path-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Private
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const NAMESPACE_REGEX = /^@.*\//iu;
|
||||
|
||||
/**
|
||||
* Brings package name to correct format based on prefix
|
||||
* @param {string} name The name of the package.
|
||||
* @param {string} prefix Can be either "eslint-plugin", "eslint-config" or "eslint-formatter"
|
||||
* @returns {string} Normalized name of the package
|
||||
* @private
|
||||
*/
|
||||
function normalizePackageName(name, prefix) {
|
||||
let normalizedName = name;
|
||||
|
||||
/**
|
||||
* On Windows, name can come in with Windows slashes instead of Unix slashes.
|
||||
* Normalize to Unix first to avoid errors later on.
|
||||
* https://github.com/eslint/eslint/issues/5644
|
||||
*/
|
||||
if (normalizedName.indexOf("\\") > -1) {
|
||||
normalizedName = pathUtils.convertPathToPosix(normalizedName);
|
||||
}
|
||||
|
||||
if (normalizedName.charAt(0) === "@") {
|
||||
|
||||
/**
|
||||
* it's a scoped package
|
||||
* package name is the prefix, or just a username
|
||||
*/
|
||||
const scopedPackageShortcutRegex = new RegExp(`^(@[^/]+)(?:/(?:${prefix})?)?$`, "u"),
|
||||
scopedPackageNameRegex = new RegExp(`^${prefix}(-|$)`, "u");
|
||||
|
||||
if (scopedPackageShortcutRegex.test(normalizedName)) {
|
||||
normalizedName = normalizedName.replace(scopedPackageShortcutRegex, `$1/${prefix}`);
|
||||
} else if (!scopedPackageNameRegex.test(normalizedName.split("/")[1])) {
|
||||
|
||||
/**
|
||||
* for scoped packages, insert the prefix after the first / unless
|
||||
* the path is already @scope/eslint or @scope/eslint-xxx-yyy
|
||||
*/
|
||||
normalizedName = normalizedName.replace(/^@([^/]+)\/(.*)$/u, `@$1/${prefix}-$2`);
|
||||
}
|
||||
} else if (normalizedName.indexOf(`${prefix}-`) !== 0) {
|
||||
normalizedName = `${prefix}-${normalizedName}`;
|
||||
}
|
||||
|
||||
return normalizedName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the prefix from a fullname.
|
||||
* @param {string} fullname The term which may have the prefix.
|
||||
* @param {string} prefix The prefix to remove.
|
||||
* @returns {string} The term without prefix.
|
||||
*/
|
||||
function getShorthandName(fullname, prefix) {
|
||||
if (fullname[0] === "@") {
|
||||
let matchResult = new RegExp(`^(@[^/]+)/${prefix}$`, "u").exec(fullname);
|
||||
|
||||
if (matchResult) {
|
||||
return matchResult[1];
|
||||
}
|
||||
|
||||
matchResult = new RegExp(`^(@[^/]+)/${prefix}-(.+)$`, "u").exec(fullname);
|
||||
if (matchResult) {
|
||||
return `${matchResult[1]}/${matchResult[2]}`;
|
||||
}
|
||||
} else if (fullname.startsWith(`${prefix}-`)) {
|
||||
return fullname.slice(prefix.length + 1);
|
||||
}
|
||||
|
||||
return fullname;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the scope (namespace) of a term.
|
||||
* @param {string} term The term which may have the namespace.
|
||||
* @returns {string} The namepace of the term if it has one.
|
||||
*/
|
||||
function getNamespaceFromTerm(term) {
|
||||
const match = term.match(NAMESPACE_REGEX);
|
||||
|
||||
return match ? match[0] : "";
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
module.exports = {
|
||||
normalizePackageName,
|
||||
getShorthandName,
|
||||
getNamespaceFromTerm
|
||||
};
|
||||
308
node_modules/eslint/lib/util/node-event-generator.js
generated
vendored
Normal file
308
node_modules/eslint/lib/util/node-event-generator.js
generated
vendored
Normal file
@ -0,0 +1,308 @@
|
||||
/**
|
||||
* @fileoverview The event generator for AST nodes.
|
||||
* @author Toru Nagashima
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const esquery = require("esquery");
|
||||
const lodash = require("lodash");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Typedefs
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* An object describing an AST selector
|
||||
* @typedef {Object} ASTSelector
|
||||
* @property {string} rawSelector The string that was parsed into this selector
|
||||
* @property {boolean} isExit `true` if this should be emitted when exiting the node rather than when entering
|
||||
* @property {Object} parsedSelector An object (from esquery) describing the matching behavior of the selector
|
||||
* @property {string[]|null} listenerTypes A list of node types that could possibly cause the selector to match,
|
||||
* or `null` if all node types could cause a match
|
||||
* @property {number} attributeCount The total number of classes, pseudo-classes, and attribute queries in this selector
|
||||
* @property {number} identifierCount The total number of identifier queries in this selector
|
||||
*/
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Gets the possible types of a selector
|
||||
* @param {Object} parsedSelector An object (from esquery) describing the matching behavior of the selector
|
||||
* @returns {string[]|null} The node types that could possibly trigger this selector, or `null` if all node types could trigger it
|
||||
*/
|
||||
function getPossibleTypes(parsedSelector) {
|
||||
switch (parsedSelector.type) {
|
||||
case "identifier":
|
||||
return [parsedSelector.value];
|
||||
|
||||
case "matches": {
|
||||
const typesForComponents = parsedSelector.selectors.map(getPossibleTypes);
|
||||
|
||||
if (typesForComponents.every(Boolean)) {
|
||||
return lodash.union(...typesForComponents);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
case "compound": {
|
||||
const typesForComponents = parsedSelector.selectors.map(getPossibleTypes).filter(typesForComponent => typesForComponent);
|
||||
|
||||
// If all of the components could match any type, then the compound could also match any type.
|
||||
if (!typesForComponents.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* If at least one of the components could only match a particular type, the compound could only match
|
||||
* the intersection of those types.
|
||||
*/
|
||||
return lodash.intersection(...typesForComponents);
|
||||
}
|
||||
|
||||
case "child":
|
||||
case "descendant":
|
||||
case "sibling":
|
||||
case "adjacent":
|
||||
return getPossibleTypes(parsedSelector.right);
|
||||
|
||||
default:
|
||||
return null;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the number of class, pseudo-class, and attribute queries in this selector
|
||||
* @param {Object} parsedSelector An object (from esquery) describing the selector's matching behavior
|
||||
* @returns {number} The number of class, pseudo-class, and attribute queries in this selector
|
||||
*/
|
||||
function countClassAttributes(parsedSelector) {
|
||||
switch (parsedSelector.type) {
|
||||
case "child":
|
||||
case "descendant":
|
||||
case "sibling":
|
||||
case "adjacent":
|
||||
return countClassAttributes(parsedSelector.left) + countClassAttributes(parsedSelector.right);
|
||||
|
||||
case "compound":
|
||||
case "not":
|
||||
case "matches":
|
||||
return parsedSelector.selectors.reduce((sum, childSelector) => sum + countClassAttributes(childSelector), 0);
|
||||
|
||||
case "attribute":
|
||||
case "field":
|
||||
case "nth-child":
|
||||
case "nth-last-child":
|
||||
return 1;
|
||||
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the number of identifier queries in this selector
|
||||
* @param {Object} parsedSelector An object (from esquery) describing the selector's matching behavior
|
||||
* @returns {number} The number of identifier queries
|
||||
*/
|
||||
function countIdentifiers(parsedSelector) {
|
||||
switch (parsedSelector.type) {
|
||||
case "child":
|
||||
case "descendant":
|
||||
case "sibling":
|
||||
case "adjacent":
|
||||
return countIdentifiers(parsedSelector.left) + countIdentifiers(parsedSelector.right);
|
||||
|
||||
case "compound":
|
||||
case "not":
|
||||
case "matches":
|
||||
return parsedSelector.selectors.reduce((sum, childSelector) => sum + countIdentifiers(childSelector), 0);
|
||||
|
||||
case "identifier":
|
||||
return 1;
|
||||
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the specificity of two selector objects, with CSS-like rules.
|
||||
* @param {ASTSelector} selectorA An AST selector descriptor
|
||||
* @param {ASTSelector} selectorB Another AST selector descriptor
|
||||
* @returns {number}
|
||||
* a value less than 0 if selectorA is less specific than selectorB
|
||||
* a value greater than 0 if selectorA is more specific than selectorB
|
||||
* a value less than 0 if selectorA and selectorB have the same specificity, and selectorA <= selectorB alphabetically
|
||||
* a value greater than 0 if selectorA and selectorB have the same specificity, and selectorA > selectorB alphabetically
|
||||
*/
|
||||
function compareSpecificity(selectorA, selectorB) {
|
||||
return selectorA.attributeCount - selectorB.attributeCount ||
|
||||
selectorA.identifierCount - selectorB.identifierCount ||
|
||||
(selectorA.rawSelector <= selectorB.rawSelector ? -1 : 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a raw selector string, and throws a useful error if parsing fails.
|
||||
* @param {string} rawSelector A raw AST selector
|
||||
* @returns {Object} An object (from esquery) describing the matching behavior of this selector
|
||||
* @throws {Error} An error if the selector is invalid
|
||||
*/
|
||||
function tryParseSelector(rawSelector) {
|
||||
try {
|
||||
return esquery.parse(rawSelector.replace(/:exit$/u, ""));
|
||||
} catch (err) {
|
||||
if (typeof err.offset === "number") {
|
||||
throw new SyntaxError(`Syntax error in selector "${rawSelector}" at position ${err.offset}: ${err.message}`);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a raw selector string, and returns the parsed selector along with specificity and type information.
|
||||
* @param {string} rawSelector A raw AST selector
|
||||
* @returns {ASTSelector} A selector descriptor
|
||||
*/
|
||||
const parseSelector = lodash.memoize(rawSelector => {
|
||||
const parsedSelector = tryParseSelector(rawSelector);
|
||||
|
||||
return {
|
||||
rawSelector,
|
||||
isExit: rawSelector.endsWith(":exit"),
|
||||
parsedSelector,
|
||||
listenerTypes: getPossibleTypes(parsedSelector),
|
||||
attributeCount: countClassAttributes(parsedSelector),
|
||||
identifierCount: countIdentifiers(parsedSelector)
|
||||
};
|
||||
});
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The event generator for AST nodes.
|
||||
* This implements below interface.
|
||||
*
|
||||
* ```ts
|
||||
* interface EventGenerator {
|
||||
* emitter: SafeEmitter;
|
||||
* enterNode(node: ASTNode): void;
|
||||
* leaveNode(node: ASTNode): void;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
class NodeEventGenerator {
|
||||
|
||||
/**
|
||||
* @param {SafeEmitter} emitter
|
||||
* An SafeEmitter which is the destination of events. This emitter must already
|
||||
* have registered listeners for all of the events that it needs to listen for.
|
||||
* (See lib/util/safe-emitter.js for more details on `SafeEmitter`.)
|
||||
* @returns {NodeEventGenerator} new instance
|
||||
*/
|
||||
constructor(emitter) {
|
||||
this.emitter = emitter;
|
||||
this.currentAncestry = [];
|
||||
this.enterSelectorsByNodeType = new Map();
|
||||
this.exitSelectorsByNodeType = new Map();
|
||||
this.anyTypeEnterSelectors = [];
|
||||
this.anyTypeExitSelectors = [];
|
||||
|
||||
emitter.eventNames().forEach(rawSelector => {
|
||||
const selector = parseSelector(rawSelector);
|
||||
|
||||
if (selector.listenerTypes) {
|
||||
selector.listenerTypes.forEach(nodeType => {
|
||||
const typeMap = selector.isExit ? this.exitSelectorsByNodeType : this.enterSelectorsByNodeType;
|
||||
|
||||
if (!typeMap.has(nodeType)) {
|
||||
typeMap.set(nodeType, []);
|
||||
}
|
||||
typeMap.get(nodeType).push(selector);
|
||||
});
|
||||
} else {
|
||||
(selector.isExit ? this.anyTypeExitSelectors : this.anyTypeEnterSelectors).push(selector);
|
||||
}
|
||||
});
|
||||
|
||||
this.anyTypeEnterSelectors.sort(compareSpecificity);
|
||||
this.anyTypeExitSelectors.sort(compareSpecificity);
|
||||
this.enterSelectorsByNodeType.forEach(selectorList => selectorList.sort(compareSpecificity));
|
||||
this.exitSelectorsByNodeType.forEach(selectorList => selectorList.sort(compareSpecificity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks a selector against a node, and emits it if it matches
|
||||
* @param {ASTNode} node The node to check
|
||||
* @param {ASTSelector} selector An AST selector descriptor
|
||||
* @returns {void}
|
||||
*/
|
||||
applySelector(node, selector) {
|
||||
if (esquery.matches(node, selector.parsedSelector, this.currentAncestry)) {
|
||||
this.emitter.emit(selector.rawSelector, node);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies all appropriate selectors to a node, in specificity order
|
||||
* @param {ASTNode} node The node to check
|
||||
* @param {boolean} isExit `false` if the node is currently being entered, `true` if it's currently being exited
|
||||
* @returns {void}
|
||||
*/
|
||||
applySelectors(node, isExit) {
|
||||
const selectorsByNodeType = (isExit ? this.exitSelectorsByNodeType : this.enterSelectorsByNodeType).get(node.type) || [];
|
||||
const anyTypeSelectors = isExit ? this.anyTypeExitSelectors : this.anyTypeEnterSelectors;
|
||||
|
||||
/*
|
||||
* selectorsByNodeType and anyTypeSelectors were already sorted by specificity in the constructor.
|
||||
* Iterate through each of them, applying selectors in the right order.
|
||||
*/
|
||||
let selectorsByTypeIndex = 0;
|
||||
let anyTypeSelectorsIndex = 0;
|
||||
|
||||
while (selectorsByTypeIndex < selectorsByNodeType.length || anyTypeSelectorsIndex < anyTypeSelectors.length) {
|
||||
if (
|
||||
selectorsByTypeIndex >= selectorsByNodeType.length ||
|
||||
anyTypeSelectorsIndex < anyTypeSelectors.length &&
|
||||
compareSpecificity(anyTypeSelectors[anyTypeSelectorsIndex], selectorsByNodeType[selectorsByTypeIndex]) < 0
|
||||
) {
|
||||
this.applySelector(node, anyTypeSelectors[anyTypeSelectorsIndex++]);
|
||||
} else {
|
||||
this.applySelector(node, selectorsByNodeType[selectorsByTypeIndex++]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits an event of entering AST node.
|
||||
* @param {ASTNode} node - A node which was entered.
|
||||
* @returns {void}
|
||||
*/
|
||||
enterNode(node) {
|
||||
if (node.parent) {
|
||||
this.currentAncestry.unshift(node.parent);
|
||||
}
|
||||
this.applySelectors(node, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits an event of leaving AST node.
|
||||
* @param {ASTNode} node - A node which was left.
|
||||
* @returns {void}
|
||||
*/
|
||||
leaveNode(node) {
|
||||
this.applySelectors(node, true);
|
||||
this.currentAncestry.shift();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NodeEventGenerator;
|
||||
183
node_modules/eslint/lib/util/npm-utils.js
generated
vendored
Normal file
183
node_modules/eslint/lib/util/npm-utils.js
generated
vendored
Normal file
@ -0,0 +1,183 @@
|
||||
/**
|
||||
* @fileoverview Utility for executing npm commands.
|
||||
* @author Ian VanSchooten
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const fs = require("fs"),
|
||||
spawn = require("cross-spawn"),
|
||||
path = require("path"),
|
||||
log = require("./logging");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Find the closest package.json file, starting at process.cwd (by default),
|
||||
* and working up to root.
|
||||
*
|
||||
* @param {string} [startDir=process.cwd()] Starting directory
|
||||
* @returns {string} Absolute path to closest package.json file
|
||||
*/
|
||||
function findPackageJson(startDir) {
|
||||
let dir = path.resolve(startDir || process.cwd());
|
||||
|
||||
do {
|
||||
const pkgFile = path.join(dir, "package.json");
|
||||
|
||||
if (!fs.existsSync(pkgFile) || !fs.statSync(pkgFile).isFile()) {
|
||||
dir = path.join(dir, "..");
|
||||
continue;
|
||||
}
|
||||
return pkgFile;
|
||||
} while (dir !== path.resolve(dir, ".."));
|
||||
return null;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Private
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Install node modules synchronously and save to devDependencies in package.json
|
||||
* @param {string|string[]} packages Node module or modules to install
|
||||
* @returns {void}
|
||||
*/
|
||||
function installSyncSaveDev(packages) {
|
||||
const packageList = Array.isArray(packages) ? packages : [packages];
|
||||
const npmProcess = spawn.sync("npm", ["i", "--save-dev"].concat(packageList),
|
||||
{ stdio: "inherit" });
|
||||
const error = npmProcess.error;
|
||||
|
||||
if (error && error.code === "ENOENT") {
|
||||
const pluralS = packageList.length > 1 ? "s" : "";
|
||||
|
||||
log.error(`Could not execute npm. Please install the following package${pluralS} with a package manager of your choice: ${packageList.join(", ")}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch `peerDependencies` of the given package by `npm show` command.
|
||||
* @param {string} packageName The package name to fetch peerDependencies.
|
||||
* @returns {Object} Gotten peerDependencies. Returns null if npm was not found.
|
||||
*/
|
||||
function fetchPeerDependencies(packageName) {
|
||||
const npmProcess = spawn.sync(
|
||||
"npm",
|
||||
["show", "--json", packageName, "peerDependencies"],
|
||||
{ encoding: "utf8" }
|
||||
);
|
||||
|
||||
const error = npmProcess.error;
|
||||
|
||||
if (error && error.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
const fetchedText = npmProcess.stdout.trim();
|
||||
|
||||
return JSON.parse(fetchedText || "{}");
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether node modules are include in a project's package.json.
|
||||
*
|
||||
* @param {string[]} packages Array of node module names
|
||||
* @param {Object} opt Options Object
|
||||
* @param {boolean} opt.dependencies Set to true to check for direct dependencies
|
||||
* @param {boolean} opt.devDependencies Set to true to check for development dependencies
|
||||
* @param {boolean} opt.startdir Directory to begin searching from
|
||||
* @returns {Object} An object whose keys are the module names
|
||||
* and values are booleans indicating installation.
|
||||
*/
|
||||
function check(packages, opt) {
|
||||
let deps = [];
|
||||
const pkgJson = (opt) ? findPackageJson(opt.startDir) : findPackageJson();
|
||||
let fileJson;
|
||||
|
||||
if (!pkgJson) {
|
||||
throw new Error("Could not find a package.json file. Run 'npm init' to create one.");
|
||||
}
|
||||
|
||||
try {
|
||||
fileJson = JSON.parse(fs.readFileSync(pkgJson, "utf8"));
|
||||
} catch (e) {
|
||||
const error = new Error(e);
|
||||
|
||||
error.messageTemplate = "failed-to-read-json";
|
||||
error.messageData = {
|
||||
path: pkgJson,
|
||||
message: e.message
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (opt.devDependencies && typeof fileJson.devDependencies === "object") {
|
||||
deps = deps.concat(Object.keys(fileJson.devDependencies));
|
||||
}
|
||||
if (opt.dependencies && typeof fileJson.dependencies === "object") {
|
||||
deps = deps.concat(Object.keys(fileJson.dependencies));
|
||||
}
|
||||
return packages.reduce((status, pkg) => {
|
||||
status[pkg] = deps.indexOf(pkg) !== -1;
|
||||
return status;
|
||||
}, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether node modules are included in the dependencies of a project's
|
||||
* package.json.
|
||||
*
|
||||
* Convienience wrapper around check().
|
||||
*
|
||||
* @param {string[]} packages Array of node modules to check.
|
||||
* @param {string} rootDir The directory contianing a package.json
|
||||
* @returns {Object} An object whose keys are the module names
|
||||
* and values are booleans indicating installation.
|
||||
*/
|
||||
function checkDeps(packages, rootDir) {
|
||||
return check(packages, { dependencies: true, startDir: rootDir });
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether node modules are included in the devDependencies of a project's
|
||||
* package.json.
|
||||
*
|
||||
* Convienience wrapper around check().
|
||||
*
|
||||
* @param {string[]} packages Array of node modules to check.
|
||||
* @returns {Object} An object whose keys are the module names
|
||||
* and values are booleans indicating installation.
|
||||
*/
|
||||
function checkDevDeps(packages) {
|
||||
return check(packages, { devDependencies: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether package.json is found in current path.
|
||||
*
|
||||
* @param {string=} startDir Starting directory
|
||||
* @returns {boolean} Whether a package.json is found in current path.
|
||||
*/
|
||||
function checkPackageJson(startDir) {
|
||||
return !!findPackageJson(startDir);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
module.exports = {
|
||||
installSyncSaveDev,
|
||||
fetchPeerDependencies,
|
||||
checkDeps,
|
||||
checkDevDeps,
|
||||
checkPackageJson
|
||||
};
|
||||
72
node_modules/eslint/lib/util/path-utils.js
generated
vendored
Normal file
72
node_modules/eslint/lib/util/path-utils.js
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
/**
|
||||
* @fileoverview Common helpers for operations on filenames and paths
|
||||
* @author Ian VanSchooten
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const path = require("path");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Private
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Replace Windows with posix style paths
|
||||
*
|
||||
* @param {string} filepath Path to convert
|
||||
* @returns {string} Converted filepath
|
||||
*/
|
||||
function convertPathToPosix(filepath) {
|
||||
const normalizedFilepath = path.normalize(filepath);
|
||||
const posixFilepath = normalizedFilepath.replace(/\\/gu, "/");
|
||||
|
||||
return posixFilepath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an absolute filepath to a relative path from a given base path
|
||||
*
|
||||
* For example, if the filepath is `/my/awesome/project/foo.bar`,
|
||||
* and the base directory is `/my/awesome/project/`,
|
||||
* then this function should return `foo.bar`.
|
||||
*
|
||||
* path.relative() does something similar, but it requires a baseDir (`from` argument).
|
||||
* This function makes it optional and just removes a leading slash if the baseDir is not given.
|
||||
*
|
||||
* It does not take into account symlinks (for now).
|
||||
*
|
||||
* @param {string} filepath Path to convert to relative path. If already relative,
|
||||
* it will be assumed to be relative to process.cwd(),
|
||||
* converted to absolute, and then processed.
|
||||
* @param {string} [baseDir] Absolute base directory to resolve the filepath from.
|
||||
* If not provided, all this function will do is remove
|
||||
* a leading slash.
|
||||
* @returns {string} Relative filepath
|
||||
*/
|
||||
function getRelativePath(filepath, baseDir) {
|
||||
const absolutePath = path.isAbsolute(filepath)
|
||||
? filepath
|
||||
: path.resolve(filepath);
|
||||
|
||||
if (baseDir) {
|
||||
if (!path.isAbsolute(baseDir)) {
|
||||
throw new Error(`baseDir should be an absolute path: ${baseDir}`);
|
||||
}
|
||||
return path.relative(baseDir, absolutePath);
|
||||
}
|
||||
return absolutePath.replace(/^\//u, "");
|
||||
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
module.exports = {
|
||||
convertPathToPosix,
|
||||
getRelativePath
|
||||
};
|
||||
36
node_modules/eslint/lib/util/patterns/letters.js
generated
vendored
Normal file
36
node_modules/eslint/lib/util/patterns/letters.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
281
node_modules/eslint/lib/util/report-translator.js
generated
vendored
Normal file
281
node_modules/eslint/lib/util/report-translator.js
generated
vendored
Normal file
@ -0,0 +1,281 @@
|
||||
/**
|
||||
* @fileoverview A helper that translates context.report() calls from the rule API into generic problem objects
|
||||
* @author Teddy Katz
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const assert = require("assert");
|
||||
const ruleFixer = require("./rule-fixer");
|
||||
const interpolate = require("./interpolate");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Typedefs
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* An error message description
|
||||
* @typedef {Object} MessageDescriptor
|
||||
* @property {ASTNode} [node] The reported node
|
||||
* @property {Location} loc The location of the problem.
|
||||
* @property {string} message The problem message.
|
||||
* @property {Object} [data] Optional data to use to fill in placeholders in the
|
||||
* message.
|
||||
* @property {Function} [fix] The function to call that creates a fix command.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Information about the report
|
||||
* @typedef {Object} ReportInfo
|
||||
* @property {string} ruleId
|
||||
* @property {(0|1|2)} severity
|
||||
* @property {(string|undefined)} message
|
||||
* @property {(string|undefined)} messageId
|
||||
* @property {number} line
|
||||
* @property {number} column
|
||||
* @property {(number|undefined)} endLine
|
||||
* @property {(number|undefined)} endColumn
|
||||
* @property {(string|null)} nodeType
|
||||
* @property {string} source
|
||||
* @property {({text: string, range: (number[]|null)}|null)} fix
|
||||
*/
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Module Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
|
||||
/**
|
||||
* Translates a multi-argument context.report() call into a single object argument call
|
||||
* @param {...*} args A list of arguments passed to `context.report`
|
||||
* @returns {MessageDescriptor} A normalized object containing report information
|
||||
*/
|
||||
function normalizeMultiArgReportCall(...args) {
|
||||
|
||||
// If there is one argument, it is considered to be a new-style call already.
|
||||
if (args.length === 1) {
|
||||
|
||||
// Shallow clone the object to avoid surprises if reusing the descriptor
|
||||
return Object.assign({}, args[0]);
|
||||
}
|
||||
|
||||
// If the second argument is a string, the arguments are interpreted as [node, message, data, fix].
|
||||
if (typeof args[1] === "string") {
|
||||
return {
|
||||
node: args[0],
|
||||
message: args[1],
|
||||
data: args[2],
|
||||
fix: args[3]
|
||||
};
|
||||
}
|
||||
|
||||
// Otherwise, the arguments are interpreted as [node, loc, message, data, fix].
|
||||
return {
|
||||
node: args[0],
|
||||
loc: args[1],
|
||||
message: args[2],
|
||||
data: args[3],
|
||||
fix: args[4]
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that either a loc or a node was provided, and the node is valid if it was provided.
|
||||
* @param {MessageDescriptor} descriptor A descriptor to validate
|
||||
* @returns {void}
|
||||
* @throws AssertionError if neither a node nor a loc was provided, or if the node is not an object
|
||||
*/
|
||||
function assertValidNodeInfo(descriptor) {
|
||||
if (descriptor.node) {
|
||||
assert(typeof descriptor.node === "object", "Node must be an object");
|
||||
} else {
|
||||
assert(descriptor.loc, "Node must be provided when reporting error if location is not provided");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a MessageDescriptor to always have a `loc` with `start` and `end` properties
|
||||
* @param {MessageDescriptor} descriptor A descriptor for the report from a rule.
|
||||
* @returns {{start: Location, end: (Location|null)}} An updated location that infers the `start` and `end` properties
|
||||
* from the `node` of the original descriptor, or infers the `start` from the `loc` of the original descriptor.
|
||||
*/
|
||||
function normalizeReportLoc(descriptor) {
|
||||
if (descriptor.loc) {
|
||||
if (descriptor.loc.start) {
|
||||
return descriptor.loc;
|
||||
}
|
||||
return { start: descriptor.loc, end: null };
|
||||
}
|
||||
return descriptor.node.loc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares items in a fixes array by range.
|
||||
* @param {Fix} a The first message.
|
||||
* @param {Fix} b The second message.
|
||||
* @returns {int} -1 if a comes before b, 1 if a comes after b, 0 if equal.
|
||||
* @private
|
||||
*/
|
||||
function compareFixesByRange(a, b) {
|
||||
return a.range[0] - b.range[0] || a.range[1] - b.range[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges the given fixes array into one.
|
||||
* @param {Fix[]} fixes The fixes to merge.
|
||||
* @param {SourceCode} sourceCode The source code object to get the text between fixes.
|
||||
* @returns {{text: string, range: number[]}} The merged fixes
|
||||
*/
|
||||
function mergeFixes(fixes, sourceCode) {
|
||||
if (fixes.length === 0) {
|
||||
return null;
|
||||
}
|
||||
if (fixes.length === 1) {
|
||||
return fixes[0];
|
||||
}
|
||||
|
||||
fixes.sort(compareFixesByRange);
|
||||
|
||||
const originalText = sourceCode.text;
|
||||
const start = fixes[0].range[0];
|
||||
const end = fixes[fixes.length - 1].range[1];
|
||||
let text = "";
|
||||
let lastPos = Number.MIN_SAFE_INTEGER;
|
||||
|
||||
for (const fix of fixes) {
|
||||
assert(fix.range[0] >= lastPos, "Fix objects must not be overlapped in a report.");
|
||||
|
||||
if (fix.range[0] >= 0) {
|
||||
text += originalText.slice(Math.max(0, start, lastPos), fix.range[0]);
|
||||
}
|
||||
text += fix.text;
|
||||
lastPos = fix.range[1];
|
||||
}
|
||||
text += originalText.slice(Math.max(0, start, lastPos), end);
|
||||
|
||||
return { range: [start, end], text };
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets one fix object from the given descriptor.
|
||||
* If the descriptor retrieves multiple fixes, this merges those to one.
|
||||
* @param {MessageDescriptor} descriptor The report descriptor.
|
||||
* @param {SourceCode} sourceCode The source code object to get text between fixes.
|
||||
* @returns {({text: string, range: number[]}|null)} The fix for the descriptor
|
||||
*/
|
||||
function normalizeFixes(descriptor, sourceCode) {
|
||||
if (typeof descriptor.fix !== "function") {
|
||||
return null;
|
||||
}
|
||||
|
||||
// @type {null | Fix | Fix[] | IterableIterator<Fix>}
|
||||
const fix = descriptor.fix(ruleFixer);
|
||||
|
||||
// Merge to one.
|
||||
if (fix && Symbol.iterator in fix) {
|
||||
return mergeFixes(Array.from(fix), sourceCode);
|
||||
}
|
||||
return fix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates information about the report from a descriptor
|
||||
* @param {Object} options Information about the problem
|
||||
* @param {string} options.ruleId Rule ID
|
||||
* @param {(0|1|2)} options.severity Rule severity
|
||||
* @param {(ASTNode|null)} options.node Node
|
||||
* @param {string} options.message Error message
|
||||
* @param {string} [options.messageId] The error message ID.
|
||||
* @param {{start: SourceLocation, end: (SourceLocation|null)}} options.loc Start and end location
|
||||
* @param {{text: string, range: (number[]|null)}} options.fix The fix object
|
||||
* @returns {function(...args): ReportInfo} Function that returns information about the report
|
||||
*/
|
||||
function createProblem(options) {
|
||||
const problem = {
|
||||
ruleId: options.ruleId,
|
||||
severity: options.severity,
|
||||
message: options.message,
|
||||
line: options.loc.start.line,
|
||||
column: options.loc.start.column + 1,
|
||||
nodeType: options.node && options.node.type || null
|
||||
};
|
||||
|
||||
/*
|
||||
* If this isn’t in the conditional, some of the tests fail
|
||||
* because `messageId` is present in the problem object
|
||||
*/
|
||||
if (options.messageId) {
|
||||
problem.messageId = options.messageId;
|
||||
}
|
||||
|
||||
if (options.loc.end) {
|
||||
problem.endLine = options.loc.end.line;
|
||||
problem.endColumn = options.loc.end.column + 1;
|
||||
}
|
||||
|
||||
if (options.fix) {
|
||||
problem.fix = options.fix;
|
||||
}
|
||||
|
||||
return problem;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function that converts the arguments of a `context.report` call from a rule into a reported
|
||||
* problem for the Node.js API.
|
||||
* @param {{ruleId: string, severity: number, sourceCode: SourceCode, messageIds: Object}} metadata Metadata for the reported problem
|
||||
* @param {SourceCode} sourceCode The `SourceCode` instance for the text being linted
|
||||
* @returns {function(...args): ReportInfo} Function that returns information about the report
|
||||
*/
|
||||
|
||||
module.exports = function createReportTranslator(metadata) {
|
||||
|
||||
/*
|
||||
* `createReportTranslator` gets called once per enabled rule per file. It needs to be very performant.
|
||||
* The report translator itself (i.e. the function that `createReportTranslator` returns) gets
|
||||
* called every time a rule reports a problem, which happens much less frequently (usually, the vast
|
||||
* majority of rules don't report any problems for a given file).
|
||||
*/
|
||||
return (...args) => {
|
||||
const descriptor = normalizeMultiArgReportCall(...args);
|
||||
|
||||
assertValidNodeInfo(descriptor);
|
||||
|
||||
let computedMessage;
|
||||
|
||||
if (descriptor.messageId) {
|
||||
if (!metadata.messageIds) {
|
||||
throw new TypeError("context.report() called with a messageId, but no messages were present in the rule metadata.");
|
||||
}
|
||||
const id = descriptor.messageId;
|
||||
const messages = metadata.messageIds;
|
||||
|
||||
if (descriptor.message) {
|
||||
throw new TypeError("context.report() called with a message and a messageId. Please only pass one.");
|
||||
}
|
||||
if (!messages || !Object.prototype.hasOwnProperty.call(messages, id)) {
|
||||
throw new TypeError(`context.report() called with a messageId of '${id}' which is not present in the 'messages' config: ${JSON.stringify(messages, null, 2)}`);
|
||||
}
|
||||
computedMessage = messages[id];
|
||||
} else if (descriptor.message) {
|
||||
computedMessage = descriptor.message;
|
||||
} else {
|
||||
throw new TypeError("Missing `message` property in report() call; add a message that describes the linting problem.");
|
||||
}
|
||||
|
||||
|
||||
return createProblem({
|
||||
ruleId: metadata.ruleId,
|
||||
severity: metadata.severity,
|
||||
node: descriptor.node,
|
||||
message: interpolate(computedMessage, descriptor.data),
|
||||
messageId: descriptor.messageId,
|
||||
loc: normalizeReportLoc(descriptor),
|
||||
fix: normalizeFixes(descriptor, metadata.sourceCode)
|
||||
});
|
||||
};
|
||||
};
|
||||
140
node_modules/eslint/lib/util/rule-fixer.js
generated
vendored
Normal file
140
node_modules/eslint/lib/util/rule-fixer.js
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
||||
/**
|
||||
* @fileoverview An object that creates fix commands for rules.
|
||||
* @author Nicholas C. Zakas
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
// none!
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Creates a fix command that inserts text at the specified index in the source text.
|
||||
* @param {int} index The 0-based index at which to insert the new text.
|
||||
* @param {string} text The text to insert.
|
||||
* @returns {Object} The fix command.
|
||||
* @private
|
||||
*/
|
||||
function insertTextAt(index, text) {
|
||||
return {
|
||||
range: [index, index],
|
||||
text
|
||||
};
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Creates code fixing commands for rules.
|
||||
*/
|
||||
|
||||
const ruleFixer = Object.freeze({
|
||||
|
||||
/**
|
||||
* Creates a fix command that inserts text after the given node or token.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {ASTNode|Token} nodeOrToken The node or token to insert after.
|
||||
* @param {string} text The text to insert.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
insertTextAfter(nodeOrToken, text) {
|
||||
return this.insertTextAfterRange(nodeOrToken.range, text);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a fix command that inserts text after the specified range in the source text.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {int[]} range The range to replace, first item is start of range, second
|
||||
* is end of range.
|
||||
* @param {string} text The text to insert.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
insertTextAfterRange(range, text) {
|
||||
return insertTextAt(range[1], text);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a fix command that inserts text before the given node or token.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {ASTNode|Token} nodeOrToken The node or token to insert before.
|
||||
* @param {string} text The text to insert.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
insertTextBefore(nodeOrToken, text) {
|
||||
return this.insertTextBeforeRange(nodeOrToken.range, text);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a fix command that inserts text before the specified range in the source text.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {int[]} range The range to replace, first item is start of range, second
|
||||
* is end of range.
|
||||
* @param {string} text The text to insert.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
insertTextBeforeRange(range, text) {
|
||||
return insertTextAt(range[0], text);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a fix command that replaces text at the node or token.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {ASTNode|Token} nodeOrToken The node or token to remove.
|
||||
* @param {string} text The text to insert.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
replaceText(nodeOrToken, text) {
|
||||
return this.replaceTextRange(nodeOrToken.range, text);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a fix command that replaces text at the specified range in the source text.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {int[]} range The range to replace, first item is start of range, second
|
||||
* is end of range.
|
||||
* @param {string} text The text to insert.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
replaceTextRange(range, text) {
|
||||
return {
|
||||
range,
|
||||
text
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a fix command that removes the node or token from the source.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {ASTNode|Token} nodeOrToken The node or token to remove.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
remove(nodeOrToken) {
|
||||
return this.removeRange(nodeOrToken.range);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a fix command that removes the specified range of text from the source.
|
||||
* The fix is not applied until applyFixes() is called.
|
||||
* @param {int[]} range The range to remove, first item is start of range, second
|
||||
* is end of range.
|
||||
* @returns {Object} The fix command.
|
||||
*/
|
||||
removeRange(range) {
|
||||
return {
|
||||
range,
|
||||
text: ""
|
||||
};
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
||||
module.exports = ruleFixer;
|
||||
52
node_modules/eslint/lib/util/safe-emitter.js
generated
vendored
Normal file
52
node_modules/eslint/lib/util/safe-emitter.js
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
/**
|
||||
* @fileoverview A variant of EventEmitter which does not give listeners information about each other
|
||||
* @author Teddy Katz
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Typedefs
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* An event emitter
|
||||
* @typedef {Object} SafeEmitter
|
||||
* @property {function(eventName: string, listenerFunc: Function): void} on Adds a listener for a given event name
|
||||
* @property {function(eventName: string, arg1?: any, arg2?: any, arg3?: any)} emit Emits an event with a given name.
|
||||
* This calls all the listeners that were listening for that name, with `arg1`, `arg2`, and `arg3` as arguments.
|
||||
* @property {function(): string[]} eventNames Gets the list of event names that have registered listeners.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Creates an object which can listen for and emit events.
|
||||
* This is similar to the EventEmitter API in Node's standard library, but it has a few differences.
|
||||
* The goal is to allow multiple modules to attach arbitrary listeners to the same emitter, without
|
||||
* letting the modules know about each other at all.
|
||||
* 1. It has no special keys like `error` and `newListener`, which would allow modules to detect when
|
||||
* another module throws an error or registers a listener.
|
||||
* 2. It calls listener functions without any `this` value. (`EventEmitter` calls listeners with a
|
||||
* `this` value of the emitter instance, which would give listeners access to other listeners.)
|
||||
* @returns {SafeEmitter} An emitter
|
||||
*/
|
||||
module.exports = () => {
|
||||
const listeners = Object.create(null);
|
||||
|
||||
return Object.freeze({
|
||||
on(eventName, listener) {
|
||||
if (eventName in listeners) {
|
||||
listeners[eventName].push(listener);
|
||||
} else {
|
||||
listeners[eventName] = [listener];
|
||||
}
|
||||
},
|
||||
emit(eventName, ...args) {
|
||||
if (eventName in listeners) {
|
||||
listeners[eventName].forEach(listener => listener(...args));
|
||||
}
|
||||
},
|
||||
eventNames() {
|
||||
return Object.keys(listeners);
|
||||
}
|
||||
});
|
||||
};
|
||||
152
node_modules/eslint/lib/util/source-code-fixer.js
generated
vendored
Normal file
152
node_modules/eslint/lib/util/source-code-fixer.js
generated
vendored
Normal file
@ -0,0 +1,152 @@
|
||||
/**
|
||||
* @fileoverview An object that caches and applies source code fixes.
|
||||
* @author Nicholas C. Zakas
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const debug = require("debug")("eslint:source-code-fixer");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const BOM = "\uFEFF";
|
||||
|
||||
/**
|
||||
* Compares items in a messages array by range.
|
||||
* @param {Message} a The first message.
|
||||
* @param {Message} b The second message.
|
||||
* @returns {int} -1 if a comes before b, 1 if a comes after b, 0 if equal.
|
||||
* @private
|
||||
*/
|
||||
function compareMessagesByFixRange(a, b) {
|
||||
return a.fix.range[0] - b.fix.range[0] || a.fix.range[1] - b.fix.range[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares items in a messages array by line and column.
|
||||
* @param {Message} a The first message.
|
||||
* @param {Message} b The second message.
|
||||
* @returns {int} -1 if a comes before b, 1 if a comes after b, 0 if equal.
|
||||
* @private
|
||||
*/
|
||||
function compareMessagesByLocation(a, b) {
|
||||
return a.line - b.line || a.column - b.column;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Utility for apply fixes to source code.
|
||||
* @constructor
|
||||
*/
|
||||
function SourceCodeFixer() {
|
||||
Object.freeze(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the fixes specified by the messages to the given text. Tries to be
|
||||
* smart about the fixes and won't apply fixes over the same area in the text.
|
||||
* @param {string} sourceText The text to apply the changes to.
|
||||
* @param {Message[]} messages The array of messages reported by ESLint.
|
||||
* @param {boolean|Function} [shouldFix=true] Determines whether each message should be fixed
|
||||
* @returns {Object} An object containing the fixed text and any unfixed messages.
|
||||
*/
|
||||
SourceCodeFixer.applyFixes = function(sourceText, messages, shouldFix) {
|
||||
debug("Applying fixes");
|
||||
|
||||
if (shouldFix === false) {
|
||||
debug("shouldFix parameter was false, not attempting fixes");
|
||||
return {
|
||||
fixed: false,
|
||||
messages,
|
||||
output: sourceText
|
||||
};
|
||||
}
|
||||
|
||||
// clone the array
|
||||
const remainingMessages = [],
|
||||
fixes = [],
|
||||
bom = sourceText.startsWith(BOM) ? BOM : "",
|
||||
text = bom ? sourceText.slice(1) : sourceText;
|
||||
let lastPos = Number.NEGATIVE_INFINITY,
|
||||
output = bom;
|
||||
|
||||
/**
|
||||
* Try to use the 'fix' from a problem.
|
||||
* @param {Message} problem The message object to apply fixes from
|
||||
* @returns {boolean} Whether fix was successfully applied
|
||||
*/
|
||||
function attemptFix(problem) {
|
||||
const fix = problem.fix;
|
||||
const start = fix.range[0];
|
||||
const end = fix.range[1];
|
||||
|
||||
// Remain it as a problem if it's overlapped or it's a negative range
|
||||
if (lastPos >= start || start > end) {
|
||||
remainingMessages.push(problem);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Remove BOM.
|
||||
if ((start < 0 && end >= 0) || (start === 0 && fix.text.startsWith(BOM))) {
|
||||
output = "";
|
||||
}
|
||||
|
||||
// Make output to this fix.
|
||||
output += text.slice(Math.max(0, lastPos), Math.max(0, start));
|
||||
output += fix.text;
|
||||
lastPos = end;
|
||||
return true;
|
||||
}
|
||||
|
||||
messages.forEach(problem => {
|
||||
if (Object.prototype.hasOwnProperty.call(problem, "fix")) {
|
||||
fixes.push(problem);
|
||||
} else {
|
||||
remainingMessages.push(problem);
|
||||
}
|
||||
});
|
||||
|
||||
if (fixes.length) {
|
||||
debug("Found fixes to apply");
|
||||
let fixesWereApplied = false;
|
||||
|
||||
for (const problem of fixes.sort(compareMessagesByFixRange)) {
|
||||
if (typeof shouldFix !== "function" || shouldFix(problem)) {
|
||||
attemptFix(problem);
|
||||
|
||||
/*
|
||||
* The only time attemptFix will fail is if a previous fix was
|
||||
* applied which conflicts with it. So we can mark this as true.
|
||||
*/
|
||||
fixesWereApplied = true;
|
||||
} else {
|
||||
remainingMessages.push(problem);
|
||||
}
|
||||
}
|
||||
output += text.slice(Math.max(0, lastPos));
|
||||
|
||||
return {
|
||||
fixed: fixesWereApplied,
|
||||
messages: remainingMessages.sort(compareMessagesByLocation),
|
||||
output
|
||||
};
|
||||
}
|
||||
|
||||
debug("No fixes to apply");
|
||||
return {
|
||||
fixed: false,
|
||||
messages,
|
||||
output: bom + text
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
module.exports = SourceCodeFixer;
|
||||
105
node_modules/eslint/lib/util/source-code-utils.js
generated
vendored
Normal file
105
node_modules/eslint/lib/util/source-code-utils.js
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
/**
|
||||
* @fileoverview Tools for obtaining SourceCode objects.
|
||||
* @author Ian VanSchooten
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const CLIEngine = require("../cli-engine"),
|
||||
globUtils = require("./glob-utils"),
|
||||
baseDefaultOptions = require("../../conf/default-cli-options");
|
||||
|
||||
const debug = require("debug")("eslint:source-code-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Get the SourceCode object for a single file
|
||||
* @param {string} filename The fully resolved filename to get SourceCode from.
|
||||
* @param {Object} options A CLIEngine options object.
|
||||
* @returns {Array} Array of the SourceCode object representing the file
|
||||
* and fatal error message.
|
||||
*/
|
||||
function getSourceCodeOfFile(filename, options) {
|
||||
debug("getting sourceCode of", filename);
|
||||
const opts = Object.assign({}, options, { rules: {} });
|
||||
const cli = new CLIEngine(opts);
|
||||
const results = cli.executeOnFiles([filename]);
|
||||
|
||||
if (results && results.results[0] && results.results[0].messages[0] && results.results[0].messages[0].fatal) {
|
||||
const msg = results.results[0].messages[0];
|
||||
|
||||
throw new Error(`(${filename}:${msg.line}:${msg.column}) ${msg.message}`);
|
||||
}
|
||||
const sourceCode = cli.linter.getSourceCode();
|
||||
|
||||
return sourceCode;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
|
||||
/**
|
||||
* This callback is used to measure execution status in a progress bar
|
||||
* @callback progressCallback
|
||||
* @param {number} The total number of times the callback will be called.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Gets the SourceCode of a single file, or set of files.
|
||||
* @param {string[]|string} patterns A filename, directory name, or glob, or an array of them
|
||||
* @param {Object} [providedOptions] A CLIEngine options object. If not provided, the default cli options will be used.
|
||||
* @param {progressCallback} [providedCallback] Callback for reporting execution status
|
||||
* @returns {Object} The SourceCode of all processed files.
|
||||
*/
|
||||
function getSourceCodeOfFiles(patterns, providedOptions, providedCallback) {
|
||||
const sourceCodes = {};
|
||||
const globPatternsList = typeof patterns === "string" ? [patterns] : patterns;
|
||||
let options, callback;
|
||||
|
||||
const defaultOptions = Object.assign({}, baseDefaultOptions, { cwd: process.cwd() });
|
||||
|
||||
if (typeof providedOptions === "undefined") {
|
||||
options = defaultOptions;
|
||||
callback = null;
|
||||
} else if (typeof providedOptions === "function") {
|
||||
callback = providedOptions;
|
||||
options = defaultOptions;
|
||||
} else if (typeof providedOptions === "object") {
|
||||
options = Object.assign({}, defaultOptions, providedOptions);
|
||||
callback = providedCallback;
|
||||
}
|
||||
debug("constructed options:", options);
|
||||
|
||||
const filenames = globUtils.listFilesToProcess(globPatternsList, options)
|
||||
.filter(fileInfo => !fileInfo.ignored)
|
||||
.reduce((files, fileInfo) => files.concat(fileInfo.filename), []);
|
||||
|
||||
if (filenames.length === 0) {
|
||||
debug(`Did not find any files matching pattern(s): ${globPatternsList}`);
|
||||
}
|
||||
filenames.forEach(filename => {
|
||||
const sourceCode = getSourceCodeOfFile(filename, options);
|
||||
|
||||
if (sourceCode) {
|
||||
debug("got sourceCode of", filename);
|
||||
sourceCodes[filename] = sourceCode;
|
||||
}
|
||||
if (callback) {
|
||||
callback(filenames.length); // eslint-disable-line callback-return
|
||||
}
|
||||
});
|
||||
return sourceCodes;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSourceCodeOfFiles
|
||||
};
|
||||
507
node_modules/eslint/lib/util/source-code.js
generated
vendored
Normal file
507
node_modules/eslint/lib/util/source-code.js
generated
vendored
Normal file
@ -0,0 +1,507 @@
|
||||
/**
|
||||
* @fileoverview Abstraction of JavaScript source code.
|
||||
* @author Nicholas C. Zakas
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const TokenStore = require("../token-store"),
|
||||
Traverser = require("./traverser"),
|
||||
astUtils = require("../util/ast-utils"),
|
||||
lodash = require("lodash");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Private
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Validates that the given AST has the required information.
|
||||
* @param {ASTNode} ast The Program node of the AST to check.
|
||||
* @throws {Error} If the AST doesn't contain the correct information.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function validate(ast) {
|
||||
if (!ast.tokens) {
|
||||
throw new Error("AST is missing the tokens array.");
|
||||
}
|
||||
|
||||
if (!ast.comments) {
|
||||
throw new Error("AST is missing the comments array.");
|
||||
}
|
||||
|
||||
if (!ast.loc) {
|
||||
throw new Error("AST is missing location information.");
|
||||
}
|
||||
|
||||
if (!ast.range) {
|
||||
throw new Error("AST is missing range information");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to see if its a ES6 export declaration.
|
||||
* @param {ASTNode} astNode An AST node.
|
||||
* @returns {boolean} whether the given node represents an export declaration.
|
||||
* @private
|
||||
*/
|
||||
function looksLikeExport(astNode) {
|
||||
return astNode.type === "ExportDefaultDeclaration" || astNode.type === "ExportNamedDeclaration" ||
|
||||
astNode.type === "ExportAllDeclaration" || astNode.type === "ExportSpecifier";
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges two sorted lists into a larger sorted list in O(n) time.
|
||||
* @param {Token[]} tokens The list of tokens.
|
||||
* @param {Token[]} comments The list of comments.
|
||||
* @returns {Token[]} A sorted list of tokens and comments.
|
||||
* @private
|
||||
*/
|
||||
function sortedMerge(tokens, comments) {
|
||||
const result = [];
|
||||
let tokenIndex = 0;
|
||||
let commentIndex = 0;
|
||||
|
||||
while (tokenIndex < tokens.length || commentIndex < comments.length) {
|
||||
if (commentIndex >= comments.length || tokenIndex < tokens.length && tokens[tokenIndex].range[0] < comments[commentIndex].range[0]) {
|
||||
result.push(tokens[tokenIndex++]);
|
||||
} else {
|
||||
result.push(comments[commentIndex++]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
class SourceCode extends TokenStore {
|
||||
|
||||
/**
|
||||
* Represents parsed source code.
|
||||
* @param {string|Object} textOrConfig - The source code text or config object.
|
||||
* @param {string} textOrConfig.text - The source code text.
|
||||
* @param {ASTNode} textOrConfig.ast - The Program node of the AST representing the code. This AST should be created from the text that BOM was stripped.
|
||||
* @param {Object|null} textOrConfig.parserServices - The parser services.
|
||||
* @param {ScopeManager|null} textOrConfig.scopeManager - The scope of this source code.
|
||||
* @param {Object|null} textOrConfig.visitorKeys - The visitor keys to traverse AST.
|
||||
* @param {ASTNode} [astIfNoConfig] - The Program node of the AST representing the code. This AST should be created from the text that BOM was stripped.
|
||||
* @constructor
|
||||
*/
|
||||
constructor(textOrConfig, astIfNoConfig) {
|
||||
let text, ast, parserServices, scopeManager, visitorKeys;
|
||||
|
||||
// Process overloading.
|
||||
if (typeof textOrConfig === "string") {
|
||||
text = textOrConfig;
|
||||
ast = astIfNoConfig;
|
||||
} else if (typeof textOrConfig === "object" && textOrConfig !== null) {
|
||||
text = textOrConfig.text;
|
||||
ast = textOrConfig.ast;
|
||||
parserServices = textOrConfig.parserServices;
|
||||
scopeManager = textOrConfig.scopeManager;
|
||||
visitorKeys = textOrConfig.visitorKeys;
|
||||
}
|
||||
|
||||
validate(ast);
|
||||
super(ast.tokens, ast.comments);
|
||||
|
||||
/**
|
||||
* The flag to indicate that the source code has Unicode BOM.
|
||||
* @type boolean
|
||||
*/
|
||||
this.hasBOM = (text.charCodeAt(0) === 0xFEFF);
|
||||
|
||||
/**
|
||||
* The original text source code.
|
||||
* BOM was stripped from this text.
|
||||
* @type string
|
||||
*/
|
||||
this.text = (this.hasBOM ? text.slice(1) : text);
|
||||
|
||||
/**
|
||||
* The parsed AST for the source code.
|
||||
* @type ASTNode
|
||||
*/
|
||||
this.ast = ast;
|
||||
|
||||
/**
|
||||
* The parser services of this source code.
|
||||
* @type {Object}
|
||||
*/
|
||||
this.parserServices = parserServices || {};
|
||||
|
||||
/**
|
||||
* The scope of this source code.
|
||||
* @type {ScopeManager|null}
|
||||
*/
|
||||
this.scopeManager = scopeManager || null;
|
||||
|
||||
/**
|
||||
* The visitor keys to traverse AST.
|
||||
* @type {Object}
|
||||
*/
|
||||
this.visitorKeys = visitorKeys || Traverser.DEFAULT_VISITOR_KEYS;
|
||||
|
||||
// Check the source text for the presence of a shebang since it is parsed as a standard line comment.
|
||||
const shebangMatched = this.text.match(astUtils.SHEBANG_MATCHER);
|
||||
const hasShebang = shebangMatched && ast.comments.length && ast.comments[0].value === shebangMatched[1];
|
||||
|
||||
if (hasShebang) {
|
||||
ast.comments[0].type = "Shebang";
|
||||
}
|
||||
|
||||
this.tokensAndComments = sortedMerge(ast.tokens, ast.comments);
|
||||
|
||||
/**
|
||||
* The source code split into lines according to ECMA-262 specification.
|
||||
* This is done to avoid each rule needing to do so separately.
|
||||
* @type string[]
|
||||
*/
|
||||
this.lines = [];
|
||||
this.lineStartIndices = [0];
|
||||
|
||||
const lineEndingPattern = astUtils.createGlobalLinebreakMatcher();
|
||||
let match;
|
||||
|
||||
/*
|
||||
* Previously, this was implemented using a regex that
|
||||
* matched a sequence of non-linebreak characters followed by a
|
||||
* linebreak, then adding the lengths of the matches. However,
|
||||
* this caused a catastrophic backtracking issue when the end
|
||||
* of a file contained a large number of non-newline characters.
|
||||
* To avoid this, the current implementation just matches newlines
|
||||
* and uses match.index to get the correct line start indices.
|
||||
*/
|
||||
while ((match = lineEndingPattern.exec(this.text))) {
|
||||
this.lines.push(this.text.slice(this.lineStartIndices[this.lineStartIndices.length - 1], match.index));
|
||||
this.lineStartIndices.push(match.index + match[0].length);
|
||||
}
|
||||
this.lines.push(this.text.slice(this.lineStartIndices[this.lineStartIndices.length - 1]));
|
||||
|
||||
// Cache for comments found using getComments().
|
||||
this._commentCache = new WeakMap();
|
||||
|
||||
// don't allow modification of this object
|
||||
Object.freeze(this);
|
||||
Object.freeze(this.lines);
|
||||
}
|
||||
|
||||
/**
|
||||
* Split the source code into multiple lines based on the line delimiters.
|
||||
* @param {string} text Source code as a string.
|
||||
* @returns {string[]} Array of source code lines.
|
||||
* @public
|
||||
*/
|
||||
static splitLines(text) {
|
||||
return text.split(astUtils.createGlobalLinebreakMatcher());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the source code for the given node.
|
||||
* @param {ASTNode=} node The AST node to get the text for.
|
||||
* @param {int=} beforeCount The number of characters before the node to retrieve.
|
||||
* @param {int=} afterCount The number of characters after the node to retrieve.
|
||||
* @returns {string} The text representing the AST node.
|
||||
* @public
|
||||
*/
|
||||
getText(node, beforeCount, afterCount) {
|
||||
if (node) {
|
||||
return this.text.slice(Math.max(node.range[0] - (beforeCount || 0), 0),
|
||||
node.range[1] + (afterCount || 0));
|
||||
}
|
||||
return this.text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the entire source text split into an array of lines.
|
||||
* @returns {Array} The source text as an array of lines.
|
||||
* @public
|
||||
*/
|
||||
getLines() {
|
||||
return this.lines;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves an array containing all comments in the source code.
|
||||
* @returns {ASTNode[]} An array of comment nodes.
|
||||
* @public
|
||||
*/
|
||||
getAllComments() {
|
||||
return this.ast.comments;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all comments for the given node.
|
||||
* @param {ASTNode} node The AST node to get the comments for.
|
||||
* @returns {Object} An object containing a leading and trailing array
|
||||
* of comments indexed by their position.
|
||||
* @public
|
||||
*/
|
||||
getComments(node) {
|
||||
if (this._commentCache.has(node)) {
|
||||
return this._commentCache.get(node);
|
||||
}
|
||||
|
||||
const comments = {
|
||||
leading: [],
|
||||
trailing: []
|
||||
};
|
||||
|
||||
/*
|
||||
* Return all comments as leading comments of the Program node when
|
||||
* there is no executable code.
|
||||
*/
|
||||
if (node.type === "Program") {
|
||||
if (node.body.length === 0) {
|
||||
comments.leading = node.comments;
|
||||
}
|
||||
} else {
|
||||
|
||||
/*
|
||||
* Return comments as trailing comments of nodes that only contain
|
||||
* comments (to mimic the comment attachment behavior present in Espree).
|
||||
*/
|
||||
if ((node.type === "BlockStatement" || node.type === "ClassBody") && node.body.length === 0 ||
|
||||
node.type === "ObjectExpression" && node.properties.length === 0 ||
|
||||
node.type === "ArrayExpression" && node.elements.length === 0 ||
|
||||
node.type === "SwitchStatement" && node.cases.length === 0
|
||||
) {
|
||||
comments.trailing = this.getTokens(node, {
|
||||
includeComments: true,
|
||||
filter: astUtils.isCommentToken
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* Iterate over tokens before and after node and collect comment tokens.
|
||||
* Do not include comments that exist outside of the parent node
|
||||
* to avoid duplication.
|
||||
*/
|
||||
let currentToken = this.getTokenBefore(node, { includeComments: true });
|
||||
|
||||
while (currentToken && astUtils.isCommentToken(currentToken)) {
|
||||
if (node.parent && (currentToken.start < node.parent.start)) {
|
||||
break;
|
||||
}
|
||||
comments.leading.push(currentToken);
|
||||
currentToken = this.getTokenBefore(currentToken, { includeComments: true });
|
||||
}
|
||||
|
||||
comments.leading.reverse();
|
||||
|
||||
currentToken = this.getTokenAfter(node, { includeComments: true });
|
||||
|
||||
while (currentToken && astUtils.isCommentToken(currentToken)) {
|
||||
if (node.parent && (currentToken.end > node.parent.end)) {
|
||||
break;
|
||||
}
|
||||
comments.trailing.push(currentToken);
|
||||
currentToken = this.getTokenAfter(currentToken, { includeComments: true });
|
||||
}
|
||||
}
|
||||
|
||||
this._commentCache.set(node, comments);
|
||||
return comments;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the JSDoc comment for a given node.
|
||||
* @param {ASTNode} node The AST node to get the comment for.
|
||||
* @returns {Token|null} The Block comment token containing the JSDoc comment
|
||||
* for the given node or null if not found.
|
||||
* @public
|
||||
* @deprecated
|
||||
*/
|
||||
getJSDocComment(node) {
|
||||
|
||||
/**
|
||||
* Checks for the presence of a JSDoc comment for the given node and returns it.
|
||||
* @param {ASTNode} astNode The AST node to get the comment for.
|
||||
* @returns {Token|null} The Block comment token containing the JSDoc comment
|
||||
* for the given node or null if not found.
|
||||
* @private
|
||||
*/
|
||||
const findJSDocComment = astNode => {
|
||||
const tokenBefore = this.getTokenBefore(astNode, { includeComments: true });
|
||||
|
||||
if (
|
||||
tokenBefore &&
|
||||
astUtils.isCommentToken(tokenBefore) &&
|
||||
tokenBefore.type === "Block" &&
|
||||
tokenBefore.value.charAt(0) === "*" &&
|
||||
astNode.loc.start.line - tokenBefore.loc.end.line <= 1
|
||||
) {
|
||||
return tokenBefore;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
let parent = node.parent;
|
||||
|
||||
switch (node.type) {
|
||||
case "ClassDeclaration":
|
||||
case "FunctionDeclaration":
|
||||
return findJSDocComment(looksLikeExport(parent) ? parent : node);
|
||||
|
||||
case "ClassExpression":
|
||||
return findJSDocComment(parent.parent);
|
||||
|
||||
case "ArrowFunctionExpression":
|
||||
case "FunctionExpression":
|
||||
if (parent.type !== "CallExpression" && parent.type !== "NewExpression") {
|
||||
while (
|
||||
!this.getCommentsBefore(parent).length &&
|
||||
!/Function/u.test(parent.type) &&
|
||||
parent.type !== "MethodDefinition" &&
|
||||
parent.type !== "Property"
|
||||
) {
|
||||
parent = parent.parent;
|
||||
|
||||
if (!parent) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (parent && parent.type !== "FunctionDeclaration" && parent.type !== "Program") {
|
||||
return findJSDocComment(parent);
|
||||
}
|
||||
}
|
||||
|
||||
return findJSDocComment(node);
|
||||
|
||||
// falls through
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the deepest node containing a range index.
|
||||
* @param {int} index Range index of the desired node.
|
||||
* @returns {ASTNode} The node if found or null if not found.
|
||||
* @public
|
||||
*/
|
||||
getNodeByRangeIndex(index) {
|
||||
let result = null;
|
||||
|
||||
Traverser.traverse(this.ast, {
|
||||
visitorKeys: this.visitorKeys,
|
||||
enter(node) {
|
||||
if (node.range[0] <= index && index < node.range[1]) {
|
||||
result = node;
|
||||
} else {
|
||||
this.skip();
|
||||
}
|
||||
},
|
||||
leave(node) {
|
||||
if (node === result) {
|
||||
this.break();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if two tokens have at least one whitespace character
|
||||
* between them. This completely disregards comments in making the
|
||||
* determination, so comments count as zero-length substrings.
|
||||
* @param {Token} first The token to check after.
|
||||
* @param {Token} second The token to check before.
|
||||
* @returns {boolean} True if there is only space between tokens, false
|
||||
* if there is anything other than whitespace between tokens.
|
||||
* @public
|
||||
*/
|
||||
isSpaceBetweenTokens(first, second) {
|
||||
const text = this.text.slice(first.range[1], second.range[0]);
|
||||
|
||||
return /\s/u.test(text.replace(/\/\*.*?\*\//gu, ""));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a source text index into a (line, column) pair.
|
||||
* @param {number} index The index of a character in a file
|
||||
* @returns {Object} A {line, column} location object with a 0-indexed column
|
||||
* @public
|
||||
*/
|
||||
getLocFromIndex(index) {
|
||||
if (typeof index !== "number") {
|
||||
throw new TypeError("Expected `index` to be a number.");
|
||||
}
|
||||
|
||||
if (index < 0 || index > this.text.length) {
|
||||
throw new RangeError(`Index out of range (requested index ${index}, but source text has length ${this.text.length}).`);
|
||||
}
|
||||
|
||||
/*
|
||||
* For an argument of this.text.length, return the location one "spot" past the last character
|
||||
* of the file. If the last character is a linebreak, the location will be column 0 of the next
|
||||
* line; otherwise, the location will be in the next column on the same line.
|
||||
*
|
||||
* See getIndexFromLoc for the motivation for this special case.
|
||||
*/
|
||||
if (index === this.text.length) {
|
||||
return { line: this.lines.length, column: this.lines[this.lines.length - 1].length };
|
||||
}
|
||||
|
||||
/*
|
||||
* To figure out which line rangeIndex is on, determine the last index at which rangeIndex could
|
||||
* be inserted into lineIndices to keep the list sorted.
|
||||
*/
|
||||
const lineNumber = lodash.sortedLastIndex(this.lineStartIndices, index);
|
||||
|
||||
return { line: lineNumber, column: index - this.lineStartIndices[lineNumber - 1] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a (line, column) pair into a range index.
|
||||
* @param {Object} loc A line/column location
|
||||
* @param {number} loc.line The line number of the location (1-indexed)
|
||||
* @param {number} loc.column The column number of the location (0-indexed)
|
||||
* @returns {number} The range index of the location in the file.
|
||||
* @public
|
||||
*/
|
||||
getIndexFromLoc(loc) {
|
||||
if (typeof loc !== "object" || typeof loc.line !== "number" || typeof loc.column !== "number") {
|
||||
throw new TypeError("Expected `loc` to be an object with numeric `line` and `column` properties.");
|
||||
}
|
||||
|
||||
if (loc.line <= 0) {
|
||||
throw new RangeError(`Line number out of range (line ${loc.line} requested). Line numbers should be 1-based.`);
|
||||
}
|
||||
|
||||
if (loc.line > this.lineStartIndices.length) {
|
||||
throw new RangeError(`Line number out of range (line ${loc.line} requested, but only ${this.lineStartIndices.length} lines present).`);
|
||||
}
|
||||
|
||||
const lineStartIndex = this.lineStartIndices[loc.line - 1];
|
||||
const lineEndIndex = loc.line === this.lineStartIndices.length ? this.text.length : this.lineStartIndices[loc.line];
|
||||
const positionIndex = lineStartIndex + loc.column;
|
||||
|
||||
/*
|
||||
* By design, getIndexFromLoc({ line: lineNum, column: 0 }) should return the start index of
|
||||
* the given line, provided that the line number is valid element of this.lines. Since the
|
||||
* last element of this.lines is an empty string for files with trailing newlines, add a
|
||||
* special case where getting the index for the first location after the end of the file
|
||||
* will return the length of the file, rather than throwing an error. This allows rules to
|
||||
* use getIndexFromLoc consistently without worrying about edge cases at the end of a file.
|
||||
*/
|
||||
if (
|
||||
loc.line === this.lineStartIndices.length && positionIndex > lineEndIndex ||
|
||||
loc.line < this.lineStartIndices.length && positionIndex >= lineEndIndex
|
||||
) {
|
||||
throw new RangeError(`Column number out of range (column ${loc.column} requested, but the length of line ${loc.line} is ${lineEndIndex - lineStartIndex}).`);
|
||||
}
|
||||
|
||||
return positionIndex;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SourceCode;
|
||||
139
node_modules/eslint/lib/util/timing.js
generated
vendored
Normal file
139
node_modules/eslint/lib/util/timing.js
generated
vendored
Normal file
@ -0,0 +1,139 @@
|
||||
/**
|
||||
* @fileoverview Tracks performance of individual rules.
|
||||
* @author Brandon Mills
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/* istanbul ignore next */
|
||||
/**
|
||||
* Align the string to left
|
||||
* @param {string} str string to evaluate
|
||||
* @param {int} len length of the string
|
||||
* @param {string} ch delimiter character
|
||||
* @returns {string} modified string
|
||||
* @private
|
||||
*/
|
||||
function alignLeft(str, len, ch) {
|
||||
return str + new Array(len - str.length + 1).join(ch || " ");
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
/**
|
||||
* Align the string to right
|
||||
* @param {string} str string to evaluate
|
||||
* @param {int} len length of the string
|
||||
* @param {string} ch delimiter character
|
||||
* @returns {string} modified string
|
||||
* @private
|
||||
*/
|
||||
function alignRight(str, len, ch) {
|
||||
return new Array(len - str.length + 1).join(ch || " ") + str;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Module definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const enabled = !!process.env.TIMING;
|
||||
|
||||
const HEADERS = ["Rule", "Time (ms)", "Relative"];
|
||||
const ALIGN = [alignLeft, alignRight, alignRight];
|
||||
|
||||
/* istanbul ignore next */
|
||||
/**
|
||||
* display the data
|
||||
* @param {Object} data Data object to be displayed
|
||||
* @returns {string} modified string
|
||||
* @private
|
||||
*/
|
||||
function display(data) {
|
||||
let total = 0;
|
||||
const rows = Object.keys(data)
|
||||
.map(key => {
|
||||
const time = data[key];
|
||||
|
||||
total += time;
|
||||
return [key, time];
|
||||
})
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.slice(0, 10);
|
||||
|
||||
rows.forEach(row => {
|
||||
row.push(`${(row[1] * 100 / total).toFixed(1)}%`);
|
||||
row[1] = row[1].toFixed(3);
|
||||
});
|
||||
|
||||
rows.unshift(HEADERS);
|
||||
|
||||
const widths = [];
|
||||
|
||||
rows.forEach(row => {
|
||||
const len = row.length;
|
||||
|
||||
for (let i = 0; i < len; i++) {
|
||||
const n = row[i].length;
|
||||
|
||||
if (!widths[i] || n > widths[i]) {
|
||||
widths[i] = n;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const table = rows.map(row => (
|
||||
row
|
||||
.map((cell, index) => ALIGN[index](cell, widths[index]))
|
||||
.join(" | ")
|
||||
));
|
||||
|
||||
table.splice(1, 0, widths.map((width, index) => {
|
||||
const extraAlignment = index !== 0 && index !== widths.length - 1 ? 2 : 1;
|
||||
|
||||
return ALIGN[index](":", width + extraAlignment, "-");
|
||||
}).join("|"));
|
||||
|
||||
console.log(table.join("\n")); // eslint-disable-line no-console
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
module.exports = (function() {
|
||||
|
||||
const data = Object.create(null);
|
||||
|
||||
/**
|
||||
* Time the run
|
||||
* @param {*} key key from the data object
|
||||
* @param {Function} fn function to be called
|
||||
* @returns {Function} function to be executed
|
||||
* @private
|
||||
*/
|
||||
function time(key, fn) {
|
||||
if (typeof data[key] === "undefined") {
|
||||
data[key] = 0;
|
||||
}
|
||||
|
||||
return function(...args) {
|
||||
let t = process.hrtime();
|
||||
|
||||
fn(...args);
|
||||
t = process.hrtime(t);
|
||||
data[key] += t[0] * 1e3 + t[1] / 1e6;
|
||||
};
|
||||
}
|
||||
|
||||
if (enabled) {
|
||||
process.on("exit", () => {
|
||||
display(data);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
time,
|
||||
enabled
|
||||
};
|
||||
|
||||
}());
|
||||
193
node_modules/eslint/lib/util/traverser.js
generated
vendored
Normal file
193
node_modules/eslint/lib/util/traverser.js
generated
vendored
Normal file
@ -0,0 +1,193 @@
|
||||
/**
|
||||
* @fileoverview Traverser to traverse AST trees.
|
||||
* @author Nicholas C. Zakas
|
||||
* @author Toru Nagashima
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const vk = require("eslint-visitor-keys");
|
||||
const debug = require("debug")("eslint:traverser");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Do nothing.
|
||||
* @returns {void}
|
||||
*/
|
||||
function noop() {
|
||||
|
||||
// do nothing.
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the given value is an ASTNode or not.
|
||||
* @param {any} x The value to check.
|
||||
* @returns {boolean} `true` if the value is an ASTNode.
|
||||
*/
|
||||
function isNode(x) {
|
||||
return x !== null && typeof x === "object" && typeof x.type === "string";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the visitor keys of a given node.
|
||||
* @param {Object} visitorKeys The map of visitor keys.
|
||||
* @param {ASTNode} node The node to get their visitor keys.
|
||||
* @returns {string[]} The visitor keys of the node.
|
||||
*/
|
||||
function getVisitorKeys(visitorKeys, node) {
|
||||
let keys = visitorKeys[node.type];
|
||||
|
||||
if (!keys) {
|
||||
keys = vk.getKeys(node);
|
||||
debug("Unknown node type \"%s\": Estimated visitor keys %j", node.type, keys);
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
/**
|
||||
* The traverser class to traverse AST trees.
|
||||
*/
|
||||
class Traverser {
|
||||
constructor() {
|
||||
this._current = null;
|
||||
this._parents = [];
|
||||
this._skipped = false;
|
||||
this._broken = false;
|
||||
this._visitorKeys = null;
|
||||
this._enter = null;
|
||||
this._leave = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {ASTNode} The current node.
|
||||
*/
|
||||
current() {
|
||||
return this._current;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {ASTNode[]} The ancestor nodes.
|
||||
*/
|
||||
parents() {
|
||||
return this._parents.slice(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Break the current traversal.
|
||||
* @returns {void}
|
||||
*/
|
||||
break() {
|
||||
this._broken = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip child nodes for the current traversal.
|
||||
* @returns {void}
|
||||
*/
|
||||
skip() {
|
||||
this._skipped = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse the given AST tree.
|
||||
* @param {ASTNode} node The root node to traverse.
|
||||
* @param {Object} options The option object.
|
||||
* @param {Object} [options.visitorKeys=DEFAULT_VISITOR_KEYS] The keys of each node types to traverse child nodes. Default is `./default-visitor-keys.json`.
|
||||
* @param {Function} [options.enter=noop] The callback function which is called on entering each node.
|
||||
* @param {Function} [options.leave=noop] The callback function which is called on leaving each node.
|
||||
* @returns {void}
|
||||
*/
|
||||
traverse(node, options) {
|
||||
this._current = null;
|
||||
this._parents = [];
|
||||
this._skipped = false;
|
||||
this._broken = false;
|
||||
this._visitorKeys = options.visitorKeys || vk.KEYS;
|
||||
this._enter = options.enter || noop;
|
||||
this._leave = options.leave || noop;
|
||||
this._traverse(node, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse the given AST tree recursively.
|
||||
* @param {ASTNode} node The current node.
|
||||
* @param {ASTNode|null} parent The parent node.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
_traverse(node, parent) {
|
||||
if (!isNode(node)) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._current = node;
|
||||
this._skipped = false;
|
||||
this._enter(node, parent);
|
||||
|
||||
if (!this._skipped && !this._broken) {
|
||||
const keys = getVisitorKeys(this._visitorKeys, node);
|
||||
|
||||
if (keys.length >= 1) {
|
||||
this._parents.push(node);
|
||||
for (let i = 0; i < keys.length && !this._broken; ++i) {
|
||||
const child = node[keys[i]];
|
||||
|
||||
if (Array.isArray(child)) {
|
||||
for (let j = 0; j < child.length && !this._broken; ++j) {
|
||||
this._traverse(child[j], node);
|
||||
}
|
||||
} else {
|
||||
this._traverse(child, node);
|
||||
}
|
||||
}
|
||||
this._parents.pop();
|
||||
}
|
||||
}
|
||||
|
||||
if (!this._broken) {
|
||||
this._leave(node, parent);
|
||||
}
|
||||
|
||||
this._current = parent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the keys to use for traversal.
|
||||
* @param {ASTNode} node The node to read keys from.
|
||||
* @returns {string[]} An array of keys to visit on the node.
|
||||
* @private
|
||||
*/
|
||||
static getKeys(node) {
|
||||
return vk.getKeys(node);
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse the given AST tree.
|
||||
* @param {ASTNode} node The root node to traverse.
|
||||
* @param {Object} options The option object.
|
||||
* @param {Object} [options.visitorKeys=DEFAULT_VISITOR_KEYS] The keys of each node types to traverse child nodes. Default is `./default-visitor-keys.json`.
|
||||
* @param {Function} [options.enter=noop] The callback function which is called on entering each node.
|
||||
* @param {Function} [options.leave=noop] The callback function which is called on leaving each node.
|
||||
* @returns {void}
|
||||
*/
|
||||
static traverse(node, options) {
|
||||
new Traverser().traverse(node, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* The default visitor keys.
|
||||
* @type {Object}
|
||||
*/
|
||||
static get DEFAULT_VISITOR_KEYS() {
|
||||
return vk.KEYS;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Traverser;
|
||||
11
node_modules/eslint/lib/util/unicode/index.js
generated
vendored
Normal file
11
node_modules/eslint/lib/util/unicode/index.js
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
/**
|
||||
* @author Toru Nagashima <https://github.com/mysticatea>
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
isCombiningCharacter: require("./is-combining-character"),
|
||||
isEmojiModifier: require("./is-emoji-modifier"),
|
||||
isRegionalIndicatorSymbol: require("./is-regional-indicator-symbol"),
|
||||
isSurrogatePair: require("./is-surrogate-pair")
|
||||
};
|
||||
13
node_modules/eslint/lib/util/unicode/is-combining-character.js
generated
vendored
Normal file
13
node_modules/eslint/lib/util/unicode/is-combining-character.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
13
node_modules/eslint/lib/util/unicode/is-emoji-modifier.js
generated
vendored
Normal file
13
node_modules/eslint/lib/util/unicode/is-emoji-modifier.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
/**
|
||||
* @author Toru Nagashima <https://github.com/mysticatea>
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Check whether a given character is an emoji modifier.
|
||||
* @param {number} code The character code to check.
|
||||
* @returns {boolean} `true` if the character is an emoji modifier.
|
||||
*/
|
||||
module.exports = function isEmojiModifier(code) {
|
||||
return code >= 0x1F3FB && code <= 0x1F3FF;
|
||||
};
|
||||
13
node_modules/eslint/lib/util/unicode/is-regional-indicator-symbol.js
generated
vendored
Normal file
13
node_modules/eslint/lib/util/unicode/is-regional-indicator-symbol.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
/**
|
||||
* @author Toru Nagashima <https://github.com/mysticatea>
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Check whether a given character is a regional indicator symbol.
|
||||
* @param {number} code The character code to check.
|
||||
* @returns {boolean} `true` if the character is a regional indicator symbol.
|
||||
*/
|
||||
module.exports = function isRegionalIndicatorSymbol(code) {
|
||||
return code >= 0x1F1E6 && code <= 0x1F1FF;
|
||||
};
|
||||
14
node_modules/eslint/lib/util/unicode/is-surrogate-pair.js
generated
vendored
Normal file
14
node_modules/eslint/lib/util/unicode/is-surrogate-pair.js
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
/**
|
||||
* @author Toru Nagashima <https://github.com/mysticatea>
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Check whether given two characters are a surrogate pair.
|
||||
* @param {number} lead The code of the lead character.
|
||||
* @param {number} tail The code of the tail character.
|
||||
* @returns {boolean} `true` if the character pair is a surrogate pair.
|
||||
*/
|
||||
module.exports = function isSurrogatePair(lead, tail) {
|
||||
return lead >= 0xD800 && lead < 0xDC00 && tail >= 0xDC00 && tail < 0xE000;
|
||||
};
|
||||
34
node_modules/eslint/lib/util/xml-escape.js
generated
vendored
Normal file
34
node_modules/eslint/lib/util/xml-escape.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
/**
|
||||
* @fileoverview XML character escaper
|
||||
* @author George Chung
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the escaped value for a character
|
||||
* @param {string} s string to examine
|
||||
* @returns {string} severity level
|
||||
* @private
|
||||
*/
|
||||
module.exports = function(s) {
|
||||
return (`${s}`).replace(/[<>&"'\x00-\x1F\x7F\u0080-\uFFFF]/gu, c => { // eslint-disable-line no-control-regex
|
||||
switch (c) {
|
||||
case "<":
|
||||
return "<";
|
||||
case ">":
|
||||
return ">";
|
||||
case "&":
|
||||
return "&";
|
||||
case "\"":
|
||||
return """;
|
||||
case "'":
|
||||
return "'";
|
||||
default:
|
||||
return `&#${c.charCodeAt(0)};`;
|
||||
}
|
||||
});
|
||||
};
|
||||
Reference in New Issue
Block a user