Codacy style changes (minor)
This commit is contained in:
@ -48,7 +48,7 @@ let snippetArchiveTokens = {data: snippetsArchiveData.data.map(snippet => {
|
|||||||
meta: {
|
meta: {
|
||||||
hash: snippet.meta.hash
|
hash: snippet.meta.hash
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
}), meta: { specification: "http://jsonapi.org/format/"}};
|
}), meta: { specification: "http://jsonapi.org/format/"}};
|
||||||
// Write data
|
// Write data
|
||||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetAnalytics.json'), JSON.stringify(snippetTokens, null, 2));
|
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetAnalytics.json'), JSON.stringify(snippetTokens, null, 2));
|
||||||
|
|||||||
@ -15,7 +15,7 @@ if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_M
|
|||||||
console.log(`${chalk.green('NOBUILD')} README build terminated, parent commit is a Travis build!`);
|
console.log(`${chalk.green('NOBUILD')} README build terminated, parent commit is a Travis build!`);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
if (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api')){
|
if (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api')) {
|
||||||
console.log(`${chalk.green('ARCHIVE')} Cron job or custom build, building archive README!`);
|
console.log(`${chalk.green('ARCHIVE')} Cron job or custom build, building archive README!`);
|
||||||
console.time('Builder');
|
console.time('Builder');
|
||||||
let snippets = {};
|
let snippets = {};
|
||||||
@ -46,7 +46,7 @@ These snippets, while useful and interesting, didn\'t quite make it into the rep
|
|||||||
for(const snippet of Object.entries(snippets))
|
for(const snippet of Object.entries(snippets))
|
||||||
output += `* [\`${snippet[0].slice(0,-3)}\`](#${snippet[0].toLowerCase().slice(0,-3)})\n`;
|
output += `* [\`${snippet[0].slice(0,-3)}\`](#${snippet[0].toLowerCase().slice(0,-3)})\n`;
|
||||||
output += '\n---\n';
|
output += '\n---\n';
|
||||||
for (const snippet of Object.entries(snippets)){
|
for (const snippet of Object.entries(snippets)) {
|
||||||
let data = snippet[1];
|
let data = snippet[1];
|
||||||
data =
|
data =
|
||||||
data.slice(0, data.lastIndexOf('```js')) +
|
data.slice(0, data.lastIndexOf('```js')) +
|
||||||
@ -140,7 +140,7 @@ try {
|
|||||||
for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) {
|
for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) {
|
||||||
let data = snippets[taggedSnippet[0] + '.md'];
|
let data = snippets[taggedSnippet[0] + '.md'];
|
||||||
// Add advanced tag
|
// Add advanced tag
|
||||||
if(taggedSnippet[1].includes('advanced')){
|
if(taggedSnippet[1].includes('advanced')) {
|
||||||
data = data.split(/\r?\n/);
|
data = data.split(/\r?\n/);
|
||||||
data[0] = data[0] +' ';
|
data[0] = data[0] +' ';
|
||||||
data = data.join('\n');
|
data = data.join('\n');
|
||||||
|
|||||||
@ -13,7 +13,7 @@ try {
|
|||||||
const output = glossaryFiles.reduce(
|
const output = glossaryFiles.reduce(
|
||||||
(accumulator, currentFilename) =>
|
(accumulator, currentFilename) =>
|
||||||
accumulator.toLowerCase().replace(/\.[^/.]+$/, "") + "\n" +
|
accumulator.toLowerCase().replace(/\.[^/.]+$/, "") + "\n" +
|
||||||
currentFilename.toLowerCase().replace(/\.[^/.]+$/, ""));
|
currentFilename.toLowerCase().replace(/\.[^/.]+$/, ""))+'\n';
|
||||||
fs.writeFileSync('glossary/keyword_database', output);
|
fs.writeFileSync('glossary/keyword_database', output);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`${chalk.red('ERROR!')} During glossary keyword_database generation: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During glossary keyword_database generation: ${err}`);
|
||||||
|
|||||||
@ -23,7 +23,7 @@ locales.forEach(locale => {
|
|||||||
Object.keys(snippets).forEach(snippet => {
|
Object.keys(snippets).forEach(snippet => {
|
||||||
const snippetName = snippet.split('.')[0];
|
const snippetName = snippet.split('.')[0];
|
||||||
const snippetHash = util.hashData(snippets[snippet]);
|
const snippetHash = util.hashData(snippets[snippet]);
|
||||||
if(locData.hasOwnProperty(snippetName)){
|
if(locData.hasOwnProperty(snippetName)) {
|
||||||
if (locData[snippetName].hash !== snippetHash) {
|
if (locData[snippetName].hash !== snippetHash) {
|
||||||
existingData = existingData.indexOf(' => '+snippetHash) !== -1 ? existingData : existingData.replace(locData[snippetName].hash, locData[snippetName].hash+' => '+snippetHash);
|
existingData = existingData.indexOf(' => '+snippetHash) !== -1 ? existingData : existingData.replace(locData[snippetName].hash, locData[snippetName].hash+' => '+snippetHash);
|
||||||
hashChanges.push({snippetName, oldHash: locData[snippetName].hash.split(' => ')[0], newHash: snippetHash});
|
hashChanges.push({snippetName, oldHash: locData[snippetName].hash.split(' => ')[0], newHash: snippetHash});
|
||||||
|
|||||||
@ -16,7 +16,7 @@ if (!fs.existsSync(DIST)) fs.mkdirSync(DIST);
|
|||||||
const es5 = babel({ presets: [['env', { modules: false }]] });
|
const es5 = babel({ presets: [['env', { modules: false }]] });
|
||||||
const min = minify({ comments: false });
|
const min = minify({ comments: false });
|
||||||
// Create the bundles
|
// Create the bundles
|
||||||
(async () => {
|
(async() => {
|
||||||
const bundle = await rollup({ input: INPUT_FILE });
|
const bundle = await rollup({ input: INPUT_FILE });
|
||||||
const bundleES5 = await rollup({ input: INPUT_FILE, plugins: [es5] });
|
const bundleES5 = await rollup({ input: INPUT_FILE, plugins: [es5] });
|
||||||
const bundleMin = await rollup({ input: INPUT_FILE, plugins: [min] });
|
const bundleMin = await rollup({ input: INPUT_FILE, plugins: [min] });
|
||||||
|
|||||||
@ -359,11 +359,8 @@ try {
|
|||||||
// Optimize keyword nodes
|
// Optimize keyword nodes
|
||||||
archivedOutput = util.optimizeNodes(archivedOutput, /<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`);
|
archivedOutput = util.optimizeNodes(archivedOutput, /<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`);
|
||||||
|
|
||||||
|
|
||||||
archivedOutput += `${archivedEndPart}`;
|
archivedOutput += `${archivedEndPart}`;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Generate and minify 'archive.html' file
|
// Generate and minify 'archive.html' file
|
||||||
const minifiedArchivedOutput = minify(archivedOutput, {
|
const minifiedArchivedOutput = minify(archivedOutput, {
|
||||||
collapseBooleanAttributes: true,
|
collapseBooleanAttributes: true,
|
||||||
|
|||||||
@ -11,9 +11,9 @@ const levenshteinDistance = (string1, string2) => {
|
|||||||
if(string2.length === 0) return string1.length;
|
if(string2.length === 0) return string1.length;
|
||||||
let matrix = Array(string2.length + 1).fill(0).map((x,i) => [i]);
|
let matrix = Array(string2.length + 1).fill(0).map((x,i) => [i]);
|
||||||
matrix[0] = Array(string1.length + 1).fill(0).map((x,i) => i);
|
matrix[0] = Array(string1.length + 1).fill(0).map((x,i) => i);
|
||||||
for(let i = 1; i <= string2.length; i++){
|
for(let i = 1; i <= string2.length; i++) {
|
||||||
for(let j = 1; j<=string1.length; j++){
|
for(let j = 1; j<=string1.length; j++) {
|
||||||
if(string2[i-1] === string1[j-1]){
|
if(string2[i-1] === string1[j-1]) {
|
||||||
matrix[i][j] = matrix[i-1][j-1];
|
matrix[i][j] = matrix[i-1][j-1];
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
|
|||||||
@ -6,5 +6,5 @@ test('cleanObj is a Function', () => {
|
|||||||
});
|
});
|
||||||
const testObj = { a: 1, b: 2, children: { a: 1, b: 2 } };
|
const testObj = { a: 1, b: 2, children: { a: 1, b: 2 } };
|
||||||
test('Removes any properties except the ones specified from a JSON object', () => {
|
test('Removes any properties except the ones specified from a JSON object', () => {
|
||||||
expect(cleanObj(testObj, ['a'], 'children')).toEqual({ a: 1, children : { a: 1}});
|
expect(cleanObj(testObj, ['a'], 'children')).toEqual({ a: 1, children: { a: 1}});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -12,8 +12,8 @@ test('When n is odd, times by 3 and add 1', () => {
|
|||||||
});
|
});
|
||||||
test('Eventually reaches 1', () => {
|
test('Eventually reaches 1', () => {
|
||||||
let n = 9;
|
let n = 9;
|
||||||
while(true){
|
while(true) {
|
||||||
if (n === 1){
|
if (n === 1) {
|
||||||
expect(n).toBe(1);
|
expect(n).toBe(1);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,5 +9,5 @@ let p1 = Promise.resolve(1);
|
|||||||
let p2 = Promise.resolve(2);
|
let p2 = Promise.resolve(2);
|
||||||
let p3 = new Promise(resolve => setTimeout(resolve, 2000, 3));
|
let p3 = new Promise(resolve => setTimeout(resolve, 2000, 3));
|
||||||
test('Works with multiple promises', () => {
|
test('Works with multiple promises', () => {
|
||||||
Pall(p1, p2, p3).then(function(val){ expect(val).toBe([1,2,3]);}, function(reason){});
|
Pall(p1, p2, p3).then(function(val) { expect(val).toBe([1, 2, 3]);}, function(reason){});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -2,11 +2,11 @@ const expect = require("expect");
|
|||||||
const dig = require("./dig.js");
|
const dig = require("./dig.js");
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
level1:{
|
level1: {
|
||||||
level2:{
|
level2: {
|
||||||
level3: "some data",
|
level3: "some data",
|
||||||
level3f: false,
|
level3f: false,
|
||||||
level3a: [1,2,3,4]
|
level3a: [1, 2, 3, 4]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -11,7 +11,7 @@ test('isObject([]) is a object', () => {
|
|||||||
expect(isObject([])).toBeTruthy();
|
expect(isObject([])).toBeTruthy();
|
||||||
});
|
});
|
||||||
test('isObject({ a:1 }) is a object', () => {
|
test('isObject({ a:1 }) is a object', () => {
|
||||||
expect(isObject({ a:1 })).toBeTruthy();
|
expect(isObject({ a: 1 })).toBeTruthy();
|
||||||
});
|
});
|
||||||
test('isObject(true) is not a object', () => {
|
test('isObject(true) is not a object', () => {
|
||||||
expect(isObject(true)).toBeFalsy();
|
expect(isObject(true)).toBeFalsy();
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
const levenshteinDistance = (string1, string2) => {
|
const levenshteinDistance = (string1, string2) => {
|
||||||
if(string1.length === 0) return string2.length;
|
if(string1.length === 0) return string2.length;
|
||||||
if(string2.length === 0) return string1.length;
|
if(string2.length === 0) return string1.length;
|
||||||
let matrix = Array(string2.length + 1).fill(0).map((x,i) => [i]);
|
let matrix = Array(string2.length + 1).fill(0).map((x, i) => [i]);
|
||||||
matrix[0] = Array(string1.length + 1).fill(0).map((x,i) => i);
|
matrix[0] = Array(string1.length + 1).fill(0).map((x, i) => i);
|
||||||
for(let i = 1; i <= string2.length; i++){
|
for(let i = 1; i <= string2.length; i++) {
|
||||||
for(let j = 1; j<=string1.length; j++){
|
for(let j = 1; j<=string1.length; j++) {
|
||||||
if(string2[i-1] === string1[j-1]){
|
if(string2[i-1] === string1[j-1]) {
|
||||||
matrix[i][j] = matrix[i-1][j-1];
|
matrix[i][j] = matrix[i-1][j-1];
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
|
|||||||
@ -9,7 +9,7 @@ test('pipeAsyncFunctions result should be 15', () => {
|
|||||||
(x) => x + 1,
|
(x) => x + 1,
|
||||||
(x) => new Promise((resolve) => setTimeout(() => resolve(x + 2), 0)),
|
(x) => new Promise((resolve) => setTimeout(() => resolve(x + 2), 0)),
|
||||||
(x) => x + 3,
|
(x) => x + 3,
|
||||||
async (x) => await x + 4,
|
async(x) => await x + 4,
|
||||||
)
|
)
|
||||||
(5)).resolves.toBe(15);
|
(5)).resolves.toBe(15);
|
||||||
});
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user