From d9593cdce5acf038cfd114a874399c8fb9bb1569 Mon Sep 17 00:00:00 2001 From: Angelos Chalaris Date: Fri, 3 Aug 2018 10:49:29 +0300 Subject: [PATCH] Scripts linted --- scripts/analyze.js | 106 +++++++---- scripts/build.js | 38 ++-- scripts/extract.js | 29 ++- scripts/glossary/keyword.js | 17 +- scripts/glossary/library.js | 35 ++-- scripts/lint.js | 2 +- scripts/localize.js | 59 ++++-- scripts/module.js | 15 +- scripts/rollup.js | 2 +- scripts/tag.js | 19 +- scripts/tdd.js | 35 ++-- scripts/util.js | 35 ++-- scripts/web.js | 366 +++++++++++++++++++++++++++--------- 13 files changed, 525 insertions(+), 233 deletions(-) diff --git a/scripts/analyze.js b/scripts/analyze.js index 0accad74d..b9375a479 100644 --- a/scripts/analyze.js +++ b/scripts/analyze.js @@ -14,45 +14,73 @@ let snippetsArchiveData = require('../snippet_data/snippetsArchive.json'); const OUTPUT_PATH = './snippet_data'; console.time('Analyzer'); // Read data -let snippetTokens = {data: snippetsData.data.map(snippet => { - let tokens = prism.tokenize(snippet.attributes.codeBlocks[0], prism.languages.javascript, 'javascript'); - return { - id: snippet.id, - type: 'snippetAnalysis', - attributes: { - codeLength: snippet.attributes.codeBlocks[0].trim().length, - tokenCount: tokens.length, - functionCount: tokens.filter(t => t.type === 'function').length, - operatorCount: tokens.filter(t => t.type === 'operator').length, - keywordCount: tokens.filter(t => t.type === 'keyword').length, - distinctFunctionCount: [...new Set(tokens.filter(t => t.type === 'function').map(t => t.content))].length - }, - meta: { - hash: snippet.meta.hash - } - }; -}), meta: { specification: "http://jsonapi.org/format/"}}; -let snippetArchiveTokens = {data: snippetsArchiveData.data.map(snippet => { - let tokens = prism.tokenize(snippet.attributes.codeBlocks[0], prism.languages.javascript, 'javascript'); - return { - id: snippet.id, - type: 'snippetAnalysis', - attributes: { - codeLength: snippet.attributes.codeBlocks[0].trim().length, - tokenCount: tokens.length, - functionCount: tokens.filter(t => t.type === 'function').length, - operatorCount: tokens.filter(t => t.type === 'operator').length, - keywordCount: tokens.filter(t => t.type === 'keyword').length, - distinctFunctionCount: [...new Set(tokens.filter(t => t.type === 'function').map(t => t.content))].length - }, - meta: { - hash: snippet.meta.hash - } - }; -}), meta: { specification: "http://jsonapi.org/format/"}}; +let snippetTokens = { + data: snippetsData.data.map(snippet => { + let tokens = prism.tokenize( + snippet.attributes.codeBlocks[0], + prism.languages.javascript, + 'javascript' + ); + return { + id: snippet.id, + type: 'snippetAnalysis', + attributes: { + codeLength: snippet.attributes.codeBlocks[0].trim().length, + tokenCount: tokens.length, + functionCount: tokens.filter(t => t.type === 'function').length, + operatorCount: tokens.filter(t => t.type === 'operator').length, + keywordCount: tokens.filter(t => t.type === 'keyword').length, + distinctFunctionCount: [ + ...new Set(tokens.filter(t => t.type === 'function').map(t => t.content)) + ].length + }, + meta: { + hash: snippet.meta.hash + } + }; + }), + meta: { specification: 'http://jsonapi.org/format/' } +}; +let snippetArchiveTokens = { + data: snippetsArchiveData.data.map(snippet => { + let tokens = prism.tokenize( + snippet.attributes.codeBlocks[0], + prism.languages.javascript, + 'javascript' + ); + return { + id: snippet.id, + type: 'snippetAnalysis', + attributes: { + codeLength: snippet.attributes.codeBlocks[0].trim().length, + tokenCount: tokens.length, + functionCount: tokens.filter(t => t.type === 'function').length, + operatorCount: tokens.filter(t => t.type === 'operator').length, + keywordCount: tokens.filter(t => t.type === 'keyword').length, + distinctFunctionCount: [ + ...new Set(tokens.filter(t => t.type === 'function').map(t => t.content)) + ].length + }, + meta: { + hash: snippet.meta.hash + } + }; + }), + meta: { specification: 'http://jsonapi.org/format/' } +}; // Write data -fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetAnalytics.json'), JSON.stringify(snippetTokens, null, 2)); -fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetArchiveAnalytics.json'), JSON.stringify(snippetArchiveTokens, null, 2)); +fs.writeFileSync( + path.join(OUTPUT_PATH, 'snippetAnalytics.json'), + JSON.stringify(snippetTokens, null, 2) +); +fs.writeFileSync( + path.join(OUTPUT_PATH, 'snippetArchiveAnalytics.json'), + JSON.stringify(snippetArchiveTokens, null, 2) +); // Display messages and time -console.log(`${chalk.green('SUCCESS!')} snippetAnalyticss.json and snippetArchiveAnalytics.json files generated!`); +console.log( + `${chalk.green( + 'SUCCESS!' + )} snippetAnalyticss.json and snippetArchiveAnalytics.json files generated!` +); console.timeEnd('Analyzer'); diff --git a/scripts/build.js b/scripts/build.js index f011cc1ba..a85f4fcb8 100644 --- a/scripts/build.js +++ b/scripts/build.js @@ -12,10 +12,15 @@ const SNIPPETS_PATH = './snippets'; const SNIPPETS_ARCHIVE_PATH = './snippets_archive'; const STATIC_PARTS_PATH = './static-parts'; if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) { - console.log(`${chalk.green('NOBUILD')} README build terminated, parent commit is a Travis build!`); + console.log( + `${chalk.green('NOBUILD')} README build terminated, parent commit is a Travis build!` + ); process.exit(0); } -if (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api')) { +if ( + util.isTravisCI() && + (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api') +) { console.log(`${chalk.green('ARCHIVE')} Cron job or custom build, building archive README!`); console.time('Builder'); let snippets = {}; @@ -43,8 +48,8 @@ These snippets, while useful and interesting, didn\'t quite make it into the rep ## Table of Contents `; - for(const snippet of Object.entries(snippets)) - output += `* [\`${snippet[0].slice(0,-3)}\`](#${snippet[0].toLowerCase().slice(0,-3)})\n`; + for (const snippet of Object.entries(snippets)) + output += `* [\`${snippet[0].slice(0, -3)}\`](#${snippet[0].toLowerCase().slice(0, -3)})\n`; output += '\n---\n'; for (const snippet of Object.entries(snippets)) { let data = snippet[1]; @@ -58,7 +63,7 @@ These snippets, while useful and interesting, didn\'t quite make it into the rep } // Write to the README file of the archive - fs.writeFileSync(path.join(SNIPPETS_ARCHIVE_PATH,'README.md'), output); + fs.writeFileSync(path.join(SNIPPETS_ARCHIVE_PATH, 'README.md'), output); } catch (err) { console.log(`${chalk.red('ERROR!')} During README generation for snippets archive: ${err}`); process.exit(1); @@ -113,7 +118,15 @@ try { Object.entries(tagDbData) .map(t => t[1][0]) .filter(v => v) - .sort((a, b) => util.capitalize(a, true) === 'Uncategorized' ? 1 : util.capitalize(b, true) === 'Uncategorized' ? -1 : a.localeCompare(b))) + .sort( + (a, b) => + util.capitalize(a, true) === 'Uncategorized' + ? 1 + : util.capitalize(b, true) === 'Uncategorized' + ? -1 + : a.localeCompare(b) + ) + ) ]; console.log(tags); @@ -124,11 +137,12 @@ try { // Loop over tags and snippets to create the table of contents for (const tag of tags) { const capitalizedTag = util.capitalize(tag, true); - output += `### ${ - EMOJIS[tag] || '' - } ${capitalizedTag}\n\n
\nView contents\n\n`; + output += `### ${EMOJIS[tag] || + ''} ${capitalizedTag}\n\n
\nView contents\n\n`; for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) { - output += `* [\`${taggedSnippet[0]}\`](#${taggedSnippet[0].toLowerCase()}${taggedSnippet[1].includes('advanced')?'-':''})\n`; + output += `* [\`${taggedSnippet[0]}\`](#${taggedSnippet[0].toLowerCase()}${ + taggedSnippet[1].includes('advanced') ? '-' : '' + })\n`; } output += '\n
\n\n'; } @@ -140,9 +154,9 @@ try { for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) { let data = snippets[taggedSnippet[0] + '.md']; // Add advanced tag - if(taggedSnippet[1].includes('advanced')) { + if (taggedSnippet[1].includes('advanced')) { data = data.split(/\r?\n/); - data[0] = data[0] +' ![advanced](/advanced.svg)'; + data[0] = data[0] + ' ![advanced](/advanced.svg)'; data = data.join('\n'); } data = diff --git a/scripts/extract.js b/scripts/extract.js index b4f81930a..985865978 100644 --- a/scripts/extract.js +++ b/scripts/extract.js @@ -12,12 +12,18 @@ const SNIPPETS_PATH = './snippets'; const SNIPPETS_ARCHIVE_PATH = './snippets_archive'; const OUTPUT_PATH = './snippet_data'; // Check if running on Travis - only build for cron jobs and custom builds -if(util.isTravisCI() && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') { +if ( + util.isTravisCI() && + process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && + process.env['TRAVIS_EVENT_TYPE'] !== 'api' +) { console.log(`${chalk.green('NOBUILD')} snippet extraction terminated, not a cron or api build!`); process.exit(0); } // Read data -let snippets = {}, archivedSnippets = {}, tagDbData = {}; +let snippets = {}, + archivedSnippets = {}, + tagDbData = {}; console.time('Extractor'); snippets = util.readSnippets(SNIPPETS_PATH); archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH); @@ -26,13 +32,15 @@ tagDbData = util.readTags(); let snippetData = { data: Object.keys(snippets).map(key => { return { - id: key.slice(0,-3), + id: key.slice(0, -3), type: 'snippet', attributes: { fileName: key, text: util.getTextualContent(snippets[key]).trim(), - codeBlocks: util.getCodeBlocks(snippets[key]).map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()), - tags: tagDbData[key.slice(0,-3)] + codeBlocks: util + .getCodeBlocks(snippets[key]) + .map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()), + tags: tagDbData[key.slice(0, -3)] }, meta: { archived: false, @@ -48,12 +56,14 @@ let snippetData = { let snippetArchiveData = { data: Object.keys(archivedSnippets).map(key => { return { - id: key.slice(0,-3), + id: key.slice(0, -3), type: 'snippet', attributes: { fileName: key, text: util.getTextualContent(archivedSnippets[key]).trim(), - codeBlocks: util.getCodeBlocks(archivedSnippets[key]).map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()), + codeBlocks: util + .getCodeBlocks(archivedSnippets[key]) + .map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()), tags: [] }, meta: { @@ -68,7 +78,10 @@ let snippetArchiveData = { }; // Write files fs.writeFileSync(path.join(OUTPUT_PATH, 'snippets.json'), JSON.stringify(snippetData, null, 2)); -fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetsArchive.json'), JSON.stringify(snippetArchiveData, null, 2)); +fs.writeFileSync( + path.join(OUTPUT_PATH, 'snippetsArchive.json'), + JSON.stringify(snippetArchiveData, null, 2) +); // Display messages and time console.log(`${chalk.green('SUCCESS!')} snippets.json and snippetsArchive.json files generated!`); console.timeEnd('Extractor'); diff --git a/scripts/glossary/keyword.js b/scripts/glossary/keyword.js index 409b9b0f1..bffa863e1 100644 --- a/scripts/glossary/keyword.js +++ b/scripts/glossary/keyword.js @@ -10,12 +10,15 @@ const util = require('../util'); const glossaryFiles = util.getFilesInDir('./glossary', false); try { - const output = glossaryFiles.reduce( - (accumulator, currentFilename) => - accumulator.toLowerCase().replace(/\.[^/.]+$/, "") + "\n" + - currentFilename.toLowerCase().replace(/\.[^/.]+$/, ""))+'\n'; - fs.writeFileSync('glossary/keyword_database', output); + const output = + glossaryFiles.reduce( + (accumulator, currentFilename) => + accumulator.toLowerCase().replace(/\.[^/.]+$/, '') + + '\n' + + currentFilename.toLowerCase().replace(/\.[^/.]+$/, '') + ) + '\n'; + fs.writeFileSync('glossary/keyword_database', output); } catch (err) { - console.log(`${chalk.red('ERROR!')} During glossary keyword_database generation: ${err}`); + console.log(`${chalk.red('ERROR!')} During glossary keyword_database generation: ${err}`); process.exit(1); -} \ No newline at end of file +} diff --git a/scripts/glossary/library.js b/scripts/glossary/library.js index b21b19387..76b5e65cd 100644 --- a/scripts/glossary/library.js +++ b/scripts/glossary/library.js @@ -10,27 +10,34 @@ const util = require('../util'); const glossaryFiles = util.getFilesInDir('./glossary', true, ['keyword_database', 'README.md']); const fileTitles = []; -const getGlossaryTermMarkdownBlock = (fileName) => { +const getGlossaryTermMarkdownBlock = fileName => { let fileContent = fs.readFileSync(fileName, 'utf8'); - - let title = fileContent.match(/###[^\n]*/)[0].replace('### ', '').trim(); + + let title = fileContent + .match(/###[^\n]*/)[0] + .replace('### ', '') + .trim(); // let description = fileContent.replace(title, '').trim(); fileTitles.push(title); - return fileContent.trim() + "\n"; + return fileContent.trim() + '\n'; }; const glossaryFilesContentReducer = (accumulator, currentFilename) => { // handle first array item if (accumulator === glossaryFiles[0]) { - return getGlossaryTermMarkdownBlock(accumulator) + "\n" + getGlossaryTermMarkdownBlock(currentFilename); + return ( + getGlossaryTermMarkdownBlock(accumulator) + + '\n' + + getGlossaryTermMarkdownBlock(currentFilename) + ); } - return accumulator + "\n" + getGlossaryTermMarkdownBlock(currentFilename); + return accumulator + '\n' + getGlossaryTermMarkdownBlock(currentFilename); }; -const getTermLinkMarkdownBlock = (termTitle) => { +const getTermLinkMarkdownBlock = termTitle => { let anchor = util.getMarkDownAnchor(termTitle); - return `* [\`${termTitle}\`](#${anchor})` + "\n"; + return `* [\`${termTitle}\`](#${anchor})` + '\n'; }; const glossaryTableOfContentsReducer = (accumulator, currentFile) => { @@ -42,14 +49,10 @@ const glossaryTableOfContentsReducer = (accumulator, currentFile) => { try { const fileContents = glossaryFiles.reduce(glossaryFilesContentReducer); - const TOC = "## Table of Contents\n\n" + fileTitles.reduce(glossaryTableOfContentsReducer); - - const README = - "# 30-seconds-of-code JavaScript Glossary\n\n" + - TOC + - "\n\n" + - fileContents; - fs.writeFileSync('glossary/README.md', README); + const TOC = '## Table of Contents\n\n' + fileTitles.reduce(glossaryTableOfContentsReducer); + + const README = '# 30-seconds-of-code JavaScript Glossary\n\n' + TOC + '\n\n' + fileContents; + fs.writeFileSync('glossary/README.md', README); } catch (err) { console.log(`${chalk.red('ERROR!')} During glossary README generation: ${err}`); process.exit(1); diff --git a/scripts/lint.js b/scripts/lint.js index 69838bd3c..4f96de1f3 100644 --- a/scripts/lint.js +++ b/scripts/lint.js @@ -10,7 +10,7 @@ const cp = require('child_process'); const path = require('path'); const chalk = require('chalk'); const util = require('./util'); -if(util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) { +if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) { console.log(`${chalk.green('NOBUILD')} Linting terminated, parent commit is a Travis build!`); process.exit(0); } diff --git a/scripts/localize.js b/scripts/localize.js index 28a621800..ffb0544cc 100644 --- a/scripts/localize.js +++ b/scripts/localize.js @@ -16,34 +16,67 @@ let snippets = util.readSnippets(SNIPPETS_PATH); const COMMENT_REGEX = /(\/\*[\w\'\s\r\n\*]*\*\/)|(\/\/.*)/g; locales.forEach(locale => { - const locData = require(path.join('..',LOCALE_PATH,locale)); - let existingData = fs.readFileSync(path.join(LOCALE_PATH,locale+'.js'), 'utf8'); + const locData = require(path.join('..', LOCALE_PATH, locale)); + let existingData = fs.readFileSync(path.join(LOCALE_PATH, locale + '.js'), 'utf8'); let newData = []; let hashChanges = []; Object.keys(snippets).forEach(snippet => { const snippetName = snippet.split('.')[0]; const snippetHash = util.hashData(snippets[snippet]); - if(locData.hasOwnProperty(snippetName)) { + if (locData.hasOwnProperty(snippetName)) { if (locData[snippetName].hash !== snippetHash) { - existingData = existingData.indexOf(' => '+snippetHash) !== -1 ? existingData : existingData.replace(locData[snippetName].hash, locData[snippetName].hash+' => '+snippetHash); - hashChanges.push({snippetName, oldHash: locData[snippetName].hash.split(' => ')[0], newHash: snippetHash}); + existingData = + existingData.indexOf(' => ' + snippetHash) !== -1 + ? existingData + : existingData.replace( + locData[snippetName].hash, + locData[snippetName].hash + ' => ' + snippetHash + ); + hashChanges.push({ + snippetName, + oldHash: locData[snippetName].hash.split(' => ')[0], + newHash: snippetHash + }); } - } - else { + } else { newData.push(`\n'${snippetName}' : { - 'description': \`${snippets[snippet].split('```js')[0].replace(/`/g,'\\`')}\`, - 'comments': [${(snippets[snippet].match(COMMENT_REGEX) || []).map(v => '`'+v.replace(/`/g,'\\`')+'`')}], + 'description': \`${snippets[snippet].split('```js')[0].replace(/`/g, '\\`')}\`, + 'comments': [${(snippets[snippet].match(COMMENT_REGEX) || []).map( + v => '`' + v.replace(/`/g, '\\`') + '`' + )}], 'hash': '${snippetHash}' }`); } }); - if(!fs.existsSync(path.join(LOCALE_PATH,locale+'.js')) || !existingData.length) existingData = `module.exports = { + if (!fs.existsSync(path.join(LOCALE_PATH, locale + '.js')) || !existingData.length) + existingData = `module.exports = { 'locale': { 'locale': '${locale}' }};`; - fs.writeFileSync(path.join(LOCALE_PATH,locale+'.js'), newData.length ? `${existingData.trim().slice(0,-2)},${newData.join(',')}};` : existingData); - fs.writeFileSync(path.join(LOCALE_PATH,locale+'_log'), `${new Date()} + fs.writeFileSync( + path.join(LOCALE_PATH, locale + '.js'), + newData.length ? `${existingData.trim().slice(0, -2)},${newData.join(',')}};` : existingData + ); + fs.writeFileSync( + path.join(LOCALE_PATH, locale + '_log'), + `${new Date()} Hash changes: ${hashChanges.length} -${hashChanges.length ? hashChanges.map(v => ('Snippet name:' + v.snippetName +'\n Old hash: ' + v.oldHash + '\n New hash: ' + v.newHash + '\n')).join('\n') : ''}`); +${ + hashChanges.length + ? hashChanges + .map( + v => + 'Snippet name:' + + v.snippetName + + '\n Old hash: ' + + v.oldHash + + '\n New hash: ' + + v.newHash + + '\n' + ) + .join('\n') + : '' + }` + ); }); diff --git a/scripts/module.js b/scripts/module.js index 0891f52b4..2eed71172 100644 --- a/scripts/module.js +++ b/scripts/module.js @@ -7,8 +7,14 @@ const cp = require('child_process'); const path = require('path'); const chalk = require('chalk'); const util = require('./util'); -if(util.isTravisCI() && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') { - console.log(`${chalk.green('NOBUILD')} Module build terminated, not a cron job or a custom build!`); +if ( + util.isTravisCI() && + process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && + process.env['TRAVIS_EVENT_TYPE'] !== 'api' +) { + console.log( + `${chalk.green('NOBUILD')} Module build terminated, not a cron job or a custom build!` + ); process.exit(0); } // Set variables for paths @@ -32,10 +38,7 @@ try { let exportStr = 'export default {'; // Read all snippets and store them appropriately for (const snippet of snippets) { - const snippetData = fs.readFileSync( - path.join(SNIPPETS_PATH, snippet), - 'utf8' - ); + const snippetData = fs.readFileSync(path.join(SNIPPETS_PATH, snippet), 'utf8'); const snippetName = snippet.replace('.md', ''); // Check if a snippet is Node-only const isNodeSnippet = tagDatabase diff --git a/scripts/rollup.js b/scripts/rollup.js index d684f7d68..4fe1951a6 100644 --- a/scripts/rollup.js +++ b/scripts/rollup.js @@ -16,7 +16,7 @@ if (!fs.existsSync(DIST)) fs.mkdirSync(DIST); const es5 = babel({ presets: [['env', { modules: false }]] }); const min = minify({ comments: false }); // Create the bundles -(async() => { +(async () => { const bundle = await rollup({ input: INPUT_FILE }); const bundleES5 = await rollup({ input: INPUT_FILE, plugins: [es5] }); const bundleMin = await rollup({ input: INPUT_FILE, plugins: [min] }); diff --git a/scripts/tag.js b/scripts/tag.js index 5ef2f535b..bf8d76178 100644 --- a/scripts/tag.js +++ b/scripts/tag.js @@ -7,7 +7,7 @@ const fs = require('fs-extra'), path = require('path'), chalk = require('chalk'); const util = require('./util'); -if(util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) { +if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) { console.log(`${chalk.green('NOBUILD')} Tagging terminated, parent commit is a Travis build!`); process.exit(0); } @@ -28,11 +28,10 @@ console.time('Tagger'); snippets = util.readSnippets(snippetsPath); // Load tag data from the database tagDbData = util.readTags(); -tagDbStats = Object.entries(tagDbData) - .reduce((acc, val) => { - val[1].forEach(v => acc.hasOwnProperty(v) ? acc[v]++ : (acc[v] = 1)); - return acc; - }, {}); +tagDbStats = Object.entries(tagDbData).reduce((acc, val) => { + val[1].forEach(v => (acc.hasOwnProperty(v) ? acc[v]++ : (acc[v] = 1))); + return acc; +}, {}); // Update the listing of snippets in tag_database and log the statistics, along with missing scripts try { for (let snippet of Object.entries(snippets)) @@ -40,7 +39,9 @@ try { tagDbData.hasOwnProperty(snippet[0].slice(0, -3)) && tagDbData[snippet[0].slice(0, -3)].join(',').trim() ) - output += `${snippet[0].slice(0, -3)}:${tagDbData[snippet[0].slice(0, -3)].join(',').trim()}\n`; + output += `${snippet[0].slice(0, -3)}:${tagDbData[snippet[0].slice(0, -3)] + .join(',') + .trim()}\n`; else { output += `${snippet[0].slice(0, -3)}:uncategorized\n`; missingTags++; @@ -55,7 +56,9 @@ try { } // Log statistics for the tag_database file console.log(`\n${chalk.bgWhite(chalk.black('=== TAG STATS ==='))}`); -for (let tagData of Object.entries(tagDbStats).filter(v => v[0] !== 'undefined').sort((a,b) => a[0].localeCompare(b[0]))) +for (let tagData of Object.entries(tagDbStats) + .filter(v => v[0] !== 'undefined') + .sort((a, b) => a[0].localeCompare(b[0]))) console.log(`${chalk.green(tagData[0])}: ${tagData[1]} snippets`); console.log( `${chalk.blue("New untagged snippets (will be tagged as 'uncategorized'):")} ${missingTags}\n` diff --git a/scripts/tdd.js b/scripts/tdd.js index 247aa15ec..fa756134c 100644 --- a/scripts/tdd.js +++ b/scripts/tdd.js @@ -4,11 +4,16 @@ */ // Load modules -const fs = require('fs-extra'), path = require('path'); +const fs = require('fs-extra'), + path = require('path'); const childProcess = require('child_process'); const chalk = require('chalk'); const util = require('./util'); -if(util.isTravisCI() && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') { +if ( + util.isTravisCI() && + process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && + process.env['TRAVIS_EVENT_TYPE'] !== 'api' +) { console.log(`${chalk.green('NOBUILD')} Testing terminated, not a cron job or a custom build!`); process.exit(0); } @@ -20,8 +25,11 @@ const TEST_PATH = './test'; // Array of snippet names const snippetFiles = []; -const snippetFilesActive = fs.readdirSync(SNIPPETS_ACTIVE, 'utf8').map(fileName => fileName.slice(0, -3)); -const snippetFilesArchive = fs.readdirSync(SNIPPETS_ARCHIVE, 'utf8') +const snippetFilesActive = fs + .readdirSync(SNIPPETS_ACTIVE, 'utf8') + .map(fileName => fileName.slice(0, -3)); +const snippetFilesArchive = fs + .readdirSync(SNIPPETS_ARCHIVE, 'utf8') .filter(fileName => !fileName.includes('README')) // -> Filters out main README.md file in Archieve which isn't a snippet .map(fileName => fileName.slice(0, -3)); @@ -32,15 +40,17 @@ console.time('Tester'); snippetFiles .map(fileName => { // Check if fileName for snippet exist in test/ dir, if doesnt create - fs.ensureDirSync(path.join(TEST_PATH,fileName)); + fs.ensureDirSync(path.join(TEST_PATH, fileName)); // return fileName for later use return fileName; }) .map(fileName => { - const activeOrArchive = snippetFilesActive.includes(fileName) ? SNIPPETS_ACTIVE : SNIPPETS_ARCHIVE; + const activeOrArchive = snippetFilesActive.includes(fileName) + ? SNIPPETS_ACTIVE + : SNIPPETS_ARCHIVE; // Grab snippetData - const fileData = fs.readFileSync(path.join(activeOrArchive,`${fileName}.md`), 'utf8'); + const fileData = fs.readFileSync(path.join(activeOrArchive, `${fileName}.md`), 'utf8'); // Grab snippet Code blocks const fileCode = fileData.slice(fileData.search(/```\s*js/i), fileData.lastIndexOf('```') + 3); // Split code based on code markers @@ -72,9 +82,9 @@ snippetFiles ].join('\n'); // Write/Update exportFile which is snippetName.js in respective dir - fs.writeFileSync(path.join(TEST_PATH,fileName,`${fileName}.js`), exportFile); + fs.writeFileSync(path.join(TEST_PATH, fileName, `${fileName}.js`), exportFile); - if ( !fs.existsSync(path.join(TEST_PATH,fileName,`${fileName}.test.js`)) ) { + if (!fs.existsSync(path.join(TEST_PATH, fileName, `${fileName}.test.js`))) { // if snippetName.test.js doesn't exist inrespective dir exportTest fs.writeFileSync(`${TEST_PATH}/${fileName}/${fileName}.test.js`, exportTest); } @@ -83,10 +93,9 @@ snippetFiles return fileName; }); try { - fs.writeFileSync(path.join(TEST_PATH,'testlog'),`Test log for: ${new Date().toString()}\n`); + fs.writeFileSync(path.join(TEST_PATH, 'testlog'), `Test log for: ${new Date().toString()}\n`); childProcess.execSync(`npm test`); -} -catch (e) { - fs.appendFileSync(path.join(TEST_PATH,'testlog')); +} catch (e) { + fs.appendFileSync(path.join(TEST_PATH, 'testlog')); } console.timeEnd('Tester'); diff --git a/scripts/util.js b/scripts/util.js index 60b93d13a..79d53f277 100644 --- a/scripts/util.js +++ b/scripts/util.js @@ -3,8 +3,10 @@ const fs = require('fs-extra'), chalk = require('chalk'), crypto = require('crypto'); -const getMarkDownAnchor = (paragraphTitle) => - paragraphTitle.trim().toLowerCase() +const getMarkDownAnchor = paragraphTitle => + paragraphTitle + .trim() + .toLowerCase() .replace(/[^\w\- ]+/g, '') .replace(/\s/g, '-') .replace(/\-+$/, ''); @@ -28,7 +30,7 @@ const getFilesInDir = (directoryPath, withPath, exclude = null) => { return fileNames; }, []); } - return directoryFilenames; + return directoryFilenames; } catch (err) { console.log(`${chalk.red('ERROR!')} During snippet loading: ${err}`); process.exit(1); @@ -66,7 +68,6 @@ const readTags = () => { return data; }) ); - } catch (err) { // Handle errors (hopefully not!) console.log(`${chalk.red('ERROR!')} During tag database loading: ${err}`); @@ -102,7 +103,11 @@ const capitalize = (str, lowerRest = false) => // Checks if current environment is Travis CI const isTravisCI = () => 'TRAVIS' in process.env && 'CI' in process.env; // Creates a hash for a value using the SHA-256 algorithm. -const hashData = val => crypto.createHash('sha256').update(val).digest('hex'); +const hashData = val => + crypto + .createHash('sha256') + .update(val) + .digest('hex'); // Gets the code blocks for a snippet file. const getCodeBlocks = str => { const regex = /```[.\S\s]*?```/g; @@ -135,15 +140,15 @@ const getTextualContent = str => { }; module.exports = { getMarkDownAnchor, - getFilesInDir, - readSnippets, - readTags, - optimizeNodes, - capitalize, - objectFromPairs, - isTravisCI, - hashData, - shuffle, - getCodeBlocks, + getFilesInDir, + readSnippets, + readTags, + optimizeNodes, + capitalize, + objectFromPairs, + isTravisCI, + hashData, + shuffle, + getCodeBlocks, getTextualContent }; diff --git a/scripts/web.js b/scripts/web.js index 441e7eaaf..935297e60 100644 --- a/scripts/web.js +++ b/scripts/web.js @@ -19,12 +19,19 @@ const unescapeHTML = str => '&': '&', '<': '<', '>': '>', - ''': '\'', + ''': "'", '"': '"' }[tag] || tag) ); -if(util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE']) && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') { - console.log(`${chalk.green('NOBUILD')} website build terminated, parent commit is a Travis build!`); +if ( + util.isTravisCI() && + /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE']) && + process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && + process.env['TRAVIS_EVENT_TYPE'] !== 'api' +) { + console.log( + `${chalk.green('NOBUILD')} website build terminated, parent commit is a Travis build!` + ); process.exit(0); } // Compile the mini.css framework and custom CSS styles, using `node-sass`. @@ -35,7 +42,7 @@ sass.render( outFile: path.join('docs', 'mini.css'), outputStyle: 'compressed' }, - function (err, result) { + function(err, result) { if (!err) { fs.writeFile(path.join('docs', 'mini.css'), result.css, function(err2) { if (!err2) console.log(`${chalk.green('SUCCESS!')} mini.css file generated!`); @@ -54,7 +61,32 @@ const snippetsPath = './snippets', // Set variables for script let snippets = {}, archivedSnippets = {}, - beginnerSnippetNames = ['everyNth', 'filterNonUnique', 'last', 'maxN', 'minN', 'nthElement', 'offset', 'sample', 'similarity', 'tail', 'currentURL', 'hasClass', 'getMeridiemSuffixOfInteger', 'factorial', 'fibonacci', 'gcd', 'isDivisible', 'isEven', 'isPrime', 'lcm', 'randomIntegerInRange', 'sum', 'reverseString', 'truncateString'], + beginnerSnippetNames = [ + 'everyNth', + 'filterNonUnique', + 'last', + 'maxN', + 'minN', + 'nthElement', + 'offset', + 'sample', + 'similarity', + 'tail', + 'currentURL', + 'hasClass', + 'getMeridiemSuffixOfInteger', + 'factorial', + 'fibonacci', + 'gcd', + 'isDivisible', + 'isEven', + 'isPrime', + 'lcm', + 'randomIntegerInRange', + 'sum', + 'reverseString', + 'truncateString' + ], startPart = '', endPart = '', output = '', @@ -64,7 +96,6 @@ let snippets = {}, archivedStartPart = '', archivedEndPart = '', archivedOutput = '', - indexStaticFile = '', pagesOutput = [], tagDbData = {}; @@ -74,16 +105,21 @@ console.time('Webber'); snippets = util.readSnippets(snippetsPath); archivedSnippets = util.readSnippets(archivedSnippetsPath); - // Load static parts for all pages try { startPart = fs.readFileSync(path.join(staticPartsPath, 'page-start.html'), 'utf8'); endPart = fs.readFileSync(path.join(staticPartsPath, 'page-end.html'), 'utf8'); - beginnerStartPart = fs.readFileSync(path.join(staticPartsPath, 'beginner-page-start.html'), 'utf8'); + beginnerStartPart = fs.readFileSync( + path.join(staticPartsPath, 'beginner-page-start.html'), + 'utf8' + ); beginnerEndPart = fs.readFileSync(path.join(staticPartsPath, 'beginner-page-end.html'), 'utf8'); - archivedStartPart = fs.readFileSync(path.join(staticPartsPath, 'archived-page-start.html'), 'utf8'); + archivedStartPart = fs.readFileSync( + path.join(staticPartsPath, 'archived-page-start.html'), + 'utf8' + ); archivedEndPart = fs.readFileSync(path.join(staticPartsPath, 'archived-page-end.html'), 'utf8'); indexStaticFile = fs.readFileSync(path.join(staticPartsPath, 'index.html'), 'utf8'); @@ -95,7 +131,11 @@ try { // Load tag data from the database tagDbData = util.readTags(); // Create the output for the index.html file (only locally or on Travis CRON or custom job) -if(!util.isTravisCI() || (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api'))) { +if ( + !util.isTravisCI() || + (util.isTravisCI() && + (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api')) +) { try { // Shuffle the array of snippets, pick 3 let indexDailyPicks = ''; @@ -110,68 +150,123 @@ if(!util.isTravisCI() || (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE'] // Generate the html for the picked snippets for (let snippet of Object.entries(dailyPicks)) indexDailyPicks += - '
' + + '
' + md .render(`\n${snippets[snippet[0]]}`) .replace(/

/g, `${snippet[1].includes('advanced') ? 'advanced' : ''}

`) + .replace( + /<\/h3>/g, + `${snippet[1].includes('advanced') ? 'advanced' : ''}` + ) .replace(/<\/h3>/g, '
') - .replace(/
([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `
${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}
`) + .replace( + /
([^\0]*?)<\/code><\/pre>/gm,
+            (match, p1) =>
+              `
${Prism.highlight(
+                unescapeHTML(p1),
+                Prism.languages.javascript
+              )}
` + ) .replace(/<\/pre>\s+
📋 Copy to clipboard' +
         '
'; // Select the first snippet from today's picks indexDailyPicks = indexDailyPicks.replace('card fluid pick', 'card fluid pick selected'); // Optimize punctuation nodes - indexDailyPicks = util.optimizeNodes(indexDailyPicks, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); + indexDailyPicks = util.optimizeNodes( + indexDailyPicks, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); // Optimize operator nodes - indexDailyPicks = util.optimizeNodes(indexDailyPicks, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); + indexDailyPicks = util.optimizeNodes( + indexDailyPicks, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); // Optimize keyword nodes - indexDailyPicks = util.optimizeNodes(indexDailyPicks, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); + indexDailyPicks = util.optimizeNodes( + indexDailyPicks, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); // Put the daily picks into the page indexStaticFile = indexStaticFile.replace('$daily-picks', indexDailyPicks); // Use the Github API to get the needed data const githubApi = 'api.github.com'; const headers = util.isTravisCI() - ? { 'User-Agent': '30-seconds-of-code', 'Authorization': 'token ' + process.env['GH_TOKEN']} - : { 'User-Agent': '30-seconds-of-code'}; + ? { 'User-Agent': '30-seconds-of-code', Authorization: 'token ' + process.env['GH_TOKEN'] } + : { 'User-Agent': '30-seconds-of-code' }; // Test the API's rate limit (keep for various reasons) - https.get({host: githubApi, path: '/rate_limit?', headers: headers}, res => { - res.on('data', function (chunk) { + https.get({ host: githubApi, path: '/rate_limit?', headers: headers }, res => { + res.on('data', function(chunk) { console.log(`Remaining requests: ${JSON.parse(chunk).resources.core.remaining}`); }); }); // Send requests and wait for responses, write to the page - https.get({host: githubApi, path: '/repos/chalarangelo/30-seconds-of-code/commits?per_page=1', headers: headers}, resCommits => { - https.get({host: githubApi, path: '/repos/chalarangelo/30-seconds-of-code/contributors?per_page=1', headers: headers}, resContributors => { - https.get({host: githubApi, path: '/repos/chalarangelo/30-seconds-of-code/stargazers?per_page=1', headers: headers}, resStars => { - let commits = resCommits.headers.link.split('&').slice(-1)[0].replace(/[^\d]/g, ''), - contribs = resContributors.headers.link.split('&').slice(-1)[0].replace(/[^\d]/g, ''), - stars = resStars.headers.link.split('&').slice(-1)[0].replace(/[^\d]/g, ''); - indexStaticFile = indexStaticFile.replace(/\$snippet-count/g, Object.keys(snippets).length).replace(/\$commit-count/g, commits).replace(/\$contrib-count/g, contribs).replace(/\$star-count/g, stars); - indexStaticFile = minify(indexStaticFile, { - collapseBooleanAttributes: true, - collapseWhitespace: true, - decodeEntities: false, - minifyCSS: true, - minifyJS: true, - keepClosingSlash: true, - processConditionalComments: true, - removeAttributeQuotes: false, - removeComments: true, - removeEmptyAttributes: false, - removeOptionalTags: false, - removeScriptTypeAttributes: false, - removeStyleLinkTypeAttributes: false, - trimCustomFragments: true - }); - // Generate 'index.html' file - fs.writeFileSync(path.join(docsPath, 'index.html'), indexStaticFile); - console.log(`${chalk.green('SUCCESS!')} index.html file generated!`); - }); - }); - }); - + https.get( + { + host: githubApi, + path: '/repos/chalarangelo/30-seconds-of-code/commits?per_page=1', + headers: headers + }, + resCommits => { + https.get( + { + host: githubApi, + path: '/repos/chalarangelo/30-seconds-of-code/contributors?per_page=1', + headers: headers + }, + resContributors => { + https.get( + { + host: githubApi, + path: '/repos/chalarangelo/30-seconds-of-code/stargazers?per_page=1', + headers: headers + }, + resStars => { + let commits = resCommits.headers.link + .split('&') + .slice(-1)[0] + .replace(/[^\d]/g, ''), + contribs = resContributors.headers.link + .split('&') + .slice(-1)[0] + .replace(/[^\d]/g, ''), + stars = resStars.headers.link + .split('&') + .slice(-1)[0] + .replace(/[^\d]/g, ''); + indexStaticFile = indexStaticFile + .replace(/\$snippet-count/g, Object.keys(snippets).length) + .replace(/\$commit-count/g, commits) + .replace(/\$contrib-count/g, contribs) + .replace(/\$star-count/g, stars); + indexStaticFile = minify(indexStaticFile, { + collapseBooleanAttributes: true, + collapseWhitespace: true, + decodeEntities: false, + minifyCSS: true, + minifyJS: true, + keepClosingSlash: true, + processConditionalComments: true, + removeAttributeQuotes: false, + removeComments: true, + removeEmptyAttributes: false, + removeOptionalTags: false, + removeScriptTypeAttributes: false, + removeStyleLinkTypeAttributes: false, + trimCustomFragments: true + }); + // Generate 'index.html' file + fs.writeFileSync(path.join(docsPath, 'index.html'), indexStaticFile); + console.log(`${chalk.green('SUCCESS!')} index.html file generated!`); + } + ); + } + ); + } + ); } catch (err) { console.log(`${chalk.red('ERROR!')} During index.html generation: ${err}`); process.exit(1); @@ -185,13 +280,21 @@ try { // Loop over tags and snippets to create the table of contents for (let tag of [...new Set(Object.entries(tagDbData).map(t => t[1][0]))] .filter(v => v) - .sort((a, b) => util.capitalize(a, true) === 'Uncategorized' ? 1 : util.capitalize(b, true) === 'Uncategorized' ? -1 : a.localeCompare(b))) { - output += '

' + - md - .render(`${util.capitalize(tag, true)}\n`) - .replace(/

/g, '') - .replace(/<\/p>/g, '') + - '

'; + .sort( + (a, b) => + util.capitalize(a, true) === 'Uncategorized' + ? 1 + : util.capitalize(b, true) === 'Uncategorized' + ? -1 + : a.localeCompare(b) + )) { + output += + '

' + + md + .render(`${util.capitalize(tag, true)}\n`) + .replace(/

/g, '') + .replace(/<\/p>/g, '') + + '

'; for (let taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) output += md .render(`[${taggedSnippet[0]}](./${tag}#${taggedSnippet[0].toLowerCase()})\n`) @@ -200,13 +303,23 @@ try { .replace(/
'; output += ' '; // Loop over tags and snippets to create the list of snippets for (let tag of [...new Set(Object.entries(tagDbData).map(t => t[1][0]))] .filter(v => v) - .sort((a, b) => util.capitalize(a, true) === 'Uncategorized' ? 1 : util.capitalize(b, true) === 'Uncategorized' ? -1 : a.localeCompare(b))) { - let localOutput = output.replace(/\$tag/g, util.capitalize(tag)).replace(new RegExp(`./${tag}#`, 'g'), '#'); + .sort( + (a, b) => + util.capitalize(a, true) === 'Uncategorized' + ? 1 + : util.capitalize(b, true) === 'Uncategorized' + ? -1 + : a.localeCompare(b) + )) { + let localOutput = output + .replace(/\$tag/g, util.capitalize(tag)) + .replace(new RegExp(`./${tag}#`, 'g'), '#'); localOutput += md .render(`## ${util.capitalize(tag, true)}\n`) .replace(/

/g, '

'); @@ -215,22 +328,49 @@ try { '
' + md .render(`\n${snippets[taggedSnippet[0] + '.md']}`) - .replace(/

/g, `${taggedSnippet[1].includes('advanced') ? 'advanced' : ''}

`) + .replace( + /

/g, + `${ + taggedSnippet[1].includes('advanced') ? 'advanced' : '' + }

` + ) .replace(/<\/h3>/g, '

') - .replace(/
([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `
${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}
`) + .replace( + /
([^\0]*?)<\/code><\/pre>/gm,
+            (match, p1) =>
+              `
${Prism.highlight(
+                unescapeHTML(p1),
+                Prism.languages.javascript
+              )}
` + ) .replace(/<\/pre>\s+
📋 Copy to clipboard' +
         '
'; // Add the ending static part localOutput += `\n${endPart + '\n'}`; // Optimize punctuation nodes - localOutput = util.optimizeNodes(localOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); + localOutput = util.optimizeNodes( + localOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); // Optimize operator nodes - localOutput = util.optimizeNodes(localOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); + localOutput = util.optimizeNodes( + localOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); // Optimize keyword nodes - localOutput = util.optimizeNodes(localOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); - pagesOutput.push({'tag': tag, 'content': localOutput}); + localOutput = util.optimizeNodes( + localOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); + pagesOutput.push({ tag: tag, content: localOutput }); } // Minify output pagesOutput.forEach(page => { @@ -250,7 +390,7 @@ try { removeStyleLinkTypeAttributes: false, trimCustomFragments: true }); - fs.writeFileSync(path.join(docsPath, page.tag+'.html'), page.content); + fs.writeFileSync(path.join(docsPath, page.tag + '.html'), page.content); console.log(`${chalk.green('SUCCESS!')} ${page.tag}.html file generated!`); }); } catch (err) { @@ -266,7 +406,7 @@ try { // Filter begginer snippets const filteredBeginnerSnippets = Object.keys(snippets) - .filter(key => beginnerSnippetNames.map(name => name+'.md').includes(key)) + .filter(key => beginnerSnippetNames.map(name => name + '.md').includes(key)) .reduce((obj, key) => { obj[key] = snippets[key]; return obj; @@ -274,26 +414,47 @@ try { for (let snippet of Object.entries(filteredBeginnerSnippets)) beginnerOutput += - '
' + - '
' + + '
' + + '
' + '
' + md .render(`\n${snippets[snippet[0]]}`) .replace(/

/g, `${snippet[1].includes('advanced') ? 'advanced' : ''}

`) + .replace( + /<\/h3>/g, + `${snippet[1].includes('advanced') ? 'advanced' : ''}` + ) .replace(/<\/h3>/g, '
') - .replace(/
([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `
${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}
`) + .replace( + /
([^\0]*?)<\/code><\/pre>/gm,
+          (match, p1) =>
+            `
${Prism.highlight(
+              unescapeHTML(p1),
+              Prism.languages.javascript
+            )}
` + ) .replace(/<\/pre>\s+
📋 Copy to clipboard' +
       '
'; - // Optimize punctuation nodes - beginnerOutput = util.optimizeNodes(beginnerOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); - // Optimize operator nodes - beginnerOutput = util.optimizeNodes(beginnerOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); - // Optimize keyword nodes - beginnerOutput = util.optimizeNodes(beginnerOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); - + // Optimize punctuation nodes + beginnerOutput = util.optimizeNodes( + beginnerOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); + // Optimize operator nodes + beginnerOutput = util.optimizeNodes( + beginnerOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); + // Optimize keyword nodes + beginnerOutput = util.optimizeNodes( + beginnerOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); beginnerOutput += `${beginnerEndPart}`; @@ -316,7 +477,6 @@ try { }); fs.writeFileSync(path.join(docsPath, 'beginner.html'), minifiedBeginnerOutput); console.log(`${chalk.green('SUCCESS!')} beginner.html file generated!`); - } catch (err) { console.log(`${chalk.red('ERROR!')} During beginner.html generation: ${err}`); process.exit(1); @@ -334,32 +494,51 @@ try { .filter(key => !excludeFiles.includes(key)) .reduce((obj, key) => { obj[key] = archivedSnippets[key]; - return obj; - }, {}); + return obj; + }, {}); // Generate archived snippets from md files for (let snippet of Object.entries(filteredArchivedSnippets)) archivedOutput += - '
' + - '
' + + '
' + + '
' + '
' + md .render(`\n${filteredArchivedSnippets[snippet[0]]}`) .replace(/

/g, '

') - .replace(/
([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `
${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}
`) + .replace( + /
([^\0]*?)<\/code><\/pre>/gm,
+          (match, p1) =>
+            `
${Prism.highlight(
+              unescapeHTML(p1),
+              Prism.languages.javascript
+            )}
` + ) .replace(/<\/pre>\s+
📋 Copy to clipboard' +
       '
'; - // Optimize punctuation nodes - archivedOutput = util.optimizeNodes(archivedOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); - // Optimize operator nodes - archivedOutput = util.optimizeNodes(archivedOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); - // Optimize keyword nodes - archivedOutput = util.optimizeNodes(archivedOutput, /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `${p1}${p2}${p3}`); + // Optimize punctuation nodes + archivedOutput = util.optimizeNodes( + archivedOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); + // Optimize operator nodes + archivedOutput = util.optimizeNodes( + archivedOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); + // Optimize keyword nodes + archivedOutput = util.optimizeNodes( + archivedOutput, + /([^\0<]*?)<\/span>([\n\r\s]*)([^\0]*?)<\/span>/gm, + (match, p1, p2, p3) => `${p1}${p2}${p3}` + ); - archivedOutput += `${archivedEndPart}`; + archivedOutput += `${archivedEndPart}`; // Generate and minify 'archive.html' file const minifiedArchivedOutput = minify(archivedOutput, { @@ -381,7 +560,6 @@ try { fs.writeFileSync(path.join(docsPath, 'archive.html'), minifiedArchivedOutput); console.log(`${chalk.green('SUCCESS!')} archive.html file generated!`); - } catch (err) { console.log(`${chalk.red('ERROR!')} During archive.html generation: ${err}`); process.exit(1);