Scripts linted
This commit is contained in:
@ -14,8 +14,13 @@ let snippetsArchiveData = require('../snippet_data/snippetsArchive.json');
|
|||||||
const OUTPUT_PATH = './snippet_data';
|
const OUTPUT_PATH = './snippet_data';
|
||||||
console.time('Analyzer');
|
console.time('Analyzer');
|
||||||
// Read data
|
// Read data
|
||||||
let snippetTokens = {data: snippetsData.data.map(snippet => {
|
let snippetTokens = {
|
||||||
let tokens = prism.tokenize(snippet.attributes.codeBlocks[0], prism.languages.javascript, 'javascript');
|
data: snippetsData.data.map(snippet => {
|
||||||
|
let tokens = prism.tokenize(
|
||||||
|
snippet.attributes.codeBlocks[0],
|
||||||
|
prism.languages.javascript,
|
||||||
|
'javascript'
|
||||||
|
);
|
||||||
return {
|
return {
|
||||||
id: snippet.id,
|
id: snippet.id,
|
||||||
type: 'snippetAnalysis',
|
type: 'snippetAnalysis',
|
||||||
@ -25,15 +30,24 @@ let snippetTokens = {data: snippetsData.data.map(snippet => {
|
|||||||
functionCount: tokens.filter(t => t.type === 'function').length,
|
functionCount: tokens.filter(t => t.type === 'function').length,
|
||||||
operatorCount: tokens.filter(t => t.type === 'operator').length,
|
operatorCount: tokens.filter(t => t.type === 'operator').length,
|
||||||
keywordCount: tokens.filter(t => t.type === 'keyword').length,
|
keywordCount: tokens.filter(t => t.type === 'keyword').length,
|
||||||
distinctFunctionCount: [...new Set(tokens.filter(t => t.type === 'function').map(t => t.content))].length
|
distinctFunctionCount: [
|
||||||
|
...new Set(tokens.filter(t => t.type === 'function').map(t => t.content))
|
||||||
|
].length
|
||||||
},
|
},
|
||||||
meta: {
|
meta: {
|
||||||
hash: snippet.meta.hash
|
hash: snippet.meta.hash
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}), meta: { specification: "http://jsonapi.org/format/"}};
|
}),
|
||||||
let snippetArchiveTokens = {data: snippetsArchiveData.data.map(snippet => {
|
meta: { specification: 'http://jsonapi.org/format/' }
|
||||||
let tokens = prism.tokenize(snippet.attributes.codeBlocks[0], prism.languages.javascript, 'javascript');
|
};
|
||||||
|
let snippetArchiveTokens = {
|
||||||
|
data: snippetsArchiveData.data.map(snippet => {
|
||||||
|
let tokens = prism.tokenize(
|
||||||
|
snippet.attributes.codeBlocks[0],
|
||||||
|
prism.languages.javascript,
|
||||||
|
'javascript'
|
||||||
|
);
|
||||||
return {
|
return {
|
||||||
id: snippet.id,
|
id: snippet.id,
|
||||||
type: 'snippetAnalysis',
|
type: 'snippetAnalysis',
|
||||||
@ -43,16 +57,30 @@ let snippetArchiveTokens = {data: snippetsArchiveData.data.map(snippet => {
|
|||||||
functionCount: tokens.filter(t => t.type === 'function').length,
|
functionCount: tokens.filter(t => t.type === 'function').length,
|
||||||
operatorCount: tokens.filter(t => t.type === 'operator').length,
|
operatorCount: tokens.filter(t => t.type === 'operator').length,
|
||||||
keywordCount: tokens.filter(t => t.type === 'keyword').length,
|
keywordCount: tokens.filter(t => t.type === 'keyword').length,
|
||||||
distinctFunctionCount: [...new Set(tokens.filter(t => t.type === 'function').map(t => t.content))].length
|
distinctFunctionCount: [
|
||||||
|
...new Set(tokens.filter(t => t.type === 'function').map(t => t.content))
|
||||||
|
].length
|
||||||
},
|
},
|
||||||
meta: {
|
meta: {
|
||||||
hash: snippet.meta.hash
|
hash: snippet.meta.hash
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}), meta: { specification: "http://jsonapi.org/format/"}};
|
}),
|
||||||
|
meta: { specification: 'http://jsonapi.org/format/' }
|
||||||
|
};
|
||||||
// Write data
|
// Write data
|
||||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetAnalytics.json'), JSON.stringify(snippetTokens, null, 2));
|
fs.writeFileSync(
|
||||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetArchiveAnalytics.json'), JSON.stringify(snippetArchiveTokens, null, 2));
|
path.join(OUTPUT_PATH, 'snippetAnalytics.json'),
|
||||||
|
JSON.stringify(snippetTokens, null, 2)
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(OUTPUT_PATH, 'snippetArchiveAnalytics.json'),
|
||||||
|
JSON.stringify(snippetArchiveTokens, null, 2)
|
||||||
|
);
|
||||||
// Display messages and time
|
// Display messages and time
|
||||||
console.log(`${chalk.green('SUCCESS!')} snippetAnalyticss.json and snippetArchiveAnalytics.json files generated!`);
|
console.log(
|
||||||
|
`${chalk.green(
|
||||||
|
'SUCCESS!'
|
||||||
|
)} snippetAnalyticss.json and snippetArchiveAnalytics.json files generated!`
|
||||||
|
);
|
||||||
console.timeEnd('Analyzer');
|
console.timeEnd('Analyzer');
|
||||||
|
|||||||
@ -12,10 +12,15 @@ const SNIPPETS_PATH = './snippets';
|
|||||||
const SNIPPETS_ARCHIVE_PATH = './snippets_archive';
|
const SNIPPETS_ARCHIVE_PATH = './snippets_archive';
|
||||||
const STATIC_PARTS_PATH = './static-parts';
|
const STATIC_PARTS_PATH = './static-parts';
|
||||||
if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) {
|
if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) {
|
||||||
console.log(`${chalk.green('NOBUILD')} README build terminated, parent commit is a Travis build!`);
|
console.log(
|
||||||
|
`${chalk.green('NOBUILD')} README build terminated, parent commit is a Travis build!`
|
||||||
|
);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
if (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api')) {
|
if (
|
||||||
|
util.isTravisCI() &&
|
||||||
|
(process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api')
|
||||||
|
) {
|
||||||
console.log(`${chalk.green('ARCHIVE')} Cron job or custom build, building archive README!`);
|
console.log(`${chalk.green('ARCHIVE')} Cron job or custom build, building archive README!`);
|
||||||
console.time('Builder');
|
console.time('Builder');
|
||||||
let snippets = {};
|
let snippets = {};
|
||||||
@ -43,8 +48,8 @@ These snippets, while useful and interesting, didn\'t quite make it into the rep
|
|||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
`;
|
`;
|
||||||
for(const snippet of Object.entries(snippets))
|
for (const snippet of Object.entries(snippets))
|
||||||
output += `* [\`${snippet[0].slice(0,-3)}\`](#${snippet[0].toLowerCase().slice(0,-3)})\n`;
|
output += `* [\`${snippet[0].slice(0, -3)}\`](#${snippet[0].toLowerCase().slice(0, -3)})\n`;
|
||||||
output += '\n---\n';
|
output += '\n---\n';
|
||||||
for (const snippet of Object.entries(snippets)) {
|
for (const snippet of Object.entries(snippets)) {
|
||||||
let data = snippet[1];
|
let data = snippet[1];
|
||||||
@ -58,7 +63,7 @@ These snippets, while useful and interesting, didn\'t quite make it into the rep
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Write to the README file of the archive
|
// Write to the README file of the archive
|
||||||
fs.writeFileSync(path.join(SNIPPETS_ARCHIVE_PATH,'README.md'), output);
|
fs.writeFileSync(path.join(SNIPPETS_ARCHIVE_PATH, 'README.md'), output);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`${chalk.red('ERROR!')} During README generation for snippets archive: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During README generation for snippets archive: ${err}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
@ -113,7 +118,15 @@ try {
|
|||||||
Object.entries(tagDbData)
|
Object.entries(tagDbData)
|
||||||
.map(t => t[1][0])
|
.map(t => t[1][0])
|
||||||
.filter(v => v)
|
.filter(v => v)
|
||||||
.sort((a, b) => util.capitalize(a, true) === 'Uncategorized' ? 1 : util.capitalize(b, true) === 'Uncategorized' ? -1 : a.localeCompare(b)))
|
.sort(
|
||||||
|
(a, b) =>
|
||||||
|
util.capitalize(a, true) === 'Uncategorized'
|
||||||
|
? 1
|
||||||
|
: util.capitalize(b, true) === 'Uncategorized'
|
||||||
|
? -1
|
||||||
|
: a.localeCompare(b)
|
||||||
|
)
|
||||||
|
)
|
||||||
];
|
];
|
||||||
|
|
||||||
console.log(tags);
|
console.log(tags);
|
||||||
@ -124,11 +137,12 @@ try {
|
|||||||
// Loop over tags and snippets to create the table of contents
|
// Loop over tags and snippets to create the table of contents
|
||||||
for (const tag of tags) {
|
for (const tag of tags) {
|
||||||
const capitalizedTag = util.capitalize(tag, true);
|
const capitalizedTag = util.capitalize(tag, true);
|
||||||
output += `### ${
|
output += `### ${EMOJIS[tag] ||
|
||||||
EMOJIS[tag] || ''
|
''} ${capitalizedTag}\n\n<details>\n<summary>View contents</summary>\n\n`;
|
||||||
} ${capitalizedTag}\n\n<details>\n<summary>View contents</summary>\n\n`;
|
|
||||||
for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) {
|
for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) {
|
||||||
output += `* [\`${taggedSnippet[0]}\`](#${taggedSnippet[0].toLowerCase()}${taggedSnippet[1].includes('advanced')?'-':''})\n`;
|
output += `* [\`${taggedSnippet[0]}\`](#${taggedSnippet[0].toLowerCase()}${
|
||||||
|
taggedSnippet[1].includes('advanced') ? '-' : ''
|
||||||
|
})\n`;
|
||||||
}
|
}
|
||||||
output += '\n</details>\n\n';
|
output += '\n</details>\n\n';
|
||||||
}
|
}
|
||||||
@ -140,9 +154,9 @@ try {
|
|||||||
for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) {
|
for (const taggedSnippet of Object.entries(tagDbData).filter(v => v[1][0] === tag)) {
|
||||||
let data = snippets[taggedSnippet[0] + '.md'];
|
let data = snippets[taggedSnippet[0] + '.md'];
|
||||||
// Add advanced tag
|
// Add advanced tag
|
||||||
if(taggedSnippet[1].includes('advanced')) {
|
if (taggedSnippet[1].includes('advanced')) {
|
||||||
data = data.split(/\r?\n/);
|
data = data.split(/\r?\n/);
|
||||||
data[0] = data[0] +' ';
|
data[0] = data[0] + ' ';
|
||||||
data = data.join('\n');
|
data = data.join('\n');
|
||||||
}
|
}
|
||||||
data =
|
data =
|
||||||
|
|||||||
@ -12,12 +12,18 @@ const SNIPPETS_PATH = './snippets';
|
|||||||
const SNIPPETS_ARCHIVE_PATH = './snippets_archive';
|
const SNIPPETS_ARCHIVE_PATH = './snippets_archive';
|
||||||
const OUTPUT_PATH = './snippet_data';
|
const OUTPUT_PATH = './snippet_data';
|
||||||
// Check if running on Travis - only build for cron jobs and custom builds
|
// Check if running on Travis - only build for cron jobs and custom builds
|
||||||
if(util.isTravisCI() && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') {
|
if (
|
||||||
|
util.isTravisCI() &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'cron' &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'api'
|
||||||
|
) {
|
||||||
console.log(`${chalk.green('NOBUILD')} snippet extraction terminated, not a cron or api build!`);
|
console.log(`${chalk.green('NOBUILD')} snippet extraction terminated, not a cron or api build!`);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
// Read data
|
// Read data
|
||||||
let snippets = {}, archivedSnippets = {}, tagDbData = {};
|
let snippets = {},
|
||||||
|
archivedSnippets = {},
|
||||||
|
tagDbData = {};
|
||||||
console.time('Extractor');
|
console.time('Extractor');
|
||||||
snippets = util.readSnippets(SNIPPETS_PATH);
|
snippets = util.readSnippets(SNIPPETS_PATH);
|
||||||
archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
|
archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
|
||||||
@ -26,13 +32,15 @@ tagDbData = util.readTags();
|
|||||||
let snippetData = {
|
let snippetData = {
|
||||||
data: Object.keys(snippets).map(key => {
|
data: Object.keys(snippets).map(key => {
|
||||||
return {
|
return {
|
||||||
id: key.slice(0,-3),
|
id: key.slice(0, -3),
|
||||||
type: 'snippet',
|
type: 'snippet',
|
||||||
attributes: {
|
attributes: {
|
||||||
fileName: key,
|
fileName: key,
|
||||||
text: util.getTextualContent(snippets[key]).trim(),
|
text: util.getTextualContent(snippets[key]).trim(),
|
||||||
codeBlocks: util.getCodeBlocks(snippets[key]).map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()),
|
codeBlocks: util
|
||||||
tags: tagDbData[key.slice(0,-3)]
|
.getCodeBlocks(snippets[key])
|
||||||
|
.map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()),
|
||||||
|
tags: tagDbData[key.slice(0, -3)]
|
||||||
},
|
},
|
||||||
meta: {
|
meta: {
|
||||||
archived: false,
|
archived: false,
|
||||||
@ -48,12 +56,14 @@ let snippetData = {
|
|||||||
let snippetArchiveData = {
|
let snippetArchiveData = {
|
||||||
data: Object.keys(archivedSnippets).map(key => {
|
data: Object.keys(archivedSnippets).map(key => {
|
||||||
return {
|
return {
|
||||||
id: key.slice(0,-3),
|
id: key.slice(0, -3),
|
||||||
type: 'snippet',
|
type: 'snippet',
|
||||||
attributes: {
|
attributes: {
|
||||||
fileName: key,
|
fileName: key,
|
||||||
text: util.getTextualContent(archivedSnippets[key]).trim(),
|
text: util.getTextualContent(archivedSnippets[key]).trim(),
|
||||||
codeBlocks: util.getCodeBlocks(archivedSnippets[key]).map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()),
|
codeBlocks: util
|
||||||
|
.getCodeBlocks(archivedSnippets[key])
|
||||||
|
.map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim()),
|
||||||
tags: []
|
tags: []
|
||||||
},
|
},
|
||||||
meta: {
|
meta: {
|
||||||
@ -68,7 +78,10 @@ let snippetArchiveData = {
|
|||||||
};
|
};
|
||||||
// Write files
|
// Write files
|
||||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippets.json'), JSON.stringify(snippetData, null, 2));
|
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippets.json'), JSON.stringify(snippetData, null, 2));
|
||||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetsArchive.json'), JSON.stringify(snippetArchiveData, null, 2));
|
fs.writeFileSync(
|
||||||
|
path.join(OUTPUT_PATH, 'snippetsArchive.json'),
|
||||||
|
JSON.stringify(snippetArchiveData, null, 2)
|
||||||
|
);
|
||||||
// Display messages and time
|
// Display messages and time
|
||||||
console.log(`${chalk.green('SUCCESS!')} snippets.json and snippetsArchive.json files generated!`);
|
console.log(`${chalk.green('SUCCESS!')} snippets.json and snippetsArchive.json files generated!`);
|
||||||
console.timeEnd('Extractor');
|
console.timeEnd('Extractor');
|
||||||
|
|||||||
@ -10,10 +10,13 @@ const util = require('../util');
|
|||||||
const glossaryFiles = util.getFilesInDir('./glossary', false);
|
const glossaryFiles = util.getFilesInDir('./glossary', false);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const output = glossaryFiles.reduce(
|
const output =
|
||||||
|
glossaryFiles.reduce(
|
||||||
(accumulator, currentFilename) =>
|
(accumulator, currentFilename) =>
|
||||||
accumulator.toLowerCase().replace(/\.[^/.]+$/, "") + "\n" +
|
accumulator.toLowerCase().replace(/\.[^/.]+$/, '') +
|
||||||
currentFilename.toLowerCase().replace(/\.[^/.]+$/, ""))+'\n';
|
'\n' +
|
||||||
|
currentFilename.toLowerCase().replace(/\.[^/.]+$/, '')
|
||||||
|
) + '\n';
|
||||||
fs.writeFileSync('glossary/keyword_database', output);
|
fs.writeFileSync('glossary/keyword_database', output);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`${chalk.red('ERROR!')} During glossary keyword_database generation: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During glossary keyword_database generation: ${err}`);
|
||||||
|
|||||||
@ -10,27 +10,34 @@ const util = require('../util');
|
|||||||
const glossaryFiles = util.getFilesInDir('./glossary', true, ['keyword_database', 'README.md']);
|
const glossaryFiles = util.getFilesInDir('./glossary', true, ['keyword_database', 'README.md']);
|
||||||
const fileTitles = [];
|
const fileTitles = [];
|
||||||
|
|
||||||
const getGlossaryTermMarkdownBlock = (fileName) => {
|
const getGlossaryTermMarkdownBlock = fileName => {
|
||||||
let fileContent = fs.readFileSync(fileName, 'utf8');
|
let fileContent = fs.readFileSync(fileName, 'utf8');
|
||||||
|
|
||||||
let title = fileContent.match(/###[^\n]*/)[0].replace('### ', '').trim();
|
let title = fileContent
|
||||||
|
.match(/###[^\n]*/)[0]
|
||||||
|
.replace('### ', '')
|
||||||
|
.trim();
|
||||||
// let description = fileContent.replace(title, '').trim();
|
// let description = fileContent.replace(title, '').trim();
|
||||||
fileTitles.push(title);
|
fileTitles.push(title);
|
||||||
|
|
||||||
return fileContent.trim() + "\n";
|
return fileContent.trim() + '\n';
|
||||||
};
|
};
|
||||||
|
|
||||||
const glossaryFilesContentReducer = (accumulator, currentFilename) => {
|
const glossaryFilesContentReducer = (accumulator, currentFilename) => {
|
||||||
// handle first array item
|
// handle first array item
|
||||||
if (accumulator === glossaryFiles[0]) {
|
if (accumulator === glossaryFiles[0]) {
|
||||||
return getGlossaryTermMarkdownBlock(accumulator) + "\n" + getGlossaryTermMarkdownBlock(currentFilename);
|
return (
|
||||||
|
getGlossaryTermMarkdownBlock(accumulator) +
|
||||||
|
'\n' +
|
||||||
|
getGlossaryTermMarkdownBlock(currentFilename)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return accumulator + "\n" + getGlossaryTermMarkdownBlock(currentFilename);
|
return accumulator + '\n' + getGlossaryTermMarkdownBlock(currentFilename);
|
||||||
};
|
};
|
||||||
|
|
||||||
const getTermLinkMarkdownBlock = (termTitle) => {
|
const getTermLinkMarkdownBlock = termTitle => {
|
||||||
let anchor = util.getMarkDownAnchor(termTitle);
|
let anchor = util.getMarkDownAnchor(termTitle);
|
||||||
return `* [\`${termTitle}\`](#${anchor})` + "\n";
|
return `* [\`${termTitle}\`](#${anchor})` + '\n';
|
||||||
};
|
};
|
||||||
|
|
||||||
const glossaryTableOfContentsReducer = (accumulator, currentFile) => {
|
const glossaryTableOfContentsReducer = (accumulator, currentFile) => {
|
||||||
@ -42,13 +49,9 @@ const glossaryTableOfContentsReducer = (accumulator, currentFile) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const fileContents = glossaryFiles.reduce(glossaryFilesContentReducer);
|
const fileContents = glossaryFiles.reduce(glossaryFilesContentReducer);
|
||||||
const TOC = "## Table of Contents\n\n" + fileTitles.reduce(glossaryTableOfContentsReducer);
|
const TOC = '## Table of Contents\n\n' + fileTitles.reduce(glossaryTableOfContentsReducer);
|
||||||
|
|
||||||
const README =
|
const README = '# 30-seconds-of-code JavaScript Glossary\n\n' + TOC + '\n\n' + fileContents;
|
||||||
"# 30-seconds-of-code JavaScript Glossary\n\n" +
|
|
||||||
TOC +
|
|
||||||
"\n\n" +
|
|
||||||
fileContents;
|
|
||||||
fs.writeFileSync('glossary/README.md', README);
|
fs.writeFileSync('glossary/README.md', README);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`${chalk.red('ERROR!')} During glossary README generation: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During glossary README generation: ${err}`);
|
||||||
|
|||||||
@ -10,7 +10,7 @@ const cp = require('child_process');
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const util = require('./util');
|
const util = require('./util');
|
||||||
if(util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) {
|
if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) {
|
||||||
console.log(`${chalk.green('NOBUILD')} Linting terminated, parent commit is a Travis build!`);
|
console.log(`${chalk.green('NOBUILD')} Linting terminated, parent commit is a Travis build!`);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -16,34 +16,67 @@ let snippets = util.readSnippets(SNIPPETS_PATH);
|
|||||||
const COMMENT_REGEX = /(\/\*[\w\'\s\r\n\*]*\*\/)|(\/\/.*)/g;
|
const COMMENT_REGEX = /(\/\*[\w\'\s\r\n\*]*\*\/)|(\/\/.*)/g;
|
||||||
|
|
||||||
locales.forEach(locale => {
|
locales.forEach(locale => {
|
||||||
const locData = require(path.join('..',LOCALE_PATH,locale));
|
const locData = require(path.join('..', LOCALE_PATH, locale));
|
||||||
let existingData = fs.readFileSync(path.join(LOCALE_PATH,locale+'.js'), 'utf8');
|
let existingData = fs.readFileSync(path.join(LOCALE_PATH, locale + '.js'), 'utf8');
|
||||||
let newData = [];
|
let newData = [];
|
||||||
let hashChanges = [];
|
let hashChanges = [];
|
||||||
Object.keys(snippets).forEach(snippet => {
|
Object.keys(snippets).forEach(snippet => {
|
||||||
const snippetName = snippet.split('.')[0];
|
const snippetName = snippet.split('.')[0];
|
||||||
const snippetHash = util.hashData(snippets[snippet]);
|
const snippetHash = util.hashData(snippets[snippet]);
|
||||||
if(locData.hasOwnProperty(snippetName)) {
|
if (locData.hasOwnProperty(snippetName)) {
|
||||||
if (locData[snippetName].hash !== snippetHash) {
|
if (locData[snippetName].hash !== snippetHash) {
|
||||||
existingData = existingData.indexOf(' => '+snippetHash) !== -1 ? existingData : existingData.replace(locData[snippetName].hash, locData[snippetName].hash+' => '+snippetHash);
|
existingData =
|
||||||
hashChanges.push({snippetName, oldHash: locData[snippetName].hash.split(' => ')[0], newHash: snippetHash});
|
existingData.indexOf(' => ' + snippetHash) !== -1
|
||||||
|
? existingData
|
||||||
|
: existingData.replace(
|
||||||
|
locData[snippetName].hash,
|
||||||
|
locData[snippetName].hash + ' => ' + snippetHash
|
||||||
|
);
|
||||||
|
hashChanges.push({
|
||||||
|
snippetName,
|
||||||
|
oldHash: locData[snippetName].hash.split(' => ')[0],
|
||||||
|
newHash: snippetHash
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
newData.push(`\n'${snippetName}' : {
|
newData.push(`\n'${snippetName}' : {
|
||||||
'description': \`${snippets[snippet].split('```js')[0].replace(/`/g,'\\`')}\`,
|
'description': \`${snippets[snippet].split('```js')[0].replace(/`/g, '\\`')}\`,
|
||||||
'comments': [${(snippets[snippet].match(COMMENT_REGEX) || []).map(v => '`'+v.replace(/`/g,'\\`')+'`')}],
|
'comments': [${(snippets[snippet].match(COMMENT_REGEX) || []).map(
|
||||||
|
v => '`' + v.replace(/`/g, '\\`') + '`'
|
||||||
|
)}],
|
||||||
'hash': '${snippetHash}'
|
'hash': '${snippetHash}'
|
||||||
}`);
|
}`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if(!fs.existsSync(path.join(LOCALE_PATH,locale+'.js')) || !existingData.length) existingData = `module.exports = {
|
if (!fs.existsSync(path.join(LOCALE_PATH, locale + '.js')) || !existingData.length)
|
||||||
|
existingData = `module.exports = {
|
||||||
'locale': {
|
'locale': {
|
||||||
'locale': '${locale}'
|
'locale': '${locale}'
|
||||||
}};`;
|
}};`;
|
||||||
fs.writeFileSync(path.join(LOCALE_PATH,locale+'.js'), newData.length ? `${existingData.trim().slice(0,-2)},${newData.join(',')}};` : existingData);
|
fs.writeFileSync(
|
||||||
fs.writeFileSync(path.join(LOCALE_PATH,locale+'_log'), `${new Date()}
|
path.join(LOCALE_PATH, locale + '.js'),
|
||||||
|
newData.length ? `${existingData.trim().slice(0, -2)},${newData.join(',')}};` : existingData
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(LOCALE_PATH, locale + '_log'),
|
||||||
|
`${new Date()}
|
||||||
Hash changes: ${hashChanges.length}
|
Hash changes: ${hashChanges.length}
|
||||||
|
|
||||||
${hashChanges.length ? hashChanges.map(v => ('Snippet name:' + v.snippetName +'\n Old hash: ' + v.oldHash + '\n New hash: ' + v.newHash + '\n')).join('\n') : ''}`);
|
${
|
||||||
|
hashChanges.length
|
||||||
|
? hashChanges
|
||||||
|
.map(
|
||||||
|
v =>
|
||||||
|
'Snippet name:' +
|
||||||
|
v.snippetName +
|
||||||
|
'\n Old hash: ' +
|
||||||
|
v.oldHash +
|
||||||
|
'\n New hash: ' +
|
||||||
|
v.newHash +
|
||||||
|
'\n'
|
||||||
|
)
|
||||||
|
.join('\n')
|
||||||
|
: ''
|
||||||
|
}`
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -7,8 +7,14 @@ const cp = require('child_process');
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const util = require('./util');
|
const util = require('./util');
|
||||||
if(util.isTravisCI() && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') {
|
if (
|
||||||
console.log(`${chalk.green('NOBUILD')} Module build terminated, not a cron job or a custom build!`);
|
util.isTravisCI() &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'cron' &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'api'
|
||||||
|
) {
|
||||||
|
console.log(
|
||||||
|
`${chalk.green('NOBUILD')} Module build terminated, not a cron job or a custom build!`
|
||||||
|
);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
// Set variables for paths
|
// Set variables for paths
|
||||||
@ -32,10 +38,7 @@ try {
|
|||||||
let exportStr = 'export default {';
|
let exportStr = 'export default {';
|
||||||
// Read all snippets and store them appropriately
|
// Read all snippets and store them appropriately
|
||||||
for (const snippet of snippets) {
|
for (const snippet of snippets) {
|
||||||
const snippetData = fs.readFileSync(
|
const snippetData = fs.readFileSync(path.join(SNIPPETS_PATH, snippet), 'utf8');
|
||||||
path.join(SNIPPETS_PATH, snippet),
|
|
||||||
'utf8'
|
|
||||||
);
|
|
||||||
const snippetName = snippet.replace('.md', '');
|
const snippetName = snippet.replace('.md', '');
|
||||||
// Check if a snippet is Node-only
|
// Check if a snippet is Node-only
|
||||||
const isNodeSnippet = tagDatabase
|
const isNodeSnippet = tagDatabase
|
||||||
|
|||||||
@ -16,7 +16,7 @@ if (!fs.existsSync(DIST)) fs.mkdirSync(DIST);
|
|||||||
const es5 = babel({ presets: [['env', { modules: false }]] });
|
const es5 = babel({ presets: [['env', { modules: false }]] });
|
||||||
const min = minify({ comments: false });
|
const min = minify({ comments: false });
|
||||||
// Create the bundles
|
// Create the bundles
|
||||||
(async() => {
|
(async () => {
|
||||||
const bundle = await rollup({ input: INPUT_FILE });
|
const bundle = await rollup({ input: INPUT_FILE });
|
||||||
const bundleES5 = await rollup({ input: INPUT_FILE, plugins: [es5] });
|
const bundleES5 = await rollup({ input: INPUT_FILE, plugins: [es5] });
|
||||||
const bundleMin = await rollup({ input: INPUT_FILE, plugins: [min] });
|
const bundleMin = await rollup({ input: INPUT_FILE, plugins: [min] });
|
||||||
|
|||||||
@ -7,7 +7,7 @@ const fs = require('fs-extra'),
|
|||||||
path = require('path'),
|
path = require('path'),
|
||||||
chalk = require('chalk');
|
chalk = require('chalk');
|
||||||
const util = require('./util');
|
const util = require('./util');
|
||||||
if(util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) {
|
if (util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])) {
|
||||||
console.log(`${chalk.green('NOBUILD')} Tagging terminated, parent commit is a Travis build!`);
|
console.log(`${chalk.green('NOBUILD')} Tagging terminated, parent commit is a Travis build!`);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
@ -28,11 +28,10 @@ console.time('Tagger');
|
|||||||
snippets = util.readSnippets(snippetsPath);
|
snippets = util.readSnippets(snippetsPath);
|
||||||
// Load tag data from the database
|
// Load tag data from the database
|
||||||
tagDbData = util.readTags();
|
tagDbData = util.readTags();
|
||||||
tagDbStats = Object.entries(tagDbData)
|
tagDbStats = Object.entries(tagDbData).reduce((acc, val) => {
|
||||||
.reduce((acc, val) => {
|
val[1].forEach(v => (acc.hasOwnProperty(v) ? acc[v]++ : (acc[v] = 1)));
|
||||||
val[1].forEach(v => acc.hasOwnProperty(v) ? acc[v]++ : (acc[v] = 1));
|
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
// Update the listing of snippets in tag_database and log the statistics, along with missing scripts
|
// Update the listing of snippets in tag_database and log the statistics, along with missing scripts
|
||||||
try {
|
try {
|
||||||
for (let snippet of Object.entries(snippets))
|
for (let snippet of Object.entries(snippets))
|
||||||
@ -40,7 +39,9 @@ try {
|
|||||||
tagDbData.hasOwnProperty(snippet[0].slice(0, -3)) &&
|
tagDbData.hasOwnProperty(snippet[0].slice(0, -3)) &&
|
||||||
tagDbData[snippet[0].slice(0, -3)].join(',').trim()
|
tagDbData[snippet[0].slice(0, -3)].join(',').trim()
|
||||||
)
|
)
|
||||||
output += `${snippet[0].slice(0, -3)}:${tagDbData[snippet[0].slice(0, -3)].join(',').trim()}\n`;
|
output += `${snippet[0].slice(0, -3)}:${tagDbData[snippet[0].slice(0, -3)]
|
||||||
|
.join(',')
|
||||||
|
.trim()}\n`;
|
||||||
else {
|
else {
|
||||||
output += `${snippet[0].slice(0, -3)}:uncategorized\n`;
|
output += `${snippet[0].slice(0, -3)}:uncategorized\n`;
|
||||||
missingTags++;
|
missingTags++;
|
||||||
@ -55,7 +56,9 @@ try {
|
|||||||
}
|
}
|
||||||
// Log statistics for the tag_database file
|
// Log statistics for the tag_database file
|
||||||
console.log(`\n${chalk.bgWhite(chalk.black('=== TAG STATS ==='))}`);
|
console.log(`\n${chalk.bgWhite(chalk.black('=== TAG STATS ==='))}`);
|
||||||
for (let tagData of Object.entries(tagDbStats).filter(v => v[0] !== 'undefined').sort((a,b) => a[0].localeCompare(b[0])))
|
for (let tagData of Object.entries(tagDbStats)
|
||||||
|
.filter(v => v[0] !== 'undefined')
|
||||||
|
.sort((a, b) => a[0].localeCompare(b[0])))
|
||||||
console.log(`${chalk.green(tagData[0])}: ${tagData[1]} snippets`);
|
console.log(`${chalk.green(tagData[0])}: ${tagData[1]} snippets`);
|
||||||
console.log(
|
console.log(
|
||||||
`${chalk.blue("New untagged snippets (will be tagged as 'uncategorized'):")} ${missingTags}\n`
|
`${chalk.blue("New untagged snippets (will be tagged as 'uncategorized'):")} ${missingTags}\n`
|
||||||
|
|||||||
@ -4,11 +4,16 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
// Load modules
|
// Load modules
|
||||||
const fs = require('fs-extra'), path = require('path');
|
const fs = require('fs-extra'),
|
||||||
|
path = require('path');
|
||||||
const childProcess = require('child_process');
|
const childProcess = require('child_process');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const util = require('./util');
|
const util = require('./util');
|
||||||
if(util.isTravisCI() && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') {
|
if (
|
||||||
|
util.isTravisCI() &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'cron' &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'api'
|
||||||
|
) {
|
||||||
console.log(`${chalk.green('NOBUILD')} Testing terminated, not a cron job or a custom build!`);
|
console.log(`${chalk.green('NOBUILD')} Testing terminated, not a cron job or a custom build!`);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
@ -20,8 +25,11 @@ const TEST_PATH = './test';
|
|||||||
// Array of snippet names
|
// Array of snippet names
|
||||||
const snippetFiles = [];
|
const snippetFiles = [];
|
||||||
|
|
||||||
const snippetFilesActive = fs.readdirSync(SNIPPETS_ACTIVE, 'utf8').map(fileName => fileName.slice(0, -3));
|
const snippetFilesActive = fs
|
||||||
const snippetFilesArchive = fs.readdirSync(SNIPPETS_ARCHIVE, 'utf8')
|
.readdirSync(SNIPPETS_ACTIVE, 'utf8')
|
||||||
|
.map(fileName => fileName.slice(0, -3));
|
||||||
|
const snippetFilesArchive = fs
|
||||||
|
.readdirSync(SNIPPETS_ARCHIVE, 'utf8')
|
||||||
.filter(fileName => !fileName.includes('README')) // -> Filters out main README.md file in Archieve which isn't a snippet
|
.filter(fileName => !fileName.includes('README')) // -> Filters out main README.md file in Archieve which isn't a snippet
|
||||||
.map(fileName => fileName.slice(0, -3));
|
.map(fileName => fileName.slice(0, -3));
|
||||||
|
|
||||||
@ -32,15 +40,17 @@ console.time('Tester');
|
|||||||
snippetFiles
|
snippetFiles
|
||||||
.map(fileName => {
|
.map(fileName => {
|
||||||
// Check if fileName for snippet exist in test/ dir, if doesnt create
|
// Check if fileName for snippet exist in test/ dir, if doesnt create
|
||||||
fs.ensureDirSync(path.join(TEST_PATH,fileName));
|
fs.ensureDirSync(path.join(TEST_PATH, fileName));
|
||||||
|
|
||||||
// return fileName for later use
|
// return fileName for later use
|
||||||
return fileName;
|
return fileName;
|
||||||
})
|
})
|
||||||
.map(fileName => {
|
.map(fileName => {
|
||||||
const activeOrArchive = snippetFilesActive.includes(fileName) ? SNIPPETS_ACTIVE : SNIPPETS_ARCHIVE;
|
const activeOrArchive = snippetFilesActive.includes(fileName)
|
||||||
|
? SNIPPETS_ACTIVE
|
||||||
|
: SNIPPETS_ARCHIVE;
|
||||||
// Grab snippetData
|
// Grab snippetData
|
||||||
const fileData = fs.readFileSync(path.join(activeOrArchive,`${fileName}.md`), 'utf8');
|
const fileData = fs.readFileSync(path.join(activeOrArchive, `${fileName}.md`), 'utf8');
|
||||||
// Grab snippet Code blocks
|
// Grab snippet Code blocks
|
||||||
const fileCode = fileData.slice(fileData.search(/```\s*js/i), fileData.lastIndexOf('```') + 3);
|
const fileCode = fileData.slice(fileData.search(/```\s*js/i), fileData.lastIndexOf('```') + 3);
|
||||||
// Split code based on code markers
|
// Split code based on code markers
|
||||||
@ -72,9 +82,9 @@ snippetFiles
|
|||||||
].join('\n');
|
].join('\n');
|
||||||
|
|
||||||
// Write/Update exportFile which is snippetName.js in respective dir
|
// Write/Update exportFile which is snippetName.js in respective dir
|
||||||
fs.writeFileSync(path.join(TEST_PATH,fileName,`${fileName}.js`), exportFile);
|
fs.writeFileSync(path.join(TEST_PATH, fileName, `${fileName}.js`), exportFile);
|
||||||
|
|
||||||
if ( !fs.existsSync(path.join(TEST_PATH,fileName,`${fileName}.test.js`)) ) {
|
if (!fs.existsSync(path.join(TEST_PATH, fileName, `${fileName}.test.js`))) {
|
||||||
// if snippetName.test.js doesn't exist inrespective dir exportTest
|
// if snippetName.test.js doesn't exist inrespective dir exportTest
|
||||||
fs.writeFileSync(`${TEST_PATH}/${fileName}/${fileName}.test.js`, exportTest);
|
fs.writeFileSync(`${TEST_PATH}/${fileName}/${fileName}.test.js`, exportTest);
|
||||||
}
|
}
|
||||||
@ -83,10 +93,9 @@ snippetFiles
|
|||||||
return fileName;
|
return fileName;
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
fs.writeFileSync(path.join(TEST_PATH,'testlog'),`Test log for: ${new Date().toString()}\n`);
|
fs.writeFileSync(path.join(TEST_PATH, 'testlog'), `Test log for: ${new Date().toString()}\n`);
|
||||||
childProcess.execSync(`npm test`);
|
childProcess.execSync(`npm test`);
|
||||||
}
|
} catch (e) {
|
||||||
catch (e) {
|
fs.appendFileSync(path.join(TEST_PATH, 'testlog'));
|
||||||
fs.appendFileSync(path.join(TEST_PATH,'testlog'));
|
|
||||||
}
|
}
|
||||||
console.timeEnd('Tester');
|
console.timeEnd('Tester');
|
||||||
|
|||||||
@ -3,8 +3,10 @@ const fs = require('fs-extra'),
|
|||||||
chalk = require('chalk'),
|
chalk = require('chalk'),
|
||||||
crypto = require('crypto');
|
crypto = require('crypto');
|
||||||
|
|
||||||
const getMarkDownAnchor = (paragraphTitle) =>
|
const getMarkDownAnchor = paragraphTitle =>
|
||||||
paragraphTitle.trim().toLowerCase()
|
paragraphTitle
|
||||||
|
.trim()
|
||||||
|
.toLowerCase()
|
||||||
.replace(/[^\w\- ]+/g, '')
|
.replace(/[^\w\- ]+/g, '')
|
||||||
.replace(/\s/g, '-')
|
.replace(/\s/g, '-')
|
||||||
.replace(/\-+$/, '');
|
.replace(/\-+$/, '');
|
||||||
@ -66,7 +68,6 @@ const readTags = () => {
|
|||||||
return data;
|
return data;
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// Handle errors (hopefully not!)
|
// Handle errors (hopefully not!)
|
||||||
console.log(`${chalk.red('ERROR!')} During tag database loading: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During tag database loading: ${err}`);
|
||||||
@ -102,7 +103,11 @@ const capitalize = (str, lowerRest = false) =>
|
|||||||
// Checks if current environment is Travis CI
|
// Checks if current environment is Travis CI
|
||||||
const isTravisCI = () => 'TRAVIS' in process.env && 'CI' in process.env;
|
const isTravisCI = () => 'TRAVIS' in process.env && 'CI' in process.env;
|
||||||
// Creates a hash for a value using the SHA-256 algorithm.
|
// Creates a hash for a value using the SHA-256 algorithm.
|
||||||
const hashData = val => crypto.createHash('sha256').update(val).digest('hex');
|
const hashData = val =>
|
||||||
|
crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(val)
|
||||||
|
.digest('hex');
|
||||||
// Gets the code blocks for a snippet file.
|
// Gets the code blocks for a snippet file.
|
||||||
const getCodeBlocks = str => {
|
const getCodeBlocks = str => {
|
||||||
const regex = /```[.\S\s]*?```/g;
|
const regex = /```[.\S\s]*?```/g;
|
||||||
|
|||||||
290
scripts/web.js
290
scripts/web.js
@ -19,12 +19,19 @@ const unescapeHTML = str =>
|
|||||||
'&': '&',
|
'&': '&',
|
||||||
'<': '<',
|
'<': '<',
|
||||||
'>': '>',
|
'>': '>',
|
||||||
''': '\'',
|
''': "'",
|
||||||
'"': '"'
|
'"': '"'
|
||||||
}[tag] || tag)
|
}[tag] || tag)
|
||||||
);
|
);
|
||||||
if(util.isTravisCI() && /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE']) && process.env['TRAVIS_EVENT_TYPE'] !== 'cron' && process.env['TRAVIS_EVENT_TYPE'] !== 'api') {
|
if (
|
||||||
console.log(`${chalk.green('NOBUILD')} website build terminated, parent commit is a Travis build!`);
|
util.isTravisCI() &&
|
||||||
|
/^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE']) &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'cron' &&
|
||||||
|
process.env['TRAVIS_EVENT_TYPE'] !== 'api'
|
||||||
|
) {
|
||||||
|
console.log(
|
||||||
|
`${chalk.green('NOBUILD')} website build terminated, parent commit is a Travis build!`
|
||||||
|
);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
// Compile the mini.css framework and custom CSS styles, using `node-sass`.
|
// Compile the mini.css framework and custom CSS styles, using `node-sass`.
|
||||||
@ -35,7 +42,7 @@ sass.render(
|
|||||||
outFile: path.join('docs', 'mini.css'),
|
outFile: path.join('docs', 'mini.css'),
|
||||||
outputStyle: 'compressed'
|
outputStyle: 'compressed'
|
||||||
},
|
},
|
||||||
function (err, result) {
|
function(err, result) {
|
||||||
if (!err) {
|
if (!err) {
|
||||||
fs.writeFile(path.join('docs', 'mini.css'), result.css, function(err2) {
|
fs.writeFile(path.join('docs', 'mini.css'), result.css, function(err2) {
|
||||||
if (!err2) console.log(`${chalk.green('SUCCESS!')} mini.css file generated!`);
|
if (!err2) console.log(`${chalk.green('SUCCESS!')} mini.css file generated!`);
|
||||||
@ -54,7 +61,32 @@ const snippetsPath = './snippets',
|
|||||||
// Set variables for script
|
// Set variables for script
|
||||||
let snippets = {},
|
let snippets = {},
|
||||||
archivedSnippets = {},
|
archivedSnippets = {},
|
||||||
beginnerSnippetNames = ['everyNth', 'filterNonUnique', 'last', 'maxN', 'minN', 'nthElement', 'offset', 'sample', 'similarity', 'tail', 'currentURL', 'hasClass', 'getMeridiemSuffixOfInteger', 'factorial', 'fibonacci', 'gcd', 'isDivisible', 'isEven', 'isPrime', 'lcm', 'randomIntegerInRange', 'sum', 'reverseString', 'truncateString'],
|
beginnerSnippetNames = [
|
||||||
|
'everyNth',
|
||||||
|
'filterNonUnique',
|
||||||
|
'last',
|
||||||
|
'maxN',
|
||||||
|
'minN',
|
||||||
|
'nthElement',
|
||||||
|
'offset',
|
||||||
|
'sample',
|
||||||
|
'similarity',
|
||||||
|
'tail',
|
||||||
|
'currentURL',
|
||||||
|
'hasClass',
|
||||||
|
'getMeridiemSuffixOfInteger',
|
||||||
|
'factorial',
|
||||||
|
'fibonacci',
|
||||||
|
'gcd',
|
||||||
|
'isDivisible',
|
||||||
|
'isEven',
|
||||||
|
'isPrime',
|
||||||
|
'lcm',
|
||||||
|
'randomIntegerInRange',
|
||||||
|
'sum',
|
||||||
|
'reverseString',
|
||||||
|
'truncateString'
|
||||||
|
],
|
||||||
startPart = '',
|
startPart = '',
|
||||||
endPart = '',
|
endPart = '',
|
||||||
output = '',
|
output = '',
|
||||||
@ -64,7 +96,6 @@ let snippets = {},
|
|||||||
archivedStartPart = '',
|
archivedStartPart = '',
|
||||||
archivedEndPart = '',
|
archivedEndPart = '',
|
||||||
archivedOutput = '',
|
archivedOutput = '',
|
||||||
|
|
||||||
indexStaticFile = '',
|
indexStaticFile = '',
|
||||||
pagesOutput = [],
|
pagesOutput = [],
|
||||||
tagDbData = {};
|
tagDbData = {};
|
||||||
@ -74,16 +105,21 @@ console.time('Webber');
|
|||||||
snippets = util.readSnippets(snippetsPath);
|
snippets = util.readSnippets(snippetsPath);
|
||||||
archivedSnippets = util.readSnippets(archivedSnippetsPath);
|
archivedSnippets = util.readSnippets(archivedSnippetsPath);
|
||||||
|
|
||||||
|
|
||||||
// Load static parts for all pages
|
// Load static parts for all pages
|
||||||
try {
|
try {
|
||||||
startPart = fs.readFileSync(path.join(staticPartsPath, 'page-start.html'), 'utf8');
|
startPart = fs.readFileSync(path.join(staticPartsPath, 'page-start.html'), 'utf8');
|
||||||
endPart = fs.readFileSync(path.join(staticPartsPath, 'page-end.html'), 'utf8');
|
endPart = fs.readFileSync(path.join(staticPartsPath, 'page-end.html'), 'utf8');
|
||||||
|
|
||||||
beginnerStartPart = fs.readFileSync(path.join(staticPartsPath, 'beginner-page-start.html'), 'utf8');
|
beginnerStartPart = fs.readFileSync(
|
||||||
|
path.join(staticPartsPath, 'beginner-page-start.html'),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
beginnerEndPart = fs.readFileSync(path.join(staticPartsPath, 'beginner-page-end.html'), 'utf8');
|
beginnerEndPart = fs.readFileSync(path.join(staticPartsPath, 'beginner-page-end.html'), 'utf8');
|
||||||
|
|
||||||
archivedStartPart = fs.readFileSync(path.join(staticPartsPath, 'archived-page-start.html'), 'utf8');
|
archivedStartPart = fs.readFileSync(
|
||||||
|
path.join(staticPartsPath, 'archived-page-start.html'),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
archivedEndPart = fs.readFileSync(path.join(staticPartsPath, 'archived-page-end.html'), 'utf8');
|
archivedEndPart = fs.readFileSync(path.join(staticPartsPath, 'archived-page-end.html'), 'utf8');
|
||||||
|
|
||||||
indexStaticFile = fs.readFileSync(path.join(staticPartsPath, 'index.html'), 'utf8');
|
indexStaticFile = fs.readFileSync(path.join(staticPartsPath, 'index.html'), 'utf8');
|
||||||
@ -95,7 +131,11 @@ try {
|
|||||||
// Load tag data from the database
|
// Load tag data from the database
|
||||||
tagDbData = util.readTags();
|
tagDbData = util.readTags();
|
||||||
// Create the output for the index.html file (only locally or on Travis CRON or custom job)
|
// Create the output for the index.html file (only locally or on Travis CRON or custom job)
|
||||||
if(!util.isTravisCI() || (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api'))) {
|
if (
|
||||||
|
!util.isTravisCI() ||
|
||||||
|
(util.isTravisCI() &&
|
||||||
|
(process.env['TRAVIS_EVENT_TYPE'] === 'cron' || process.env['TRAVIS_EVENT_TYPE'] === 'api'))
|
||||||
|
) {
|
||||||
try {
|
try {
|
||||||
// Shuffle the array of snippets, pick 3
|
// Shuffle the array of snippets, pick 3
|
||||||
let indexDailyPicks = '';
|
let indexDailyPicks = '';
|
||||||
@ -114,41 +154,94 @@ if(!util.isTravisCI() || (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE']
|
|||||||
md
|
md
|
||||||
.render(`\n${snippets[snippet[0]]}`)
|
.render(`\n${snippets[snippet[0]]}`)
|
||||||
.replace(/<h3/g, `<h3 id="${snippet[0].toLowerCase()}" class="section double-padded"`)
|
.replace(/<h3/g, `<h3 id="${snippet[0].toLowerCase()}" class="section double-padded"`)
|
||||||
.replace(/<\/h3>/g, `${snippet[1].includes('advanced') ? '<mark class="tag">advanced</mark>' : ''}</h3>`)
|
.replace(
|
||||||
|
/<\/h3>/g,
|
||||||
|
`${snippet[1].includes('advanced') ? '<mark class="tag">advanced</mark>' : ''}</h3>`
|
||||||
|
)
|
||||||
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
||||||
.replace(/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `<pre class="language-js">${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}</pre>`)
|
.replace(
|
||||||
|
/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm,
|
||||||
|
(match, p1) =>
|
||||||
|
`<pre class="language-js">${Prism.highlight(
|
||||||
|
unescapeHTML(p1),
|
||||||
|
Prism.languages.javascript
|
||||||
|
)}</pre>`
|
||||||
|
)
|
||||||
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
||||||
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
||||||
'</div></div>';
|
'</div></div>';
|
||||||
// Select the first snippet from today's picks
|
// Select the first snippet from today's picks
|
||||||
indexDailyPicks = indexDailyPicks.replace('card fluid pick', 'card fluid pick selected');
|
indexDailyPicks = indexDailyPicks.replace('card fluid pick', 'card fluid pick selected');
|
||||||
// Optimize punctuation nodes
|
// Optimize punctuation nodes
|
||||||
indexDailyPicks = util.optimizeNodes(indexDailyPicks, /<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`);
|
indexDailyPicks = util.optimizeNodes(
|
||||||
|
indexDailyPicks,
|
||||||
|
/<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize operator nodes
|
// Optimize operator nodes
|
||||||
indexDailyPicks = util.optimizeNodes(indexDailyPicks, /<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`);
|
indexDailyPicks = util.optimizeNodes(
|
||||||
|
indexDailyPicks,
|
||||||
|
/<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize keyword nodes
|
// Optimize keyword nodes
|
||||||
indexDailyPicks = util.optimizeNodes(indexDailyPicks, /<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`);
|
indexDailyPicks = util.optimizeNodes(
|
||||||
|
indexDailyPicks,
|
||||||
|
/<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Put the daily picks into the page
|
// Put the daily picks into the page
|
||||||
indexStaticFile = indexStaticFile.replace('$daily-picks', indexDailyPicks);
|
indexStaticFile = indexStaticFile.replace('$daily-picks', indexDailyPicks);
|
||||||
// Use the Github API to get the needed data
|
// Use the Github API to get the needed data
|
||||||
const githubApi = 'api.github.com';
|
const githubApi = 'api.github.com';
|
||||||
const headers = util.isTravisCI()
|
const headers = util.isTravisCI()
|
||||||
? { 'User-Agent': '30-seconds-of-code', 'Authorization': 'token ' + process.env['GH_TOKEN']}
|
? { 'User-Agent': '30-seconds-of-code', Authorization: 'token ' + process.env['GH_TOKEN'] }
|
||||||
: { 'User-Agent': '30-seconds-of-code'};
|
: { 'User-Agent': '30-seconds-of-code' };
|
||||||
// Test the API's rate limit (keep for various reasons)
|
// Test the API's rate limit (keep for various reasons)
|
||||||
https.get({host: githubApi, path: '/rate_limit?', headers: headers}, res => {
|
https.get({ host: githubApi, path: '/rate_limit?', headers: headers }, res => {
|
||||||
res.on('data', function (chunk) {
|
res.on('data', function(chunk) {
|
||||||
console.log(`Remaining requests: ${JSON.parse(chunk).resources.core.remaining}`);
|
console.log(`Remaining requests: ${JSON.parse(chunk).resources.core.remaining}`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
// Send requests and wait for responses, write to the page
|
// Send requests and wait for responses, write to the page
|
||||||
https.get({host: githubApi, path: '/repos/chalarangelo/30-seconds-of-code/commits?per_page=1', headers: headers}, resCommits => {
|
https.get(
|
||||||
https.get({host: githubApi, path: '/repos/chalarangelo/30-seconds-of-code/contributors?per_page=1', headers: headers}, resContributors => {
|
{
|
||||||
https.get({host: githubApi, path: '/repos/chalarangelo/30-seconds-of-code/stargazers?per_page=1', headers: headers}, resStars => {
|
host: githubApi,
|
||||||
let commits = resCommits.headers.link.split('&').slice(-1)[0].replace(/[^\d]/g, ''),
|
path: '/repos/chalarangelo/30-seconds-of-code/commits?per_page=1',
|
||||||
contribs = resContributors.headers.link.split('&').slice(-1)[0].replace(/[^\d]/g, ''),
|
headers: headers
|
||||||
stars = resStars.headers.link.split('&').slice(-1)[0].replace(/[^\d]/g, '');
|
},
|
||||||
indexStaticFile = indexStaticFile.replace(/\$snippet-count/g, Object.keys(snippets).length).replace(/\$commit-count/g, commits).replace(/\$contrib-count/g, contribs).replace(/\$star-count/g, stars);
|
resCommits => {
|
||||||
|
https.get(
|
||||||
|
{
|
||||||
|
host: githubApi,
|
||||||
|
path: '/repos/chalarangelo/30-seconds-of-code/contributors?per_page=1',
|
||||||
|
headers: headers
|
||||||
|
},
|
||||||
|
resContributors => {
|
||||||
|
https.get(
|
||||||
|
{
|
||||||
|
host: githubApi,
|
||||||
|
path: '/repos/chalarangelo/30-seconds-of-code/stargazers?per_page=1',
|
||||||
|
headers: headers
|
||||||
|
},
|
||||||
|
resStars => {
|
||||||
|
let commits = resCommits.headers.link
|
||||||
|
.split('&')
|
||||||
|
.slice(-1)[0]
|
||||||
|
.replace(/[^\d]/g, ''),
|
||||||
|
contribs = resContributors.headers.link
|
||||||
|
.split('&')
|
||||||
|
.slice(-1)[0]
|
||||||
|
.replace(/[^\d]/g, ''),
|
||||||
|
stars = resStars.headers.link
|
||||||
|
.split('&')
|
||||||
|
.slice(-1)[0]
|
||||||
|
.replace(/[^\d]/g, '');
|
||||||
|
indexStaticFile = indexStaticFile
|
||||||
|
.replace(/\$snippet-count/g, Object.keys(snippets).length)
|
||||||
|
.replace(/\$commit-count/g, commits)
|
||||||
|
.replace(/\$contrib-count/g, contribs)
|
||||||
|
.replace(/\$star-count/g, stars);
|
||||||
indexStaticFile = minify(indexStaticFile, {
|
indexStaticFile = minify(indexStaticFile, {
|
||||||
collapseBooleanAttributes: true,
|
collapseBooleanAttributes: true,
|
||||||
collapseWhitespace: true,
|
collapseWhitespace: true,
|
||||||
@ -168,10 +261,12 @@ if(!util.isTravisCI() || (util.isTravisCI() && (process.env['TRAVIS_EVENT_TYPE']
|
|||||||
// Generate 'index.html' file
|
// Generate 'index.html' file
|
||||||
fs.writeFileSync(path.join(docsPath, 'index.html'), indexStaticFile);
|
fs.writeFileSync(path.join(docsPath, 'index.html'), indexStaticFile);
|
||||||
console.log(`${chalk.green('SUCCESS!')} index.html file generated!`);
|
console.log(`${chalk.green('SUCCESS!')} index.html file generated!`);
|
||||||
});
|
}
|
||||||
});
|
);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`${chalk.red('ERROR!')} During index.html generation: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During index.html generation: ${err}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
@ -185,8 +280,16 @@ try {
|
|||||||
// Loop over tags and snippets to create the table of contents
|
// Loop over tags and snippets to create the table of contents
|
||||||
for (let tag of [...new Set(Object.entries(tagDbData).map(t => t[1][0]))]
|
for (let tag of [...new Set(Object.entries(tagDbData).map(t => t[1][0]))]
|
||||||
.filter(v => v)
|
.filter(v => v)
|
||||||
.sort((a, b) => util.capitalize(a, true) === 'Uncategorized' ? 1 : util.capitalize(b, true) === 'Uncategorized' ? -1 : a.localeCompare(b))) {
|
.sort(
|
||||||
output += '<h3>' +
|
(a, b) =>
|
||||||
|
util.capitalize(a, true) === 'Uncategorized'
|
||||||
|
? 1
|
||||||
|
: util.capitalize(b, true) === 'Uncategorized'
|
||||||
|
? -1
|
||||||
|
: a.localeCompare(b)
|
||||||
|
)) {
|
||||||
|
output +=
|
||||||
|
'<h3>' +
|
||||||
md
|
md
|
||||||
.render(`${util.capitalize(tag, true)}\n`)
|
.render(`${util.capitalize(tag, true)}\n`)
|
||||||
.replace(/<p>/g, '')
|
.replace(/<p>/g, '')
|
||||||
@ -200,13 +303,23 @@ try {
|
|||||||
.replace(/<a/g, `<a class="sublink-1" tags="${taggedSnippet[1].join(',')}"`);
|
.replace(/<a/g, `<a class="sublink-1" tags="${taggedSnippet[1].join(',')}"`);
|
||||||
output += '\n';
|
output += '\n';
|
||||||
}
|
}
|
||||||
output += '</nav><main class="col-sm-12 col-md-8 col-lg-9" style="height: 100%;overflow-y: auto; background: #eceef2; padding: 0;">';
|
output +=
|
||||||
|
'</nav><main class="col-sm-12 col-md-8 col-lg-9" style="height: 100%;overflow-y: auto; background: #eceef2; padding: 0;">';
|
||||||
output += '<a id="top"> </a>';
|
output += '<a id="top"> </a>';
|
||||||
// Loop over tags and snippets to create the list of snippets
|
// Loop over tags and snippets to create the list of snippets
|
||||||
for (let tag of [...new Set(Object.entries(tagDbData).map(t => t[1][0]))]
|
for (let tag of [...new Set(Object.entries(tagDbData).map(t => t[1][0]))]
|
||||||
.filter(v => v)
|
.filter(v => v)
|
||||||
.sort((a, b) => util.capitalize(a, true) === 'Uncategorized' ? 1 : util.capitalize(b, true) === 'Uncategorized' ? -1 : a.localeCompare(b))) {
|
.sort(
|
||||||
let localOutput = output.replace(/\$tag/g, util.capitalize(tag)).replace(new RegExp(`./${tag}#`, 'g'), '#');
|
(a, b) =>
|
||||||
|
util.capitalize(a, true) === 'Uncategorized'
|
||||||
|
? 1
|
||||||
|
: util.capitalize(b, true) === 'Uncategorized'
|
||||||
|
? -1
|
||||||
|
: a.localeCompare(b)
|
||||||
|
)) {
|
||||||
|
let localOutput = output
|
||||||
|
.replace(/\$tag/g, util.capitalize(tag))
|
||||||
|
.replace(new RegExp(`./${tag}#`, 'g'), '#');
|
||||||
localOutput += md
|
localOutput += md
|
||||||
.render(`## ${util.capitalize(tag, true)}\n`)
|
.render(`## ${util.capitalize(tag, true)}\n`)
|
||||||
.replace(/<h2>/g, '<h2 style="text-align:center;">');
|
.replace(/<h2>/g, '<h2 style="text-align:center;">');
|
||||||
@ -215,22 +328,49 @@ try {
|
|||||||
'<div class="card fluid">' +
|
'<div class="card fluid">' +
|
||||||
md
|
md
|
||||||
.render(`\n${snippets[taggedSnippet[0] + '.md']}`)
|
.render(`\n${snippets[taggedSnippet[0] + '.md']}`)
|
||||||
.replace(/<h3/g, `<h3 id="${taggedSnippet[0].toLowerCase()}" class="section double-padded"`)
|
.replace(
|
||||||
.replace(/<\/h3>/g, `${taggedSnippet[1].includes('advanced') ? '<mark class="tag">advanced</mark>' : ''}</h3>`)
|
/<h3/g,
|
||||||
|
`<h3 id="${taggedSnippet[0].toLowerCase()}" class="section double-padded"`
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
/<\/h3>/g,
|
||||||
|
`${
|
||||||
|
taggedSnippet[1].includes('advanced') ? '<mark class="tag">advanced</mark>' : ''
|
||||||
|
}</h3>`
|
||||||
|
)
|
||||||
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
||||||
.replace(/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `<pre class="language-js">${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}</pre>`)
|
.replace(
|
||||||
|
/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm,
|
||||||
|
(match, p1) =>
|
||||||
|
`<pre class="language-js">${Prism.highlight(
|
||||||
|
unescapeHTML(p1),
|
||||||
|
Prism.languages.javascript
|
||||||
|
)}</pre>`
|
||||||
|
)
|
||||||
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
||||||
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
||||||
'</div></div>';
|
'</div></div>';
|
||||||
// Add the ending static part
|
// Add the ending static part
|
||||||
localOutput += `\n${endPart + '\n'}`;
|
localOutput += `\n${endPart + '\n'}`;
|
||||||
// Optimize punctuation nodes
|
// Optimize punctuation nodes
|
||||||
localOutput = util.optimizeNodes(localOutput, /<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`);
|
localOutput = util.optimizeNodes(
|
||||||
|
localOutput,
|
||||||
|
/<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize operator nodes
|
// Optimize operator nodes
|
||||||
localOutput = util.optimizeNodes(localOutput, /<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`);
|
localOutput = util.optimizeNodes(
|
||||||
|
localOutput,
|
||||||
|
/<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize keyword nodes
|
// Optimize keyword nodes
|
||||||
localOutput = util.optimizeNodes(localOutput, /<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`);
|
localOutput = util.optimizeNodes(
|
||||||
pagesOutput.push({'tag': tag, 'content': localOutput});
|
localOutput,
|
||||||
|
/<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
|
pagesOutput.push({ tag: tag, content: localOutput });
|
||||||
}
|
}
|
||||||
// Minify output
|
// Minify output
|
||||||
pagesOutput.forEach(page => {
|
pagesOutput.forEach(page => {
|
||||||
@ -250,7 +390,7 @@ try {
|
|||||||
removeStyleLinkTypeAttributes: false,
|
removeStyleLinkTypeAttributes: false,
|
||||||
trimCustomFragments: true
|
trimCustomFragments: true
|
||||||
});
|
});
|
||||||
fs.writeFileSync(path.join(docsPath, page.tag+'.html'), page.content);
|
fs.writeFileSync(path.join(docsPath, page.tag + '.html'), page.content);
|
||||||
console.log(`${chalk.green('SUCCESS!')} ${page.tag}.html file generated!`);
|
console.log(`${chalk.green('SUCCESS!')} ${page.tag}.html file generated!`);
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -266,7 +406,7 @@ try {
|
|||||||
|
|
||||||
// Filter begginer snippets
|
// Filter begginer snippets
|
||||||
const filteredBeginnerSnippets = Object.keys(snippets)
|
const filteredBeginnerSnippets = Object.keys(snippets)
|
||||||
.filter(key => beginnerSnippetNames.map(name => name+'.md').includes(key))
|
.filter(key => beginnerSnippetNames.map(name => name + '.md').includes(key))
|
||||||
.reduce((obj, key) => {
|
.reduce((obj, key) => {
|
||||||
obj[key] = snippets[key];
|
obj[key] = snippets[key];
|
||||||
return obj;
|
return obj;
|
||||||
@ -280,20 +420,41 @@ try {
|
|||||||
md
|
md
|
||||||
.render(`\n${snippets[snippet[0]]}`)
|
.render(`\n${snippets[snippet[0]]}`)
|
||||||
.replace(/<h3/g, `<h3 id="${snippet[0].toLowerCase()}" class="section double-padded"`)
|
.replace(/<h3/g, `<h3 id="${snippet[0].toLowerCase()}" class="section double-padded"`)
|
||||||
.replace(/<\/h3>/g, `${snippet[1].includes('advanced') ? '<mark class="tag">advanced</mark>' : ''}</h3>`)
|
.replace(
|
||||||
|
/<\/h3>/g,
|
||||||
|
`${snippet[1].includes('advanced') ? '<mark class="tag">advanced</mark>' : ''}</h3>`
|
||||||
|
)
|
||||||
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
||||||
.replace(/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `<pre class="language-js">${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}</pre>`)
|
.replace(
|
||||||
|
/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm,
|
||||||
|
(match, p1) =>
|
||||||
|
`<pre class="language-js">${Prism.highlight(
|
||||||
|
unescapeHTML(p1),
|
||||||
|
Prism.languages.javascript
|
||||||
|
)}</pre>`
|
||||||
|
)
|
||||||
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
||||||
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
||||||
'</div></div></div></div>';
|
'</div></div></div></div>';
|
||||||
|
|
||||||
// Optimize punctuation nodes
|
// Optimize punctuation nodes
|
||||||
beginnerOutput = util.optimizeNodes(beginnerOutput, /<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`);
|
beginnerOutput = util.optimizeNodes(
|
||||||
|
beginnerOutput,
|
||||||
|
/<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize operator nodes
|
// Optimize operator nodes
|
||||||
beginnerOutput = util.optimizeNodes(beginnerOutput, /<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`);
|
beginnerOutput = util.optimizeNodes(
|
||||||
|
beginnerOutput,
|
||||||
|
/<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize keyword nodes
|
// Optimize keyword nodes
|
||||||
beginnerOutput = util.optimizeNodes(beginnerOutput, /<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`);
|
beginnerOutput = util.optimizeNodes(
|
||||||
|
beginnerOutput,
|
||||||
|
/<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
|
|
||||||
beginnerOutput += `${beginnerEndPart}`;
|
beginnerOutput += `${beginnerEndPart}`;
|
||||||
|
|
||||||
@ -316,7 +477,6 @@ try {
|
|||||||
});
|
});
|
||||||
fs.writeFileSync(path.join(docsPath, 'beginner.html'), minifiedBeginnerOutput);
|
fs.writeFileSync(path.join(docsPath, 'beginner.html'), minifiedBeginnerOutput);
|
||||||
console.log(`${chalk.green('SUCCESS!')} beginner.html file generated!`);
|
console.log(`${chalk.green('SUCCESS!')} beginner.html file generated!`);
|
||||||
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`${chalk.red('ERROR!')} During beginner.html generation: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During beginner.html generation: ${err}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
@ -347,17 +507,36 @@ try {
|
|||||||
.render(`\n${filteredArchivedSnippets[snippet[0]]}`)
|
.render(`\n${filteredArchivedSnippets[snippet[0]]}`)
|
||||||
.replace(/<h3/g, `<h3 id="${snippet[0].toLowerCase()}" class="section double-padded"`)
|
.replace(/<h3/g, `<h3 id="${snippet[0].toLowerCase()}" class="section double-padded"`)
|
||||||
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
.replace(/<\/h3>/g, '</h3><div class="section double-padded">')
|
||||||
.replace(/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm, (match, p1) => `<pre class="language-js">${Prism.highlight(unescapeHTML(p1), Prism.languages.javascript)}</pre>`)
|
.replace(
|
||||||
|
/<pre><code class="language-js">([^\0]*?)<\/code><\/pre>/gm,
|
||||||
|
(match, p1) =>
|
||||||
|
`<pre class="language-js">${Prism.highlight(
|
||||||
|
unescapeHTML(p1),
|
||||||
|
Prism.languages.javascript
|
||||||
|
)}</pre>`
|
||||||
|
)
|
||||||
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
.replace(/<\/pre>\s+<pre/g, '</pre><label class="collapse">Show examples</label><pre') +
|
||||||
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
'<button class="primary clipboard-copy">📋 Copy to clipboard</button>' +
|
||||||
'</div></div></div></div>';
|
'</div></div></div></div>';
|
||||||
|
|
||||||
// Optimize punctuation nodes
|
// Optimize punctuation nodes
|
||||||
archivedOutput = util.optimizeNodes(archivedOutput, /<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`);
|
archivedOutput = util.optimizeNodes(
|
||||||
|
archivedOutput,
|
||||||
|
/<span class="token punctuation">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token punctuation">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token punctuation">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize operator nodes
|
// Optimize operator nodes
|
||||||
archivedOutput = util.optimizeNodes(archivedOutput, /<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`);
|
archivedOutput = util.optimizeNodes(
|
||||||
|
archivedOutput,
|
||||||
|
/<span class="token operator">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token operator">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token operator">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
// Optimize keyword nodes
|
// Optimize keyword nodes
|
||||||
archivedOutput = util.optimizeNodes(archivedOutput, /<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm, (match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`);
|
archivedOutput = util.optimizeNodes(
|
||||||
|
archivedOutput,
|
||||||
|
/<span class="token keyword">([^\0<]*?)<\/span>([\n\r\s]*)<span class="token keyword">([^\0]*?)<\/span>/gm,
|
||||||
|
(match, p1, p2, p3) => `<span class="token keyword">${p1}${p2}${p3}</span>`
|
||||||
|
);
|
||||||
|
|
||||||
archivedOutput += `${archivedEndPart}`;
|
archivedOutput += `${archivedEndPart}`;
|
||||||
|
|
||||||
@ -381,7 +560,6 @@ try {
|
|||||||
|
|
||||||
fs.writeFileSync(path.join(docsPath, 'archive.html'), minifiedArchivedOutput);
|
fs.writeFileSync(path.join(docsPath, 'archive.html'), minifiedArchivedOutput);
|
||||||
console.log(`${chalk.green('SUCCESS!')} archive.html file generated!`);
|
console.log(`${chalk.green('SUCCESS!')} archive.html file generated!`);
|
||||||
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`${chalk.red('ERROR!')} During archive.html generation: ${err}`);
|
console.log(`${chalk.red('ERROR!')} During archive.html generation: ${err}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
|
|||||||
Reference in New Issue
Block a user