Cleanup for scripts folder
This commit is contained in:
@ -47,8 +47,7 @@ if (
|
||||
// Store the data read from each snippet in the appropriate object
|
||||
for (const name of snippetFilenames.filter(s => s !== 'README.md'))
|
||||
snippets[name] = fs.readFileSync(path.join(SNIPPETS_ARCHIVE_PATH, name), 'utf8');
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During snippet loading: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@ -70,8 +69,7 @@ if (
|
||||
|
||||
// Write to the README file of the archive
|
||||
fs.writeFileSync(path.join(SNIPPETS_ARCHIVE_PATH, 'README.md'), output);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During README generation for snippets archive: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@ -110,8 +108,7 @@ snippets = util.readSnippets(SNIPPETS_PATH);
|
||||
try {
|
||||
startPart = fs.readFileSync(path.join(STATIC_PARTS_PATH, 'README-start.md'), 'utf8');
|
||||
endPart = fs.readFileSync(path.join(STATIC_PARTS_PATH, 'README-end.md'), 'utf8');
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During static part loading: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@ -171,8 +168,7 @@ try {
|
||||
output += `\n${endPart}\n`;
|
||||
// Write to the README file
|
||||
fs.writeFileSync('README.md', output);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During README generation: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -1,94 +1,93 @@
|
||||
/*
|
||||
This is the extractor script that generates the snippets.json and snippetsArchive.json files.
|
||||
Run using `npm run extractor`.
|
||||
*/
|
||||
// Load modules
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const chalk = require('chalk');
|
||||
const util = require('./util');
|
||||
// Paths
|
||||
const SNIPPETS_PATH = './snippets';
|
||||
const SNIPPETS_ARCHIVE_PATH = './snippets_archive';
|
||||
const OUTPUT_PATH = './snippet_data';
|
||||
// Check if running on Travis - only build for cron jobs and custom builds
|
||||
if (
|
||||
util.isTravisCI() &&
|
||||
process.env['TRAVIS_EVENT_TYPE'] !== 'cron' &&
|
||||
process.env['TRAVIS_EVENT_TYPE'] !== 'api'
|
||||
) {
|
||||
console.log(`${chalk.green('NOBUILD')} snippet extraction terminated, not a cron or api build!`);
|
||||
process.exit(0);
|
||||
}
|
||||
// Read data
|
||||
let snippets = {},
|
||||
archivedSnippets = {},
|
||||
tagDbData = {};
|
||||
console.time('Extractor');
|
||||
snippets = util.readSnippets(SNIPPETS_PATH);
|
||||
archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
|
||||
tagDbData = util.readTags();
|
||||
// Extract snippet data
|
||||
let snippetData = Object.keys(snippets).map(key => {
|
||||
return {
|
||||
id: key.slice(0, -3),
|
||||
type: 'snippet',
|
||||
attributes: {
|
||||
fileName: key,
|
||||
text: util.getTextualContent(snippets[key]).trim(),
|
||||
codeBlocks: util.getCodeBlocks(snippets[key]),
|
||||
tags: tagDbData[key.slice(0, -3)]
|
||||
},
|
||||
meta: {
|
||||
archived: false,
|
||||
hash: util.hashData(snippets[key])
|
||||
}
|
||||
};
|
||||
});
|
||||
// Extract archived snippet data
|
||||
let snippetArchiveData = Object.keys(archivedSnippets).map(key => {
|
||||
return {
|
||||
id: key.slice(0, -3),
|
||||
type: 'snippet',
|
||||
attributes: {
|
||||
fileName: key,
|
||||
text: util.getTextualContent(archivedSnippets[key]).trim(),
|
||||
codeBlocks: util.getCodeBlocks(archivedSnippets[key]),
|
||||
tags: []
|
||||
},
|
||||
meta: {
|
||||
archived: true,
|
||||
hash: util.hashData(archivedSnippets[key])
|
||||
}
|
||||
};
|
||||
});
|
||||
const completeData = {
|
||||
data: [...snippetData, ...snippetArchiveData],
|
||||
meta: {
|
||||
specification: 'http://jsonapi.org/format/'
|
||||
}
|
||||
};
|
||||
let listingData = {
|
||||
data:
|
||||
completeData.data.map(v => ({
|
||||
id: v.id,
|
||||
type: 'snippetListing',
|
||||
attributes: {
|
||||
tags: v.attributes.tags,
|
||||
archived: v.meta.archived
|
||||
},
|
||||
meta: {
|
||||
hash: v.meta.hash
|
||||
}
|
||||
}))
|
||||
,
|
||||
meta: {
|
||||
specification: 'http://jsonapi.org/format/'
|
||||
}
|
||||
};
|
||||
// Write files
|
||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippets.json'), JSON.stringify(completeData, null, 2));
|
||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetList.json'), JSON.stringify(listingData, null, 2));
|
||||
// Display messages and time
|
||||
console.log(`${chalk.green('SUCCESS!')} snippets.json and snippetList.json files generated!`);
|
||||
console.timeEnd('Extractor');
|
||||
/*
|
||||
This is the extractor script that generates the snippets.json and snippetsArchive.json files.
|
||||
Run using `npm run extractor`.
|
||||
*/
|
||||
// Load modules
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const chalk = require('chalk');
|
||||
const util = require('./util');
|
||||
// Paths
|
||||
const SNIPPETS_PATH = './snippets';
|
||||
const SNIPPETS_ARCHIVE_PATH = './snippets_archive';
|
||||
const OUTPUT_PATH = './snippet_data';
|
||||
// Check if running on Travis - only build for cron jobs and custom builds
|
||||
if (
|
||||
util.isTravisCI() &&
|
||||
process.env['TRAVIS_EVENT_TYPE'] !== 'cron' &&
|
||||
process.env['TRAVIS_EVENT_TYPE'] !== 'api'
|
||||
) {
|
||||
console.log(`${chalk.green('NOBUILD')} snippet extraction terminated, not a cron or api build!`);
|
||||
process.exit(0);
|
||||
}
|
||||
// Read data
|
||||
let snippets = {},
|
||||
archivedSnippets = {},
|
||||
tagDbData = {};
|
||||
console.time('Extractor');
|
||||
snippets = util.readSnippets(SNIPPETS_PATH);
|
||||
archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
|
||||
tagDbData = util.readTags();
|
||||
// Extract snippet data
|
||||
let snippetData = Object.keys(snippets).map(key => {
|
||||
return {
|
||||
id: key.slice(0, -3),
|
||||
type: 'snippet',
|
||||
attributes: {
|
||||
fileName: key,
|
||||
text: util.getTextualContent(snippets[key]).trim(),
|
||||
codeBlocks: util.getCodeBlocks(snippets[key]),
|
||||
tags: tagDbData[key.slice(0, -3)]
|
||||
},
|
||||
meta: {
|
||||
archived: false,
|
||||
hash: util.hashData(snippets[key])
|
||||
}
|
||||
};
|
||||
});
|
||||
// Extract archived snippet data
|
||||
let snippetArchiveData = Object.keys(archivedSnippets).map(key => {
|
||||
return {
|
||||
id: key.slice(0, -3),
|
||||
type: 'snippet',
|
||||
attributes: {
|
||||
fileName: key,
|
||||
text: util.getTextualContent(archivedSnippets[key]).trim(),
|
||||
codeBlocks: util.getCodeBlocks(archivedSnippets[key]),
|
||||
tags: []
|
||||
},
|
||||
meta: {
|
||||
archived: true,
|
||||
hash: util.hashData(archivedSnippets[key])
|
||||
}
|
||||
};
|
||||
});
|
||||
const completeData = {
|
||||
data: [...snippetData, ...snippetArchiveData],
|
||||
meta: {
|
||||
specification: 'http://jsonapi.org/format/'
|
||||
}
|
||||
};
|
||||
let listingData = {
|
||||
data:
|
||||
completeData.data.map(v => ({
|
||||
id: v.id,
|
||||
type: 'snippetListing',
|
||||
attributes: {
|
||||
tags: v.attributes.tags,
|
||||
archived: v.meta.archived
|
||||
},
|
||||
meta: {
|
||||
hash: v.meta.hash
|
||||
}
|
||||
})),
|
||||
|
||||
meta: {
|
||||
specification: 'http://jsonapi.org/format/'
|
||||
}
|
||||
};
|
||||
// Write files
|
||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippets.json'), JSON.stringify(completeData, null, 2));
|
||||
fs.writeFileSync(path.join(OUTPUT_PATH, 'snippetList.json'), JSON.stringify(listingData, null, 2));
|
||||
// Display messages and time
|
||||
console.log(`${chalk.green('SUCCESS!')} snippets.json and snippetList.json files generated!`);
|
||||
|
||||
@ -41,9 +41,9 @@ const getTermLinkMarkdownBlock = termTitle => {
|
||||
};
|
||||
|
||||
const glossaryTableOfContentsReducer = (accumulator, currentFile) => {
|
||||
if (accumulator === fileTitles[0]) {
|
||||
if (accumulator === fileTitles[0])
|
||||
return getTermLinkMarkdownBlock(accumulator) + getTermLinkMarkdownBlock(currentFile);
|
||||
}
|
||||
|
||||
return accumulator + getTermLinkMarkdownBlock(currentFile);
|
||||
};
|
||||
|
||||
|
||||
@ -77,8 +77,7 @@ try {
|
||||
console.log(`${chalk.green('SUCCESS!')} Snippet files linted!`);
|
||||
console.timeEnd('Linter');
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During linting: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -18,19 +18,19 @@ const MODULE_NAME = '_30s';
|
||||
const DIST = './dist';
|
||||
// Regex for selecting code blocks
|
||||
const codeRE = /```\s*js([\s\S]*?)```/;
|
||||
// Read snippets, build packages
|
||||
(async () => {
|
||||
// Read snippets, build packages
|
||||
(async() => {
|
||||
// Start the timer of the script
|
||||
console.time('Packager');
|
||||
try {
|
||||
const tagDatabase = fs.readFileSync('tag_database', 'utf8');
|
||||
const nodeSnippets = tagDatabase.split('\n').filter(v => v.search(/:.*node/g) !== -1).map(v => v.slice(0,v.indexOf(':')));
|
||||
const nodeSnippets = tagDatabase.split('\n').filter(v => v.search(/:.*node/g) !== -1).map(v => v.slice(0, v.indexOf(':')));
|
||||
const snippets = fs.readdirSync(SNIPPETS_PATH);
|
||||
const snippetExports = `module.exports = {${snippets.map(v => v.replace('.md', '')).join(',')}}`;
|
||||
let requires = [];
|
||||
let importData = '';
|
||||
const archivedSnippets = fs.readdirSync(SNIPPETS_ARCHIVE_PATH).filter(v => v !== 'README.md');
|
||||
const testExports = `module.exports = {${[...snippets,...archivedSnippets].map(v => v.replace('.md', '')).join(',')}}`;
|
||||
const testExports = `module.exports = {${[...snippets, ...archivedSnippets].map(v => v.replace('.md', '')).join(',')}}`;
|
||||
// Create `temp` and `dist` folders if they don't already exist.
|
||||
if (!fs.existsSync(DIST)) fs.mkdirSync(DIST);
|
||||
// Write `imports.js`
|
||||
@ -43,7 +43,7 @@ const codeRE = /```\s*js([\s\S]*?)```/;
|
||||
let code = snippetData.match(codeRE)[1].replace('\n', '');
|
||||
if (nodeSnippets.includes(snippetName)) {
|
||||
requires.push(code.match(/const.*=.*require\(([^\)]*)\);/g));
|
||||
code = code.replace(/const.*=.*require\(([^\)]*)\);/g,'');
|
||||
code = code.replace(/const.*=.*require\(([^\)]*)\);/g, '');
|
||||
}
|
||||
importData += code;
|
||||
});
|
||||
@ -67,7 +67,7 @@ const codeRE = /```\s*js([\s\S]*?)```/;
|
||||
console.timeEnd('Packager');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
|
||||
// Write to the proper files and start the `rollup` script
|
||||
const es5 = babel({
|
||||
presets: ['@babel/preset-env']
|
||||
@ -85,7 +85,7 @@ const codeRE = /```\s*js([\s\S]*?)```/;
|
||||
file: `${DIST}/${MODULE_NAME}.esm.js`,
|
||||
name: MODULE_NAME,
|
||||
format: 'es'
|
||||
});
|
||||
});
|
||||
// UMD ES5
|
||||
const bundleES5 = await rollup({ input: IMPORTS, plugins: [es5] });
|
||||
await bundleES5.write({
|
||||
@ -115,4 +115,4 @@ const codeRE = /```\s*js([\s\S]*?)```/;
|
||||
console.log(`${chalk.red('ERROR!')} During module creation: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
})();
|
||||
|
||||
@ -38,8 +38,7 @@ try {
|
||||
output += `${snippet[0].slice(0, -3)}:${tagDbData[snippet[0].slice(0, -3)]
|
||||
.join(',')
|
||||
.trim()}\n`;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
output += `${snippet[0].slice(0, -3)}:uncategorized\n`;
|
||||
missingTags++;
|
||||
console.log(`${chalk.yellow('Tagged uncategorized:')} ${snippet[0].slice(0, -3)}`);
|
||||
@ -47,8 +46,7 @@ try {
|
||||
}
|
||||
// Write to tag_database
|
||||
fs.writeFileSync('tag_database', output);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
// Handle errors (hopefully not!)
|
||||
console.log(`${chalk.red('ERROR!')} During tag_database generation: ${err}`);
|
||||
process.exit(1);
|
||||
|
||||
@ -28,7 +28,7 @@ try {
|
||||
const orphanedTests = [...definedTests.filter(v => ![...snippets, ...archivedSnippets].includes(v))];
|
||||
orphanedTests.forEach(snippet => {
|
||||
console.log(`${chalk.yellow('WARNING!')} Orphaned test: ${snippet}`);
|
||||
})
|
||||
});
|
||||
// Create files for undefined tests
|
||||
undefinedTests.forEach(snippet => {
|
||||
const exportTest = [
|
||||
@ -48,4 +48,4 @@ try {
|
||||
console.log(`${chalk.red('ERROR!')} During test runs: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.timeEnd('Tester');
|
||||
console.timeEnd('Tester');
|
||||
|
||||
@ -32,8 +32,7 @@ const getFilesInDir = (directoryPath, withPath, exclude = null) => {
|
||||
}, []);
|
||||
}
|
||||
return directoryFilenames;
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During snippet loading: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@ -47,8 +46,7 @@ const readSnippets = snippetsPath => {
|
||||
try {
|
||||
for (let snippet of snippetFilenames)
|
||||
snippets[snippet] = fs.readFileSync(path.join(snippetsPath, snippet), 'utf8');
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During snippet loading: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@ -71,8 +69,7 @@ const readTags = () => {
|
||||
return data;
|
||||
})
|
||||
);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
// Handle errors (hopefully not!)
|
||||
console.log(`${chalk.red('ERROR!')} During tag database loading: ${err}`);
|
||||
process.exit(1);
|
||||
@ -131,9 +128,9 @@ const getCodeBlocks = str => {
|
||||
results = results.map(v => v.replace(/```js([\s\S]*?)```/g, '$1').trim());
|
||||
return {
|
||||
es6: results[0],
|
||||
es5: babel.transformSync(results[0], { presets: ['@babel/preset-env'] }).code.replace('"use strict";\n\n',''),
|
||||
es5: babel.transformSync(results[0], { presets: ['@babel/preset-env'] }).code.replace('"use strict";\n\n', ''),
|
||||
example: results[1]
|
||||
}
|
||||
};
|
||||
};
|
||||
// Gets the textual content for a snippet file.
|
||||
const getTextualContent = str => {
|
||||
|
||||
@ -11,10 +11,10 @@ let snippetsData = require('../snippet_data/snippets.json');
|
||||
const OUTPUT_PATH = './vscode_snippets';
|
||||
console.time('VSCoder');
|
||||
// Read and format data
|
||||
let vscodeData = snippetsData.data.filter(v => !v.meta.archived ).reduce((acc,v) => {
|
||||
let vscodeData = snippetsData.data.filter(v => !v.meta.archived ).reduce((acc, v) => {
|
||||
acc[v.id] = {
|
||||
prefix: `30s_${v.id}`,
|
||||
body: v.attributes.codeBlocks.es6.replace(/\r/g,'').split('\n'),
|
||||
body: v.attributes.codeBlocks.es6.replace(/\r/g, '').split('\n'),
|
||||
description: v.attributes.text.slice(0, v.attributes.text.indexOf('\r\n\r\n'))
|
||||
};
|
||||
return acc;
|
||||
@ -30,4 +30,4 @@ console.log(
|
||||
'SUCCESS!'
|
||||
)} vscode_snippets/snippets.json file generated!`
|
||||
);
|
||||
console.timeEnd('VSCoder');
|
||||
console.timeEnd('VSCoder');
|
||||
|
||||
@ -109,8 +109,7 @@ sass.render(
|
||||
if (!err2) console.log(`${chalk.green('SUCCESS!')} style.css file generated!`);
|
||||
else console.log(`${chalk.red('ERROR!')} During style.css file generation: ${err}`);
|
||||
});
|
||||
}
|
||||
else
|
||||
} else
|
||||
console.log(`${chalk.red('ERROR!')} During style.css file generation: ${err}`);
|
||||
|
||||
}
|
||||
@ -148,8 +147,7 @@ try {
|
||||
'static-page-start.html',
|
||||
'static-page-end.html'
|
||||
].map(filename => fs.readFileSync(path.join(staticPartsPath, filename), 'utf8'));
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
// Handle errors (hopefully not!)
|
||||
console.log(`${chalk.red('ERROR!')} During static part loading: ${err}`);
|
||||
process.exit(1);
|
||||
@ -234,14 +232,13 @@ try {
|
||||
`${chalk.green('SUCCESS!')} ${page.tag === 'array' ? 'index' : page.tag}.html file generated!`
|
||||
);
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
// Handle errors (hopefully not!)
|
||||
console.log(`${chalk.red('ERROR!')} During category page generation: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const generateMenuForStaticPage = (staticPart) => {
|
||||
const generateMenuForStaticPage = staticPart => {
|
||||
let taggedData = util.prepTaggedData(tagDbData);
|
||||
// Add the start static part
|
||||
let htmlCode;
|
||||
@ -258,7 +255,7 @@ const generateMenuForStaticPage = (staticPart) => {
|
||||
htmlCode += md
|
||||
.render(
|
||||
`[${taggedSnippet[0]}](./${
|
||||
tag === 'array' ? 'index' : tag
|
||||
tag === 'array' ? 'index' : tag
|
||||
}#${taggedSnippet[0].toLowerCase()})\n`
|
||||
)
|
||||
.replace(/<p>/g, '')
|
||||
@ -268,7 +265,7 @@ const generateMenuForStaticPage = (staticPart) => {
|
||||
htmlCode += '</ul>\n';
|
||||
}
|
||||
return staticPart.replace('$nav-menu-data', htmlCode);
|
||||
}
|
||||
};
|
||||
|
||||
const staticPageStartGenerator = (staticPart, heading, description) => {
|
||||
let taggedData = util.prepTaggedData(tagDbData);
|
||||
@ -350,8 +347,7 @@ try {
|
||||
|
||||
fs.writeFileSync(path.join(docsPath, 'archive.html'), minifiedArchivedOutput);
|
||||
console.log(`${chalk.green('SUCCESS!')} archive.html file generated!`);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During archive.html generation: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@ -384,8 +380,7 @@ try {
|
||||
const minifiedGlossaryOutput = minifyHTML(glossaryOutput);
|
||||
fs.writeFileSync(path.join(docsPath, 'glossary.html'), minifiedGlossaryOutput);
|
||||
console.log(`${chalk.green('SUCCESS!')} glossary.html file generated!`);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During glossary.html generation: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@ -396,12 +391,10 @@ staticFiles.forEach(f => {
|
||||
if(f !== 'array.html') {
|
||||
let fileData = fs.readFileSync(path.join(staticPartsPath, f), 'utf8');
|
||||
fs.writeFileSync(path.join(docsPath, f), generateMenuForStaticPage(fileData));
|
||||
}
|
||||
else
|
||||
} else
|
||||
fs.copyFileSync(path.join(staticPartsPath, f), path.join(docsPath, f));
|
||||
console.log(`${chalk.green('SUCCESS!')} ${f} file copied!`);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
console.log(`${chalk.red('ERROR!')} During ${f} copying: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user