Clean scripts and relics

This commit is contained in:
Angelos Chalaris
2019-12-02 22:55:53 +02:00
parent e29c6f1c20
commit 887e438ac1
8 changed files with 4 additions and 477 deletions

View File

@ -1,252 +0,0 @@
/*
This is the builder script that generates the README files.
Run using `npm run builder`.
*/
// Load modules
const fs = require('fs-extra');
const path = require('path');
const { green, red } = require('kleur');
const util = require('./util');
const markdown = require('markdown-builder');
const { headers, misc, lists } = markdown;
const config = require('../config');
// Paths
const SNIPPETS_PATH = `./${config.snippetPath}`;
const SNIPPETS_ARCHIVE_PATH = `./${config.snippetArchivePath}`;
const GLOSSARY_PATH = `./${config.glossaryPath}`;
const STATIC_PARTS_PATH = `./${config.staticPartsPath}`;
// Terminate if parent commit is a Travis build
if (
util.isTravisCI() &&
/^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])
) {
console.log(
`${green(
'NOBUILD',
)} README build terminated, parent commit is a Travis build!`,
);
process.exit(0);
}
// Setup everything
let snippets = {},
snippetsArray = [],
startPart = '',
endPart = '',
output = '';
const EMOJIS = {
adapter: '🔌',
array: '📚',
browser: '🌐',
date: '⏱️',
function: '🎛️',
logic: '🔮',
math: '➗',
media: '📺',
node: '📦',
object: '🗃️',
string: '📜',
type: '📃',
utility: '🔧'
};
console.time('Builder');
// Synchronously read all snippets from snippets folder and sort them as necessary (case-insensitive)
snippets = util.readSnippets(SNIPPETS_PATH);
snippetsArray = Object.keys(snippets).reduce((acc, key) => {
acc.push(snippets[key]);
return acc;
}, []);
// Load static parts for the README file
try {
startPart = fs.readFileSync(
path.join(STATIC_PARTS_PATH, 'README-start.md'),
'utf8',
);
endPart = fs.readFileSync(
path.join(STATIC_PARTS_PATH, 'README-end.md'),
'utf8',
);
} catch (err) {
console.log(`${red('ERROR!')} During static part loading: ${err}`);
process.exit(1);
}
// Create the output for the README file
try {
const tags = util.prepTaggedData(
Object.keys(snippets).reduce((acc, key) => {
acc[key] = snippets[key].attributes.tags;
return acc;
}, {}),
);
output += `${startPart}\n`;
// Loop over tags and snippets to create the table of contents
for (const tag of tags) {
const capitalizedTag = util.capitalize(tag, true);
const taggedSnippets = snippetsArray.filter(
snippet => snippet.attributes.tags[0] === tag,
);
output += headers.h3((EMOJIS[tag] || '') + ' ' + capitalizedTag).trim();
output +=
misc.collapsible(
'View contents',
lists.ul(taggedSnippets, snippet =>
misc.link(
`\`${snippet.title}\``,
`${misc.anchor(snippet.title)}${
snippet.attributes.tags.includes('advanced') ? '-' : ''
}`,
),
),
) + '\n';
}
for (const tag of tags) {
const capitalizedTag = util.capitalize(tag, true);
const taggedSnippets = snippetsArray.filter(
snippet => snippet.attributes.tags[0] === tag,
);
output +=
misc.hr() + headers.h2((EMOJIS[tag] || '') + ' ' + capitalizedTag) + '\n';
for (let snippet of taggedSnippets) {
if (snippet.attributes.tags.includes('advanced'))
output +=
headers.h3(
snippet.title + ' ' + misc.image('advanced', '/advanced.svg'),
) + '\n';
else output += headers.h3(snippet.title) + '\n';
output += snippet.attributes.text;
output += `\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.es6}\n\`\`\``;
output += misc.collapsible(
'Examples',
`\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.example}\n\`\`\``,
);
output +=
'\n<br>' + misc.link('⬆ Back to top', misc.anchor('Contents')) + '\n';
}
}
// Add the ending static part
output += `\n${endPart}\n`;
// Write to the README file
fs.writeFileSync('README.md', output);
} catch (err) {
console.log(`${red('ERROR!')} During README generation: ${err}`);
process.exit(1);
}
// Snippets archive README file
output = '';
archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
archivedSnippetsArray = Object.keys(archivedSnippets).reduce((acc, key) => {
acc.push(archivedSnippets[key]);
return acc;
}, []);
// Load static parts for the README file
try {
startPart = fs.readFileSync(
path.join(STATIC_PARTS_PATH, 'snippets_archive_README-start.md'),
'utf8',
);
} catch (err) {
console.log(`${red('ERROR!')} During static part loading: ${err}`);
process.exit(1);
}
// Create the output for the README file
try {
output += `${startPart}\n`;
// Loop over tags and snippets to create the table of contents
output += lists.ul(archivedSnippetsArray, snippet =>
misc.link(
`\`${snippet.title}\``,
`${misc.anchor(snippet.title)}`,
),
);
for (let snippet of archivedSnippetsArray) {
output += headers.h3(snippet.title) + '\n';
output += snippet.attributes.text;
output += `\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.es6}\n\`\`\``;
output += misc.collapsible(
'Examples',
`\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.example}\n\`\`\``,
);
output +=
'\n<br>' + misc.link('⬆ Back to top', misc.anchor('Contents')) + '\n';
}
// Write to the README file
fs.writeFileSync(path.join(SNIPPETS_ARCHIVE_PATH, 'README.md'), output);
} catch (err) {
console.log(`${red('ERROR!')} During README generation: ${err}`);
process.exit(1);
}
// Glossary README file
output = '';
glossarySnippets = util.readSnippets(GLOSSARY_PATH);
glossarySnippetsArray = Object.keys(glossarySnippets).reduce((acc, key) => {
acc.push(glossarySnippets[key]);
return acc;
}, []);
// Load static parts for the README file
try {
startPart = fs.readFileSync(
path.join(STATIC_PARTS_PATH, 'glossary_README-start.md'),
'utf8',
);
} catch (err) {
console.log(`${red('ERROR!')} During static part loading: ${err}`);
process.exit(1);
}
// Create the output for the README file
try {
output += `${startPart}\n`;
// Loop over tags and snippets to create the table of contents
output += lists.ul(glossarySnippetsArray, snippet =>
misc.link(
`\`${snippet.title}\``,
`${misc.anchor(snippet.title)}`,
),
);
for (let snippet of glossarySnippetsArray) {
output += headers.h3(snippet.title) + '\n';
output += snippet.attributes.text;
output +=
'\n<br>' + misc.link('⬆ Back to top', misc.anchor('Contents')) + '\n';
}
// Write to the README file
fs.writeFileSync(path.join(GLOSSARY_PATH, 'README.md'), output);
} catch (err) {
console.log(`${red('ERROR!')} During README generation: ${err}`);
process.exit(1);
}
console.log(`${green('SUCCESS!')} README files generated!`);
console.timeEnd('Builder');

View File

@ -1,165 +0,0 @@
/*
This is the extractor script that generates the snippets.json and snippetsArchive.json files.
Run using `npm run extractor`.
*/
// Load modules
const fs = require('fs-extra');
const path = require('path');
const { green } = require('kleur');
const util = require('./util');
const config = require('../config');
// Paths
const SNIPPETS_PATH = `./${config.snippetPath}`;
const SNIPPETS_ARCHIVE_PATH = `./${config.snippetArchivePath}`;
const GLOSSARY_PATH = `./${config.glossaryPath}`;
const OUTPUT_PATH = `./${config.snippetDataPath}`;
// Terminate if parent commit is a Travis build
if (
util.isTravisCI() &&
/^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])
) {
console.log(
`${green(
'NOEXTRACT',
)} Snippet extraction terminated, parent commit is a Travis build!`,
);
process.exit(0);
}
// Setup everything
let snippets = {},
snippetsArray = [],
archivedSnippets = {},
archivedSnippetsArray = [],
glossarySnippets = {},
glossarySnippetsArray = [];
console.time('Extractor');
// Synchronously read all snippets from snippets, snippets_archive and glossary folders and sort them as necessary (case-insensitive)
snippets = util.readSnippets(SNIPPETS_PATH);
snippetsArray = Object.keys(snippets).reduce((acc, key) => {
acc.push(snippets[key]);
return acc;
}, []);
archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
archivedSnippetsArray = Object.keys(archivedSnippets).reduce((acc, key) => {
acc.push(archivedSnippets[key]);
return acc;
}, []);
glossarySnippets = util.readSnippets(GLOSSARY_PATH);
glossarySnippetsArray = Object.keys(glossarySnippets).reduce((acc, key) => {
acc.push(glossarySnippets[key]);
return acc;
}, []);
const completeData = {
data: [...snippetsArray],
meta: {
specification: 'http://jsonapi.org/format/',
type: 'snippetArray',
scope: SNIPPETS_PATH,
language: config.language
},
};
const listingData = {
data: completeData.data.map(v => ({
id: v.id,
type: 'snippetListing',
title: v.title,
attributes: {
text: v.attributes.text,
tags: v.attributes.tags,
},
meta: {
hash: v.meta.hash,
},
})),
meta: {
specification: 'http://jsonapi.org/format/',
type: 'snippetListingArray',
scope: SNIPPETS_PATH,
language: config.language
},
};
const archiveCompleteData = {
data: [...archivedSnippetsArray],
meta: {
specification: 'http://jsonapi.org/format/',
type: 'snippetArray',
scope: SNIPPETS_ARCHIVE_PATH,
language: config.language
}
};
const archiveListingData = {
data: archiveCompleteData.data.map(v => ({
id: v.id,
type: 'snippetListing',
title: v.title,
attributes: {
text: v.attributes.text,
tags: v.attributes.tags,
},
meta: {
hash: v.meta.hash,
},
})),
meta: {
specification: 'http://jsonapi.org/format/',
type: 'snippetListingArray',
scope: SNIPPETS_ARCHIVE_PATH,
language: config.language
},
};
const glossaryData = {
data: glossarySnippetsArray.map(v => ({
id: v.id,
type: 'glossaryTerm',
title: v.title,
attributes: {
text: v.attributes.text,
tags: v.attributes.tags,
},
meta: {
hash: v.meta.hash,
},
})),
meta: {
specification: 'http://jsonapi.org/format/',
type: 'glossaryTermArray',
scope: GLOSSARY_PATH,
},
};
// Write files
fs.writeFileSync(
path.join(OUTPUT_PATH, 'snippets.json'),
JSON.stringify(completeData, null, 2),
);
fs.writeFileSync(
path.join(OUTPUT_PATH, 'snippetList.json'),
JSON.stringify(listingData, null, 2),
);
fs.writeFileSync(
path.join(OUTPUT_PATH, 'archivedSnippets.json'),
JSON.stringify(archiveCompleteData, null, 2),
);
fs.writeFileSync(
path.join(OUTPUT_PATH, 'archivedSnippetList.json'),
JSON.stringify(archiveListingData, null, 2),
);
fs.writeFileSync(
path.join(OUTPUT_PATH, 'glossaryTerms.json'),
JSON.stringify(glossaryData, null, 2),
);
// Display messages and time
console.log(`${green('SUCCESS!')} JSON data files generated!`);
console.timeEnd('Extractor');

View File

@ -1,51 +0,0 @@
/*
This is the tdd script that creates & updates your TDD environment .
Run using `npm run tdd`.
*/
// Load modules
const fs = require('fs-extra'),
path = require('path');
const childProcess = require('child_process');
const { green, yellow, red } = require('kleur');
const util = require('./util');
const config = require('../config');
// Declare paths
const SNIPPETS_PATH = `./${config.snippetPath}`;
const SNIPPETS_ARCHIVE_PATH = `./${config.snippetArchivePath}`;
const TEST_PATH = `./${config.testPath}`;
console.time('Tester');
try {
// Read snippets, archive and tests, find which tests are not defined
const snippets = fs.readdirSync(SNIPPETS_PATH).map(v => v.replace('.md', ''));
const archivedSnippets = fs.readdirSync(SNIPPETS_ARCHIVE_PATH).filter(v => v !== 'README.md').map(v => v.replace('.md', ''));
const definedTests = fs.readdirSync(TEST_PATH).map(v => v.replace('.test.js', '')).filter(v => v !== '_30s.js' && v !== 'testlog');
const undefinedTests = [...snippets, ...archivedSnippets].filter(v => !definedTests.includes(v));
const orphanedTests = [...definedTests.filter(v => ![...snippets, ...archivedSnippets].includes(v))];
orphanedTests.forEach(snippet => {
console.log(`${yellow('WARNING!')} Orphaned test: ${snippet}`);
});
// Create files for undefined tests
undefinedTests.forEach(snippet => {
const exportTest = [
`const {${snippet}} = require('./_30s.js');`,
`\ntest('${snippet} is a Function', () => {`,
` expect(${snippet}).toBeInstanceOf(Function);`,
`});\n`
].join('\n');
fs.writeFileSync(path.join(TEST_PATH, `${snippet}.test.js`), exportTest);
});
// Run tests
if (util.isTravisCI()) {
process.exit(0);
}
else {
childProcess.execSync('npm test', { stdio: 'inherit' });
}
console.log(`${green('SUCCESS!')} All tests ran successfully!`);
} catch (err) {
console.log(`${red('ERROR!')} During test runs: ${err}`);
process.exit(1);
}
console.timeEnd('Tester');