Add extractor and builder
Tested and working
This commit is contained in:
291
scripts/build.js
291
scripts/build.js
@ -1,177 +1,142 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const marked = require('marked')
|
||||
const pretty = require('pretty')
|
||||
const caniuseDb = require('caniuse-db/data.json')
|
||||
const sass = require('node-sass')
|
||||
const { toKebabCase, createElement, template, dom, getCode } = require('../utils/utils.js')
|
||||
const { differenceInDays } = require('date-fns')
|
||||
/*
|
||||
This is the builder script that generates the README file.
|
||||
Run using `npm run builder`.
|
||||
*/
|
||||
// Load modules
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { green, red } = require('kleur');
|
||||
const util = require('./util');
|
||||
const markdown = require('markdown-builder');
|
||||
const { headers, misc, lists } = markdown;
|
||||
const config = require('../config');
|
||||
|
||||
const SNIPPETS_PATH = './snippets'
|
||||
const TAGS = [
|
||||
{
|
||||
name: 'all',
|
||||
icon: 'check'
|
||||
},
|
||||
{
|
||||
name: 'layout',
|
||||
icon: 'layout'
|
||||
},
|
||||
{
|
||||
name: 'visual',
|
||||
icon: 'eye'
|
||||
},
|
||||
{
|
||||
name: 'animation',
|
||||
icon: 'loader'
|
||||
},
|
||||
{
|
||||
name: 'interactivity',
|
||||
icon: 'edit-2'
|
||||
},
|
||||
{
|
||||
name: 'other',
|
||||
icon: 'tag'
|
||||
}
|
||||
]
|
||||
// Paths (relative to package.json)
|
||||
const SNIPPETS_PATH = `./${config.snippetPath}`;
|
||||
const STATIC_PARTS_PATH = `./${config.staticPartsPath}`;
|
||||
|
||||
const renderer = new marked.Renderer()
|
||||
renderer.heading = (text, level) => {
|
||||
if (level === 3) {
|
||||
return `<h${level} id="${toKebabCase(text)}"><span>${text}</span></h${level}>`
|
||||
} else {
|
||||
return ['HTML', 'CSS', 'JavaScript'].includes(text)
|
||||
? `<h${level} data-type="${text}">${text}</h${level}>`
|
||||
: `<h${level}>${text}</h${level}>`
|
||||
}
|
||||
}
|
||||
renderer.link = (url, _, text) => `<a href="${url}" target="_blank">${text || url}</a>`
|
||||
|
||||
const document = dom('./src/html/index.html')
|
||||
const components = {
|
||||
backToTopButton: dom('./src/html/components/back-to-top-button.html'),
|
||||
sidebar: dom('./src/html/components/sidebar.html'),
|
||||
header: dom('./src/html/components/header.html'),
|
||||
main: dom('./src/html/components/main.html'),
|
||||
tags: dom('./src/html/components/tags.html')
|
||||
// Terminate if parent commit is a Travis build
|
||||
if (
|
||||
util.isTravisCI() &&
|
||||
/^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])
|
||||
) {
|
||||
console.log(
|
||||
`${green(
|
||||
'NOBUILD',
|
||||
)} README build terminated, parent commit is a Travis build!`,
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const snippetContainer = components.main.querySelector('.container')
|
||||
const sidebarLinkContainer = components.sidebar.querySelector('.sidebar__links')
|
||||
TAGS.slice(1).forEach(tag => {
|
||||
sidebarLinkContainer.append(
|
||||
createElement(`
|
||||
<section data-type="${tag.name}" class="sidebar__section">
|
||||
<h4 class="sidebar__section-heading">${tag.name}</h4>
|
||||
</section>
|
||||
`)
|
||||
)
|
||||
})
|
||||
// Setup everything
|
||||
let snippets = {},
|
||||
snippetsArray = [],
|
||||
startPart = '',
|
||||
endPart = '',
|
||||
output = '';
|
||||
const EMOJIS = {};
|
||||
|
||||
for (const snippetFile of fs.readdirSync(SNIPPETS_PATH)) {
|
||||
const snippetPath = path.join(SNIPPETS_PATH, snippetFile)
|
||||
const snippetData = fs.readFileSync(snippetPath, 'utf8')
|
||||
console.time('Builder');
|
||||
|
||||
const html = getCode('html', snippetData).trim()
|
||||
const css = getCode('css', snippetData)
|
||||
const scopedCSS = sass.renderSync({
|
||||
data: `[data-scope="${snippetFile}"] { ${css} }`
|
||||
})
|
||||
const js = getCode('js', snippetData)
|
||||
// Synchronously read all snippets from snippets folder and sort them as necessary (case-insensitive)
|
||||
snippets = util.readSnippets(SNIPPETS_PATH);
|
||||
snippetsArray = Object.keys(snippets).reduce((acc, key) => {
|
||||
acc.push(snippets[key]);
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
const demo =
|
||||
`<div class="snippet-demo" data-scope="${snippetFile}">${html}</div>` +
|
||||
`<style>${scopedCSS.css.toString()}</style>` +
|
||||
`${js ? `<script>(function(){${js}})();</script>` : ''}`
|
||||
// Load static parts for the README file
|
||||
try {
|
||||
startPart = fs.readFileSync(
|
||||
path.join(STATIC_PARTS_PATH, 'README-start.md'),
|
||||
'utf8',
|
||||
);
|
||||
endPart = fs.readFileSync(
|
||||
path.join(STATIC_PARTS_PATH, 'README-end.md'),
|
||||
'utf8',
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`${red('ERROR!')} During static part loading: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const markdown = marked(snippetData, { renderer }).replace(
|
||||
'<h4>Demo</h4>',
|
||||
`<h4>Demo</h4>${demo}`
|
||||
)
|
||||
const snippetEl = createElement(`<div class="snippet">${markdown}</div>`)
|
||||
snippetContainer.append(snippetEl)
|
||||
// Create the output for the README file
|
||||
try {
|
||||
const tags = util.prepTaggedData(
|
||||
Object.keys(snippets).reduce((acc, key) => {
|
||||
acc[key] = snippets[key].attributes.tags;
|
||||
return acc;
|
||||
}, {}),
|
||||
);
|
||||
|
||||
// browser support usage
|
||||
const featUsageShares = (snippetData.match(/https?:\/\/caniuse\.com\/#feat=.*/g) || []).map(
|
||||
feat => {
|
||||
const featData = caniuseDb.data[feat.match(/#feat=(.*)/)[1]]
|
||||
// caniuse doesn't count "untracked" users, which makes the overall share appear much lower
|
||||
// than it probably is. Most of these untracked browsers probably support these features.
|
||||
// Currently it's around 5.3% untracked, so we'll use 4% as probably supporting the feature.
|
||||
// Also the npm package appears to be show higher usage % than the main website, this shows
|
||||
// about 0.2% lower than the main website when selecting "tracked users" (as of Feb 2019).
|
||||
const UNTRACKED_PERCENT = 4
|
||||
const usage = featData
|
||||
? Number(featData.usage_perc_y + featData.usage_perc_a) + UNTRACKED_PERCENT
|
||||
: 100
|
||||
return Math.min(100, usage)
|
||||
output += `${startPart}\n`;
|
||||
|
||||
// Loop over tags and snippets to create the table of contents
|
||||
for (const tag of tags) {
|
||||
const capitalizedTag = util.capitalize(tag, true);
|
||||
const taggedSnippets = snippetsArray.filter(
|
||||
snippet => snippet.attributes.tags[0] === tag,
|
||||
);
|
||||
output += headers.h3((EMOJIS[tag] || '') + ' ' + capitalizedTag).trim();
|
||||
|
||||
output +=
|
||||
misc.collapsible(
|
||||
'View contents',
|
||||
lists.ul(taggedSnippets, snippet =>
|
||||
misc.link(
|
||||
`\`${snippet.title}\``,
|
||||
`${misc.anchor(snippet.title)}${
|
||||
snippet.attributes.tags.includes('advanced') ? '-' : ''
|
||||
}`,
|
||||
),
|
||||
),
|
||||
) + '\n';
|
||||
}
|
||||
|
||||
for (const tag of tags) {
|
||||
const capitalizedTag = util.capitalize(tag, true);
|
||||
const taggedSnippets = snippetsArray.filter(
|
||||
snippet => snippet.attributes.tags[0] === tag,
|
||||
);
|
||||
|
||||
output +=
|
||||
misc.hr() + headers.h2((EMOJIS[tag] || '') + ' ' + capitalizedTag) + '\n';
|
||||
|
||||
for (let snippet of taggedSnippets) {
|
||||
if (snippet.attributes.tags.includes('advanced'))
|
||||
output +=
|
||||
headers.h3(
|
||||
snippet.title + ' ' + misc.image('advanced', '/advanced.svg'),
|
||||
) + '\n';
|
||||
else output += headers.h3(snippet.title) + '\n';
|
||||
|
||||
output += snippet.attributes.text;
|
||||
|
||||
output += `\`\`\`${config.secondLanguage}\n${snippet.attributes.codeBlocks.html}\n\`\`\`\n\n`;
|
||||
output += `\`\`\`${config.language}\n${snippet.attributes.codeBlocks.css}\n\`\`\`\n\n`;
|
||||
if (snippet.attributes.codeBlocks.js)
|
||||
output += `\`\`\`${config.optionalLanguage}\n${snippet.attributes.codeBlocks.js}\n\`\`\`\n\n`;
|
||||
|
||||
output += headers.h4('Explanation');
|
||||
output += snippet.attributes.explanation;
|
||||
|
||||
output += headers.h4('Browser support') + '\n';
|
||||
output += snippet.attributes.browserSupport.supportPercentage.toFixed(1) + '%';
|
||||
output += snippet.attributes.browserSupport.text;
|
||||
|
||||
output +=
|
||||
'\n<br>' + misc.link('⬆ Back to top', misc.anchor('Contents')) + '\n';
|
||||
}
|
||||
)
|
||||
const browserSupportHeading = snippetEl.querySelector('h4:last-of-type')
|
||||
browserSupportHeading.after(
|
||||
createElement(`
|
||||
<div>
|
||||
<div class="snippet__browser-support">
|
||||
${featUsageShares.length ? Math.min(...featUsageShares).toPrecision(3) : 100}%
|
||||
</div>
|
||||
</div>
|
||||
`)
|
||||
)
|
||||
|
||||
// sidebar link
|
||||
const link = createElement(
|
||||
`<a class="sidebar__link" href="#${snippetFile.replace('.md', '')}">${
|
||||
snippetEl.querySelector('h3').innerHTML
|
||||
}</a>`
|
||||
)
|
||||
|
||||
// new icon = less than 31 days old
|
||||
const date = (snippetData.match(/<!--\s*date:\s*(.+?)-->/) || [, ''])[1]
|
||||
if (date && differenceInDays(new Date(), new Date(date)) < 31) {
|
||||
const newIcon = '<img alt="New" draggable="false" class="snippet__new" src="./src/img/new.svg">'
|
||||
snippetEl.prepend(createElement(newIcon))
|
||||
link.prepend(createElement(newIcon))
|
||||
}
|
||||
|
||||
// tags
|
||||
const tags = (snippetData.match(/<!--\s*tags:\s*(.+?)-->/) || [, ''])[1]
|
||||
.split(/,\s*/)
|
||||
.forEach(tag => {
|
||||
tag = tag.trim().toLowerCase()
|
||||
snippetEl
|
||||
.querySelector('h3')
|
||||
.append(
|
||||
createElement(
|
||||
`<span class="tags__tag snippet__tag" data-type="${tag}"><i data-feather="${
|
||||
TAGS.find(t => t.name === tag).icon
|
||||
}"></i>${tag}</span>`
|
||||
)
|
||||
)
|
||||
|
||||
sidebarLinkContainer.querySelector(`section[data-type="${tag}"]`).append(link)
|
||||
})
|
||||
// Add the ending static part
|
||||
output += `\n${endPart}\n`;
|
||||
// Write to the README file
|
||||
fs.writeFileSync('README.md', output);
|
||||
} catch (err) {
|
||||
console.log(`${red('ERROR!')} During README generation: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// build dom
|
||||
TAGS.forEach(tag =>
|
||||
components.tags.append(
|
||||
createElement(
|
||||
`<button class="tags__tag is-large ${tag.name === 'all' ? 'is-active' : ''}" data-type="${
|
||||
tag.name
|
||||
}"><i data-feather="${tag.icon}"></i>${tag.name}</button>`
|
||||
)
|
||||
)
|
||||
)
|
||||
const content = document.querySelector('.content-wrapper')
|
||||
content.before(components.backToTopButton)
|
||||
content.before(components.sidebar)
|
||||
content.append(components.header)
|
||||
content.append(components.main)
|
||||
components.main.querySelector('.container').prepend(components.tags)
|
||||
|
||||
// doctype declaration gets stripped, add it back in
|
||||
const html = `<!DOCTYPE html>
|
||||
${pretty(document.documentElement.outerHTML, { ocd: true })}
|
||||
`
|
||||
|
||||
fs.writeFileSync('./index.html', html)
|
||||
console.log(`${green('SUCCESS!')} README file generated!`);
|
||||
console.timeEnd('Builder');
|
||||
|
||||
80
scripts/extract.js
Normal file
80
scripts/extract.js
Normal file
@ -0,0 +1,80 @@
|
||||
/*
|
||||
This is the extractor script that generates the snippets.json file.
|
||||
Run using `npm run extractor`.
|
||||
*/
|
||||
// Load modules
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { green } = require('kleur');
|
||||
const util = require('./util');
|
||||
const config = require('../config');
|
||||
|
||||
// Paths (relative to package.json)
|
||||
const SNIPPETS_PATH = `./${config.snippetPath}`;
|
||||
const OUTPUT_PATH = `./${config.snippetDataPath}`;
|
||||
|
||||
// Check if running on Travis, only build for cron jobs and custom builds
|
||||
if (
|
||||
util.isTravisCI() &&
|
||||
process.env['TRAVIS_EVENT_TYPE'] !== 'cron' &&
|
||||
process.env['TRAVIS_EVENT_TYPE'] !== 'api'
|
||||
) {
|
||||
console.log(
|
||||
`${green(
|
||||
'NOBUILD',
|
||||
)} snippet extraction terminated, not a cron or api build!`,
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Setup everything
|
||||
let snippets = {},
|
||||
snippetsArray = [];
|
||||
console.time('Extractor');
|
||||
|
||||
// Synchronously read all snippets from snippets folder and sort them as necessary (case-insensitive)
|
||||
snippets = util.readSnippets(SNIPPETS_PATH);
|
||||
snippetsArray = Object.keys(snippets).reduce((acc, key) => {
|
||||
acc.push(snippets[key]);
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
const completeData = {
|
||||
data: [...snippetsArray],
|
||||
meta: {
|
||||
specification: 'http://jsonapi.org/format/',
|
||||
type: 'snippetArray',
|
||||
},
|
||||
};
|
||||
let listingData = {
|
||||
data: completeData.data.map(v => ({
|
||||
id: v.id,
|
||||
type: 'snippetListing',
|
||||
title: v.title,
|
||||
attributes: {
|
||||
text: v.attributes.text,
|
||||
tags: v.attributes.tags,
|
||||
},
|
||||
meta: {
|
||||
hash: v.meta.hash,
|
||||
},
|
||||
})),
|
||||
meta: {
|
||||
specification: 'http://jsonapi.org/format/',
|
||||
type: 'snippetListingArray',
|
||||
},
|
||||
};
|
||||
// Write files
|
||||
fs.writeFileSync(
|
||||
path.join(OUTPUT_PATH, 'snippets.json'),
|
||||
JSON.stringify(completeData, null, 2),
|
||||
);
|
||||
fs.writeFileSync(
|
||||
path.join(OUTPUT_PATH, 'snippetList.json'),
|
||||
JSON.stringify(listingData, null, 2),
|
||||
);
|
||||
// Display messages and time
|
||||
console.log(
|
||||
`${green('SUCCESS!')} snippets.json and snippetList.json files generated!`,
|
||||
);
|
||||
console.timeEnd('Extractor');
|
||||
12
scripts/util/environmentCheck.js
Normal file
12
scripts/util/environmentCheck.js
Normal file
@ -0,0 +1,12 @@
|
||||
// Checks if current environment is Travis CI, Cron builds, API builds
|
||||
const isTravisCI = () => 'TRAVIS' in process.env && 'CI' in process.env;
|
||||
const isTravisCronOrAPI = () =>
|
||||
process.env['TRAVIS_EVENT_TYPE'] === 'cron' ||
|
||||
process.env['TRAVIS_EVENT_TYPE'] === 'api';
|
||||
const isNotTravisCronOrAPI = () => !isTravisCronOrAPI();
|
||||
|
||||
module.exports = {
|
||||
isTravisCI,
|
||||
isTravisCronOrAPI,
|
||||
isNotTravisCronOrAPI,
|
||||
};
|
||||
60
scripts/util/helpers.js
Normal file
60
scripts/util/helpers.js
Normal file
@ -0,0 +1,60 @@
|
||||
const config = require('../../config');
|
||||
|
||||
const getMarkDownAnchor = paragraphTitle =>
|
||||
paragraphTitle
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/[^\w\- ]+/g, '')
|
||||
.replace(/\s/g, '-')
|
||||
.replace(/\-+$/, '');
|
||||
// Creates an object from pairs
|
||||
const objectFromPairs = arr => arr.reduce((a, v) => ((a[v[0]] = v[1]), a), {});
|
||||
// Optimizes nodes in an HTML document
|
||||
const optimizeNodes = (data, regexp, replacer) => {
|
||||
let count = 0;
|
||||
let output = data;
|
||||
do {
|
||||
output = output.replace(regexp, replacer);
|
||||
count = 0;
|
||||
while (regexp.exec(output) !== null) ++count;
|
||||
} while (count > 0);
|
||||
return output;
|
||||
};
|
||||
// Capitalizes the first letter of a string
|
||||
const capitalize = (str, lowerRest = false) =>
|
||||
str.slice(0, 1).toUpperCase() +
|
||||
(lowerRest ? str.slice(1).toLowerCase() : str.slice(1));
|
||||
const prepTaggedData = tagDbData =>
|
||||
[...new Set(Object.entries(tagDbData).map(t => t[1][0]))]
|
||||
.filter(v => v)
|
||||
.sort((a, b) =>
|
||||
capitalize(a, true) === 'Uncategorized'
|
||||
? 1
|
||||
: capitalize(b, true) === 'Uncategorized'
|
||||
? -1
|
||||
: a.localeCompare(b),
|
||||
);
|
||||
const makeExamples = data => {
|
||||
data =
|
||||
data.slice(0, data.lastIndexOf(`\`\`\`${config.language}`)).trim() +
|
||||
misc.collapsible(
|
||||
'Examples',
|
||||
data.slice(
|
||||
data.lastIndexOf(`\`\`\`${config.language}`),
|
||||
data.lastIndexOf('```'),
|
||||
) + data.slice(data.lastIndexOf('```')),
|
||||
);
|
||||
return `${data}\n<br>${misc.link(
|
||||
'⬆ Back to top',
|
||||
misc.anchor('Contents'),
|
||||
)}\n\n`;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getMarkDownAnchor,
|
||||
objectFromPairs,
|
||||
optimizeNodes,
|
||||
capitalize,
|
||||
prepTaggedData,
|
||||
makeExamples,
|
||||
};
|
||||
37
scripts/util/index.js
Normal file
37
scripts/util/index.js
Normal file
@ -0,0 +1,37 @@
|
||||
const {
|
||||
isTravisCI,
|
||||
isTravisCronOrAPI,
|
||||
isNotTravisCronOrAPI,
|
||||
} = require('./environmentCheck');
|
||||
const {
|
||||
getMarkDownAnchor,
|
||||
objectFromPairs,
|
||||
optimizeNodes,
|
||||
capitalize,
|
||||
prepTaggedData,
|
||||
makeExamples,
|
||||
} = require('./helpers');
|
||||
const {
|
||||
getFilesInDir,
|
||||
hashData,
|
||||
getCodeBlocks,
|
||||
getTextualContent,
|
||||
readSnippets,
|
||||
} = require('./snippetParser');
|
||||
|
||||
module.exports = {
|
||||
isTravisCI,
|
||||
isTravisCronOrAPI,
|
||||
isNotTravisCronOrAPI,
|
||||
getMarkDownAnchor,
|
||||
objectFromPairs,
|
||||
optimizeNodes,
|
||||
capitalize,
|
||||
prepTaggedData,
|
||||
makeExamples,
|
||||
getFilesInDir,
|
||||
hashData,
|
||||
getCodeBlocks,
|
||||
getTextualContent,
|
||||
readSnippets,
|
||||
};
|
||||
198
scripts/util/snippetParser.js
Normal file
198
scripts/util/snippetParser.js
Normal file
@ -0,0 +1,198 @@
|
||||
const fs = require('fs-extra'),
|
||||
path = require('path'),
|
||||
{ red } = require('kleur'),
|
||||
crypto = require('crypto'),
|
||||
frontmatter = require('front-matter');
|
||||
const sass = require('node-sass');
|
||||
const caniuseDb = require('caniuse-db/data.json');
|
||||
const config = require('../../config');
|
||||
|
||||
// Reade all files in a directory
|
||||
const getFilesInDir = (directoryPath, withPath, exclude = null) => {
|
||||
try {
|
||||
let directoryFilenames = fs.readdirSync(directoryPath);
|
||||
directoryFilenames.sort((a, b) => {
|
||||
a = a.toLowerCase();
|
||||
b = b.toLowerCase();
|
||||
if (a < b) return -1;
|
||||
if (a > b) return 1;
|
||||
return 0;
|
||||
});
|
||||
|
||||
if (withPath) {
|
||||
// a hacky way to do conditional array.map
|
||||
return directoryFilenames.reduce((fileNames, fileName) => {
|
||||
if (
|
||||
exclude == null ||
|
||||
!exclude.some(toExclude => fileName === toExclude)
|
||||
)
|
||||
fileNames.push(`${directoryPath}/${fileName}`);
|
||||
return fileNames;
|
||||
}, []);
|
||||
}
|
||||
return directoryFilenames.filter(v => v !== 'README.md');
|
||||
} catch (err) {
|
||||
console.log(`${red('ERROR!')} During snippet loading: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
// Creates a hash for a value using the SHA-256 algorithm.
|
||||
const hashData = val =>
|
||||
crypto
|
||||
.createHash('sha256')
|
||||
.update(val)
|
||||
.digest('hex');
|
||||
// Gets the code blocks for a snippet file.
|
||||
const getCodeBlocks = str => {
|
||||
const regex = /```[.\S\s]*?```/g;
|
||||
let results = [];
|
||||
let m = null;
|
||||
while ((m = regex.exec(str)) !== null) {
|
||||
if (m.index === regex.lastIndex) regex.lastIndex += 1;
|
||||
|
||||
m.forEach((match, groupIndex) => {
|
||||
results.push(match);
|
||||
});
|
||||
}
|
||||
const replacer = new RegExp(
|
||||
`\`\`\`${config.language}([\\s\\S]*?)\`\`\``,
|
||||
'g',
|
||||
);
|
||||
const secondReplacer = new RegExp(
|
||||
`\`\`\`${config.secondLanguage}([\\s\\S]*?)\`\`\``,
|
||||
'g',
|
||||
);
|
||||
const optionalReplacer = new RegExp(
|
||||
`\`\`\`${config.optionalLanguage}([\\s\\S]*?)\`\`\``,
|
||||
'g',
|
||||
);
|
||||
results = results.map(v =>
|
||||
v
|
||||
.replace(replacer, '$1')
|
||||
.replace(secondReplacer, '$1')
|
||||
.replace(optionalReplacer, '$1')
|
||||
.trim()
|
||||
);
|
||||
if (results.length > 2)
|
||||
return {
|
||||
html: results[0],
|
||||
css: results[1],
|
||||
js: results[2],
|
||||
};
|
||||
return {
|
||||
html: results[0],
|
||||
css: results[1],
|
||||
js: '',
|
||||
};
|
||||
};
|
||||
// Gets the textual content for a snippet file.
|
||||
const getTextualContent = str => {
|
||||
const regex = /([\s\S]*?)```/g;
|
||||
const results = [];
|
||||
let m = null;
|
||||
while ((m = regex.exec(str)) !== null) {
|
||||
if (m.index === regex.lastIndex) regex.lastIndex += 1;
|
||||
|
||||
m.forEach((match, groupIndex) => {
|
||||
results.push(match);
|
||||
});
|
||||
}
|
||||
return results[1].replace(/\r\n/g, '\n');
|
||||
};
|
||||
|
||||
// Gets the explanation for a snippet file.
|
||||
const getExplanation = str => {
|
||||
const regex = /####\s*Explanation([\s\S]*)####/g;
|
||||
const results = [];
|
||||
let m = null;
|
||||
while ((m = regex.exec(str)) !== null) {
|
||||
if (m.index === regex.lastIndex) regex.lastIndex += 1;
|
||||
|
||||
m.forEach((match, groupIndex) => {
|
||||
results.push(match);
|
||||
});
|
||||
}
|
||||
// console.log(results);
|
||||
return results[1].replace(/\r\n/g, '\n');
|
||||
};
|
||||
|
||||
// Gets the browser support for a snippet file.
|
||||
const getBrowserSupport = str => {
|
||||
const regex = /####\s*Browser [s|S]upport([\s\S]*)/g;
|
||||
const results = [];
|
||||
let m = null;
|
||||
while ((m = regex.exec(str)) !== null) {
|
||||
if (m.index === regex.lastIndex) regex.lastIndex += 1;
|
||||
|
||||
m.forEach((match, groupIndex) => {
|
||||
results.push(match);
|
||||
});
|
||||
}
|
||||
let browserSupportText = results[1].replace(/\r\n/g, '\n');
|
||||
const supportPercentage = (browserSupportText.match(/https?:\/\/caniuse\.com\/#feat=.*/g) || []).map(
|
||||
feat => {
|
||||
const featData = caniuseDb.data[feat.match(/#feat=(.*)/)[1]];
|
||||
// caniuse doesn't count "untracked" users, which makes the overall share appear much lower
|
||||
// than it probably is. Most of these untracked browsers probably support these features.
|
||||
// Currently it's around 5.3% untracked, so we'll use 4% as probably supporting the feature.
|
||||
// Also the npm package appears to be show higher usage % than the main website, this shows
|
||||
// about 0.2% lower than the main website when selecting "tracked users" (as of Feb 2019).
|
||||
const UNTRACKED_PERCENT = 4;
|
||||
const usage = featData
|
||||
? Number(featData.usage_perc_y + featData.usage_perc_a) + UNTRACKED_PERCENT
|
||||
: 100;
|
||||
return Math.min(100, usage);
|
||||
}
|
||||
)
|
||||
return {
|
||||
text: browserSupportText,
|
||||
supportPercentage: Math.min(...supportPercentage)
|
||||
}
|
||||
};
|
||||
|
||||
// Synchronously read all snippets and sort them as necessary (case-insensitive)
|
||||
const readSnippets = snippetsPath => {
|
||||
const snippetFilenames = getFilesInDir(snippetsPath, false);
|
||||
|
||||
let snippets = {};
|
||||
try {
|
||||
for (let snippet of snippetFilenames) {
|
||||
let data = frontmatter(
|
||||
fs.readFileSync(path.join(snippetsPath, snippet), 'utf8'),
|
||||
);
|
||||
snippets[snippet] = {
|
||||
id: snippet.slice(0, -3),
|
||||
title: data.attributes.title,
|
||||
type: 'snippet',
|
||||
attributes: {
|
||||
fileName: snippet,
|
||||
text: getTextualContent(data.body),
|
||||
explanation: getExplanation(data.body),
|
||||
browserSupport: getBrowserSupport(data.body),
|
||||
codeBlocks: getCodeBlocks(data.body),
|
||||
tags: data.attributes.tags.split(',').map(t => t.trim()),
|
||||
},
|
||||
meta: {
|
||||
hash: hashData(data.body),
|
||||
},
|
||||
};
|
||||
snippets[snippet].attributes.codeBlocks.scopedCss = sass.renderSync({
|
||||
data: `[data-scope="${snippets[snippet].id}"] { ${snippets[snippet].attributes.codeBlocks.css} }`
|
||||
}).css.toString();
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`${red('ERROR!')} During snippet loading: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
return snippets;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getFilesInDir,
|
||||
hashData,
|
||||
getCodeBlocks,
|
||||
getTextualContent,
|
||||
getExplanation,
|
||||
getBrowserSupport,
|
||||
readSnippets,
|
||||
};
|
||||
Reference in New Issue
Block a user