diff --git a/.travis.yml b/.travis.yml
index 349290617..b33bb033d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -11,9 +11,8 @@ before_install:
script:
- npm run linter
- npm run extractor
-- npm run builder
- npm run packager
-- npm run tester
+- npm run test
- npm run vscoder
after_success:
- chmod +x .travis/push.sh
diff --git a/netlify.toml b/netlify.toml
index 3bcf87c32..3c3266ab1 100644
--- a/netlify.toml
+++ b/netlify.toml
@@ -1,6 +1,6 @@
[build]
- publish = "public"
- command = "npm run webber"
+ publish = "static"
+ command = "echo 'ok'"
[build.environment]
YARN_VERSION = "1.9.4"
YARN_FLAGS = "--no-ignore-optional"
\ No newline at end of file
diff --git a/package.json b/package.json
index 05e796d38..4a38657fc 100644
--- a/package.json
+++ b/package.json
@@ -8,11 +8,6 @@
"scripts": {
"linter": "node ./scripts/lint.js",
"extractor": "extract-snippet-data config.js",
- "builder": "node ./scripts/build.js",
- "webber": "gatsby build",
- "webber:dev": "gatsby develop",
- "webber:serve": "gatsby serve",
- "tester": "node ./scripts/tdd.js",
"vscoder": "node ./scripts/vscodegen.js",
"packager": "node ./scripts/module.js",
"test": "jest --verbose --coverage --testPathIgnorePatterns=\".cache\""
diff --git a/scripts/build.js b/scripts/build.js
deleted file mode 100644
index 82128d9b0..000000000
--- a/scripts/build.js
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- This is the builder script that generates the README files.
- Run using `npm run builder`.
-*/
-// Load modules
-const fs = require('fs-extra');
-const path = require('path');
-const { green, red } = require('kleur');
-const util = require('./util');
-const markdown = require('markdown-builder');
-const { headers, misc, lists } = markdown;
-const config = require('../config');
-
-// Paths
-const SNIPPETS_PATH = `./${config.snippetPath}`;
-const SNIPPETS_ARCHIVE_PATH = `./${config.snippetArchivePath}`;
-const GLOSSARY_PATH = `./${config.glossaryPath}`;
-const STATIC_PARTS_PATH = `./${config.staticPartsPath}`;
-
-// Terminate if parent commit is a Travis build
-if (
- util.isTravisCI() &&
- /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])
-) {
- console.log(
- `${green(
- 'NOBUILD',
- )} README build terminated, parent commit is a Travis build!`,
- );
- process.exit(0);
-}
-
-// Setup everything
-let snippets = {},
- snippetsArray = [],
- startPart = '',
- endPart = '',
- output = '';
-const EMOJIS = {
- adapter: '🔌',
- array: '📚',
- browser: '🌐',
- date: '⏱️',
- function: '🎛️',
- logic: '🔮',
- math: '➗',
- media: '📺',
- node: '📦',
- object: '🗃️',
- string: '📜',
- type: '📃',
- utility: '🔧'
-};
-
-console.time('Builder');
-
-// Synchronously read all snippets from snippets folder and sort them as necessary (case-insensitive)
-snippets = util.readSnippets(SNIPPETS_PATH);
-snippetsArray = Object.keys(snippets).reduce((acc, key) => {
- acc.push(snippets[key]);
- return acc;
-}, []);
-
-// Load static parts for the README file
-try {
- startPart = fs.readFileSync(
- path.join(STATIC_PARTS_PATH, 'README-start.md'),
- 'utf8',
- );
- endPart = fs.readFileSync(
- path.join(STATIC_PARTS_PATH, 'README-end.md'),
- 'utf8',
- );
-} catch (err) {
- console.log(`${red('ERROR!')} During static part loading: ${err}`);
- process.exit(1);
-}
-
-// Create the output for the README file
-try {
- const tags = util.prepTaggedData(
- Object.keys(snippets).reduce((acc, key) => {
- acc[key] = snippets[key].attributes.tags;
- return acc;
- }, {}),
- );
-
- output += `${startPart}\n`;
-
- // Loop over tags and snippets to create the table of contents
- for (const tag of tags) {
- const capitalizedTag = util.capitalize(tag, true);
- const taggedSnippets = snippetsArray.filter(
- snippet => snippet.attributes.tags[0] === tag,
- );
- output += headers.h3((EMOJIS[tag] || '') + ' ' + capitalizedTag).trim();
-
- output +=
- misc.collapsible(
- 'View contents',
- lists.ul(taggedSnippets, snippet =>
- misc.link(
- `\`${snippet.title}\``,
- `${misc.anchor(snippet.title)}${
- snippet.attributes.tags.includes('advanced') ? '-' : ''
- }`,
- ),
- ),
- ) + '\n';
- }
-
- for (const tag of tags) {
- const capitalizedTag = util.capitalize(tag, true);
- const taggedSnippets = snippetsArray.filter(
- snippet => snippet.attributes.tags[0] === tag,
- );
-
- output +=
- misc.hr() + headers.h2((EMOJIS[tag] || '') + ' ' + capitalizedTag) + '\n';
-
- for (let snippet of taggedSnippets) {
- if (snippet.attributes.tags.includes('advanced'))
- output +=
- headers.h3(
- snippet.title + ' ' + misc.image('advanced', '/advanced.svg'),
- ) + '\n';
- else output += headers.h3(snippet.title) + '\n';
-
- output += snippet.attributes.text;
-
- output += `\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.es6}\n\`\`\``;
-
- output += misc.collapsible(
- 'Examples',
- `\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.example}\n\`\`\``,
- );
-
- output +=
- '\n
' + misc.link('⬆ Back to top', misc.anchor('Contents')) + '\n';
- }
- }
-
- // Add the ending static part
- output += `\n${endPart}\n`;
- // Write to the README file
- fs.writeFileSync('README.md', output);
-} catch (err) {
- console.log(`${red('ERROR!')} During README generation: ${err}`);
- process.exit(1);
-}
-
-// Snippets archive README file
-output = '';
-archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
-archivedSnippetsArray = Object.keys(archivedSnippets).reduce((acc, key) => {
- acc.push(archivedSnippets[key]);
- return acc;
-}, []);
-
-// Load static parts for the README file
-try {
- startPart = fs.readFileSync(
- path.join(STATIC_PARTS_PATH, 'snippets_archive_README-start.md'),
- 'utf8',
- );
-} catch (err) {
- console.log(`${red('ERROR!')} During static part loading: ${err}`);
- process.exit(1);
-}
-
-// Create the output for the README file
-try {
- output += `${startPart}\n`;
-
- // Loop over tags and snippets to create the table of contents
- output += lists.ul(archivedSnippetsArray, snippet =>
- misc.link(
- `\`${snippet.title}\``,
- `${misc.anchor(snippet.title)}`,
- ),
- );
-
- for (let snippet of archivedSnippetsArray) {
- output += headers.h3(snippet.title) + '\n';
-
- output += snippet.attributes.text;
-
- output += `\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.es6}\n\`\`\``;
-
- output += misc.collapsible(
- 'Examples',
- `\`\`\`${config.language.short}\n${snippet.attributes.codeBlocks.example}\n\`\`\``,
- );
-
- output +=
- '\n
' + misc.link('⬆ Back to top', misc.anchor('Contents')) + '\n';
- }
- // Write to the README file
- fs.writeFileSync(path.join(SNIPPETS_ARCHIVE_PATH, 'README.md'), output);
-} catch (err) {
- console.log(`${red('ERROR!')} During README generation: ${err}`);
- process.exit(1);
-}
-
-// Glossary README file
-output = '';
-glossarySnippets = util.readSnippets(GLOSSARY_PATH);
-glossarySnippetsArray = Object.keys(glossarySnippets).reduce((acc, key) => {
- acc.push(glossarySnippets[key]);
- return acc;
-}, []);
-
-// Load static parts for the README file
-try {
- startPart = fs.readFileSync(
- path.join(STATIC_PARTS_PATH, 'glossary_README-start.md'),
- 'utf8',
- );
-} catch (err) {
- console.log(`${red('ERROR!')} During static part loading: ${err}`);
- process.exit(1);
-}
-
-// Create the output for the README file
-try {
- output += `${startPart}\n`;
-
- // Loop over tags and snippets to create the table of contents
- output += lists.ul(glossarySnippetsArray, snippet =>
- misc.link(
- `\`${snippet.title}\``,
- `${misc.anchor(snippet.title)}`,
- ),
- );
-
- for (let snippet of glossarySnippetsArray) {
- output += headers.h3(snippet.title) + '\n';
-
- output += snippet.attributes.text;
-
- output +=
- '\n
' + misc.link('⬆ Back to top', misc.anchor('Contents')) + '\n';
- }
- // Write to the README file
- fs.writeFileSync(path.join(GLOSSARY_PATH, 'README.md'), output);
-} catch (err) {
- console.log(`${red('ERROR!')} During README generation: ${err}`);
- process.exit(1);
-}
-
-console.log(`${green('SUCCESS!')} README files generated!`);
-console.timeEnd('Builder');
\ No newline at end of file
diff --git a/scripts/extract.js b/scripts/extract.js
deleted file mode 100644
index ec7da9810..000000000
--- a/scripts/extract.js
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- This is the extractor script that generates the snippets.json and snippetsArchive.json files.
- Run using `npm run extractor`.
-*/
-// Load modules
-const fs = require('fs-extra');
-const path = require('path');
-const { green } = require('kleur');
-const util = require('./util');
-const config = require('../config');
-
-// Paths
-const SNIPPETS_PATH = `./${config.snippetPath}`;
-const SNIPPETS_ARCHIVE_PATH = `./${config.snippetArchivePath}`;
-const GLOSSARY_PATH = `./${config.glossaryPath}`;
-const OUTPUT_PATH = `./${config.snippetDataPath}`;
-
-// Terminate if parent commit is a Travis build
-if (
- util.isTravisCI() &&
- /^Travis build: \d+/g.test(process.env['TRAVIS_COMMIT_MESSAGE'])
-) {
- console.log(
- `${green(
- 'NOEXTRACT',
- )} Snippet extraction terminated, parent commit is a Travis build!`,
- );
- process.exit(0);
-}
-
-// Setup everything
-let snippets = {},
- snippetsArray = [],
- archivedSnippets = {},
- archivedSnippetsArray = [],
- glossarySnippets = {},
- glossarySnippetsArray = [];
-console.time('Extractor');
-
-// Synchronously read all snippets from snippets, snippets_archive and glossary folders and sort them as necessary (case-insensitive)
-snippets = util.readSnippets(SNIPPETS_PATH);
-snippetsArray = Object.keys(snippets).reduce((acc, key) => {
- acc.push(snippets[key]);
- return acc;
-}, []);
-
-archivedSnippets = util.readSnippets(SNIPPETS_ARCHIVE_PATH);
-archivedSnippetsArray = Object.keys(archivedSnippets).reduce((acc, key) => {
- acc.push(archivedSnippets[key]);
- return acc;
-}, []);
-
-glossarySnippets = util.readSnippets(GLOSSARY_PATH);
-glossarySnippetsArray = Object.keys(glossarySnippets).reduce((acc, key) => {
- acc.push(glossarySnippets[key]);
- return acc;
-}, []);
-
-const completeData = {
- data: [...snippetsArray],
- meta: {
- specification: 'http://jsonapi.org/format/',
- type: 'snippetArray',
- scope: SNIPPETS_PATH,
- language: config.language
- },
-};
-const listingData = {
- data: completeData.data.map(v => ({
- id: v.id,
- type: 'snippetListing',
- title: v.title,
- attributes: {
- text: v.attributes.text,
- tags: v.attributes.tags,
- },
- meta: {
- hash: v.meta.hash,
- },
- })),
- meta: {
- specification: 'http://jsonapi.org/format/',
- type: 'snippetListingArray',
- scope: SNIPPETS_PATH,
- language: config.language
- },
-};
-
-const archiveCompleteData = {
- data: [...archivedSnippetsArray],
- meta: {
- specification: 'http://jsonapi.org/format/',
- type: 'snippetArray',
- scope: SNIPPETS_ARCHIVE_PATH,
- language: config.language
- }
-};
-const archiveListingData = {
- data: archiveCompleteData.data.map(v => ({
- id: v.id,
- type: 'snippetListing',
- title: v.title,
- attributes: {
- text: v.attributes.text,
- tags: v.attributes.tags,
- },
- meta: {
- hash: v.meta.hash,
- },
- })),
- meta: {
- specification: 'http://jsonapi.org/format/',
- type: 'snippetListingArray',
- scope: SNIPPETS_ARCHIVE_PATH,
- language: config.language
- },
-};
-
-const glossaryData = {
- data: glossarySnippetsArray.map(v => ({
- id: v.id,
- type: 'glossaryTerm',
- title: v.title,
- attributes: {
- text: v.attributes.text,
- tags: v.attributes.tags,
- },
- meta: {
- hash: v.meta.hash,
- },
- })),
- meta: {
- specification: 'http://jsonapi.org/format/',
- type: 'glossaryTermArray',
- scope: GLOSSARY_PATH,
- },
-};
-
-// Write files
-fs.writeFileSync(
- path.join(OUTPUT_PATH, 'snippets.json'),
- JSON.stringify(completeData, null, 2),
-);
-fs.writeFileSync(
- path.join(OUTPUT_PATH, 'snippetList.json'),
- JSON.stringify(listingData, null, 2),
-);
-
-fs.writeFileSync(
- path.join(OUTPUT_PATH, 'archivedSnippets.json'),
- JSON.stringify(archiveCompleteData, null, 2),
-);
-fs.writeFileSync(
- path.join(OUTPUT_PATH, 'archivedSnippetList.json'),
- JSON.stringify(archiveListingData, null, 2),
-);
-
-fs.writeFileSync(
- path.join(OUTPUT_PATH, 'glossaryTerms.json'),
- JSON.stringify(glossaryData, null, 2),
-);
-
-// Display messages and time
-console.log(`${green('SUCCESS!')} JSON data files generated!`);
-console.timeEnd('Extractor');
diff --git a/scripts/tdd.js b/scripts/tdd.js
deleted file mode 100644
index d06631b74..000000000
--- a/scripts/tdd.js
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- This is the tdd script that creates & updates your TDD environment .
- Run using `npm run tdd`.
-*/
-
-// Load modules
-const fs = require('fs-extra'),
- path = require('path');
-const childProcess = require('child_process');
-const { green, yellow, red } = require('kleur');
-const util = require('./util');
-const config = require('../config');
-// Declare paths
-const SNIPPETS_PATH = `./${config.snippetPath}`;
-const SNIPPETS_ARCHIVE_PATH = `./${config.snippetArchivePath}`;
-const TEST_PATH = `./${config.testPath}`;
-
-console.time('Tester');
-try {
- // Read snippets, archive and tests, find which tests are not defined
- const snippets = fs.readdirSync(SNIPPETS_PATH).map(v => v.replace('.md', ''));
- const archivedSnippets = fs.readdirSync(SNIPPETS_ARCHIVE_PATH).filter(v => v !== 'README.md').map(v => v.replace('.md', ''));
- const definedTests = fs.readdirSync(TEST_PATH).map(v => v.replace('.test.js', '')).filter(v => v !== '_30s.js' && v !== 'testlog');
- const undefinedTests = [...snippets, ...archivedSnippets].filter(v => !definedTests.includes(v));
- const orphanedTests = [...definedTests.filter(v => ![...snippets, ...archivedSnippets].includes(v))];
- orphanedTests.forEach(snippet => {
- console.log(`${yellow('WARNING!')} Orphaned test: ${snippet}`);
- });
- // Create files for undefined tests
- undefinedTests.forEach(snippet => {
- const exportTest = [
- `const {${snippet}} = require('./_30s.js');`,
- `\ntest('${snippet} is a Function', () => {`,
- ` expect(${snippet}).toBeInstanceOf(Function);`,
- `});\n`
- ].join('\n');
- fs.writeFileSync(path.join(TEST_PATH, `${snippet}.test.js`), exportTest);
- });
- // Run tests
- if (util.isTravisCI()) {
- process.exit(0);
- }
- else {
- childProcess.execSync('npm test', { stdio: 'inherit' });
- }
- console.log(`${green('SUCCESS!')} All tests ran successfully!`);
-} catch (err) {
- console.log(`${red('ERROR!')} During test runs: ${err}`);
- process.exit(1);
-}
-console.timeEnd('Tester');
diff --git a/_headers b/static/_headers
similarity index 100%
rename from _headers
rename to static/_headers
diff --git a/static/_redirects b/static/_redirects
new file mode 100644
index 000000000..0f04572b4
--- /dev/null
+++ b/static/_redirects
@@ -0,0 +1 @@
+/ https://30secondsofcode.org 301!
\ No newline at end of file