Added localization extractor
This commit is contained in:
3247
locale/de_DE.js
Normal file
3247
locale/de_DE.js
Normal file
File diff suppressed because it is too large
Load Diff
3
locale/de_DE_log
Normal file
3
locale/de_DE_log
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
Thu Feb 08 2018 13:51:53 GMT+0200 (GTB Standard Time)
|
||||||
|
Hash changes: 0
|
||||||
|
|
||||||
3247
locale/el_GR.js
Normal file
3247
locale/el_GR.js
Normal file
File diff suppressed because it is too large
Load Diff
3
locale/el_GR_log
Normal file
3
locale/el_GR_log
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
Thu Feb 08 2018 13:51:53 GMT+0200 (GTB Standard Time)
|
||||||
|
Hash changes: 0
|
||||||
|
|
||||||
@ -29,6 +29,7 @@
|
|||||||
"webber": "node ./scripts/web.js",
|
"webber": "node ./scripts/web.js",
|
||||||
"tester": "node ./scripts/tdd.js",
|
"tester": "node ./scripts/tdd.js",
|
||||||
"packager": "node ./scripts/module.js",
|
"packager": "node ./scripts/module.js",
|
||||||
|
"localizer": "node ./scripts/localize.js",
|
||||||
"test": "tape test/**/*.test.js | tap-spec"
|
"test": "tape test/**/*.test.js | tap-spec"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|||||||
48
scripts/localize.js
Normal file
48
scripts/localize.js
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
/*
|
||||||
|
This is the localizer script that generates the localization files.
|
||||||
|
Run using `npm run localizer`.
|
||||||
|
*/
|
||||||
|
// Load modules
|
||||||
|
const fs = require('fs-extra');
|
||||||
|
const path = require('path');
|
||||||
|
const chalk = require('chalk');
|
||||||
|
const util = require('./util');
|
||||||
|
|
||||||
|
const LOCALE_PATH = 'locale';
|
||||||
|
const SNIPPETS_PATH = './snippets';
|
||||||
|
const locales = ['de_DE', 'el_GR'];
|
||||||
|
let snippets = util.readSnippets(SNIPPETS_PATH);
|
||||||
|
const COMMENT_REGEX = /(\/\*[\w\'\s\r\n\*]*\*\/)|(\/\/.*)/g;
|
||||||
|
|
||||||
|
locales.forEach(locale => {
|
||||||
|
const locData = require(path.join('..',LOCALE_PATH,locale));
|
||||||
|
let existingData = fs.readFileSync(path.join(LOCALE_PATH,locale+'.js'), 'utf8');
|
||||||
|
let newData = [];
|
||||||
|
let hashChanges = [];
|
||||||
|
Object.keys(snippets).forEach(snippet => {
|
||||||
|
const snippetName = snippet.split('.')[0];
|
||||||
|
const snippetHash = util.hashData(snippets[snippet]);
|
||||||
|
if(locData.hasOwnProperty(snippetName)){
|
||||||
|
if (locData[snippetName].hash !== snippetHash) {
|
||||||
|
existingData = existingData.indexOf(' => '+snippetHash) !== -1 ? existingData : existingData.replace(locData[snippetName].hash, locData[snippetName].hash+' => '+snippetHash);
|
||||||
|
hashChanges.push({snippetName, oldHash: locData[snippetName].hash.split(' => ')[0], newHash: snippetHash});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
newData.push(`\n'${snippetName}' : {
|
||||||
|
'description': \`${snippets[snippet].split('```js')[0].replace(/`/g,'\\`')}\`,
|
||||||
|
'comments': [${(snippets[snippet].match(COMMENT_REGEX) || []).map(v => '`'+v.replace(/`/g,'\\`')+'`')}],
|
||||||
|
'hash': '${snippetHash}'
|
||||||
|
}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if(!fs.existsSync(path.join(LOCALE_PATH,locale+'.js')) || !existingData.length) existingData = `module.exports = {
|
||||||
|
'locale': {
|
||||||
|
'locale': '${locale}'
|
||||||
|
}};`;
|
||||||
|
fs.writeFileSync(path.join(LOCALE_PATH,locale+'.js'), newData.length ? `${existingData.trim().slice(0,-2)},${newData.join(',')}};` : existingData);
|
||||||
|
fs.writeFileSync(path.join(LOCALE_PATH,locale+'_log'), `${new Date()}
|
||||||
|
Hash changes: ${hashChanges.length}
|
||||||
|
|
||||||
|
${hashChanges.length ? hashChanges.map(v => ('Snippet name:' + v.snippetName +'\n Old hash: ' + v.oldHash + '\n New hash: ' + v.newHash + '\n')).join('\n') : ''}`)
|
||||||
|
});
|
||||||
@ -1,6 +1,7 @@
|
|||||||
const fs = require('fs-extra'),
|
const fs = require('fs-extra'),
|
||||||
path = require('path'),
|
path = require('path'),
|
||||||
chalk = require('chalk');
|
chalk = require('chalk'),
|
||||||
|
crypto = require('crypto');
|
||||||
// Synchronously read all snippets and sort them as necessary (case-insensitive)
|
// Synchronously read all snippets and sort them as necessary (case-insensitive)
|
||||||
const readSnippets = snippetsPath => {
|
const readSnippets = snippetsPath => {
|
||||||
let snippets = {};
|
let snippets = {};
|
||||||
@ -66,4 +67,6 @@ const capitalize = (str, lowerRest = false) =>
|
|||||||
str.slice(0, 1).toUpperCase() + (lowerRest ? str.slice(1).toLowerCase() : str.slice(1));
|
str.slice(0, 1).toUpperCase() + (lowerRest ? str.slice(1).toLowerCase() : str.slice(1));
|
||||||
// Checks if current environment is Travis CI
|
// Checks if current environment is Travis CI
|
||||||
const isTravisCI = () => 'TRAVIS' in process.env && 'CI' in process.env;
|
const isTravisCI = () => 'TRAVIS' in process.env && 'CI' in process.env;
|
||||||
module.exports = {readSnippets, readTags, optimizeNodes, capitalize, objectFromPairs, isTravisCI};
|
// Creates a hash for a value using the SHA-256 algorithm.
|
||||||
|
const hashData = val => crypto.createHash('sha256').update(val).digest('hex');
|
||||||
|
module.exports = {readSnippets, readTags, optimizeNodes, capitalize, objectFromPairs, isTravisCI, hashData};
|
||||||
|
|||||||
Reference in New Issue
Block a user