2016-12-05 00:54:35 +03:00
|
|
|
#! /usr/bin/env node
|
|
|
|
|
2016-12-10 03:57:20 +03:00
|
|
|
const Extractor = require('../dist/extractor').Extractor;
|
|
|
|
const PipeParser = require('../dist/parsers/pipe.parser').PipeParser;
|
|
|
|
const DirectiveParser = require('../dist/parsers/directive.parser').DirectiveParser;
|
|
|
|
const ServiceParser = require('../dist/parsers/service.parser').ServiceParser;
|
|
|
|
const JsonCompiler = require('../dist/compilers/json.compiler').JsonCompiler;
|
|
|
|
const PoCompiler = require('../dist/compilers/po.compiler').PoCompiler
|
2016-12-05 00:54:35 +03:00
|
|
|
|
2016-12-10 03:57:20 +03:00
|
|
|
const fs = require('fs');
|
|
|
|
const path = require('path');
|
|
|
|
const cli = require('cli');
|
2016-12-05 00:54:35 +03:00
|
|
|
|
2016-12-10 03:57:20 +03:00
|
|
|
const options = cli.parse({
|
2016-12-05 00:54:35 +03:00
|
|
|
dir: ['d', 'Directory path you would like to extract strings from', 'dir', process.env.PWD],
|
|
|
|
output: ['o', 'Directory path you would like to save extracted strings', 'dir', process.env.PWD],
|
2016-12-09 21:29:48 +03:00
|
|
|
format: ['f', 'Output format', ['json', 'pot'], 'json'],
|
2016-12-10 03:57:20 +03:00
|
|
|
replace: ['r', 'Replace the contents of output file if it exists (merging by default)', 'boolean', false],
|
2016-12-09 21:29:48 +03:00
|
|
|
clean: ['c', 'Remove unused keys when merging', 'boolean', false]
|
2016-12-05 00:54:35 +03:00
|
|
|
});
|
|
|
|
|
|
|
|
[options.dir, options.output].forEach(dir => {
|
|
|
|
if (!fs.existsSync(dir)) {
|
|
|
|
cli.fatal('The directory path you supplied was not found: ' + dir);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2016-12-10 03:57:20 +03:00
|
|
|
const filename = 'template.' + options.format;
|
|
|
|
const dest = path.join(options.output, filename);
|
|
|
|
|
|
|
|
const parsers = [
|
|
|
|
new PipeParser(),
|
|
|
|
new DirectiveParser(),
|
|
|
|
new ServiceParser()
|
|
|
|
];
|
|
|
|
const patterns = [
|
|
|
|
'/**/*.html',
|
|
|
|
'/**/*.ts',
|
|
|
|
'/**/*.js'
|
|
|
|
];
|
2016-12-05 00:54:35 +03:00
|
|
|
|
2016-12-10 03:57:20 +03:00
|
|
|
const extractor = new Extractor(parsers, patterns);
|
2016-12-05 00:54:35 +03:00
|
|
|
|
|
|
|
try {
|
2016-12-10 03:57:20 +03:00
|
|
|
cli.info(`Extracting strings from '${options.dir}'`);
|
|
|
|
const extracted = extractor.process(options.dir);
|
|
|
|
cli.ok(`* Extracted ${extracted.count()} strings`);
|
2016-12-09 21:29:48 +03:00
|
|
|
|
|
|
|
if (extracted.isEmpty()) {
|
|
|
|
process.exit();
|
2016-12-05 00:54:35 +03:00
|
|
|
}
|
2016-12-09 21:29:48 +03:00
|
|
|
|
|
|
|
let collection = extracted;
|
|
|
|
|
2016-12-10 03:57:20 +03:00
|
|
|
let compiler = new JsonCompiler();
|
|
|
|
if (options.format === 'pot') {
|
|
|
|
compiler = new PoCompiler();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!options.replace && fs.existsSync(dest)) {
|
2016-12-09 21:29:48 +03:00
|
|
|
const existing = compiler.parse(fs.readFileSync(dest, 'utf-8'));
|
|
|
|
|
|
|
|
collection = extracted.union(existing);
|
2016-12-10 03:57:20 +03:00
|
|
|
cli.ok(`* Merged ${existing.count()} existing strings`);
|
2016-12-09 21:29:48 +03:00
|
|
|
|
|
|
|
if (options.clean) {
|
2016-12-10 03:57:20 +03:00
|
|
|
const collectionCount = collection.count();
|
2016-12-09 21:29:48 +03:00
|
|
|
collection = collection.intersect(extracted);
|
2016-12-10 03:57:20 +03:00
|
|
|
const removeCount = collectionCount - collection.count();
|
|
|
|
if (removeCount > 0) {
|
|
|
|
cli.ok(`* Removed ${removeCount} unused strings`);
|
|
|
|
}
|
2016-12-09 21:29:48 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fs.writeFileSync(dest, compiler.compile(collection));
|
2016-12-10 03:57:20 +03:00
|
|
|
cli.ok(`* Saved to '${dest}'`);
|
2016-12-05 00:54:35 +03:00
|
|
|
} catch (e) {
|
|
|
|
cli.fatal(e.toString());
|
|
|
|
}
|