Refactored Extractor and cli
This commit is contained in:
parent
f61cdc4064
commit
4537c1224a
@ -1,9 +0,0 @@
|
|||||||
export interface CliOptionsInterface {
|
|
||||||
input: string[];
|
|
||||||
output: string[];
|
|
||||||
format: 'json' | 'namespaced-json' | 'pot';
|
|
||||||
replace: boolean;
|
|
||||||
sort: boolean;
|
|
||||||
clean: boolean;
|
|
||||||
help: boolean;
|
|
||||||
}
|
|
130
src/cli/cli.ts
130
src/cli/cli.ts
@ -1,33 +1,26 @@
|
|||||||
import { Extractor } from '../utils/extractor';
|
import { ExtractTask } from './tasks/extract.task';
|
||||||
import { CliOptionsInterface } from './cli-options.interface';
|
|
||||||
import { TranslationCollection } from '../utils/translation.collection';
|
|
||||||
import { ParserInterface } from '../parsers/parser.interface';
|
|
||||||
import { PipeParser } from '../parsers/pipe.parser';
|
import { PipeParser } from '../parsers/pipe.parser';
|
||||||
import { DirectiveParser } from '../parsers/directive.parser';
|
import { DirectiveParser } from '../parsers/directive.parser';
|
||||||
import { ServiceParser } from '../parsers/service.parser';
|
import { ServiceParser } from '../parsers/service.parser';
|
||||||
import { CompilerInterface } from '../compilers/compiler.interface';
|
|
||||||
import { JsonCompiler } from '../compilers/json.compiler';
|
|
||||||
import { NamespacedJsonCompiler } from '../compilers/namespaced-json.compiler';
|
|
||||||
import { PoCompiler } from '../compilers/po.compiler';
|
|
||||||
|
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
|
||||||
import * as mkdirp from 'mkdirp';
|
|
||||||
import * as chalk from 'chalk';
|
|
||||||
import * as yargs from 'yargs';
|
import * as yargs from 'yargs';
|
||||||
|
|
||||||
const options: CliOptionsInterface = yargs
|
export const cli = yargs
|
||||||
.usage('Extract strings from files for translation.\nUsage: $0 [options]')
|
.usage('Extract strings from files for translation.\nUsage: $0 [options]')
|
||||||
.help('h')
|
.version('2.0.0') // TODO: Read from package.json
|
||||||
.alias('h', 'help')
|
.alias('version', 'v')
|
||||||
|
.help('help')
|
||||||
|
.alias('help', 'h')
|
||||||
.option('input', {
|
.option('input', {
|
||||||
alias: ['i', 'dir', 'd'],
|
alias: 'i',
|
||||||
describe: 'Paths you would like to extract strings from. You can use path expansion, glob patterns and multiple paths',
|
describe: 'Paths you would like to extract strings from. You can use path expansion, glob patterns and multiple paths',
|
||||||
default: process.env.PWD,
|
default: process.env.PWD,
|
||||||
type: 'array',
|
type: 'array',
|
||||||
|
normalize: true
|
||||||
})
|
})
|
||||||
.check((options: CliOptionsInterface) => {
|
.check(options => {
|
||||||
options.input.forEach(dir => {
|
options.input.forEach((dir: string) => {
|
||||||
if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) {
|
if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) {
|
||||||
throw new Error(`The path you supplied was not found: '${dir}'`)
|
throw new Error(`The path you supplied was not found: '${dir}'`)
|
||||||
}
|
}
|
||||||
@ -35,10 +28,17 @@ const options: CliOptionsInterface = yargs
|
|||||||
});
|
});
|
||||||
return true;
|
return true;
|
||||||
})
|
})
|
||||||
|
.option('patterns', {
|
||||||
|
alias: 'p',
|
||||||
|
describe: 'Input file patterns to parse',
|
||||||
|
type: 'array',
|
||||||
|
default: ['/**/*.html', '/**/*.ts']
|
||||||
|
})
|
||||||
.option('output', {
|
.option('output', {
|
||||||
alias: 'o',
|
alias: 'o',
|
||||||
describe: 'Paths where you would like to save extracted strings. You can use path expansion, glob patterns and multiple paths',
|
describe: 'Paths where you would like to save extracted strings. You can use path expansion, glob patterns and multiple paths',
|
||||||
type: 'array',
|
type: 'array',
|
||||||
|
normalize: true,
|
||||||
required: true
|
required: true
|
||||||
})
|
})
|
||||||
.option('format', {
|
.option('format', {
|
||||||
@ -66,88 +66,22 @@ const options: CliOptionsInterface = yargs
|
|||||||
default: false,
|
default: false,
|
||||||
type: 'boolean'
|
type: 'boolean'
|
||||||
})
|
})
|
||||||
.argv;
|
.exitProcess(true)
|
||||||
|
.parse(process.argv);
|
||||||
|
|
||||||
const patterns: string[] = [
|
const extractTask = new ExtractTask(cli.input, cli.output, {
|
||||||
'/**/*.html',
|
replace: cli.replace,
|
||||||
'/**/*.ts'
|
sort: cli.sort,
|
||||||
];
|
clean: cli.clean,
|
||||||
const parsers: ParserInterface[] = [
|
patterns: cli.patterns
|
||||||
new ServiceParser(),
|
|
||||||
new PipeParser(),
|
|
||||||
new DirectiveParser()
|
|
||||||
];
|
|
||||||
|
|
||||||
let compiler: CompilerInterface;
|
|
||||||
let ext: string;
|
|
||||||
switch (options.format) {
|
|
||||||
case 'pot':
|
|
||||||
compiler = new PoCompiler();
|
|
||||||
ext = 'pot';
|
|
||||||
break;
|
|
||||||
case 'json':
|
|
||||||
compiler = new JsonCompiler();
|
|
||||||
ext = 'json';
|
|
||||||
break;
|
|
||||||
case 'namespaced-json':
|
|
||||||
compiler = new NamespacedJsonCompiler();
|
|
||||||
ext = 'json';
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const extractor: Extractor = new Extractor(parsers, patterns);
|
|
||||||
|
|
||||||
let extractedStrings: TranslationCollection = new TranslationCollection();
|
|
||||||
|
|
||||||
// Extract strings from paths
|
|
||||||
console.log(chalk.bold('Extracting strings from...'));
|
|
||||||
options.input.forEach(dir => {
|
|
||||||
const normalizedDir: string = path.resolve(dir);
|
|
||||||
console.log(chalk.gray('- %s'), normalizedDir);
|
|
||||||
extractedStrings = extractedStrings.union(extractor.process(normalizedDir));
|
|
||||||
});
|
});
|
||||||
console.log(chalk.green('Extracted %d strings\n'), extractedStrings.count());
|
|
||||||
|
|
||||||
// Save extracted strings to output paths
|
extractTask
|
||||||
options.output.forEach(output => {
|
.setParsers([
|
||||||
const normalizedOutput: string = path.resolve(output);
|
new ServiceParser(),
|
||||||
|
new PipeParser(),
|
||||||
|
new DirectiveParser()
|
||||||
|
])
|
||||||
|
.setCompiler(cli.format)
|
||||||
|
.execute();
|
||||||
|
|
||||||
let outputDir: string = normalizedOutput;
|
|
||||||
let outputFilename: string = `template.${ext}`;
|
|
||||||
if (!fs.existsSync(normalizedOutput) || !fs.statSync(normalizedOutput).isDirectory()) {
|
|
||||||
outputDir = path.dirname(normalizedOutput);
|
|
||||||
outputFilename = path.basename(normalizedOutput);
|
|
||||||
}
|
|
||||||
const outputPath: string = path.join(outputDir, outputFilename);
|
|
||||||
|
|
||||||
console.log(chalk.bold('Saving to: %s'), outputPath);
|
|
||||||
if (!fs.existsSync(outputDir)) {
|
|
||||||
console.log(chalk.dim('- Created output dir: %s'), outputDir);
|
|
||||||
mkdirp.sync(outputDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
let processedStrings: TranslationCollection = extractedStrings;
|
|
||||||
|
|
||||||
if (fs.existsSync(outputPath) && !options.replace) {
|
|
||||||
const existingStrings: TranslationCollection = compiler.parse(fs.readFileSync(outputPath, 'utf-8'));
|
|
||||||
if (existingStrings.count() > 0) {
|
|
||||||
processedStrings = processedStrings.union(existingStrings);
|
|
||||||
console.log(chalk.dim('- Merged with %d existing strings'), existingStrings.count());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.clean) {
|
|
||||||
const collectionCount = processedStrings.count();
|
|
||||||
processedStrings = processedStrings.intersect(processedStrings);
|
|
||||||
const removeCount = collectionCount - processedStrings.count();
|
|
||||||
console.log(chalk.dim('- Removed %d obsolete strings'), removeCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.sort) {
|
|
||||||
processedStrings = processedStrings.sort();
|
|
||||||
console.log(chalk.dim('- Sorted strings'));
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.writeFileSync(outputPath, compiler.compile(processedStrings));
|
|
||||||
console.log(chalk.green('OK!\n'));
|
|
||||||
});
|
|
||||||
|
156
src/cli/tasks/extract.task.ts
Normal file
156
src/cli/tasks/extract.task.ts
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
import { TranslationCollection } from '../../utils/translation.collection';
|
||||||
|
import { TaskInterface } from './task.interface';
|
||||||
|
import { ParserInterface } from '../../parsers/parser.interface';
|
||||||
|
import { CompilerInterface } from '../../compilers/compiler.interface';
|
||||||
|
import { CompilerFactory } from '../../compilers/compiler.factory';
|
||||||
|
|
||||||
|
import * as chalk from 'chalk';
|
||||||
|
import * as glob from 'glob';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as mkdirp from 'mkdirp';
|
||||||
|
|
||||||
|
export interface ExtractTaskOptionsInterface {
|
||||||
|
replace?: boolean;
|
||||||
|
sort?: boolean;
|
||||||
|
clean?: boolean;
|
||||||
|
patterns?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ExtractTask implements TaskInterface {
|
||||||
|
|
||||||
|
protected _options: ExtractTaskOptionsInterface = {
|
||||||
|
replace: false,
|
||||||
|
sort: false,
|
||||||
|
clean: false,
|
||||||
|
patterns: []
|
||||||
|
};
|
||||||
|
|
||||||
|
protected _parsers: ParserInterface[] = [];
|
||||||
|
protected _compiler: CompilerInterface;
|
||||||
|
|
||||||
|
public constructor(protected _input: string[], protected _output: string[], options?: ExtractTaskOptionsInterface) {
|
||||||
|
this._options = Object.assign({}, this._options, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
public execute(): void {
|
||||||
|
if (!this._parsers) {
|
||||||
|
throw new Error('No parsers configured');
|
||||||
|
}
|
||||||
|
if (!this._compiler) {
|
||||||
|
throw new Error('No compiler configured');
|
||||||
|
}
|
||||||
|
|
||||||
|
const collection = this._extract();
|
||||||
|
if (collection.isEmpty()) {
|
||||||
|
this._out(chalk.yellow('Did not find any extractable strings\n'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._out(chalk.green('Extracted %d strings\n'), collection.count());
|
||||||
|
this._save(collection);
|
||||||
|
}
|
||||||
|
|
||||||
|
public setParsers(parsers: ParserInterface[]): this {
|
||||||
|
this._parsers = parsers;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public setCompiler(compiler: CompilerInterface | string): this {
|
||||||
|
if (typeof compiler === 'string') {
|
||||||
|
this._compiler = CompilerFactory.create(compiler)
|
||||||
|
} else {
|
||||||
|
this._compiler = compiler;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract strings from input dirs using configured parsers
|
||||||
|
*/
|
||||||
|
protected _extract(): TranslationCollection {
|
||||||
|
let collection: TranslationCollection = new TranslationCollection();
|
||||||
|
|
||||||
|
this._out(chalk.bold('Extracting strings...'));
|
||||||
|
|
||||||
|
this._input.forEach(dir => {
|
||||||
|
this._readDir(dir, this._options.patterns).forEach(path => {
|
||||||
|
this._out(chalk.gray('- %s'), path);
|
||||||
|
const contents: string = fs.readFileSync(path, 'utf-8');
|
||||||
|
this._parsers.forEach((parser: ParserInterface) => {
|
||||||
|
collection = collection.union(parser.extract(contents, path));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return collection;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process collection according to options (merge, clean, sort), compile and save
|
||||||
|
* @param collection
|
||||||
|
*/
|
||||||
|
protected _save(collection: TranslationCollection): void {
|
||||||
|
this._output.forEach(output => {
|
||||||
|
const normalizedOutput: string = path.resolve(output);
|
||||||
|
|
||||||
|
let dir: string = normalizedOutput;
|
||||||
|
let filename: string = `template.${this._compiler.extension}`;
|
||||||
|
if (!fs.existsSync(normalizedOutput) || !fs.statSync(normalizedOutput).isDirectory()) {
|
||||||
|
dir = path.dirname(normalizedOutput);
|
||||||
|
filename = path.basename(normalizedOutput);
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputPath: string = path.join(dir, filename);
|
||||||
|
let processedCollection: TranslationCollection = collection;
|
||||||
|
|
||||||
|
this._out(chalk.bold('\nSaving: %s'), outputPath);
|
||||||
|
|
||||||
|
if (fs.existsSync(outputPath) && !this._options.replace) {
|
||||||
|
const existingCollection: TranslationCollection = this._compiler.parse(fs.readFileSync(outputPath, 'utf-8'));
|
||||||
|
if (!existingCollection.isEmpty()) {
|
||||||
|
processedCollection = processedCollection.union(existingCollection);
|
||||||
|
this._out(chalk.dim('- merged with %d existing strings'), existingCollection.count());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._options.clean) {
|
||||||
|
const collectionCount = processedCollection.count();
|
||||||
|
processedCollection = processedCollection.intersect(processedCollection);
|
||||||
|
const removeCount = collectionCount - processedCollection.count();
|
||||||
|
if (removeCount > 0) {
|
||||||
|
this._out(chalk.dim('- removed %d obsolete strings'), removeCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._options.sort) {
|
||||||
|
processedCollection = processedCollection.sort();
|
||||||
|
this._out(chalk.dim('- sorted strings'));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fs.existsSync(dir)) {
|
||||||
|
mkdirp.sync(dir);
|
||||||
|
this._out(chalk.dim('- created dir: %s'), dir);
|
||||||
|
}
|
||||||
|
fs.writeFileSync(outputPath, this._compiler.compile(processedCollection));
|
||||||
|
|
||||||
|
this._out(chalk.green('Done!'));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files in dir matching patterns
|
||||||
|
*/
|
||||||
|
protected _readDir(dir: string, patterns: string[]): string[] {
|
||||||
|
return patterns.reduce((results, pattern) => {
|
||||||
|
return glob.sync(dir + pattern)
|
||||||
|
.filter(path => fs.statSync(path).isFile())
|
||||||
|
.concat(results);
|
||||||
|
}, []);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected _out(...args: any[]): void {
|
||||||
|
console.log.apply(this, arguments);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
3
src/cli/tasks/task.interface.ts
Normal file
3
src/cli/tasks/task.interface.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
export interface TaskInterface {
|
||||||
|
execute(): void;
|
||||||
|
}
|
17
src/compilers/compiler.factory.ts
Normal file
17
src/compilers/compiler.factory.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import { CompilerInterface } from '../compilers/compiler.interface';
|
||||||
|
import { JsonCompiler } from '../compilers/json.compiler';
|
||||||
|
import { NamespacedJsonCompiler } from '../compilers/namespaced-json.compiler';
|
||||||
|
import { PoCompiler } from '../compilers/po.compiler';
|
||||||
|
|
||||||
|
export class CompilerFactory {
|
||||||
|
|
||||||
|
public static create(format: string): CompilerInterface {
|
||||||
|
switch (format) {
|
||||||
|
case 'pot': return new PoCompiler();
|
||||||
|
case 'json': return new JsonCompiler();
|
||||||
|
case 'namespaced-json': return new NamespacedJsonCompiler();
|
||||||
|
default: throw new Error(`Unknown format: ${format}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -2,6 +2,8 @@ import { TranslationCollection } from '../utils/translation.collection';
|
|||||||
|
|
||||||
export interface CompilerInterface {
|
export interface CompilerInterface {
|
||||||
|
|
||||||
|
extension: string;
|
||||||
|
|
||||||
compile(collection: TranslationCollection): string;
|
compile(collection: TranslationCollection): string;
|
||||||
|
|
||||||
parse(contents: string): TranslationCollection;
|
parse(contents: string): TranslationCollection;
|
||||||
|
@ -3,6 +3,8 @@ import { TranslationCollection } from '../utils/translation.collection';
|
|||||||
|
|
||||||
export class JsonCompiler implements CompilerInterface {
|
export class JsonCompiler implements CompilerInterface {
|
||||||
|
|
||||||
|
public extension = 'json';
|
||||||
|
|
||||||
public compile(collection: TranslationCollection): string {
|
public compile(collection: TranslationCollection): string {
|
||||||
return JSON.stringify(collection.values, null, '\t');
|
return JSON.stringify(collection.values, null, '\t');
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,8 @@ import * as flat from 'flat';
|
|||||||
|
|
||||||
export class NamespacedJsonCompiler implements CompilerInterface {
|
export class NamespacedJsonCompiler implements CompilerInterface {
|
||||||
|
|
||||||
|
public extension = 'json';
|
||||||
|
|
||||||
public compile(collection: TranslationCollection): string {
|
public compile(collection: TranslationCollection): string {
|
||||||
const values: {} = flat.unflatten(collection.values);
|
const values: {} = flat.unflatten(collection.values);
|
||||||
return JSON.stringify(values, null, '\t');
|
return JSON.stringify(values, null, '\t');
|
||||||
|
@ -5,6 +5,8 @@ import * as gettext from 'gettext-parser';
|
|||||||
|
|
||||||
export class PoCompiler implements CompilerInterface {
|
export class PoCompiler implements CompilerInterface {
|
||||||
|
|
||||||
|
public extension = 'po';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Translation domain
|
* Translation domain
|
||||||
*/
|
*/
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
import { ParserInterface } from '../parsers/parser.interface';
|
|
||||||
import { TranslationCollection } from './translation.collection';
|
|
||||||
|
|
||||||
import * as glob from 'glob';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
|
|
||||||
export class Extractor {
|
|
||||||
|
|
||||||
public constructor(public parsers: ParserInterface[], public patterns: string[]) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract strings from dir
|
|
||||||
*/
|
|
||||||
public process(dir: string): TranslationCollection {
|
|
||||||
let collection: TranslationCollection = new TranslationCollection();
|
|
||||||
|
|
||||||
this._readDir(dir, this.patterns).forEach(path => {
|
|
||||||
const contents: string = fs.readFileSync(path, 'utf-8');
|
|
||||||
this.parsers.forEach((parser: ParserInterface) => {
|
|
||||||
collection = collection.union(parser.extract(contents, path));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return collection;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all files in dir matching patterns
|
|
||||||
*/
|
|
||||||
protected _readDir(dir: string, patterns: string[]): string[] {
|
|
||||||
return patterns.reduce((results, pattern) => {
|
|
||||||
return glob.sync(dir + pattern)
|
|
||||||
.filter(path => fs.statSync(path).isFile())
|
|
||||||
.concat(results);
|
|
||||||
}, []);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user