diff --git a/package.json b/package.json index 038cc9ae..0fc46677 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "scripts": { "clean": "rimraf ./*.log && lerna exec --parallel -- rimraf coverage lib/* temp", - "build": "yarn clean && lerna exec --parallel -- babel src --out-dir lib --config-file ../../.babelrc", + "build": "yarn clean && lerna exec --parallel -- babel src --out-dir lib --config-file ../../.babelrc && lerna run build", "test": "yarn build && yarn lint && lerna exec --concurrency 1 -- nyc mocha --recursive", "type": "lerna exec -- tsc", "type:doc": "lerna exec --scope @ridi/content-parser -- typedoc", @@ -40,7 +40,7 @@ "sinon": "^11.1.1", "typedoc": "^0.20.36", "typedoc-plugin-external-module-name": "^4.0.6", - "typescript": "^4.0.5" + "typescript": "^5.0.4" }, "esm": { "cjs": true diff --git a/packages/comic-parser/package.json b/packages/comic-parser/package.json index 45e981aa..febf63fb 100644 --- a/packages/comic-parser/package.json +++ b/packages/comic-parser/package.json @@ -6,6 +6,9 @@ "name": "Ridibooks Viewer Team", "mail": "viewer.team@ridi.com" }, + "scripts": { + "build": "npx tsc || exit 0" + }, "license": "MIT", "repository": { "type": "git", @@ -17,15 +20,17 @@ "lib", "type" ], - "main": "lib/index.js", - "types": "type/index.d.ts", + "main": "./lib/index.js", + "types": "./lib/index.d.ts", "dependencies": { "@ridi/parser-core": "0.7.4-alpha.1", + "adm-zip": "^0.5.10", "fs-extra": "^10.0.0", "image-size": "^1.0.0", "string-natural-compare": "^3.0.1" }, "devDependencies": { + "@types/adm-zip": "^0.5.0", "@types/node": "^14.14.6" } } diff --git a/packages/comic-parser/src/ComicParser.js b/packages/comic-parser/src/ComicParser.js deleted file mode 100755 index ee2e3ad5..00000000 --- a/packages/comic-parser/src/ComicParser.js +++ /dev/null @@ -1,218 +0,0 @@ -import { - Parser, isString, stringContains, isExists, createError, Errors, -} from '@ridi/parser-core'; -import sizeOf from 'image-size'; -import naturalCompare from 'string-natural-compare'; - -import * as path from 'path'; - -import ComicBook from './model/ComicBook'; -import ComicItem from './model/ComicItem'; -import ComicParseContext from './model/ComicParseContext'; -import ComicReadContext from './model/ComicReadContext'; - -class ComicParser extends Parser { - /** - * Get default values of parse options - */ - static get parseDefaultOptions() { - return { - ...super.parseDefaultOptions, - // File extension to allow when extracting lists. - ext: ['jpg', 'jpeg', 'png', 'bmp', 'gif'], - // If true, image size parse. (parse may be slower.) - parseImageSize: false, - }; - } - - /** - * Get types of parse options - */ - static get parseOptionTypes() { - return { - ...super.parseOptionTypes, - ext: 'Array', - parseImageSize: 'Boolean|Number', - }; - } - - /** - * @typedef {Object} ComicReadOptionExtra - * @property {boolean} base64 - * - * @typedef {import('@ridi/parser-core/type/BaseReadContext').BaseReadOption & ComicReadOptionExtra} ComicReadOption - */ - - /** - * Get default values of read options - * @returns {ComicReadOption} - */ - static get readDefaultOptions() { - return { - ...super.readDefaultOptions, - // If false, reads image into a buffer. - base64: false, - }; - } - - /** - * @typedef {Object} ComicReadOptionTypeExtra - * @property {string} base64 - * - * @typedef {import('@ridi/parser-core/type/BaseReadContext').BaseReadOptionType - * & ComicReadOptionTypeExtra} ComicReadOptionType - */ - - /** - * Get types of read option - * @returns {ComicReadOptionType} - */ - static get readOptionTypes() { - return { - ...super.readOptionTypes, - base64: 'Boolean', - }; - } - - /** - * Create new ComicParser - * @param {string} input file or directory - * @param {import('@ridi/parser-core').CryptoProvider} cryptoProvider en/decrypto provider - * @param {import('@ridi/parser-core').LogLevel} logLevel logging level - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.EINVAL} invalid input - * @example new ComicParser('./foo/bar.zip' or './foo/bar'); - */ - constructor(input, cryptoProvider, logLevel) { - /* istanbul ignore next */ - super( - input, - isString(cryptoProvider) ? undefined : cryptoProvider, - { - namespace: 'ComicParser', - logLevel: isString(cryptoProvider) ? cryptoProvider : logLevel, - }, - ); - } - - /** - * @returns {ComicParseContext} - */ - _getParseContextClass() { - return ComicParseContext; - } - - /** - * @returns {ComicBook} - */ - _getBookClass() { - return ComicBook; - } - - /** - * @returns {ComicReadContext} - */ - _getReadContextClass() { - return ComicReadContext; - } - - /** - * @returns {ComicItem} - */ - _getReadItemClass() { - return ComicItem; - } - - /** - * @returns {import('@ridi/parser-core/type/Parser').Task[]} return tasks - */ - _parseTasks() { - return [ - ...super._parseTasks(), - { fun: this._parse, name: 'parse' }, - ]; - } - - /** - * extracts only necessary metadata from entries and create item list - * @param {ComicReadContext} context intermediate result - * @returns {Promise} return Context containing item list - * @see ComicParser.parseDefaultOptions.ext - * @see ComicParser.parseDefaultOptions.parseImageSize - */ - async _parse(context) { - const { entries, rawBook, options } = context; - const items = entries.sort((e1, e2) => naturalCompare(e1.entryPath, e2.entryPath)) - .filter(entry => { - const ext = path.extname(entry.entryPath); - return ext.length > 0 && stringContains(options.ext.map(e => `.${e}`), ext); - }); - rawBook.items = []; - await items.reduce((prevPromise, item, index) => prevPromise.then(async () => { - rawBook.items.push({ - index, - path: item.entryPath, - size: item.size, - ...await this._parseImageSize(item, options), - }); - }), Promise.resolve()); - return context; - } - - /** - * - * @typedef {Object} ImageMetaData - * @property {number} width - * @property {number} height - */ - /** - * parse image size from entry - * @param {import('@ridi/parser-core/type/readEntries').EntryBasicInformation} entry image entry - * @param {ComicParser.parseDefaultOptions} options parse options - * @returns {Promise} return image size - */ - async _parseImageSize(entry, options) { - const { parseImageSize } = options; - if (parseImageSize === false) { - return {}; - } - const readOptions = Number.isInteger(parseImageSize) ? { end: parseImageSize } : {}; - const buffer = await entry.getFile(readOptions); - try { - const size = sizeOf(buffer); - return { width: size.width, height: size.height }; - } catch (e) { - this.logger.error(e); - return { width: undefined, height: undefined }; - } - } - - /** - * Contents is read using loader suitable for context - * @param {ComicReadContext} context properties required for reading - * @returns {(string|Buffer)[]} reading results - * @throws {Errors.ENOFILE} no such file - * @see ComicParser.readDefaultOptions.base64 - */ - async _read(context) { - const { items, entries, options } = context; - const results = []; - await items.reduce((prevPromise, item) => prevPromise.then(async () => { - const entry = entries.find(item.path); - /* istanbul ignore next */ - if (!options.force && !isExists(entry)) { - /* istanbul ignore next */ - throw createError(Errors.ENOFILE, item.path); - } - const file = await entry.getFile(); - if (options.base64) { - results.push(`data:${item.mimeType};base64,${file.toString('base64')}`); - } else { - results.push(file); - } - }), Promise.resolve()); - return results; - } -} - -export default ComicParser; diff --git a/packages/comic-parser/src/ComicParser.ts b/packages/comic-parser/src/ComicParser.ts new file mode 100755 index 00000000..ec16805f --- /dev/null +++ b/packages/comic-parser/src/ComicParser.ts @@ -0,0 +1,300 @@ +import { + Parser, + isString, + stringContains, + isExists, + createError, + Errors, + CryptoProvider, + LogLevel, + BaseItem, +} from "@ridi/parser-core"; +import sizeOf from "image-size"; +import naturalCompare from "string-natural-compare"; + +import * as path from "path"; + +import ComicBook from "./model/ComicBook"; +import ComicItem from "./model/ComicItem"; +import ComicParseContext from "./model/ComicParseContext"; +import ComicReadContext from "./model/ComicReadContext"; +import { + BaseReadOption, + BaseReadOptionType, +} from "@ridi/parser-core/lib/BaseReadContext"; +import { Task } from "@ridi/parser-core/lib/Parser"; +import { EntryBasicInformation } from "@ridi/parser-core/lib/readEntries"; +import { PassThrough, Stream } from "stream"; +import AdmZip, { IZipEntry } from "adm-zip"; +import crypto from "crypto"; + +const MODE = "aes-128-cbc"; + +function createStream(data: Buffer | string | null | undefined) { + const rv = new PassThrough(); // PassThrough is also a Readable stream + rv.push(data); + rv.push(null); + return rv; +} + +type ComicReadOptionExtra = { + base64: boolean; +}; + +type ComicReadOption = BaseReadOption & ComicReadOptionExtra; + +type ComicReadOptionTypeExtra = { + base64: string; +}; +type ComicReadOptionType = BaseReadOptionType & ComicReadOptionTypeExtra; + +type ImageMetaData = { + width: number; + height: number; +}; + +interface AltParser { + entries: { + [path: string]: IZipEntry; + }; + + init(input: string, options: { secretKey }): void; + + readStream(item: BaseItem): Stream; +} + +class ComicParser extends Parser implements AltParser { + entries!: { + [entryName: string]: IZipEntry; + }; + /** + * Get default values of parse options + */ + static get parseDefaultOptions() { + return { + ...super.parseDefaultOptions, + // File extension to allow when extracting lists. + ext: ["jpg", "jpeg", "png", "bmp", "gif"], + // If true, image size parse. (parse may be slower.) + parseImageSize: false, + }; + } + + /** + * Get types of parse options + */ + static get parseOptionTypes() { + return { + ...super.parseOptionTypes, + ext: "Array", + parseImageSize: "Boolean|Number", + }; + } + + /** + * Get default values of read options + + */ + static get readDefaultOptions(): ComicReadOption { + return { + ...super.readDefaultOptions, + // If false, reads image into a buffer. + base64: false, + }; + } + /** + * Get types of read option + */ + static get readOptionTypes(): ComicReadOptionType { + return { + ...super.readOptionTypes, + base64: "Boolean", + }; + } + + /** + * Create new ComicParser + * @param {string} input file or directory + * @throws {Errors.ENOENT} no such file or directory + * @throws {Errors.EINVAL} invalid input + * @example new ComicParser('./foo/bar.zip' or './foo/bar'); + */ + constructor( + input: string, + // FIXME: 추후 수정 + cryptoProvider?: CryptoProvider | LogLevel, + logLevel?: LogLevel + ) { + /* istanbul ignore next */ + super(input, isString(cryptoProvider) ? undefined : cryptoProvider, { + namespace: "ComicParser", + logLevel: isString(cryptoProvider) ? cryptoProvider : logLevel, + }); + } + + _getParseContextClass() { + return ComicParseContext; + } + + _getBookClass() { + return ComicBook; + } + + _getReadContextClass() { + return ComicReadContext; + } + + _getReadItemClass() { + return ComicItem; + } + + _parseTasks(): Task[] { + return [...super._parseTasks(), { fun: this._parse, name: "parse" }]; + } + + /** + * extracts only necessary metadata from entries and create item list + * @param {ComicReadContext} context intermediate result + * @returns {Promise} return Context containing item list + * @see ComicParser.parseDefaultOptions.ext + * @see ComicParser.parseDefaultOptions.parseImageSize + */ + async _parse(context: ComicReadContext) { + const { entries, rawBook, options } = context; + const items = entries + .sort((e1, e2) => naturalCompare(e1.entryPath, e2.entryPath)) + .filter((entry) => { + const ext = path.extname(entry.entryPath); + return ( + ext.length > 0 && + stringContains( + options.ext.map((e) => `.${e}`), + ext + ) + ); + }); + + // FIXME: context와 연결되지 않는 것으로 보임 + rawBook.items = []; + await items.reduce( + (prevPromise, item, index) => + prevPromise.then(async () => { + rawBook.items.push({ + index, + path: item.entryPath, + size: item.size, + ...(await this._parseImageSize(item, options)), + }); + }), + Promise.resolve() + ); + return context; + } + + /** + * parse image size from entry + * @param {import('@ridi/parser-core/type/readEntries').EntryBasicInformation} entry image entry + * @param {ComicParser.parseDefaultOptions} options parse options + * @returns {Promise} return image size + */ + async _parseImageSize( + entry: EntryBasicInformation, + options: Partial + ) { + const { parseImageSize } = options; + if (parseImageSize === false) { + return {}; + } + const readOptions = Number.isInteger(parseImageSize) + ? { end: parseImageSize as unknown as number } + : {}; + const buffer = await entry.getFile(readOptions); + try { + const size = sizeOf(buffer); + return { width: size.width, height: size.height }; + } catch (e) { + this.logger.error(e); + return { width: undefined, height: undefined }; + } + } + + /** + * Contents is read using loader suitable for context + * @param {ComicReadContext} context properties required for reading + * @returns {(string|Buffer)[]} reading results + * @throws {Errors.ENOFILE} no such file + * @see ComicParser.readDefaultOptions.base64 + */ + async _read(context: ComicReadContext) { + const { items, entries, options } = context; + const results = []; + await (items as ComicItem[]).reduce( + (prevPromise, item) => + prevPromise.then(async () => { + const entry = entries.find(item.path); + /* istanbul ignore next */ + if (!options.force && !isExists(entry)) { + /* istanbul ignore next */ + throw createError(Errors.ENOFILE, item.path); + } + const file = await entry.getFile(); + if (options.base64) { + results.push( + `data:${item.mimeType};base64,${file.toString("base64")}` + ); + } else { + results.push(file); + } + }), + Promise.resolve() + ); + return results; + } + + _secretKey: any; + init(input: string, { secretKey }: { secretKey: any }) { + if (this.entries) throw new Error("이미 entries 초기화 됨"); + + this._secretKey = secretKey; + this.entries = new AdmZip(input) + .getEntries() + .reduce((acc, entry) => { + return { + ...acc, + [entry.entryName]: entry, + }; + }, {}); + } + readData(entryName: string) { + if (!this.entries) + throw new Error( + "entries가 초기화 되지 않음. 먼저 this.init()를 호출해주세요." + ); + + const entry = this.entries[entryName]; + + const data = entry.getData(); + const iv = data.slice(0, 16); + + const decipher = crypto.createDecipheriv(MODE, this._secretKey, iv); + + return Buffer.concat([decipher.update(data.slice(16)), decipher.final()]); + } + readStream(entryName: string) { + if (!this.entries) + throw new Error( + "entries가 초기화 되지 않음. 먼저 this.init()를 호출해주세요." + ); + + const entry = this.entries[entryName]; + + const data = entry.getData(); + const iv = data.slice(0, 16); + + const decipher = crypto.createDecipheriv(MODE, this._secretKey, iv); + + return createStream(data.slice(16)).pipe(decipher); + } +} + +export default ComicParser; diff --git a/packages/comic-parser/src/index.js b/packages/comic-parser/src/index.ts similarity index 100% rename from packages/comic-parser/src/index.js rename to packages/comic-parser/src/index.ts diff --git a/packages/comic-parser/src/model/ComicBook.js b/packages/comic-parser/src/model/ComicBook.js deleted file mode 100644 index 754d62f1..00000000 --- a/packages/comic-parser/src/model/ComicBook.js +++ /dev/null @@ -1,24 +0,0 @@ -import { BaseBook } from '@ridi/parser-core'; - -import ComicItem from './ComicItem'; - -class ComicBook extends BaseBook { - /** - * @type {ComicItem[]} - */ - items - - constructor(rawBook = {}) { - super(rawBook); - this.items = (rawBook.items || []).map(rawObj => new ComicItem(rawObj)); - Object.freeze(this); - } - - toRaw() { - return { - items: this.items.map(item => item.toRaw()), - }; - } -} - -export default ComicBook; diff --git a/packages/comic-parser/src/model/ComicBook.ts b/packages/comic-parser/src/model/ComicBook.ts new file mode 100644 index 00000000..fd42f5df --- /dev/null +++ b/packages/comic-parser/src/model/ComicBook.ts @@ -0,0 +1,25 @@ +import { BaseBook } from "@ridi/parser-core"; + +import ComicItem, { ComicItemProperties } from "./ComicItem"; + +type ComicBookProps = { + items?: ComicItemProperties[]; +}; + +class ComicBook extends BaseBook { + items: ComicItem[]; + + constructor(rawBook: ComicBookProps = {}) { + super(); + this.items = (rawBook.items || []).map((rawObj) => new ComicItem(rawObj)); + Object.freeze(this); + } + + toRaw() { + return { + items: this.items.map((item) => item.toRaw()), + }; + } +} + +export default ComicBook; diff --git a/packages/comic-parser/src/model/ComicItem.js b/packages/comic-parser/src/model/ComicItem.ts similarity index 53% rename from packages/comic-parser/src/model/ComicItem.js rename to packages/comic-parser/src/model/ComicItem.ts index c1313451..fdfc4377 100755 --- a/packages/comic-parser/src/model/ComicItem.js +++ b/packages/comic-parser/src/model/ComicItem.ts @@ -1,59 +1,36 @@ -import { BaseItem, isExists } from '@ridi/parser-core'; +import { BaseItem, isExists } from "@ridi/parser-core"; -import path from 'path'; +import path from "path"; -/** - * @typedef {Object} ComicItemProperties - * @property {number} [index] - * @property {string} [path] - * @property {number} [width] - * @property {number} [height] - */ +export type ComicItemProperties = Partial<{ + index: number; + path: string; + width: number; + height: number; + size: number; +}>; class ComicItem extends BaseItem { - /** - * @type {number} - */ - index; + index: number; + path: string; + width: number; + height: number; - /** - * @type {string} - */ - path; - - /** - * @type {number} - */ - width; - - /** - * @type {number} - */ - height; - - /** - * @return {string} - */ get mimeType() { - const ext = path.extname(this.path).toLocaleLowerCase().replace('.', ''); + const ext = path.extname(this.path).toLocaleLowerCase().replace(".", ""); switch (ext) { - case 'jpg': - case 'jpeg': - case 'png': - case 'bmp': - case 'gif': + case "jpg": + case "jpeg": + case "png": + case "bmp": + case "gif": return `image/${ext}`; default: - return ''; + return ""; } } - /** - * - * @param {ComicItemProperties} rawObj - * @param {boolean} freeze - */ - constructor(rawObj = {}, freeze = true) { + constructor(rawObj: ComicItemProperties = {}, freeze = true) { super(rawObj); this.index = rawObj.index; this.path = rawObj.path; diff --git a/packages/comic-parser/src/model/ComicParseContext.js b/packages/comic-parser/src/model/ComicParseContext.js deleted file mode 100644 index 3b2fc301..00000000 --- a/packages/comic-parser/src/model/ComicParseContext.js +++ /dev/null @@ -1,14 +0,0 @@ -import { BaseParseContext } from '@ridi/parser-core'; - -import ComicBook from './ComicBook'; - -class ComicParseContext extends BaseParseContext { - constructor() { - super(); - const rawBook = {}; - Object.keys(new ComicBook()).forEach(key => { rawBook[key] = undefined; }); - this.rawBook = rawBook; - } -} - -export default ComicParseContext; diff --git a/packages/comic-parser/src/model/ComicParseContext.ts b/packages/comic-parser/src/model/ComicParseContext.ts new file mode 100644 index 00000000..46a1acbb --- /dev/null +++ b/packages/comic-parser/src/model/ComicParseContext.ts @@ -0,0 +1,18 @@ +import { BaseBook, BaseParseContext } from "@ridi/parser-core"; + +import ComicBook from "./ComicBook"; + +class ComicParseContext extends BaseParseContext { + rawBook: any; + + constructor() { + super(); + const rawBook = {}; + Object.keys(new ComicBook()).forEach((key) => { + rawBook[key] = undefined; + }); + this.rawBook = rawBook; + } +} + +export default ComicParseContext; diff --git a/packages/comic-parser/src/model/ComicReadContext.js b/packages/comic-parser/src/model/ComicReadContext.js deleted file mode 100644 index 46e4c830..00000000 --- a/packages/comic-parser/src/model/ComicReadContext.js +++ /dev/null @@ -1,7 +0,0 @@ -import { BaseReadContext } from '@ridi/parser-core'; - -class ComicReadContext extends BaseReadContext { - -} - -export default ComicReadContext; diff --git a/packages/comic-parser/src/model/ComicReadContext.ts b/packages/comic-parser/src/model/ComicReadContext.ts new file mode 100644 index 00000000..115ad70b --- /dev/null +++ b/packages/comic-parser/src/model/ComicReadContext.ts @@ -0,0 +1,9 @@ +import { BaseReadContext } from "@ridi/parser-core"; +import { ComicItemProperties } from "./ComicItem"; + +class ComicReadContext extends BaseReadContext { + options?: BaseReadContext["options"] & { ext: string[]; base64?: boolean }; + rawBook: { items: ComicItemProperties[] }; +} + +export default ComicReadContext; diff --git a/packages/comic-parser/test/ComicParser.spec.js b/packages/comic-parser/test/ComicParser.spec.js index f1934a4b..c4cc20aa 100755 --- a/packages/comic-parser/test/ComicParser.spec.js +++ b/packages/comic-parser/test/ComicParser.spec.js @@ -5,13 +5,15 @@ import fs from 'fs-extra'; import path from 'path'; import sinon from 'sinon'; -import ComicBook from '../src/model/ComicBook'; -import ComicParser from '../src/ComicParser'; -import ComicItem from '../src/model/ComicItem'; -import ComicReadContext from '../src/model/ComicReadContext'; -import ComicParseContext from '../src/model/ComicParseContext'; +import ComicBook from '../lib/model/ComicBook'; +import ComicParser from '../lib/ComicParser'; +import ComicItem from '../lib/model/ComicItem'; +import ComicReadContext from '../lib/model/ComicReadContext'; +import ComicParseContext from '../lib/model/ComicParseContext'; import Paths from '../../../test/paths'; -import validationBook from './validationBook'; +import validationBook, { Book2 } from './validationBook'; + +console.log('ComicBook === Book2', ComicBook === Book2) chai.use(chaiAsPromised); should(); // Initialize should @@ -61,6 +63,7 @@ describe('ComicParser', () => { }); it('Parse with default options from file', () => { + const cp = new ComicParser(Paths.COMIC); return new ComicParser(Paths.COMIC).parse().then(book => { validationBook(book, JSON.parse(fs.readFileSync(Paths.EXPECTED_COMIC_BOOK))); }); @@ -102,7 +105,7 @@ describe('ComicParser', () => { it('parseImageSize return undefined when sizeof throws', async () => { const comicParser = new ComicParser(Paths.COMIC, 'fakeProvider'); - const imageSize = await comicParser._parseImageSize({getFile:sinon.fake.returns(Buffer.from(['f','a','k','e']))}, {parseImageSize: true}); + const imageSize = await comicParser._parseImageSize({ getFile: sinon.fake.returns(Buffer.from(['f', 'a', 'k', 'e'])) }, { parseImageSize: true }); expect(imageSize.height).to.be.undefined; expect(imageSize.width).to.be.undefined; }); diff --git a/packages/comic-parser/test/index.spec.js b/packages/comic-parser/test/index.spec.js index 5744ae76..a52dd2ec 100644 --- a/packages/comic-parser/test/index.spec.js +++ b/packages/comic-parser/test/index.spec.js @@ -8,7 +8,7 @@ import { CryptoProvider, AesCryptor, Hash, -} from '../src/index'; +} from '../lib/index'; describe('comic-parser', () => { it('Check imports', () => { diff --git a/packages/comic-parser/test/model/Item.spec.js b/packages/comic-parser/test/model/Item.spec.js index bc90dc43..fc765bfa 100644 --- a/packages/comic-parser/test/model/Item.spec.js +++ b/packages/comic-parser/test/model/Item.spec.js @@ -1,6 +1,6 @@ import { assert, should } from 'chai'; -import Item from '../../src/model/ComicItem'; +import Item from '../../lib/model/ComicItem'; should(); // Initialize should diff --git a/packages/comic-parser/test/model/ParseContext.spec.js b/packages/comic-parser/test/model/ParseContext.spec.js index 1f8c5019..06e5b274 100644 --- a/packages/comic-parser/test/model/ParseContext.spec.js +++ b/packages/comic-parser/test/model/ParseContext.spec.js @@ -1,6 +1,6 @@ import { should } from 'chai'; -import ParseContext from '../../src/model/ComicParseContext'; +import ParseContext from '../../lib/model/ComicParseContext'; should(); // Initialize should diff --git a/packages/comic-parser/test/validationBook.js b/packages/comic-parser/test/validationBook.js index 3c072eff..8197b2dd 100644 --- a/packages/comic-parser/test/validationBook.js +++ b/packages/comic-parser/test/validationBook.js @@ -1,9 +1,10 @@ import { isExists } from '@ridi/parser-core'; -import Book from '../src/model/ComicBook'; -import Item from '../src/model/ComicItem'; +import Book from '../lib/model/ComicBook'; +import Item from '../lib/model/ComicItem'; export default function validationBook(book, expectedBook, parseOptions = {}) { + console.log('book', book.constructor, Book); book.should.be.an.instanceOf(Book); book.items.should.have.lengthOf(expectedBook.items.length); @@ -22,3 +23,4 @@ export default function validationBook(book, expectedBook, parseOptions = {}) { } }); } +export const Book2 = Book diff --git a/packages/comic-parser/tsconfig.json b/packages/comic-parser/tsconfig.json index 9fc4a37a..6128f834 100644 --- a/packages/comic-parser/tsconfig.json +++ b/packages/comic-parser/tsconfig.json @@ -1,17 +1,17 @@ { // Change this to match your project - "include": [ - "src/**/*" - ], + "include": ["src/**/*"], "compilerOptions": { + "target": "es6", + "module": "CommonJS", // Tells TypeScript to read JS files, as // normally they are ignored as source files "allowJs": true, // Generate d.ts files "declaration": true, // This compiler run should - // only output d.ts files - "emitDeclarationOnly": true, - "outDir": "type" - }, + "esModuleInterop": true, + "moduleResolution": "node", + "outDir": "lib" + } } diff --git a/packages/comic-parser/type/ComicParser.d.ts b/packages/comic-parser/type/ComicParser.d.ts deleted file mode 100644 index dbe0540f..00000000 --- a/packages/comic-parser/type/ComicParser.d.ts +++ /dev/null @@ -1,89 +0,0 @@ -export default ComicParser; -declare class ComicParser extends Parser { - /** - * Get default values of parse options - */ - static get parseDefaultOptions(): { - ext: string[]; - parseImageSize: boolean; - unzipPath: string; - overwrite: boolean; - }; - /** - * Get types of parse options - */ - static get parseOptionTypes(): { - ext: string; - parseImageSize: string; - unzipPath: string; - overwrite: string; - }; - /** - * @typedef {Object} ComicReadOptionExtra - * @property {boolean} base64 - * - * @typedef {import('@ridi/parser-core/type/BaseReadContext').BaseReadOption & ComicReadOptionExtra} ComicReadOption - */ - /** - * Get default values of read options - * @returns {ComicReadOption} - */ - static get readDefaultOptions(): import("@ridi/parser-core/type/BaseReadContext").BaseReadOption & { - base64: boolean; - }; - /** - * @typedef {Object} ComicReadOptionTypeExtra - * @property {string} base64 - * - * @typedef {import('@ridi/parser-core/type/BaseReadContext').BaseReadOptionType - * & ComicReadOptionTypeExtra} ComicReadOptionType - */ - /** - * Get types of read option - * @returns {ComicReadOptionType} - */ - static get readOptionTypes(): import("@ridi/parser-core/type/BaseReadContext").BaseReadOptionType & { - base64: string; - }; - /** - * Create new ComicParser - * @param {string} input file or directory - * @param {import('@ridi/parser-core').CryptoProvider} cryptoProvider en/decrypto provider - * @param {import('@ridi/parser-core').LogLevel} logLevel logging level - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.EINVAL} invalid input - * @example new ComicParser('./foo/bar.zip' or './foo/bar'); - */ - constructor(input: string, cryptoProvider: import('@ridi/parser-core').CryptoProvider, logLevel: import('@ridi/parser-core').LogLevel); - /** - * extracts only necessary metadata from entries and create item list - * @param {ComicReadContext} context intermediate result - * @returns {Promise} return Context containing item list - * @see ComicParser.parseDefaultOptions.ext - * @see ComicParser.parseDefaultOptions.parseImageSize - */ - _parse(context: ComicReadContext): Promise; - /** - * - * @typedef {Object} ImageMetaData - * @property {number} width - * @property {number} height - */ - /** - * parse image size from entry - * @param {import('@ridi/parser-core/type/readEntries').EntryBasicInformation} entry image entry - * @param {ComicParser.parseDefaultOptions} options parse options - * @returns {Promise} return image size - */ - _parseImageSize(entry: import('@ridi/parser-core/type/readEntries').EntryBasicInformation, options: { - ext: string[]; - parseImageSize: boolean; - unzipPath: string; - overwrite: boolean; - }): Promise<{ - width: number; - height: number; - }>; -} -import { Parser } from "@ridi/parser-core"; -import ComicReadContext from "./model/ComicReadContext"; diff --git a/packages/comic-parser/type/index.d.ts b/packages/comic-parser/type/index.d.ts deleted file mode 100644 index 2333a3ef..00000000 --- a/packages/comic-parser/type/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import ComicParser from "./ComicParser"; -import ComicBook from "./model/ComicBook"; -import { Errors } from "@ridi/parser-core"; -import { LogLevel } from "@ridi/parser-core"; -import { CryptoProvider } from "@ridi/parser-core"; -import { AesCryptor } from "@ridi/parser-core"; -import { Hash } from "@ridi/parser-core"; -export { ComicParser, ComicBook, Errors, LogLevel, CryptoProvider, AesCryptor, Hash }; diff --git a/packages/comic-parser/type/model/ComicBook.d.ts b/packages/comic-parser/type/model/ComicBook.d.ts deleted file mode 100644 index 151b7c07..00000000 --- a/packages/comic-parser/type/model/ComicBook.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -export default ComicBook; -declare class ComicBook extends BaseBook { - constructor(rawBook?: {}); - /** - * @type {ComicItem[]} - */ - items: ComicItem[]; -} -import { BaseBook } from "@ridi/parser-core"; -import ComicItem from "./ComicItem"; diff --git a/packages/comic-parser/type/model/ComicItem.d.ts b/packages/comic-parser/type/model/ComicItem.d.ts deleted file mode 100644 index dd6ccd93..00000000 --- a/packages/comic-parser/type/model/ComicItem.d.ts +++ /dev/null @@ -1,43 +0,0 @@ -export default ComicItem; -export type ComicItemProperties = { - index?: number; - path?: string; - width?: number; - height?: number; -}; -/** - * @typedef {Object} ComicItemProperties - * @property {number} [index] - * @property {string} [path] - * @property {number} [width] - * @property {number} [height] - */ -declare class ComicItem extends BaseItem { - /** - * - * @param {ComicItemProperties} rawObj - * @param {boolean} freeze - */ - constructor(rawObj?: ComicItemProperties, freeze?: boolean); - /** - * @type {number} - */ - index: number; - /** - * @type {string} - */ - path: string; - /** - * @type {number} - */ - width: number; - /** - * @type {number} - */ - height: number; - /** - * @return {string} - */ - get mimeType(): string; -} -import { BaseItem } from "@ridi/parser-core"; diff --git a/packages/comic-parser/type/model/ComicParseContext.d.ts b/packages/comic-parser/type/model/ComicParseContext.d.ts deleted file mode 100644 index 2143aa40..00000000 --- a/packages/comic-parser/type/model/ComicParseContext.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export default ComicParseContext; -declare class ComicParseContext extends BaseParseContext { -} -import { BaseParseContext } from "@ridi/parser-core"; diff --git a/packages/comic-parser/type/model/ComicReadContext.d.ts b/packages/comic-parser/type/model/ComicReadContext.d.ts deleted file mode 100644 index b386bc93..00000000 --- a/packages/comic-parser/type/model/ComicReadContext.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export default ComicReadContext; -declare class ComicReadContext extends BaseReadContext { -} -import { BaseReadContext } from "@ridi/parser-core"; diff --git a/packages/comic-parser/yarn.lock b/packages/comic-parser/yarn.lock index 098883f7..111a7d54 100644 --- a/packages/comic-parser/yarn.lock +++ b/packages/comic-parser/yarn.lock @@ -2,14 +2,82 @@ # yarn lockfile v1 +"@ridi/parser-core@0.7.4-alpha.1": + version "0.7.4-alpha.1" + resolved "https://registry.yarnpkg.com/@ridi/parser-core/-/parser-core-0.7.4-alpha.1.tgz#0eead7cb3a7c89e66124d1b02bd82c0a93d14f54" + integrity sha512-7BYkCKBw/lbs23+jHNO/r9l+hcRsIa2de7NXU7Ud0qKTx6L2wUOlf5P82Fe/aRzHF90V116R2RjJhN3kGornmQ== + dependencies: + adm-zip "^0.5.5" + crypto-js "^4.0.0" + event-stream "3.3.4" + fs-extra "^10.0.0" + sha1 "^1.1.1" + string-format "^2.0.0" + string-natural-compare "^3.0.1" + +"@types/adm-zip@^0.5.0": + version "0.5.0" + resolved "https://registry.yarnpkg.com/@types/adm-zip/-/adm-zip-0.5.0.tgz#94c90a837ce02e256c7c665a6a1eb295906333c1" + integrity sha512-FCJBJq9ODsQZUNURo5ILAQueuA8WJhRvuihS3ke2iI25mJlfV2LK8jG2Qj2z2AWg8U0FtWWqBHVRetceLskSaw== + dependencies: + "@types/node" "*" + +"@types/node@*": + version "18.16.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.16.0.tgz#4668bc392bb6938637b47e98b1f2ed5426f33316" + integrity sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ== + "@types/node@^14.14.6": version "14.14.11" - resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.11.tgz#fc25a4248a5e8d0837019b1d170146d07334abe0" + resolved "https://registry.npmjs.org/@types/node/-/node-14.14.11.tgz" integrity sha512-BJ97wAUuU3NUiUCp44xzUFquQEvnk1wu7q4CMEUYKJWjdkr0YWYDsm4RFtAvxYsNjLsKcrFt6RvK8r+mnzMbEQ== +adm-zip@^0.5.10, adm-zip@^0.5.5: + version "0.5.10" + resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.5.10.tgz#4a51d5ab544b1f5ce51e1b9043139b639afff45b" + integrity sha512-x0HvcHqVJNTPk/Bw8JbLWlWoo6Wwnsug0fnYYro1HBrjxZ3G7/AZk7Ahv8JwDe1uIcz8eBqvu86FuF1POiG7vQ== + +"charenc@>= 0.0.1": + version "0.0.2" + resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667" + integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA== + +"crypt@>= 0.0.1": + version "0.0.2" + resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" + integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow== + +crypto-js@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-4.1.1.tgz#9e485bcf03521041bd85844786b83fb7619736cf" + integrity sha512-o2JlM7ydqd3Qk9CA0L4NL6mTzU2sdx96a+oOfPu8Mkl/PK51vSyoi8/rQ8NknZtk44vq15lmhAj9CIAGwgeWKw== + +duplexer@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +event-stream@3.3.4: + version "3.3.4" + resolved "https://registry.yarnpkg.com/event-stream/-/event-stream-3.3.4.tgz#4ab4c9a0f5a54db9338b4c34d86bfce8f4b35571" + integrity sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g== + dependencies: + duplexer "~0.1.1" + from "~0" + map-stream "~0.1.0" + pause-stream "0.0.11" + split "0.3" + stream-combiner "~0.0.4" + through "~2.3.1" + +from@~0: + version "0.1.7" + resolved "https://registry.yarnpkg.com/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" + integrity sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g== + fs-extra@^10.0.0: version "10.0.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.0.0.tgz#9ff61b655dde53fb34a82df84bb214ce802e17c1" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz" integrity sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ== dependencies: graceful-fs "^4.2.0" @@ -18,43 +86,87 @@ fs-extra@^10.0.0: graceful-fs@^4.1.6, graceful-fs@^4.2.0: version "4.2.4" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz" integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== image-size@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/image-size/-/image-size-1.0.0.tgz#58b31fe4743b1cec0a0ac26f5c914d3c5b2f0750" + resolved "https://registry.npmjs.org/image-size/-/image-size-1.0.0.tgz" integrity sha512-JLJ6OwBfO1KcA+TvJT+v8gbE6iWbj24LyDNFgFEN0lzegn6cC6a/p3NIDaepMsJjQjlUWqIC7wJv8lBFxPNjcw== dependencies: queue "6.0.2" inherits@~2.0.3: version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== jsonfile@^6.0.1: version "6.1.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== dependencies: universalify "^2.0.0" optionalDependencies: graceful-fs "^4.1.6" +map-stream@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/map-stream/-/map-stream-0.1.0.tgz#e56aa94c4c8055a16404a0674b78f215f7c8e194" + integrity sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g== + +pause-stream@0.0.11: + version "0.0.11" + resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" + integrity sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A== + dependencies: + through "~2.3" + queue@6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/queue/-/queue-6.0.2.tgz#b91525283e2315c7553d2efa18d83e76432fed65" + resolved "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz" integrity sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA== dependencies: inherits "~2.0.3" +sha1@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/sha1/-/sha1-1.1.1.tgz#addaa7a93168f393f19eb2b15091618e2700f848" + integrity sha512-dZBS6OrMjtgVkopB1Gmo4RQCDKiZsqcpAQpkV/aaj+FCrCg8r4I4qMkDPQjBgLIxlmu9k4nUbWq6ohXahOneYA== + dependencies: + charenc ">= 0.0.1" + crypt ">= 0.0.1" + +split@0.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/split/-/split-0.3.3.tgz#cd0eea5e63a211dfff7eb0f091c4133e2d0dd28f" + integrity sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA== + dependencies: + through "2" + +stream-combiner@~0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/stream-combiner/-/stream-combiner-0.0.4.tgz#4d5e433c185261dde623ca3f44c586bcf5c4ad14" + integrity sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw== + dependencies: + duplexer "~0.1.1" + +string-format@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/string-format/-/string-format-2.0.0.tgz#f2df2e7097440d3b65de31b6d40d54c96eaffb9b" + integrity sha512-bbEs3scLeYNXLecRRuk6uJxdXUSj6le/8rNPHChIJTn2V79aXVTR1EH2OH5zLKKoz0V02fOUKZZcw01pLUShZA== + string-natural-compare@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" + resolved "https://registry.npmjs.org/string-natural-compare/-/string-natural-compare-3.0.1.tgz" integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== +through@2, through@~2.3, through@~2.3.1: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== + universalify@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== diff --git a/packages/content-parser/type/index.d.ts b/packages/content-parser/type/index.d.ts index 53fac77d..89f62d7d 100644 --- a/packages/content-parser/type/index.d.ts +++ b/packages/content-parser/type/index.d.ts @@ -1,12 +1,5 @@ import { EpubParser } from "@ridi/epub-parser"; import { EpubBook } from "@ridi/epub-parser"; -import { ComicParser } from "@ridi/comic-parser"; -import { ComicBook } from "@ridi/comic-parser"; import { PdfParser } from "@ridi/pdf-parser"; import { PdfBook } from "@ridi/pdf-parser"; -import { Errors } from "@ridi/parser-core"; -import { LogLevel } from "@ridi/parser-core"; -import { CryptoProvider } from "@ridi/parser-core"; -import { AesCryptor } from "@ridi/parser-core"; -import { Hash } from "@ridi/parser-core"; export { EpubParser, EpubBook, ComicParser, ComicBook, PdfParser, PdfBook, Errors, LogLevel, CryptoProvider, AesCryptor, Hash }; diff --git a/packages/parser-core/package.json b/packages/parser-core/package.json index eb1865dc..e10ae09f 100644 --- a/packages/parser-core/package.json +++ b/packages/parser-core/package.json @@ -7,6 +7,9 @@ "mail": "viewer.team@ridi.com" }, "license": "MIT", + "scripts": { + "build": "npx tsc || exit 0" + }, "repository": { "type": "git", "url": "https://github.com/ridi/content-parser" @@ -17,8 +20,8 @@ "lib", "type" ], - "main": "lib/index.js", - "types": "type/index.d.ts", + "main": "./lib/index.js", + "types": "./lib/index.d.ts", "dependencies": { "adm-zip": "^0.5.5", "crypto-js": "^4.0.0", diff --git a/packages/parser-core/src/AesCryptor.js b/packages/parser-core/src/AesCryptor.ts similarity index 63% rename from packages/parser-core/src/AesCryptor.js rename to packages/parser-core/src/AesCryptor.ts index 37cdf655..d680bebc 100644 --- a/packages/parser-core/src/AesCryptor.js +++ b/packages/parser-core/src/AesCryptor.ts @@ -1,104 +1,113 @@ import * as CryptoJs from 'crypto-js'; -import { Padding, Encoding } from './cryptoUtil'; +import { Padding, Encoding, PaddingObject, EncodingObject } from './cryptoUtil'; import Errors, { createError } from './errors'; import mergeObjects from './mergeObjects'; import { stringContains } from './stringUtil'; import { isExists, isObject, isString } from './typecheck'; import validateOptions from './validateOptions'; +import { ValueOf } from './helper'; const { mode: aesMode, AES } = CryptoJs; -/** - * @typedef {Object} ModeConfig - * @property {string} key - * @property {string} [iv] -*/ +type WordArray = CryptoJs.lib.WordArray; -/** - * @type {ModeConfig} - */ -const defaultConfigTypes = { +type ModeConfig = { + key: string | WordArray | number[] | Buffer | Uint8Array; + iv?: WordArray | number[] | Buffer; +}; + +const defaultConfigTypes: ModeConfig = { key: 'String|Buffer|Uint8Array|Array', }; -/** - * @typedef {Object} ModeObject - * @property {string} name - * @property {import('../type/CryptoJs').BlockCipherMode} op - * @property {ModeConfig} configTypes - * - * @typedef {Object} ModeList - * @property {ModeObject} ECB - * @property {ModeObject} CBC - * @property {ModeObject} CFB - * @property {ModeObject} OFB - * @property {ModeObject} CTR -*/ +type BlockCipherMode = ValueOf; + +type ModeObject = { + name: string; + op: BlockCipherMode; + configTypes: ModeConfig; +}; -/** - * @type {ModeList} -*/ -const Mode = Object.freeze({ - ECB: { // Electronic Codebook (key) +type ModeList = { + ECB: ModeObject; + CBC: ModeObject; + CFB: ModeObject; + OFB: ModeObject; + CTR: ModeObject; +}; + +const Mode: ModeList = { + ECB: { + // Electronic Codebook (key) name: 'ECB', op: aesMode.ECB, configTypes: defaultConfigTypes, }, - CBC: { // Cipher-Block Chaining (key + iv) + CBC: { + // Cipher-Block Chaining (key + iv) name: 'CBC', op: aesMode.CBC, configTypes: mergeObjects(defaultConfigTypes, { iv: 'Buffer|Uint8Array|Array', }), }, - CFB: { // Cipher Feedback (key + iv + {segmentSize}) + CFB: { + // Cipher Feedback (key + iv + {segmentSize}) name: 'CFB', op: aesMode.CFB, configTypes: mergeObjects(defaultConfigTypes, { iv: 'Buffer|Uint8Array|Array', }), }, - OFB: { // Output Feedback (key + iv) + OFB: { + // Output Feedback (key + iv) name: 'OFB', op: aesMode.OFB, configTypes: mergeObjects(defaultConfigTypes, { iv: 'Buffer|Uint8Array|Array', }), }, - CTR: { // Counter (key + iv + {counter}) + CTR: { + // Counter (key + iv + {counter}) name: 'CTR', op: aesMode.CTR, configTypes: mergeObjects(defaultConfigTypes, { iv: 'Buffer|Uint8Array|Array', }), }, -}); +}; + +type EncodeAndDecode = (data: string | WordArray) => WordArray; + +type Operator = { + name: string; + encrypt: EncodeAndDecode; + decrypt: EncodeAndDecode; +}; + +type CryptOption = { + padding: PaddingObject; + encoding: EncodingObject; +}; class AesCryptor { - /** - * @typedef {(data: string | CryptoJs.lib.WordArray) => CryptoJs.lib.WordArray} EncodeAndDecode - * @typedef {Object} Operator - * @property {string} name - * @property {EncodeAndDecode} encrypt - * @property {EncodeAndDecode} decrypt - */ + static Padding = Padding; + static Encoding = Encoding; + static Mode = Mode; - /** - * @private - * @type {Operator} - */ - operator; + private operator: Operator; /** * Construct AesCryptor - * @param {ModeObject} mode Crypto mode - * @param {ModeConfig} config Crypto config */ - constructor(mode, config) { + constructor(mode: ModeObject, config: ModeConfig) { if (!isExists(mode)) { throw createError(Errors.EREQPRM, 'mode'); - } else if (!isObject(mode) || !stringContains(Object.keys(Mode), mode.name)) { + } else if ( + !isObject(mode) || + !stringContains(Object.keys(Mode), mode.name) + ) { throw createError(Errors.EINVAL, 'mode', 'mode', 'use Modes'); } if (!isExists(config)) { @@ -116,7 +125,8 @@ class AesCryptor { throw createError(Errors.EREQPRM, 'config.iv'); } break; - default: break; + default: + break; } validateOptions(config, mode.configTypes, true); this.operator = this.makeOperator(mode, config); @@ -125,12 +135,8 @@ class AesCryptor { /** * Make an operator - * @private - * @param {ModeObject} mode - * @param {ModeConfig} config - * @returns {Operator} Operator */ - makeOperator(mode, config) { + private makeOperator(mode: ModeObject, config: ModeConfig): Operator { let { key, iv } = config; // convert key to WordArray @@ -138,6 +144,7 @@ class AesCryptor { const { length } = key; key = Encoding.UTF8.decode(key); if (length % 16 !== 0) { + // ** 콛, 확인필요 Padding.PKCS7.pad(key); } } else if (Buffer.isBuffer(key)) { @@ -153,43 +160,48 @@ class AesCryptor { iv = Encoding.UINT8.decode(iv); } - const checkType = (data, allow) => { + const checkType = (data: T, allow?: string) => { if (!isExists(data)) { - const message = `require Buffer or Uint8Array or Array${isExists(allow) ? ` ${allow}` : ''}`; + const message = `require Buffer or Uint8Array or Array${ + isExists(allow) ? ` ${allow}` : '' + }`; throw createError(Errors.ECRYT, 'data type', 'reason', message); } return data; }; // return operator - const options = { iv, mode: mode.op, padding: Padding.NONE.op }; - return { // Note that all data and return type is a WordArray + const options = { + iv, + mode: mode.op, + padding: Padding.NONE.op, + }; + return { + // Note that all data and return type is a WordArray name: mode.name, - encrypt: data => AES.encrypt(checkType(data, 'or String'), key, options).ciphertext, - decrypt: data => { - const cipherParams = CryptoJs.lib.CipherParams.create({ ciphertext: checkType(data) }); - return AES.decrypt(cipherParams, key, options); + encrypt: (data) => + AES.encrypt(checkType(data, 'or String'), key as any, options) + .ciphertext, + decrypt: (data) => { + const cipherParams = CryptoJs.lib.CipherParams.create({ + ciphertext: checkType(data) as WordArray, + }); + return AES.decrypt(cipherParams, key as any, options); }, }; } - /** - * @typedef {Object} CryptOption - * @property {import('./cryptoUtil').PaddingObject} padding - * @property {import('./cryptoUtil').EncodingObject} encoding - */ - - /** - * Encrypt string - * @param {Buffer | Uint8Array | number[]} data - * @param {CryptOption} options - * @returns {string} encrypted string - */ /* eslint-disable no-param-reassign */ - encrypt(data, options = {}) { + encrypt( + data: string | Buffer | Uint8Array | number[] | WordArray, + options: Partial = {} + ) { const padding = options.padding || Padding.NONE; const encoding = options.encoding || Encoding.BUFFER; - const length = isExists(data) && isExists(data.length) ? data.length : 0; + const length = + isExists(data) && isExists((data as any).length) + ? (data as any).length + : 0; // convert data to WordArray if (isString(data)) { @@ -201,7 +213,10 @@ class AesCryptor { } // padding data if needed as padding type - if (padding === Padding.PKCS7 || (padding === Padding.AUTO && length % 16 !== 0)) { + if ( + padding === Padding.PKCS7 || + (padding === Padding.AUTO && length % 16 !== 0) + ) { padding.pad(data); } @@ -212,12 +227,13 @@ class AesCryptor { /** * Decrupt string - * @param {Buffer | Uint8Array | number[]} data - * @param {CryptOption} options * @returns {string} decrypted string */ /* eslint-disable no-param-reassign */ - decrypt(data, options = {}) { + decrypt( + data: string | Buffer | Uint8Array | number[] | WordArray, + options: Partial = {} + ) { const padding = options.padding || Padding.NONE; const encoding = options.encoding || Encoding.BUFFER; @@ -234,7 +250,7 @@ class AesCryptor { // unpadding data if needed as padding type if (padding === Padding.PKCS7 || padding === Padding.AUTO) { try { - const array = Encoding.UINT8.encode(decryptedData); + const array = Encoding.UINT8.encode(decryptedData) as Uint8Array; if (array.length < 16) { throw createError(Errors.ECRYT, 'invalid data length'); } @@ -262,14 +278,6 @@ class AesCryptor { /* eslint-enable no-param-reassign */ } -AesCryptor.Padding = Padding; -AesCryptor.Encoding = Encoding; -AesCryptor.Mode = Mode; - export default AesCryptor; -export { - Padding, - Encoding, - Mode, -}; +export { Padding, Encoding, Mode }; diff --git a/packages/parser-core/src/BaseBook.js b/packages/parser-core/src/BaseBook.js deleted file mode 100644 index d2a2c3c8..00000000 --- a/packages/parser-core/src/BaseBook.js +++ /dev/null @@ -1,16 +0,0 @@ -import { mustOverride } from './errors'; - -/** - * @abstract - * @class - */ -class BaseBook { - /** - * @virtual - * @returns {string} - */ - toRaw() { - mustOverride(); - } -} -export default BaseBook; diff --git a/packages/parser-core/src/BaseBook.ts b/packages/parser-core/src/BaseBook.ts new file mode 100644 index 00000000..54f4edde --- /dev/null +++ b/packages/parser-core/src/BaseBook.ts @@ -0,0 +1,8 @@ +import { mustOverride } from "./errors"; + +abstract class BaseBook { + toRaw?(): any { + return mustOverride(); + } +} +export default BaseBook; diff --git a/packages/parser-core/src/BaseItem.js b/packages/parser-core/src/BaseItem.js deleted file mode 100644 index 6ba05054..00000000 --- a/packages/parser-core/src/BaseItem.js +++ /dev/null @@ -1,31 +0,0 @@ -import { mustOverride } from './errors'; - -/** - * @typedef {Object} BaseItemParam - * @property {number} size -*/ - -class BaseItem { - /** - * @public - * @type {number} - */ - size; - - /** - * @param {BaseItemParam} rawObj - */ - constructor(rawObj) { - this.size = rawObj.size; - } - - /** - * @public - * @virtual - * @returns {string} - */ - toRaw() { - mustOverride(); - } -} -export default BaseItem; diff --git a/packages/parser-core/src/BaseItem.ts b/packages/parser-core/src/BaseItem.ts new file mode 100644 index 00000000..21e82d47 --- /dev/null +++ b/packages/parser-core/src/BaseItem.ts @@ -0,0 +1,18 @@ +import { mustOverride } from "./errors"; + +type BaseItemParam = { + size?: number; +}; + +class BaseItem { + size?: number; + + constructor(rawObj: BaseItemParam) { + this.size = rawObj.size; + } + + toRaw(): any { + return mustOverride(); + } +} +export default BaseItem; diff --git a/packages/parser-core/src/BaseParseContext.js b/packages/parser-core/src/BaseParseContext.js deleted file mode 100644 index edb76292..00000000 --- a/packages/parser-core/src/BaseParseContext.js +++ /dev/null @@ -1,42 +0,0 @@ -/** - * @typedef {Object} BaseParserOption - * @property {string} unzipPath If specified, unzip to that path. - * @property {boolean} overwrite If true, overwrite to unzipPath when unzip. (only using if unzipPath specified.) - * - * @typedef {Object} BaseParserOptionType - * @property {string} unzipPath - * @property {string} overwrite -* / - -/** - * @class - */ -class BaseParseContext { - /** - * @public - * @type {BaseParserOption} - */ - options; - - /** - * @public - * @type {import('./readEntries').ReadEntriesReturnType>} - */ - entries; - - /** - * @typedef {Object} RawBookObject - */ - /** - * @public - * @type {RawBookObject} - */ - rawBook; - - constructor() { - this.options = undefined; - this.entries = undefined; - this.rawBook = undefined; - } -} -export default BaseParseContext; diff --git a/packages/parser-core/src/BaseParseContext.ts b/packages/parser-core/src/BaseParseContext.ts new file mode 100644 index 00000000..293effad --- /dev/null +++ b/packages/parser-core/src/BaseParseContext.ts @@ -0,0 +1,32 @@ +import BaseBook from "./BaseBook"; +import readEntries, { ReadEntriesReturnType } from "./readEntries"; + +export type BaseParserOption = { + // If specified, unzip to that path. + unzipPath: string; + // If true, overwrite to unzipPath when unzip. (only using if unzipPath specified.) + overwrite: boolean; +}; + +export type BaseParserOptionType = { + unzipPath: string; + overwrite: string; +}; + +/** + * @class + */ +class BaseParseContext { + options?: BaseParserOption; + + entries?: ReadEntriesReturnType; + + rawBook?: BaseBook; + + constructor() { + this.options = undefined; + this.entries = undefined; + this.rawBook = undefined; + } +} +export default BaseParseContext; diff --git a/packages/parser-core/src/BaseReadContext.js b/packages/parser-core/src/BaseReadContext.js deleted file mode 100644 index de402672..00000000 --- a/packages/parser-core/src/BaseReadContext.js +++ /dev/null @@ -1,34 +0,0 @@ -/** - * @typedef {Object} BaseReadOption - * @property {boolean} force - * - * @typedef {Object} BaseReadOptionType - * @property {string} force - */ - -class BaseReadContext { - /** - * @pblic - * @type {BaseReadOption} - */ - options; - - /** - * @public - * @type {import('./readEntries').ReadEntriesReturnType[]} - */ - entries; - - /** - * @public - * @type {Array} - */ - items; - - constructor() { - this.items = undefined; - this.entries = undefined; - this.options = undefined; - } -} -export default BaseReadContext; diff --git a/packages/parser-core/src/BaseReadContext.ts b/packages/parser-core/src/BaseReadContext.ts new file mode 100644 index 00000000..cfb2dd7b --- /dev/null +++ b/packages/parser-core/src/BaseReadContext.ts @@ -0,0 +1,25 @@ +import BaseBook from "./BaseBook"; +import { ReadEntriesReturnType } from "./readEntries"; + +export type BaseReadOption = { + force?: boolean; +}; + +export type BaseReadOptionType = { + force: string; +}; + +class BaseReadContext { + options?: BaseReadOption; + + entries?: ReadEntriesReturnType; + + items?: BaseBook[]; + + constructor() { + this.items = undefined; + this.entries = undefined; + this.options = undefined; + } +} +export default BaseReadContext; diff --git a/packages/parser-core/src/CryptoProvider.js b/packages/parser-core/src/CryptoProvider.js deleted file mode 100644 index befdb8c5..00000000 --- a/packages/parser-core/src/CryptoProvider.js +++ /dev/null @@ -1,79 +0,0 @@ -// eslint-disable-next-line no-unused-vars -import AesCryptor from './AesCryptor'; -import Errors, { createError, mustOverride } from './errors'; - -/** - * @typedef {string} CryptoProviderOption - * - * @typedef {Object} CryptoProviderPurpose - * @property {CryptoProviderOption} READ_IN_ZIP "read_in_zip" - * @property {CryptoProviderOption} READ_IN_DIR "read_in_dir" - * @property {CryptoProviderOption} WRITE "write" -*/ - -/** - * @enum {CryptoProviderPurpose} - */ -const Purpose = Object.freeze({ - READ_IN_ZIP: 'read_in_zip', - READ_IN_DIR: 'read_in_dir', - WRITE: 'write', -}); - -class CryptoProvider { - isStreamMode = true; - - constructor() { - if (this.constructor === CryptoProvider) { - throw createError(Errors.EINTR, 'You must use subclasses.'); - } - } - - /** - * Size of data to process at once - * `null` means use nodejs default (default: 65535) - * @returns {number | null} - */ - get bufferSize() { return null; } - - /** - * Create or reuse AesCryptor by condition - * @abstract - * @param {string} filePath - * @param {string} purpose - * @returns {AesCryptor} - */ - getCryptor(filePath, purpose) { // eslint-disable-line no-unused-vars - // ex) - // if (condition) { - // return new AesCryptor(...); - // } else { - // return new AesCryptor(...); - // } - mustOverride(); - } - - /** - * Should execute encrypt or decrypt by condition if needed - * @abstract - * @param {Buffer} data - * @param {string} filePath - * @param {string} purpose - */ - run(data, filePath, purpose) { // eslint-disable-line no-unused-vars - // ex) - // const cryptor = this.getCryptor(filePath, status); - // const padding = AesCryptor.Padding.PKCS7 - // if (condition1) { - // return cryptor.encrypt(data, padding) - // } else if (condition2) { - // return cryptor.decrypt(data, padding) - // } - // return data; - mustOverride(); - } -} - -CryptoProvider.Purpose = Purpose; - -export default CryptoProvider; diff --git a/packages/parser-core/src/CryptoProvider.ts b/packages/parser-core/src/CryptoProvider.ts new file mode 100644 index 00000000..2ce899fe --- /dev/null +++ b/packages/parser-core/src/CryptoProvider.ts @@ -0,0 +1,63 @@ +// eslint-disable-next-line no-unused-vars +import AesCryptor from "./AesCryptor"; +import Errors, { createError, mustOverride } from "./errors"; + +export type CryptoProviderOption = string; + +enum Purpose { + READ_IN_ZIP = "read_in_zip", + READ_IN_DIR = "read_in_dir", + WRITE = "write", +} + +class CryptoProvider { + static Purpose = Purpose; + isStreamMode = true; + + constructor() { + if (this.constructor === CryptoProvider) { + throw createError(Errors.EINTR, "You must use subclasses."); + } + } + + /** + * Size of data to process at once + * `null` means use nodejs default (default: 65535) + */ + get bufferSize(): number | null { + return null; + } + + /** + * Create or reuse AesCryptor by condition + */ + getCryptor(filePath: string, purpose: string): AesCryptor { + // eslint-disable-line no-unused-vars + // ex) + // if (condition) { + // return new AesCryptor(...); + // } else { + // return new AesCryptor(...); + // } + return mustOverride(); + } + + /** + * Should execute encrypt or decrypt by condition if needed + */ + run(data: Buffer, filePath: string, purpose: string) { + // eslint-disable-line no-unused-vars + // ex) + // const cryptor = this.getCryptor(filePath, status); + // const padding = AesCryptor.Padding.PKCS7 + // if (condition1) { + // return cryptor.encrypt(data, padding) + // } else if (condition2) { + // return cryptor.decrypt(data, padding) + // } + // return data; + mustOverride(); + } +} + +export default CryptoProvider; diff --git a/packages/parser-core/src/Logger.js b/packages/parser-core/src/Logger.js deleted file mode 100644 index 686c2c10..00000000 --- a/packages/parser-core/src/Logger.js +++ /dev/null @@ -1,201 +0,0 @@ -/* eslint-disable no-console */ -import { stringContains } from './stringUtil'; -import { isExists } from './typecheck'; - -/** - * @typedef LogLevel - * @property {string} SILENT "silent" - * @property {string} ERROR "error" - * @property {string} WARN "warn" - * @property {string} INFO "info" - * @property {string} DEBUG "debug" - * @property {string} VERBOSE "verbose" - * - * @typedef LoggerOptions - * @property {string} namespace - * @property {LogLevel} logLevel - */ - -/** - * @enum {LogLevel} - */ -const LogLevel = Object.freeze({ - SILENT: 'silent', - ERROR: 'error', - WARN: 'warn', - INFO: 'info', - DEBUG: 'debug', - VERBOSE: 'verbose', -}); - -/** - * Get an order of a log level - * @param {LogLevel} logLevel - * @returns {number} order of a level - */ -const getOrder = logLevel => { - switch (logLevel) { - case LogLevel.ERROR: return 1; - case LogLevel.WARN: return 2; - case LogLevel.INFO: return 3; - case LogLevel.DEBUG: return 4; - case LogLevel.VERBOSE: return 5; - default: return 0; - } -}; - -const touchTime = time => new Date().getTime() - time; - -class Logger { - /** - * @private - */ - _logLevel; - - /** - * @private - */ - namespace; - - /** - * @private - */ - _firstTime; - - get logLevel() { return this._logLevel; } - - set logLevel(level) { this._logLevel = stringContains(Object.values(LogLevel), level) ? level : this.logLevel; } - - /** - * Construct Logger Class; - * @param {string} namespace - * @param {LogLevel} logLevel - */ - constructor(namespace, logLevel) { - this.namespace = namespace || Logger.name; - this._logLevel = stringContains(Object.values(LogLevel), logLevel) ? logLevel : LogLevel.WARN; - this._firstTime = null; - } - - /** - * @param {LogLevel} current - * @param {LogLevel} target - * @returns {boolean} - */ - static confirm(current, target) { - return getOrder(current) >= getOrder(target); - } - - /** - * Log information - * @param {any?} message - * @param {any[]} ...optionalParams - */ - info(message, ...optionalParams) { - /* istanbul ignore else */ - if (Logger.confirm(this.logLevel, LogLevel.INFO)) { - console.info(`[${this.namespace}] ${message}`, ...optionalParams); - } - } - - /** - * Log warning - * @param {any?} message - * @param {any[]} ...optionalParams - */ - warn(message, ...optionalParams) { - /* istanbul ignore else */ - if (Logger.confirm(this.logLevel, LogLevel.WARN)) { - console.warn(`[${this.namespace}] ${message}`, ...optionalParams); - } - } - - /** - * Log error - * @param {any?} message - * @param {any[]} ...optionalParams - */ - error(message, ...optionalParams) { - /* istanbul ignore else */ - if (Logger.confirm(this.logLevel, LogLevel.ERROR)) { - console.error(`[${this.namespace}] ${message}`, ...optionalParams); - } - } - - /** - * Log degug - * @param {string?} message - * @param {any[]} ...optionalParams - */ - debug(message, ...optionalParams) { - /* istanbul ignore else */ - if (Logger.confirm(this.logLevel, LogLevel.DEBUG)) { - console.debug(`[${this.namespace}] ${message}`, ...optionalParams); - } - } - - /** - * @async - * Measure run time onf a function. - * @param {(...any)=>Promise} func - * @param {any} thisArg - * @param {any} argsArray - * @param {any} message - * @param {any[]} optionalParams - * @returns {Promise} result of the run - * @template T - */ - async measure(func, thisArg, argsArray, message, ...optionalParams) { - if (Logger.confirm(this.logLevel, LogLevel.INFO)) { - const startTime = new Date().getTime(); - if (!isExists(this._firstTime)) { - this._firstTime = startTime; - } - const result = await func.apply(thisArg, argsArray); - console.log(`[${this.namespace}] ${message}`, ...optionalParams, `(${touchTime(startTime)}ms)`); - return result; - } - const result = await func.apply(thisArg, argsArray); - return result; - } - - /** - * Measure run time of a function - * @param {(...any)=>T} func - * @param {any} thisArg - * @param {any} argsArray - * @param {any} message - * @param {any[]} optionalParams - * @returns {T} result of the function - * @template T - */ - measureSync(func, thisArg, argsArray, message, ...optionalParams) { - if (Logger.confirm(this.logLevel, LogLevel.INFO)) { - const startTime = new Date().getTime(); - if (!isExists(this._firstTime)) { - this._firstTime = startTime; - } - const result = func.apply(thisArg, argsArray); - console.log(`[${this.namespace}] ${message}`, ...optionalParams, `(${touchTime(startTime)}ms)`); - return result; - } - return func.apply(thisArg, argsArray); - } - - /** - * Measure the total time of this.measureSync - * @param {any?} message - * @param {any[]} optionalParams - */ - result(message, ...optionalParams) { - const startTime = this._firstTime || new Date().getTime(); - if (Logger.confirm(this.logLevel, LogLevel.INFO)) { - console.log(`[${this.namespace}] ${message}`, ...optionalParams, `(${touchTime(startTime)}ms)`); - } - this._firstTime = null; - } -} - -export default Logger; - -export { LogLevel }; diff --git a/packages/parser-core/src/Logger.ts b/packages/parser-core/src/Logger.ts new file mode 100644 index 00000000..f5a7208c --- /dev/null +++ b/packages/parser-core/src/Logger.ts @@ -0,0 +1,187 @@ +/* eslint-disable no-console */ +import { stringContains } from "./stringUtil"; +import { isExists } from "./typecheck"; + +enum LogLevel { + SILENT = "silent", + ERROR = "error", + WARN = "warn", + INFO = "info", + DEBUG = "debug", + VERBOSE = "verbose", +} + +export type LoggerOptions = { + namespace: string; + logLevel: LogLevel; +}; + +/** + * Get an order of a log level + * @returns {number} order of a level + */ +const getOrder = (logLevel: LogLevel) => { + switch (logLevel) { + case LogLevel.ERROR: + return 1; + case LogLevel.WARN: + return 2; + case LogLevel.INFO: + return 3; + case LogLevel.DEBUG: + return 4; + case LogLevel.VERBOSE: + return 5; + default: + return 0; + } +}; + +const touchTime = (time: number) => performance.now() - time; + +class Logger { + private _logLevel: LogLevel; + + private namespace; + + private _firstTime; + + get logLevel() { + return this._logLevel; + } + + set logLevel(level: LogLevel) { + this._logLevel = stringContains(Object.values(LogLevel), level) + ? level + : this.logLevel; + } + + /** + * Construct Logger Class; + */ + constructor(namespace: string, logLevel: LogLevel) { + this.namespace = namespace || Logger.name; + this._logLevel = stringContains(Object.values(LogLevel), logLevel) + ? logLevel + : LogLevel.WARN; + this._firstTime = null; + } + + static confirm(current: LogLevel, target: LogLevel) { + return getOrder(current) >= getOrder(target); + } + + /** + * Log information + */ + info(message: any, ...optionalParams: any[]) { + /* istanbul ignore else */ + if (Logger.confirm(this.logLevel, LogLevel.INFO)) { + console.info(`[${this.namespace}] ${message}`, ...optionalParams); + } + } + + /** + * Log warning + */ + warn(message: any, ...optionalParams: any[]) { + /* istanbul ignore else */ + if (Logger.confirm(this.logLevel, LogLevel.WARN)) { + console.warn(`[${this.namespace}] ${message}`, ...optionalParams); + } + } + + /** + * Log error + */ + error(message: any, ...optionalParams: any[]) { + /* istanbul ignore else */ + if (Logger.confirm(this.logLevel, LogLevel.ERROR)) { + console.error(`[${this.namespace}] ${message}`, ...optionalParams); + } + } + + /** + * Log degug + */ + debug(message: any, ...optionalParams: any[]) { + /* istanbul ignore else */ + if (Logger.confirm(this.logLevel, LogLevel.DEBUG)) { + console.debug(`[${this.namespace}] ${message}`, ...optionalParams); + } + } + + /** + * Measure run time onf a function. + */ + async measure( + func: (...any: any[]) => Promise, + thisArg: any, + argsArray: any, + message: any, + ...optionalParams: any[] + ): Promise { + if (Logger.confirm(this.logLevel, LogLevel.INFO)) { + const startTime = performance.now(); + if (!isExists(this._firstTime)) { + this._firstTime = startTime; + } + const result = await func.apply(thisArg, argsArray); + console.log( + `[${this.namespace}] ${message}`, + ...optionalParams, + `(${touchTime(startTime)}ms)` + ); + return result; + } + const result = await func.apply(thisArg, argsArray); + return result; + } + + /** + * Measure run time of a function + */ + measureSync( + func: (...any) => T, + thisArg: any, + argsArray: any, + message: any, + ...optionalParams: any[] + ): T { + if (Logger.confirm(this.logLevel, LogLevel.INFO)) { + const startTime = performance.now(); + if (!isExists(this._firstTime)) { + this._firstTime = startTime; + } + const result = func.apply(thisArg, argsArray); + console.log( + `[${this.namespace}] ${message}`, + ...optionalParams, + `(${touchTime(startTime)}ms)` + ); + return result; + } + return func.apply(thisArg, argsArray); + } + + /** + * Measure the total time of this.measureSync + * @param {any?} message + * @param {any[]} optionalParams + */ + result(message: any, ...optionalParams: any[]) { + const startTime = this._firstTime || performance.now(); + if (Logger.confirm(this.logLevel, LogLevel.INFO)) { + console.log( + `[${this.namespace}] ${message}`, + ...optionalParams, + `(${touchTime(startTime)}ms)` + ); + } + this._firstTime = null; + } +} + +export default Logger; + +export { LogLevel }; diff --git a/packages/parser-core/src/Parser.js b/packages/parser-core/src/Parser.js deleted file mode 100644 index 1a24416f..00000000 --- a/packages/parser-core/src/Parser.js +++ /dev/null @@ -1,478 +0,0 @@ -import fs from 'fs'; - -import { removeCacheFile } from './cacheFile'; -import CryptoProvider from './CryptoProvider'; -import Errors, { createError, mustOverride } from './errors'; -import Logger from './Logger'; -import mergeObjects from './mergeObjects'; -import readEntries from './readEntries'; -import { - isArray, - isExists, - isFunc, - isString, -} from './typecheck'; -import validateOptions from './validateOptions'; - -/** - * @typedef {import('./BaseParseContext').default} BaseParseContext - * @typedef {import('./BaseParseContext').BaseParserOption} BaseParserOption - * @typedef {import('./BaseParseContext').BaseParserOptionType} BaseParserOptionType - * @typedef {import('./BaseBook').default} BaseBook - * @typedef {import('./BaseItem').default} BaseItem - * @typedef {import('./BaseReadContext').default} BaseReadContext - * @typedef {import('./BaseReadContext').BaseReadOption} BaseReadOption - * @typedef {import('./BaseReadContext').BaseReadOptionType} BaseReadOptionType - */ - -/** - * @typedef {string} ParserAction - * - * @typedef {Object} ActionEnum - * @property {ParserAction} PARSER "parse" - * @property {ParserAction} READ_ITEMS "readItems" - * @property {ParserAction} UNZIP "unzip" - */ - -/** - * @readonly - * @type {ActionEnum} - */ -const Action = Object.freeze({ - PARSE: 'parse', - READ_ITEMS: 'readItems', - UNZIP: 'unzip', -}); - -/** - * @typedef {(step:number, totalStep:number, action:string)=>void} OnProgressCallBack - * - * @typedef {Object} Task - * @property {Function} fun Action executor - * @property {string} name Action name -*/ - -const privateProps = new WeakMap(); - -class Parser { - /** - * Get default values of parse options - * @static - * @return {BaseParserOption} - */ - static get parseDefaultOptions() { - return { - unzipPath: undefined, - overwrite: true, - }; - } - - /** - * Get types of parse options - * @static - * @return {BaseParserOptionType} - */ - static get parseOptionTypes() { - return { - unzipPath: 'String|Undefined', - overwrite: 'Boolean', - }; - } - - /** - * Get default values of read options - * @static - * @returns {BaseReadOption | void} - */ - static get readDefaultOptions() { - return { - force: false, - }; - } - - /** - * Get types of read option - * @static - * @returns {BaseReadOptionType | void} - */ - static get readOptionTypes() { - return { - force: 'Boolean', - }; - } - - /** - * Get file or directory - * @returns {string} - * - */ - get input() { return privateProps.get(this).input; } - - /** - * Get en/decrypto provider - * @returns {CryptoProvider} - */ - get cryptoProvider() { return privateProps.get(this).cryptoProvider; } - - /** - * Get logger - * @returns {Logger} - */ - get logger() { return privateProps.get(this).logger; } - - /** - * Get onProgress callback - * @returns {OnProgressCallBack} - */ - get onProgress() { return privateProps.get(this).onProgress || (() => { }); } - - /** - * Set callback that tells progress of parse and readItems. - * @param {OnProgressCallBack} onProgress - * @example - * parser.onProgress = (step, totalStep, action) => { - * console.log(`[${action}] ${step} / ${totalStep}`); - * } - * @see Parser.Action - */ - set onProgress(onProgress) { - if (!isFunc(onProgress)) { - throw createError(Errors.EINVAL, 'onProgress', 'reason', 'must be function type'); - } - privateProps.set(this, { ...privateProps.get(this), onProgress }); - } - - /** - * Create new Parser - * @param {string} input file or directory - * @param {CryptoProvider} [cryptoProvider] en/decrypto provider - * @param {import('./Logger').LoggerOptions} [loggerOptions] logger options - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.EINVAL} invalid input - * @example - * class FooParser extends Parser { - * ... - * } - * new FooParser('./foo/bar.zip' or './foo/bar'); - */ - constructor(input, cryptoProvider, loggerOptions = {}) { - if (isString(input)) { - if (!fs.existsSync(input)) { - throw createError(Errors.ENOENT, input); - } - removeCacheFile(input); - } else { - throw createError(Errors.EINVAL, 'input', 'reason', 'must be String type'); - } - if (isExists(cryptoProvider) && !(cryptoProvider instanceof CryptoProvider)) { - throw createError(Errors.EINVAL, 'cryptoProvider', 'reason', 'must be CryptoProvider subclassing type'); - } - const { namespace, logLevel } = loggerOptions; - const logger = new Logger(namespace || Parser.name, logLevel); - logger.debug(`Create new parser with input: '${input}', cryptoProvider: ${isExists(cryptoProvider) ? 'Y' : 'N'}.`); - privateProps.set(this, { input, cryptoProvider, logger }); - } - - /** - * @virtual - * @protected - * @returns {new ()=>BaseParseContext} - */ - _getParseContextClass() { - return mustOverride(); - } - - /** - * @virtual - * @protected - * @returns {new ()=>BaseBook} - */ - _getBookClass() { - return mustOverride(); - } - - /** - * @virtual - * @protected - * @returns {new ()=>BaseReadContext} - */ - _getReadContextClass() { - mustOverride(); - } - - /** - * @virtual - * @protected - * @returns {new ()=>BaseItem} - */ - _getReadItemClass() { - mustOverride(); - } - - /** - * @protected - * @returns {Task[]} return before tasks - */ - _parseBeforeTasks() { - return [ - { fun: this._prepareParse, name: 'prepareParse' }, - { fun: this._unzipIfNeeded, name: 'unzipIfNeeded' }, - ]; - } - - /** - * @protected - * @returns {Task[]} return tasks - */ - _parseTasks() { - return []; - } - - /** - * @protected - * @returns {Task[]} return after tasks - */ - _parseAfterTasks() { - return [ - { fun: this._createBook, name: 'createBook' }, - ]; - } - - /** - * Parse the input - * @async - * @param {BaseParserOption} [options] parse options - * @returns {Promise} return Book - * @see Parser.parseDefaultOptions - * @see Parser.parseOptionTypes - */ - async parse(options = {}) { - const action = Action.PARSE; - const tasks = [].concat( - this._parseBeforeTasks(), - this._parseTasks(), - this._parseAfterTasks(), - ); - let context = options; - this.onProgress(0, tasks.length, action); - await tasks.reduce((prevPromise, task, index) => { - const result = prevPromise.then(async () => { - const { fun, name } = task; - const message = `${action} - ${name}`; - context = await this.logger.measure(fun, this, [context], message); - }); - this.onProgress(index + 1, tasks.length, action); - return result; - }, Promise.resolve()); - this.logger.result(action); - return context; - } - - /** - * Validate parse options and get entries from input - * @async - * @param {BaseParserOption} [options] parse options - * @returns {Promise} return Context containing parse options, entries - * @throws {Errors.EINVAL} invalid options or value type - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.ENOFILE} no such file - */ - async _prepareParse(options = {}) { - const { parseOptionTypes, parseDefaultOptions } = this.constructor; - validateOptions(options, parseOptionTypes); - const ParseContext = this._getParseContextClass(); - const context = new ParseContext(); - context.options = mergeObjects(parseDefaultOptions, options); - context.entries = await readEntries(this.input, this.cryptoProvider, this.logger); - this.logger.debug(`Ready to parse with options: ${JSON.stringify(context.options)}.`); - return context; - } - - /** - * Unzipping if zip source and unzipPath option specified - * @async - * @param {BaseParseContext} context intermediate result - * @returns {Promise} return Context (no change at this step) - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.EEXIST} file or directory already exists - */ - async _unzipIfNeeded(context) { - const { options, entries } = context; - const { unzipPath, overwrite } = options; - if (!isString(entries.source) && isExists(unzipPath)) { - await entries.source.extractAll(unzipPath, overwrite); - privateProps.set(this, { ...privateProps.get(this), input: unzipPath }); - removeCacheFile(this.input); - context.entries = await readEntries(this.input, this.cryptoProvider, this.logger); - } - return context; - } - - /** - * Create new Book from context - * @protected - * @param {BaseParseContext} context intermediate result - * @returns {Promise} return Book - */ - _createBook(context) { - return new Promise(resolve => { - const Book = this._getBookClass(); - resolve(new Book(context.rawBook)); - }); - } - - /** - * @protected - * @returns {Task[]} return before tasks - */ - _readBeforeTasks() { - return [ - { fun: this._prepareRead, name: 'prepareRead' }, - ]; - } - - /** - * @protected - * @returns {Task[]} return tasks - */ - _readTasks() { - return [ - { fun: this._read, name: 'read' }, - ]; - } - - /** - * @protected - * @returns {Task[]} return after tasks - */ - _readAfterTasks() { - return []; - } - - /** - * Reading contents of Item - * @param {BaseItem} item target - * @param {BaseReadOption} [options] read options - * @returns {(string|Buffer)} reading result - * @see Parser.readDefaultOptions - * @see Parser.readOptionTypes - */ - async readItem(item, options = {}) { - const results = await this.readItems([item], options); - return results[0]; - } - - /** - * Reading contents of Items - * @async - * @param {BaseItem[]} items targets - * @param {BaseReadOption} [options] read options - * @returns {(string|Buffer)[]} reading results - * @see Parser.readDefaultOptions - * @see Parser.readOptionTypes - */ - async readItems(items, options = {}) { - const action = Action.READ_ITEMS; - const tasks = [].concat( - this._readBeforeTasks(), - this._readTasks(), - this._readAfterTasks(), - ); - let context = [items, options]; - this.onProgress(0, tasks.length, action); - await tasks.reduce((prevPromise, task, index) => { - const result = prevPromise.then(async () => { - const { fun, name } = task; - const message = `${action}(${items.length}) - ${name}`; - context = await this.logger.measure(fun, this, isArray(context) ? context : [context], message); - }); - this.onProgress(index + 1, tasks.length, action); - return result; - }, Promise.resolve()); - this.logger.result(`${action}(${items.length})`); - return context; - } - - /** - * Validate read options and get entries from input - * @async - * @param {Item[]} items targets - * @param {BaseReadOption} [options] read options - * @returns {Promise} returns Context containing target items, read options, entries - * @throws {Errors.EINVAL} invalid options or value type - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.ENOFILE} no such file - */ - async _prepareRead(items, options = {}) { - if (!options.force && items.find(item => !(item instanceof this._getReadItemClass()))) { - throw createError(Errors.EINVAL, 'item', 'reason', 'must be Parser._getReadItemClass type'); - } - const { readOptionTypes, readDefaultOptions } = this.constructor; - validateOptions(options, readOptionTypes); - const entries = await readEntries(this.input, this.cryptoProvider, this.logger); - const ReadContext = this._getReadContextClass(); - const context = new ReadContext(); - context.items = items; - context.entries = entries; - context.options = mergeObjects(readDefaultOptions, options); - this.logger.debug(`Ready to read with options: ${JSON.stringify(context.options)}.`); - return context; - } - - /** - * Contents is read using loader suitable for context - * @async - * @override - * @param {ReadContext} context properties required for reading - * @returns {Promise>} reading results - * @throws {Errors.ENOFILE} no such file - * @see Parser.readDefaultOptions.force - */ - // eslint-disable-next-line no-unused-vars - async _read(context) { - return mustOverride(); - } - - /** - * @private - * @returns {Task[]} return tasks - */ - _unzipTasks() { - return [ - { fun: this._prepareParse, name: 'prepareParse' }, - { fun: this._unzipIfNeeded, name: 'unzipIfNeeded' }, - ]; - } - - /** - * Unzip - * @async - * @param {string} unzipPath - * @param {boolean} overwrite - * @returns {Promise} success - * @throws {Errors.EINVAL} invalid options or value type - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.ENOFILE} no such file - */ - async unzip(unzipPath, overwrite = true) { - const action = Action.UNZIP; - const tasks = this._unzipTasks(); - let context = { unzipPath, overwrite }; - this.onProgress(0, tasks.length, action); - await tasks.reduce((prevPromise, task, index) => { - const result = prevPromise.then(async () => { - const { fun, name } = task; - const message = `${action} - ${name}`; - context = await this.logger.measure(fun, this, [context], message); - }); - this.onProgress(index + 1, tasks.length, action); - return result; - }, Promise.resolve()); - this.logger.result(action); - return isString(this.input); - } -} - -Parser.Action = Action; - -export default Parser; diff --git a/packages/parser-core/src/Parser.ts b/packages/parser-core/src/Parser.ts new file mode 100644 index 00000000..17941963 --- /dev/null +++ b/packages/parser-core/src/Parser.ts @@ -0,0 +1,447 @@ +import fs from "fs"; + +import { removeCacheFile } from "./cacheFile"; +import CryptoProvider from "./CryptoProvider"; +import Errors, { createError, mustOverride } from "./errors"; +import Logger, { LoggerOptions } from "./Logger"; +import mergeObjects from "./mergeObjects"; +import readEntries from "./readEntries"; +import { isArray, isExists, isFunc, isString } from "./typecheck"; +import validateOptions from "./validateOptions"; +import BaseParseContext, { + BaseParserOption, + BaseParserOptionType, +} from "./BaseParseContext"; +import BaseBook from "./BaseBook"; +import BaseItem from "./BaseItem"; +import BaseReadContext, { + BaseReadOption, + BaseReadOptionType, +} from "./BaseReadContext"; + +enum Action { + PARSE = "parse", + READ_ITEMS = "readItems", + UNZIP = "unzip", +} + +type OnProgressCallBack = ( + step: number, + totalStep: number, + action: string +) => void; + +export type Task = { + fun: (...args: any[]) => any | Promise; + name: string; +}; + +const privateProps = new WeakMap< + Parser, + { + input: string; + cryptoProvider: CryptoProvider; + logger: Logger; + onProgress?: OnProgressCallBack; + } +>(); + +class Parser { + static Action = Action; + /** + * Get default values of parse options + */ + static get parseDefaultOptions(): BaseParserOption { + return { + unzipPath: undefined, + overwrite: true, + }; + } + + /** + * Get types of parse options + */ + static get parseOptionTypes(): BaseParserOptionType { + return { + unzipPath: "String|Undefined", + overwrite: "Boolean", + }; + } + + /** + * Get default values of read options + */ + static get readDefaultOptions(): BaseReadOption | undefined { + return { + force: false, + }; + } + + /** + * Get types of read option + */ + static get readOptionTypes(): BaseReadOptionType | undefined { + return { + force: "Boolean", + }; + } + + /** + * Get file or directory + */ + get input() { + return privateProps.get(this).input; + } + + /** + * Get en/decrypto provider + */ + get cryptoProvider() { + return privateProps.get(this).cryptoProvider; + } + + /** + * Get logger + */ + get logger() { + return privateProps.get(this).logger; + } + + /** + * Get onProgress callback + */ + get onProgress() { + return privateProps.get(this).onProgress || (() => {}); + } + + /** + * Set callback that tells progress of parse and readItems. + * @example + * parser.onProgress = (step, totalStep, action) => { + * console.log(`[${action}] ${step} / ${totalStep}`); + * } + * @see Parser.Action + */ + set onProgress(onProgress: OnProgressCallBack) { + if (!isFunc(onProgress)) { + throw createError( + Errors.EINVAL, + "onProgress", + "reason", + "must be function type" + ); + } + privateProps.set(this, { ...privateProps.get(this), onProgress }); + } + + /** + * Create new Parser + * @throws {Errors.ENOENT} no such file or directory + * @throws {Errors.EINVAL} invalid input + * @example + * class FooParser extends Parser { + * ... + * } + * new FooParser('./foo/bar.zip' or './foo/bar'); + */ + constructor( + input: string, + cryptoProvider?: CryptoProvider, + loggerOptions: Partial = {} + ) { + if (isString(input)) { + if (!fs.existsSync(input)) { + throw createError(Errors.ENOENT, input); + } + removeCacheFile(input); + } else { + throw createError( + Errors.EINVAL, + "input", + "reason", + "must be String type" + ); + } + if ( + isExists(cryptoProvider) && + !(cryptoProvider instanceof CryptoProvider) + ) { + throw createError( + Errors.EINVAL, + "cryptoProvider", + "reason", + "must be CryptoProvider subclassing type" + ); + } + const { namespace, logLevel } = loggerOptions; + const logger = new Logger(namespace || Parser.name, logLevel); + logger.debug( + `Create new parser with input: '${input}', cryptoProvider: ${ + isExists(cryptoProvider) ? "Y" : "N" + }.` + ); + privateProps.set(this, { input, cryptoProvider, logger }); + } + + protected _getParseContextClass(): typeof BaseParseContext { + return mustOverride(); + } + + protected _getBookClass(): typeof BaseBook { + return mustOverride(); + } + + protected _getReadContextClass(): typeof BaseReadContext { + return mustOverride(); + } + + protected _getReadItemClass(): typeof BaseItem { + return mustOverride(); + } + + protected _parseBeforeTasks(): Task[] { + return [ + { fun: this._prepareParse, name: "prepareParse" }, + { fun: this._unzipIfNeeded, name: "unzipIfNeeded" }, + ]; + } + + protected _parseTasks(): Task[] { + return []; + } + + protected _parseAfterTasks(): Task[] { + return [{ fun: this._createBook, name: "createBook" }]; + } + + /** + * Parse the input + * @see Parser.parseDefaultOptions + * @see Parser.parseOptionTypes + */ + async parse(options: Partial = {}) { + const action = Action.PARSE; + const tasks = [].concat( + this._parseBeforeTasks(), + this._parseTasks(), + this._parseAfterTasks() + ); + let context = options; + this.onProgress(0, tasks.length, action); + await tasks.reduce((prevPromise, task, index) => { + const result = prevPromise.then(async () => { + const { fun, name } = task; + const message = `${action} - ${name}`; + context = await this.logger.measure(fun, this, [context], message); + }); + this.onProgress(index + 1, tasks.length, action); + return result; + }, Promise.resolve()); + this.logger.result(action); + return context as BaseBook; + } + + /** + * Validate parse options and get entries from input + * @throws {Errors.EINVAL} invalid options or value type + * @throws {Errors.ENOENT} no such file or directory + * @throws {Errors.ENOFILE} no such file + */ + async _prepareParse( + options: Partial = {} + ): Promise { + const { parseOptionTypes, parseDefaultOptions } = this + .constructor as typeof Parser; + validateOptions(options, parseOptionTypes); + const ParseContext = this._getParseContextClass(); + const context = new ParseContext(); + context.options = mergeObjects(parseDefaultOptions, options); + context.entries = await readEntries( + this.input, + this.cryptoProvider, + this.logger + ); + this.logger.debug( + `Ready to parse with options: ${JSON.stringify(context.options)}.` + ); + return context; + } + + /** + * Unzipping if zip source and unzipPath option specified + * @throws {Errors.ENOENT} no such file or directory + * @throws {Errors.EEXIST} file or directory already exists + */ + async _unzipIfNeeded(context: BaseParseContext): Promise { + const { options, entries } = context; + const { unzipPath, overwrite } = options; + if (!isString(entries.source) && isExists(unzipPath)) { + await entries.source.extractAll(unzipPath, overwrite); + privateProps.set(this, { ...privateProps.get(this), input: unzipPath }); + removeCacheFile(this.input); + context.entries = await readEntries( + this.input, + this.cryptoProvider, + this.logger + ); + } + return context; + } + + /** + * Create new Book from context + */ + protected _createBook(context: BaseParseContext): Promise { + return new Promise((resolve) => { + const Book = this._getBookClass(); + // 인터페이스로 수정 + resolve(new (Book as any)(context.rawBook)); + }); + } + + protected _readBeforeTasks(): Task[] { + return [{ fun: this._prepareRead, name: "prepareRead" }]; + } + + protected _readTasks(): Task[] { + return [{ fun: this._read, name: "read" }]; + } + + protected _readAfterTasks(): Task[] { + return []; + } + + /** + * Reading contents of Item + * @see Parser.readDefaultOptions + * @see Parser.readOptionTypes + */ + async readItem( + item: BaseItem, + options: Partial = {} + ): Promise { + const results = await this.readItems([item], options); + return results[0] as string | Buffer; + } + + /** + * Reading contents of Items + * @see Parser.readDefaultOptions + * @see Parser.readOptionTypes + */ + async readItems( + items: BaseItem[], + options: Partial = {} + ): Promise<(string | Buffer)[]> { + const action = Action.READ_ITEMS; + const tasks = [].concat( + this._readBeforeTasks(), + this._readTasks(), + this._readAfterTasks() + ); + let context = [items, options]; + this.onProgress(0, tasks.length, action); + await tasks.reduce((prevPromise, task, index) => { + const result = prevPromise.then(async () => { + const { fun, name } = task; + const message = `${action}(${items.length}) - ${name}`; + context = await this.logger.measure( + fun, + this, + isArray(context) ? context : [context], + message + ); + }); + this.onProgress(index + 1, tasks.length, action); + return result; + }, Promise.resolve()); + this.logger.result(`${action}(${items.length})`); + return context as (string | Buffer)[]; + } + + /** + * Validate read options and get entries from input + * @throws {Errors.EINVAL} invalid options or value type + * @throws {Errors.ENOENT} no such file or directory + * @throws {Errors.ENOFILE} no such file + */ + async _prepareRead( + items: BaseItem[], + options: Partial = {} + ): Promise { + if ( + !options.force && + items.find((item) => !(item instanceof this._getReadItemClass())) + ) { + throw createError( + Errors.EINVAL, + "item", + "reason", + "must be Parser._getReadItemClass type" + ); + } + const { readOptionTypes, readDefaultOptions } = this + .constructor as typeof Parser; + validateOptions(options, readOptionTypes); + const entries = await readEntries( + this.input, + this.cryptoProvider, + this.logger + ); + const ReadContext = this._getReadContextClass(); + const context = new ReadContext(); + context.items = items; + context.entries = entries; + context.options = mergeObjects(readDefaultOptions ?? {}, options); + this.logger.debug( + `Ready to read with options: ${JSON.stringify(context.options)}.` + ); + return context; + } + + /** + * Contents is read using loader suitable for context + * @throws {Errors.ENOFILE} no such file + * @see Parser.readDefaultOptions.force + */ + // eslint-disable-next-line no-unused-vars + async _read(context: BaseReadContext): Promise> { + return mustOverride(); + } + + private _unzipTasks(): Task[] { + return [ + { fun: this._prepareParse, name: "prepareParse" }, + { fun: this._unzipIfNeeded, name: "unzipIfNeeded" }, + ]; + } + + /** + * Unzip + * @returns {Promise} success + * @throws {Errors.EINVAL} invalid options or value type + * @throws {Errors.ENOENT} no such file or directory + * @throws {Errors.ENOFILE} no such file + */ + async unzip(unzipPath: string, overwrite = true) { + const action = Action.UNZIP; + const tasks = this._unzipTasks(); + let context = { unzipPath, overwrite }; + this.onProgress(0, tasks.length, action); + await tasks.reduce((prevPromise, task, index) => { + const result = prevPromise.then(async () => { + const { fun, name } = task; + const message = `${action} - ${name}`; + context = await this.logger.measure(fun, this, [context], message); + }); + this.onProgress(index + 1, tasks.length, action); + return result; + }, Promise.resolve()); + this.logger.result(action); + return isString(this.input); + } +} + +export default Parser; + +export { default as CryptoProvider } from "./CryptoProvider"; diff --git a/packages/parser-core/src/Version.js b/packages/parser-core/src/Version.ts similarity index 57% rename from packages/parser-core/src/Version.js rename to packages/parser-core/src/Version.ts index 822de6e5..db4a4a88 100644 --- a/packages/parser-core/src/Version.js +++ b/packages/parser-core/src/Version.ts @@ -1,41 +1,28 @@ -const normalize = version => { +function normalize(version: string) { if (!/(?:0|[1-9]\d*)(?:\.(?:0|[1-9]\d*)){0,3}/gm.test(version)) { return '1.0'; } return version; -}; +} class Version { - /** - * @private - */ - major; - - /** - * @private - */ - minor; - - /** - * @private - */ - patch; + private major: number; + private minor: number; + private patch: number; /** * Construct Version object with version. - * @param {string} version */ - constructor(version) { + constructor(version?: string) { const component = normalize(version).split('.'); this.major = parseInt(component[0], 10); - this.minor = parseInt(component[1] || 0, 10) || 0; - this.patch = parseInt(component[2] || 0, 10) || 0; + this.minor = parseInt(component[1] ?? '0', 10) || 0; + this.patch = parseInt(component[2] ?? '0', 10) || 0; Object.freeze(this); } /** * Get Version as string - * @returns {string} version */ toString() { return `${this.major}.${this.minor}.${this.patch}`; diff --git a/packages/parser-core/src/bufferUtil.js b/packages/parser-core/src/bufferUtil.ts similarity index 64% rename from packages/parser-core/src/bufferUtil.js rename to packages/parser-core/src/bufferUtil.ts index 6097d311..8b3cce52 100644 --- a/packages/parser-core/src/bufferUtil.js +++ b/packages/parser-core/src/bufferUtil.ts @@ -1,9 +1,7 @@ /** * Removes all the leading non zero buffer chunk - * @param {Buffer} buffer - * @returns {Buffer} Trimmed buffer */ -export function trimStart(buffer) { +export function trimStart(buffer: Buffer) { let pos = 0; for (let i = 0; i <= buffer.length; i += 1) { if (buffer[i] !== 0x00) { @@ -16,10 +14,8 @@ export function trimStart(buffer) { /** * Removes all the trailing non zero buffer chunk - * @param {Buffer} buffer - * @returns {Buffer} Trimmed buffer */ -export function trimEnd(buffer) { +export function trimEnd(buffer: Buffer) { let pos = 0; for (let i = buffer.length - 1; i >= 0; i -= 1) { if (buffer[i] !== 0x00) { @@ -32,9 +28,7 @@ export function trimEnd(buffer) { /** * Trim a buffer - * @param {Buffer} buffer - * @returns {Buffer} Trimmed buffer */ -export function trim(buffer) { +export function trim(buffer: Buffer) { return trimEnd(trimStart(buffer)); } diff --git a/packages/parser-core/src/cacheFile.js b/packages/parser-core/src/cacheFile.ts similarity index 65% rename from packages/parser-core/src/cacheFile.js rename to packages/parser-core/src/cacheFile.ts index 68df6fbf..e69fb91b 100644 --- a/packages/parser-core/src/cacheFile.js +++ b/packages/parser-core/src/cacheFile.ts @@ -1,18 +1,18 @@ -import * as fs from 'fs-extra'; -import sha1 from 'sha1'; +import * as fs from "fs-extra"; +import sha1 from "sha1"; -import os from 'os'; -import path from 'path'; +import os from "os"; +import path from "path"; -import Errors, { createError } from './errors'; -import { isExists, isString } from './typecheck'; +import Errors, { createError } from "./errors"; +import { isExists, isString } from "./typecheck"; /** * Get path to store cache * @returns {string} Path of a temp directory */ export function getCachePath() { - return path.join(os.tmpdir(), 'parser-cache'); + return path.join(os.tmpdir(), "parser-cache"); } /** @@ -20,9 +20,9 @@ export function getCachePath() { * @param {string} key Key of a cache file * @returns {string} Path to a cache file */ -function getCacheFilePath(key) { +function getCacheFilePath(key: string) { if (!isExists(key) || !isString(key)) { - throw createError(Errors.EINVAL, 'key', 'key', key); + throw createError(Errors.EINVAL, "key", "key", key); } return path.join(getCachePath(), `${sha1(key)}.dat`); } @@ -32,7 +32,7 @@ function getCacheFilePath(key) { * @param {string} key Key of a cache file * @returns {void} */ -export function removeCacheFile(key) { +export function removeCacheFile(key: string) { const filePath = getCacheFilePath(key); if (fs.existsSync(filePath)) { fs.removeSync(filePath); @@ -54,12 +54,12 @@ export function removeAllCacheFiles() { * @param {string} key Key of a cache file * @returns {string|null} `null` if cache does not exists, `string` otherwise */ -export function readCacheFile(key) { +export function readCacheFile(key: string): string | null { const filePath = getCacheFilePath(key); if (!fs.existsSync(filePath)) { return null; } - return fs.readFileSync(filePath, { encoding: 'utf8' }); + return fs.readFileSync(filePath, { encoding: "utf8" }); } /** @@ -69,11 +69,18 @@ export function readCacheFile(key) { * @param {boolean} [overwrite=false] * @returns {void} */ -export function writeCacheFile(key, message, overwrite = false) { +export function writeCacheFile( + key: string, + message: string | NodeJS.ArrayBufferView, + overwrite = false +) { const filePath = getCacheFilePath(key); const cachePath = path.dirname(filePath); if (!fs.existsSync(cachePath)) { fs.mkdirpSync(cachePath); } - fs.writeFileSync(filePath, message, { flag: overwrite ? 'w' : 'a', encoding: 'utf8' }); + fs.writeFileSync(filePath, message, { + flag: overwrite ? "w" : "a", + encoding: "utf8", + }); } diff --git a/packages/parser-core/src/createCryptoStream.js b/packages/parser-core/src/createCryptoStream.ts similarity index 57% rename from packages/parser-core/src/createCryptoStream.js rename to packages/parser-core/src/createCryptoStream.ts index 49134b41..415b0e29 100644 --- a/packages/parser-core/src/createCryptoStream.js +++ b/packages/parser-core/src/createCryptoStream.ts @@ -1,27 +1,29 @@ -import * as es from 'event-stream'; +import * as es from "event-stream"; // eslint-disable-next-line no-unused-vars -import CryptoProvider from './CryptoProvider'; +import CryptoProvider, { CryptoProviderOption } from "./CryptoProvider"; -/** - * @param {string} filePath - * @param {number} totalSize - * @param {CryptoProvider} cryptoProvider - * @param {import('./CryptoProvider').CryptoProviderOption} purpose - * @returns {es.MapStream} - */ /* eslint-disable no-param-reassign */ -const createCryptoStream = (filePath, totalSize, cryptoProvider, purpose) => { +const createCryptoStream = ( + filePath: string, + totalSize: number, + cryptoProvider: CryptoProvider, + purpose: CryptoProviderOption +) => { let tmpChunk = Buffer.from([]); let pushedSize = 0; return es.map(async (chunk, callback) => { - chunk = Buffer.from(chunk, 'binary'); + chunk = Buffer.from(chunk, "binary"); const subTotalSize = pushedSize + tmpChunk.length + chunk.length; if (subTotalSize < totalSize && subTotalSize % 16 !== 0) { tmpChunk = Buffer.concat([tmpChunk, chunk]); callback(); } else { - chunk = cryptoProvider.run(Buffer.concat([tmpChunk, chunk]), filePath, purpose); + chunk = cryptoProvider.run( + Buffer.concat([tmpChunk, chunk]), + filePath, + purpose + ); if (Promise.resolve(chunk) === chunk) { chunk = await chunk; } diff --git a/packages/parser-core/src/createSliceStream.js b/packages/parser-core/src/createSliceStream.ts similarity index 77% rename from packages/parser-core/src/createSliceStream.js rename to packages/parser-core/src/createSliceStream.ts index bab6833e..1a65d8e8 100644 --- a/packages/parser-core/src/createSliceStream.js +++ b/packages/parser-core/src/createSliceStream.ts @@ -1,15 +1,11 @@ -import * as es from 'event-stream'; +import * as es from "event-stream"; -/** - * @param {number} start=0 - * @param {number} end=Infinity - * @returns {es.MapStream} - */ /* eslint-disable no-param-reassign */ const createSliceStream = (start = 0, end = Infinity) => { let bytesReceived = 0; let finish = false; - return es.map((chunk, callback) => { // eslint-disable-line + return es.map((chunk, callback) => { + // eslint-disable-line bytesReceived += chunk.length; if (!finish && bytesReceived >= start) { if (start - (bytesReceived - chunk.length) > 0) { diff --git a/packages/parser-core/src/cryptoUtil.js b/packages/parser-core/src/cryptoUtil.js deleted file mode 100644 index 29c22ad2..00000000 --- a/packages/parser-core/src/cryptoUtil.js +++ /dev/null @@ -1,179 +0,0 @@ -/* eslint-disable no-plusplus */ -/* eslint-disable no-bitwise */ -import * as CryptoJs from 'crypto-js'; - -const { - pad, enc, MD5, SHA1, SHA224, SHA256, SHA384, SHA512, SHA3, RIPEMD160, -} = CryptoJs; -const { Pkcs7 } = pad; -const { Utf8, Hex } = enc; - -/** - * @typedef {Object} PaddingObject - * @property {string} name - * @property {Padding} op - * @property {(data:CryptoJs.lib.WordArray)=>void} pad - * @property {(data:CryptoJs.lib.WordArray)=>void} unpad - */ - -/** - * @typedef {Object} PaddingList - * @property {PaddingObject} AUTO - * @property {PaddingObject} PKCS7 - * @property {PaddingObject} NONE - */ - -/** - * @type {PaddingList} - */ -const Padding = Object.freeze({ - AUTO: { - name: 'auto', - op: Pkcs7, - pad: data => Pkcs7.pad(data, 4), - unpad: Pkcs7.unpad, - }, - PKCS7: { - name: 'pkcs7', - op: pad.Pkcs7, - pad: data => Pkcs7.pad(data, 4), - unpad: Pkcs7.unpad, - }, - NONE: { - name: 'none', - op: pad.NoPadding, - }, -}); - -/** - * @typedef {Uint8Array|Buffer|Array} IterableObject - * @typedef {(uint8ArrayOrBufferOrArray:IterableObject)=>CryptoJs.lib.WordArray} DecodeFunction - * @typedef {(wordArray:CryptoJs.lib.WordArray)=>Uint8Array} EncodeFunction - * @typedef {Object} UINT8Object - * @property {DecodeFunction} decode - * @property {EncodeFunction} encode - */ - -/** - * @type {UINT8Object} - */ -const Uint8 = { - decode: uint8ArrayOrBufferOrArray => { - let uint8Array = uint8ArrayOrBufferOrArray; - if (Buffer.isBuffer(uint8ArrayOrBufferOrArray)) { - const buffer = uint8ArrayOrBufferOrArray; - uint8Array = new Uint8Array(buffer.length); - for (let i = 0; i < buffer.length; i += 1) { - uint8Array[i] = buffer[i]; - } - } else if (uint8ArrayOrBufferOrArray instanceof Array) { - uint8Array = Uint8Array.from(uint8ArrayOrBufferOrArray); - } - return CryptoJs.lib.WordArray.create(uint8Array); - }, - encode: wordArray => { - const { words, sigBytes } = wordArray; - const uint8Array = new Uint8Array(sigBytes); - let offset = 0; - let word; - for (let i = 0; i < words.length; i += 1) { - word = words[i]; - uint8Array[offset++] = word >> 24; - uint8Array[offset++] = (word >> 16) & 0xff; - uint8Array[offset++] = (word >> 8) & 0xff; - uint8Array[offset++] = word & 0xff; - } - return uint8Array; - }, -}; - -/** - * @typedef {Object} EncodingObject - * @property {string} name - * @property {(str: string|IterableObject)=>CryptoJs.lib.WordArray} decode - * @property {(wordArray: CryptoJs.lib.WordArray)=>(string|Uint8Array)} encode - */ - -/** - * @typedef {Object} EncodingList - * @property {EncodingObject} UTF8 - * @property {EncodingObject} HEX - * @property {EncodingObject} UINT8 - * @property {EncodingObject} BUFFER - */ - -/** - * @type {EncodingList} - */ -const Encoding = Object.freeze({ - UTF8: { - name: 'utf8', - decode: Utf8.parse, - encode: Utf8.stringify, - }, - HEX: { - name: 'hex', - decode: Hex.parse, - encode: Hex.stringify, - }, - UINT8: { - name: 'uint8', - decode: Uint8.decode, - encode: Uint8.encode, - }, - BUFFER: { - name: 'buffer', - decode: data => Uint8.decode(data), - encode: data => Buffer.from(Uint8.encode(data)), - }, -}); - -/** - * @param {any} any - * @returns {any} - */ -const prepareHash = any => { - if (Buffer.isBuffer(any)) { - return Encoding.BUFFER.decode(any); - } - if (any instanceof Uint8Array || any instanceof Array) { - return Encoding.UINT8.decode(any); - } - return any; -}; - -/** - * @typedef {(any:any, encoding?:EncodingObject)=>string} HashFunction Use `Encoding` - */ - -/** - * @typedef {Object} HashList - * @property {HashFunction} md5 - * @property {HashFunction} sha1 - * @property {HashFunction} sha224 - * @property {HashFunction} sha256 - * @property {HashFunction} sha384 - * @property {HashFunction} sha512 - * @property {HashFunction} sha3 - * @property {HashFunction} ripemd160 - */ - -/** - * @type {HashList} - */ -const Hash = Object.freeze({ - md5: (any, encoding = Encoding.HEX) => encoding.encode(MD5(prepareHash(any))), - sha1: (any, encoding = Encoding.HEX) => encoding.encode(SHA1(prepareHash(any))), - sha224: (any, encoding = Encoding.HEX) => encoding.encode(SHA224(prepareHash(any))), - sha256: (any, encoding = Encoding.HEX) => encoding.encode(SHA256(prepareHash(any))), - sha384: (any, encoding = Encoding.HEX) => encoding.encode(SHA384(prepareHash(any))), - sha512: (any, encoding = Encoding.HEX) => encoding.encode(SHA512(prepareHash(any))), - sha3: (any, size = 512, encoding = Encoding.HEX) => encoding.encode(SHA3(prepareHash(any), { outputLength: size })), - ripemd160: (any, encoding = Encoding.HEX) => encoding.encode(RIPEMD160(prepareHash(any))), -}); - -export { - Padding, - Encoding, - Hash, -}; diff --git a/packages/parser-core/src/cryptoUtil.ts b/packages/parser-core/src/cryptoUtil.ts new file mode 100644 index 00000000..c5592eee --- /dev/null +++ b/packages/parser-core/src/cryptoUtil.ts @@ -0,0 +1,173 @@ +/* eslint-disable no-plusplus */ +/* eslint-disable no-bitwise */ +import * as CryptoJs from "crypto-js"; +import { ValueOf } from "./helper"; + +const { pad, enc, MD5, SHA1, SHA224, SHA256, SHA384, SHA512, SHA3, RIPEMD160 } = + CryptoJs; +const { Pkcs7 } = pad; +const { Utf8, Hex } = enc; + +type Padding = ValueOf; + +export type PaddingObject = { + name: string; + op: Padding; + pad?: (data: CryptoJs.lib.WordArray) => void; + unpad?: (data: CryptoJs.lib.WordArray) => void; +}; + +type PaddingList = { + AUTO: PaddingObject; + PKCS7: PaddingObject; + NONE: PaddingObject; +}; + +const Padding: PaddingList = { + AUTO: { + name: "auto", + op: Pkcs7, + pad: (data) => Pkcs7.pad(data, 4), + unpad: Pkcs7.unpad, + }, + PKCS7: { + name: "pkcs7", + op: pad.Pkcs7, + pad: (data) => Pkcs7.pad(data, 4), + unpad: Pkcs7.unpad, + }, + NONE: { + name: "none", + op: pad.NoPadding, + }, +}; + +type IterableObject = Uint8Array | Buffer | Array; + +type DecodeFunction = ( + uint8ArrayOrBufferOrArray: IterableObject +) => CryptoJs.lib.WordArray; + +type EncodeFunction = (wordArray: CryptoJs.lib.WordArray) => Uint8Array; + +type UINT8Object = { + decode: DecodeFunction; + encode: EncodeFunction; +}; + +const Uint8: UINT8Object = { + decode: (uint8ArrayOrBufferOrArray) => { + let uint8Array = uint8ArrayOrBufferOrArray; + if (Buffer.isBuffer(uint8ArrayOrBufferOrArray)) { + const buffer = uint8ArrayOrBufferOrArray; + uint8Array = new Uint8Array(buffer.length); + for (let i = 0; i < buffer.length; i += 1) { + uint8Array[i] = buffer[i]; + } + } else if (uint8ArrayOrBufferOrArray instanceof Array) { + uint8Array = Uint8Array.from(uint8ArrayOrBufferOrArray); + } + + return CryptoJs.lib.WordArray.create(uint8Array as any); + }, + encode: (wordArray) => { + const { words, sigBytes } = wordArray; + const uint8Array = new Uint8Array(sigBytes); + let offset = 0; + let word: number; + for (let i = 0; i < words.length; i += 1) { + word = words[i]; + uint8Array[offset++] = word >> 24; + uint8Array[offset++] = (word >> 16) & 0xff; + uint8Array[offset++] = (word >> 8) & 0xff; + uint8Array[offset++] = word & 0xff; + } + return uint8Array; + }, +}; + +export type EncodingObject = { + name: string; + decode: (str: string | IterableObject) => CryptoJs.lib.WordArray; + encode: (wordArray: CryptoJs.lib.WordArray) => string | Uint8Array; +}; + +type EncodingList = { + UTF8: EncodingObject; + HEX: EncodingObject; + UINT8: EncodingObject; + BUFFER: EncodingObject; +}; + +const Encoding: EncodingList = { + UTF8: { + name: "utf8", + decode: Utf8.parse, + encode: Utf8.stringify, + }, + HEX: { + name: "hex", + decode: Hex.parse, + encode: Hex.stringify, + }, + UINT8: { + name: "uint8", + decode: Uint8.decode, + encode: Uint8.encode, + }, + BUFFER: { + name: "buffer", + decode: (data) => Uint8.decode([...data]), + encode: (data) => Buffer.from(Uint8.encode(data)), + }, +}; + +const prepareHash = (any: any) => { + if (Buffer.isBuffer(any)) { + return Encoding.BUFFER.decode(any); + } + if (any instanceof Uint8Array || any instanceof Array) { + return Encoding.UINT8.decode(any); + } + return any; +}; + +type HashFunction = ( + any: any, + encoding?: EncodingObject +) => string | Uint8Array | Buffer; + +type HashList = { + md5: HashFunction; + sha1: HashFunction; + sha224: HashFunction; + sha256: HashFunction; + sha384: HashFunction; + sha512: HashFunction; + sha3: ( + any: any, + size: number, + encoding?: EncodingObject + ) => ReturnType; + ripemd160: HashFunction; +}; + +const Hash: HashList = { + md5: (any, encoding = Encoding.HEX) => encoding.encode(MD5(prepareHash(any))), + sha1: (any, encoding = Encoding.HEX) => + encoding.encode(SHA1(prepareHash(any))), + sha224: (any, encoding = Encoding.HEX) => + encoding.encode(SHA224(prepareHash(any))), + sha256: (any, encoding = Encoding.HEX) => + encoding.encode(SHA256(prepareHash(any))), + sha384: (any, encoding = Encoding.HEX) => + encoding.encode(SHA384(prepareHash(any))), + sha512: (any, encoding = Encoding.HEX) => + encoding.encode(SHA512(prepareHash(any))), + sha3: (any, size = 512, encoding = Encoding.HEX) => + encoding.encode(SHA3(prepareHash(any), { outputLength: size })), + ripemd160: (any, encoding = Encoding.HEX) => + encoding.encode(RIPEMD160(prepareHash(any))), +}; + +export { Padding, Encoding, Hash }; diff --git a/packages/parser-core/src/errors.js b/packages/parser-core/src/errors.js deleted file mode 100644 index 8c49099c..00000000 --- a/packages/parser-core/src/errors.js +++ /dev/null @@ -1,59 +0,0 @@ -import format from 'string-format'; - -/** - * @typedef ErrorType - * @property {string} code - * @property {string} format -*/ - -/** - * @typedef Errors - * @property {ErrorType} ENOENT - * @property {ErrorType} ENOFILE - * @property {ErrorType} EEXIST - * @property {ErrorType} EINVAL - * @property {ErrorType} ENOELMT - * @property {ErrorType} ENOATTR - * @property {ErrorType} EREQPRM - * @property {ErrorType} EINTR - * @property {ErrorType} ECRYT - * @property {ErrorType} EPDFJS - * @property {ErrorType} ENOIMP - */ - -/** - * @type {Errors} - */ -const Errors = { - ENOENT: { code: 'ENOENT', format: 'ENOENT: no such file or directory. (path: {0})' }, - ENOFILE: { code: 'ENOFILE', format: 'ENOFILE: no such file. (path: {0})' }, - EEXIST: { code: 'EEXIST', format: 'EEXIST: file or directory already exists. (path: {0})' }, - EINVAL: { code: 'EINVAL', format: 'EINVAL: invalid {0}. ({1}: {2})' }, - ENOELMT: { code: 'ENOELMT', format: 'ENOELMT: no such element. (element: {0}, path: {1})' }, - ENOATTR: { code: 'ENOATTR', format: 'ENOATTR: no such attribute. (attribute: {0}, element: {1}, path: {2})' }, - EREQPRM: { code: 'EREQPRM', format: 'EREQPRM: required parameter missing. (name: {0})' }, - EINTR: { code: 'EINTR', format: 'EINTR: interrupted function call. (detail: {0})' }, - ECRYT: { code: 'ECRYT', format: 'ECRYT: cryptor internal error. (detail: {0})' }, - EPDFJS: { code: 'EPDFJS', format: 'EPDFJS: pdf.js internal error. (detail: {0})' }, - ENOIMP: { code: 'ENOIMP', format: 'ENOIMP: function not implemented.' }, -}; - -export default Errors; -/** - * Create error - * @param {ErrorType} type - * @param {string[]} args - * @returns {Error} - */ -export function createError(type, ...args) { - const error = new Error(format(type.format, ...args)); - error.code = type.code; - return error; -} -/** - * Create MustOverride error - * @returns {Error} - */ -export function mustOverride() { - throw createError(Errors.EINTR, 'You must override in a subclass.'); -} diff --git a/packages/parser-core/src/errors.ts b/packages/parser-core/src/errors.ts new file mode 100644 index 00000000..076dc63e --- /dev/null +++ b/packages/parser-core/src/errors.ts @@ -0,0 +1,64 @@ +import format from "string-format"; + +type ErrorType = { + code: string; + format: string; +}; + +const Errors = { + ENOENT: { + code: "ENOENT", + format: "ENOENT: no such file or directory. (path: {0})", + }, + ENOFILE: { code: "ENOFILE", format: "ENOFILE: no such file. (path: {0})" }, + EEXIST: { + code: "EEXIST", + format: "EEXIST: file or directory already exists. (path: {0})", + }, + EINVAL: { code: "EINVAL", format: "EINVAL: invalid {0}. ({1}: {2})" }, + ENOELMT: { + code: "ENOELMT", + format: "ENOELMT: no such element. (element: {0}, path: {1})", + }, + ENOATTR: { + code: "ENOATTR", + format: + "ENOATTR: no such attribute. (attribute: {0}, element: {1}, path: {2})", + }, + EREQPRM: { + code: "EREQPRM", + format: "EREQPRM: required parameter missing. (name: {0})", + }, + EINTR: { + code: "EINTR", + format: "EINTR: interrupted function call. (detail: {0})", + }, + ECRYT: { + code: "ECRYT", + format: "ECRYT: cryptor internal error. (detail: {0})", + }, + EPDFJS: { + code: "EPDFJS", + format: "EPDFJS: pdf.js internal error. (detail: {0})", + }, + ENOIMP: { code: "ENOIMP", format: "ENOIMP: function not implemented." }, +} as const; + +export default Errors; +/** + * Create error + */ +export function createError(type: ErrorType, ...args: string[]) { + const error = new Error(format(type.format, ...args)); + (error as any).code = type.code; + return error; +} +/** + * Create MustOverride error + * @returns {Error} + */ +export function mustOverride() { + throw createError(Errors.EINTR, "You must override in a subclass."); + + return {} as Type; +} diff --git a/packages/parser-core/src/helper.ts b/packages/parser-core/src/helper.ts new file mode 100644 index 00000000..6a691eda --- /dev/null +++ b/packages/parser-core/src/helper.ts @@ -0,0 +1 @@ +export type ValueOf = T extends Record ? Value : T; diff --git a/packages/parser-core/src/index.js b/packages/parser-core/src/index.ts similarity index 100% rename from packages/parser-core/src/index.js rename to packages/parser-core/src/index.ts diff --git a/packages/parser-core/src/mergeObjects.js b/packages/parser-core/src/mergeObjects.ts similarity index 63% rename from packages/parser-core/src/mergeObjects.js rename to packages/parser-core/src/mergeObjects.ts index 569468d8..5e1a830a 100644 --- a/packages/parser-core/src/mergeObjects.js +++ b/packages/parser-core/src/mergeObjects.ts @@ -1,4 +1,4 @@ -import { isExists, isObject } from './typecheck'; +import { isExists, isObject } from "./typecheck"; /** * Merge Objects * @param {T} obj1 @@ -6,15 +6,15 @@ import { isExists, isObject } from './typecheck'; * @returns {K} merged object * @template T,S,K */ -export default function mergeObjects(obj1, obj2) { +export default function mergeObjects(obj1: A, obj2: B): A & B { return [obj1, obj2].reduce((draft, obj) => { - Object.keys(obj).forEach(key => { + Object.keys(obj).forEach((key) => { if (isObject(draft[key]) && isExists(obj[key])) { draft[key] = mergeObjects(draft[key], obj[key]); } else { draft[key] = obj[key]; } }); - return draft; + return draft as any; }, {}); } diff --git a/packages/parser-core/src/parseBool.js b/packages/parser-core/src/parseBool.js deleted file mode 100644 index 2847a427..00000000 --- a/packages/parser-core/src/parseBool.js +++ /dev/null @@ -1,13 +0,0 @@ -import { stringContains } from './stringUtil'; -import { isBool, isString } from './typecheck'; -/** - * Return boolean form of any input - * @param {any} any - * @returns {boolean} - */ -export default function parseBool(any) { - if (isBool(any)) { - return any; - } - return isString(any) ? stringContains(['true', 't', '1', 'yes', 'y'], any) : false; -} diff --git a/packages/parser-core/src/parseBool.ts b/packages/parser-core/src/parseBool.ts new file mode 100644 index 00000000..ac28632e --- /dev/null +++ b/packages/parser-core/src/parseBool.ts @@ -0,0 +1,13 @@ +import { stringContains } from "./stringUtil"; +import { isBool, isString } from "./typecheck"; +/** + * Return boolean form of any input + */ +export default function parseBool(any: any) { + if (isBool(any)) { + return any; + } + return isString(any) + ? stringContains(["true", "t", "1", "yes", "y"], any) + : false; +} diff --git a/packages/parser-core/src/pathUtil.js b/packages/parser-core/src/pathUtil.js deleted file mode 100644 index eb51e7ac..00000000 --- a/packages/parser-core/src/pathUtil.js +++ /dev/null @@ -1,45 +0,0 @@ -import naturalCompare from 'string-natural-compare'; - -import fs from 'fs'; -import path from 'path'; - -import { isString } from './typecheck'; - -/** - * @param {string} target - * @returns {string} - */ -export function safePath(target) { - return target.replace(/\\/g, '/').replace(/(? !isString(component)) >= 0) { - return ''; - } - return safePath(path.join(...components)); -} - -/** - * @param {string} target - * @returns {string[]} - */ -export function getPathes(target) { - return fs.readdirSync(target).reduce((subpathes, subpath) => { - const fullPath = path.join(target, subpath); - const isDirectory = fs.statSync(fullPath).isDirectory(); - return subpathes.concat(isDirectory ? getPathes(fullPath) : [fullPath]); - }, []).sort(naturalCompare); -} diff --git a/packages/parser-core/src/pathUtil.ts b/packages/parser-core/src/pathUtil.ts new file mode 100644 index 00000000..2265695f --- /dev/null +++ b/packages/parser-core/src/pathUtil.ts @@ -0,0 +1,32 @@ +import naturalCompare from "string-natural-compare"; + +import fs from "fs"; +import path from "path"; + +import { isString } from "./typecheck"; + +export function safePath(target: string) { + return target.replace(/\\/g, "/").replace(/(? !isString(component)) >= 0) { + return ""; + } + return safePath(path.join(...components)); +} + +export function getPathes(target: string): string[] { + return fs + .readdirSync(target) + .reduce((subpathes, subpath) => { + const fullPath = path.join(target, subpath); + const isDirectory = fs.statSync(fullPath).isDirectory(); + return subpathes.concat(isDirectory ? getPathes(fullPath) : [fullPath]); + }, []) + .sort(naturalCompare); +} diff --git a/packages/parser-core/src/readEntries.js b/packages/parser-core/src/readEntries.js deleted file mode 100644 index 2066d270..00000000 --- a/packages/parser-core/src/readEntries.js +++ /dev/null @@ -1,227 +0,0 @@ -import * as fs from 'fs-extra'; - -import path from 'path'; - -import { trimEnd } from './bufferUtil'; -import { removeCacheFile, readCacheFile, writeCacheFile } from './cacheFile'; -import createCryptoStream from './createCryptoStream'; -import createSliceStream from './createSliceStream'; -import CryptoProvider from './CryptoProvider'; -import Errors, { createError } from './errors'; -import { getPathes, safePath } from './pathUtil'; -import { conditionally } from './streamUtil'; -import { safeDecodeURI } from './stringUtil'; -import { isExists } from './typecheck'; -import openZip from './zipUtil'; - -/** - * @typedef {Object} FileEntryObject - * @property {S} first - * @property {number} length - * @property {T} source - * @property {(idx:number)=>S} get - * @property {(entryPath:string,strict:boolean)=>S} find - * @property {(callback:(value: S, index: number, array: S[]) => void)=>void} forEach - * @property {(callback: (value: S, index: number, array: S[]) => any)=>void} map - * @property {(callback: (a: S, b: S) => number)=>void} sort - * @template T, S - */ - -/** - * @param {T} source - * @param {S[]} entries - * @returns {FileEntryObject} - * @template T, S - */ -function create(source, entries) { - return { - first: entries[0], - length: entries.length, - source, - get: idx => entries[idx], - find: (entryPath, strict = true) => entries.find(entry => { - const lhs = strict ? entryPath : safeDecodeURI(entryPath); - const rhs = strict ? entry.entryPath : safeDecodeURI(entry.entryPath); - return lhs === rhs; - }), - forEach: callback => entries.forEach(callback), - map: callback => entries.map(callback), - sort: callback => entries.sort(callback), - }; -} - -/** - * @typedef {Object} EntryBasicInformation - * @property {string} entryPath - * @property {number} size - * @property {(options:{endocing:string, end: number})=>(Promise|Buffer)} getFile - * - * @typedef {Object} ZipfileEntryInformation - * @property {string} method - * @property {number} extraFieldLength - * - * @typedef {import('adm-zip').IZipEntry & EntryBasicInformation & ZipfileEntryInformation} IZipEntryPlus -*/ - -/** - * Get FileEntryObject from the zip file - * @param {import('./zipUtil').ZipFileInformation} zip - * @returns {FileEntryObject { - const getFile = (options = {}) => { - let data = file.getData(); - if (options.encoding) { - data = data.toString(options.encoding); - } - if (options.end) { - data = data.slice(0, options.end); - } - return data; - }; - return { - ...file, - getFile, - entryPath: file.entryName, - size: file.header.size, - method: file.header.method, - extraFieldLength: file.extra.length, - }; - }); - return create(zipCopy, zipCopy.files); -} - -/** - * @param {string} dir - * @param {CryptoProvider} cryptoProvider - * @returns {FileEntryObject} - */ -function fromDirectory(dir, cryptoProvider) { - let paths = (() => { - /* istanbul ignore next */ - try { return JSON.parse(readCacheFile(dir) || '[]'); } catch (e) { return []; } - })(); - if (paths.length === 0) { - paths = getPathes(dir); - writeCacheFile(dir, JSON.stringify(paths), true); - } - return create(dir, paths.reduce((entries, fullPath) => { - const subPathOffset = path.normalize(dir).length + path.sep.length; - const size = (() => { - /* istanbul ignore next */ - try { return fs.lstatSync(fullPath).size; } catch (e) { return 0; } - })(); - return entries.concat([{ - entryPath: safePath(fullPath).substring(subPathOffset), - getFile: async (options = {}) => { - const { encoding, end } = options; - let file = await new Promise((resolve, reject) => { - if (fs.existsSync(fullPath)) { - const stream = fs.createReadStream(fullPath); - const totalSize = Math.min(end || Infinity, size); - let data = Buffer.from([]); - stream - .pipe(conditionally(isExists(end), createSliceStream(0, end))) - .pipe(conditionally(cryptoProvider && !!cryptoProvider.isStreamMode, - createCryptoStream(fullPath, totalSize, cryptoProvider, CryptoProvider.Purpose.READ_IN_DIR))) - .on('data', chunk => { - data = Buffer.concat([data, chunk]); - }) - .on('error', e => reject(e)) - .on('end', () => resolve(data)); - } else { - removeCacheFile(dir); - throw createError(Errors.ENOFILE, fullPath); - } - }); - if (cryptoProvider && !cryptoProvider.isStreamMode) { - file = cryptoProvider.run(file, fullPath, CryptoProvider.Purpose.READ_IN_DIR); - if (Promise.resolve(file) === file) { - file = await file; - } - } - if (isExists(encoding)) { - file = trimEnd(file).toString(encoding); - } - return file; - }, - size, - }]); - }, [])); -} - -/** - * @param {string} filePath - * @param {CryptoProvider} cryptoProvider - * @returns {FileEntryObject} - */ -function fromFile(filePath, cryptoProvider) { - const size = (() => { - /* istanbul ignore next */ - try { return fs.lstatSync(filePath).size; } catch (e) { return 0; } - })(); - return create(filePath, [{ - entryPath: filePath, - getFile: async (options = {}) => { - const { encoding, end } = options; - const streamOption = (cryptoProvider && cryptoProvider.bufferSize) - ? { highWaterMark: cryptoProvider.bufferSize } - : {}; - let file = await new Promise((resolve, reject) => { - const stream = fs.createReadStream(filePath, streamOption); - let data = Buffer.from([]); - const totalSize = Math.min(end || Infinity, size); - stream - .pipe(conditionally(isExists(end), createSliceStream(0, end))) - .pipe(conditionally(cryptoProvider && !!cryptoProvider.isStreamMode, - createCryptoStream(filePath, totalSize, cryptoProvider, CryptoProvider.Purpose.READ_IN_DIR))) - .on('data', chunk => { data = Buffer.concat([data, chunk]); }) - .on('error', e => reject(e)) - .on('end', () => resolve(data)); - }); - if (cryptoProvider && !cryptoProvider.isStreamMode) { - file = cryptoProvider.run(file, filePath, CryptoProvider.Purpose.READ_IN_DIR); - if (Promise.resolve(file) === file) { - file = await file; - } - } - if (isExists(encoding)) { - file = trimEnd(file).toString(encoding); - } - return file; - }, - size, - }]); -} -/** - * @typedef {FileEntryObject} ReadEntriesReturnType - */ -/** - * @async - * @param {string} input - * @param {CryptoProvider} cryptoProvider - * @param {import('./Logger').default} logger - * @returns {Promise} - */ -/* eslint-disable no-param-reassign */ -export default async function readEntries(input, cryptoProvider, logger) { - if (fs.lstatSync(input).isFile()) { // TODO: When input is Buffer. - if (path.extname(input).toLowerCase() === '.pdf') { - return fromFile(input, cryptoProvider); - } - /* istanbul ignore if */ - if (isExists(cryptoProvider)) { - /* istanbul ignore next */ - input = cryptoProvider.run(fs.readFileSync(input), input, CryptoProvider.Purpose.READ_IN_DIR); - if (Promise.resolve(input) === input) { - input = await input; - } - } - const zip = await openZip(input, cryptoProvider, logger); - return fromZip(zip); - } - return fromDirectory(input, cryptoProvider); -} -/* eslint-enable no-param-reassign */ diff --git a/packages/parser-core/src/readEntries.ts b/packages/parser-core/src/readEntries.ts new file mode 100644 index 00000000..ed08b2ba --- /dev/null +++ b/packages/parser-core/src/readEntries.ts @@ -0,0 +1,285 @@ +import * as fs from "fs-extra"; + +import path from "path"; + +import { trimEnd } from "./bufferUtil"; +import { removeCacheFile, readCacheFile, writeCacheFile } from "./cacheFile"; +import createCryptoStream from "./createCryptoStream"; +import createSliceStream from "./createSliceStream"; +import CryptoProvider from "./CryptoProvider"; +import Errors, { createError } from "./errors"; +import { getPathes, safePath } from "./pathUtil"; +import { conditionally } from "./streamUtil"; +import { safeDecodeURI } from "./stringUtil"; +import { isExists } from "./typecheck"; +import openZip, { ZipFileInformation } from "./zipUtil"; +import { IZipEntry } from "adm-zip"; +import Logger from "./Logger"; +import { Stream, Readable } from "stream"; + +type ArrayItem = Type extends (infer Item)[] ? Item : Type; + +type FileEntryObject = { + first: Entry; + length: number; + source: Source; + get(index: number): Entry; + find(entryPath: string, strict?: boolean): Entry | undefined; + forEach(f: (value: Entry, index: number, array: Entry[]) => void): void; + map( + f: (value: Entry, index: number, array: Entry[]) => Return + ): Return[]; + sort(f: (a: Entry, b: Entry) => number): Entry[]; +}; + +function create( + source: Source, + entries: Entry[] +): FileEntryObject { + return { + first: entries[0], + length: entries.length, + source, + get: (idx) => entries[idx], + find: (entryPath, strict = true) => + entries.find((entry) => { + const lhs = strict ? entryPath : safeDecodeURI(entryPath); + const rhs = strict ? entry.entryPath : safeDecodeURI(entry.entryPath); + return lhs === rhs; + }), + forEach: (callback) => entries.forEach(callback), + map: (callback) => entries.map(callback), + sort: (callback) => entries.sort(callback), + }; +} + +export type EntryBasicInformation = { + entryPath: string; + size: number; + getFile(options?: { + encoding?: BufferEncoding; + end?: number; + }): Promise | Buffer | string; +}; + +type ZipfileEntryInformation = { + method: number; + extraFieldLength: number; +}; + +type IZipEntryPlus = IZipEntry & + EntryBasicInformation & + ZipfileEntryInformation; + +/** + * Get FileEntryObject from the zip file + */ +function fromZip( + zip: ZipFileInformation +): FileEntryObject { + const zipCopy = { ...zip }; + const result = { + ...zipCopy, + files: zip.files.map((file) => { + const getFile: EntryBasicInformation["getFile"] = (options = {}) => { + let data: Buffer | string = file.getData(); + if (options.encoding) { + data = data.toString(options.encoding); + } + if (options.end) { + data = data.slice(0, options.end); + } + return data; + }; + + return { + ...file, + getFile, + entryPath: file.entryName, + size: file.header.size, + method: file.header.method, + extraFieldLength: file.extra.length, + }; + }), + }; + + return create(result, result.files); +} + +function fromDirectory( + dir: string, + cryptoProvider: CryptoProvider +): FileEntryObject { + let paths: string[] = (() => { + /* istanbul ignore next */ + try { + return JSON.parse(readCacheFile(dir) || "[]"); + } catch (e) { + return []; + } + })(); + if (paths.length === 0) { + paths = getPathes(dir); + writeCacheFile(dir, JSON.stringify(paths), true); + } + return create( + dir, + paths.reduce((entries, fullPath) => { + const subPathOffset = path.normalize(dir).length + path.sep.length; + const size = (() => { + /* istanbul ignore next */ + try { + return fs.lstatSync(fullPath).size; + } catch (e) { + return 0; + } + })(); + return entries.concat([ + { + entryPath: safePath(fullPath).substring(subPathOffset), + getFile: async (options = {}) => { + const { encoding, end } = options; + let file: Buffer | string = await new Promise( + (resolve, reject) => { + if (fs.existsSync(fullPath)) { + const stream = fs.createReadStream(fullPath); + const totalSize = Math.min(end || Infinity, size); + let data = Buffer.from([]); + stream + .pipe( + conditionally(isExists(end), createSliceStream(0, end)) + ) + .pipe( + conditionally( + cryptoProvider && !!cryptoProvider.isStreamMode, + createCryptoStream( + fullPath, + totalSize, + cryptoProvider, + CryptoProvider.Purpose.READ_IN_DIR + ) + ) + ) + .on("data", (chunk) => { + data = Buffer.concat([data, chunk]); + }) + .on("error", (e) => reject(e)) + .on("end", () => resolve(data)); + } else { + removeCacheFile(dir); + throw createError(Errors.ENOFILE, fullPath); + } + } + ); + if (cryptoProvider && !cryptoProvider.isStreamMode) { + file = (await cryptoProvider.run( + file, + fullPath, + CryptoProvider.Purpose.READ_IN_DIR + )) as unknown as Buffer; + } + if (isExists(encoding)) { + file = trimEnd(file).toString(encoding); + } + return file; + }, + size, + }, + ]); + }, []) + ); +} + +function fromFile( + filePath: string, + cryptoProvider: CryptoProvider +): FileEntryObject { + const size = (() => { + /* istanbul ignore next */ + try { + return fs.lstatSync(filePath).size; + } catch (e) { + return 0; + } + })(); + return create(filePath, [ + { + entryPath: filePath, + getFile: async (options = {}) => { + const { encoding, end } = options; + const streamOption = + cryptoProvider && cryptoProvider.bufferSize + ? { highWaterMark: cryptoProvider.bufferSize } + : {}; + let file: Buffer | string = await new Promise( + (resolve, reject) => { + const stream = fs.createReadStream(filePath, streamOption); + let data = Buffer.from([]); + const totalSize = Math.min(end || Infinity, size); + stream + .pipe(conditionally(isExists(end), createSliceStream(0, end))) + .pipe( + conditionally( + cryptoProvider && !!cryptoProvider.isStreamMode, + createCryptoStream( + filePath, + totalSize, + cryptoProvider, + CryptoProvider.Purpose.READ_IN_DIR + ) + ) + ) + .on("data", (chunk) => { + data = Buffer.concat([data, chunk]); + }) + .on("error", (e) => reject(e)) + .on("end", () => resolve(data)); + } + ); + if (cryptoProvider && !cryptoProvider.isStreamMode) { + file = (await cryptoProvider.run( + file, + filePath, + CryptoProvider.Purpose.READ_IN_DIR + )) as unknown as Buffer; + } + if (isExists(encoding)) { + file = trimEnd(file).toString(encoding); + } + return file; + }, + size, + }, + ]); +} + +export type ReadEntriesReturnType = + | FileEntryObject + | FileEntryObject; + +/* eslint-disable no-param-reassign */ +export default async function readEntries( + input: string, + cryptoProvider: CryptoProvider, + logger: Logger +): Promise { + if (fs.lstatSync(input).isFile()) { + // TODO: When input is Buffer. + if (path.extname(input).toLowerCase() === ".pdf") { + return fromFile(input, cryptoProvider); + } + /* istanbul ignore if */ + if (isExists(cryptoProvider)) { + /* istanbul ignore next */ + input = (await cryptoProvider.run( + fs.readFileSync(input), + input, + CryptoProvider.Purpose.READ_IN_DIR + )) as unknown as string; + } + const zip = await openZip(input, cryptoProvider, logger); + return fromZip(zip); + } + return fromDirectory(input, cryptoProvider); +} +/* eslint-enable no-param-reassign */ diff --git a/packages/parser-core/src/streamUtil.js b/packages/parser-core/src/streamUtil.js deleted file mode 100644 index 73cc2b97..00000000 --- a/packages/parser-core/src/streamUtil.js +++ /dev/null @@ -1,18 +0,0 @@ -import * as es from 'event-stream'; - -import { isFunc } from './typecheck'; - -/** - * Apply event stream conditionally - * @param {boolean | (()=>boolean)} condition - * @param {es.MapStream} stream - * @returns {es.MapStream} Mapstream - */ -export function conditionally(condition, stream) { - if (isFunc(condition) ? condition() : condition) { - return stream; - } - return es.through(function write(data) { - this.emit('data', data); - }); -} diff --git a/packages/parser-core/src/streamUtil.ts b/packages/parser-core/src/streamUtil.ts new file mode 100644 index 00000000..8ac0968f --- /dev/null +++ b/packages/parser-core/src/streamUtil.ts @@ -0,0 +1,16 @@ +import { PassThrough, Stream } from "stream"; +import { isFunc } from "./typecheck"; + +/** + * Apply event stream conditionally + */ +export function conditionally( + condition: boolean | (() => boolean), + stream: S +) { + if (isFunc(condition) ? condition() : condition) { + return stream; + } + + return new PassThrough(); +} diff --git a/packages/parser-core/src/stringUtil.js b/packages/parser-core/src/stringUtil.js deleted file mode 100644 index 65892b7d..00000000 --- a/packages/parser-core/src/stringUtil.js +++ /dev/null @@ -1,62 +0,0 @@ -import { isExists } from './typecheck'; - -/** - * @typedef MatchOption - * @property {number} MATCHING "0" - * @property {number} CONTAINING "1" - * @property {number} STARTSWITH "2" - * @property {number} ENDSWITH "3" - */ - -/** - * @type {MatchOption} - */ -const MatchOption = { - MATCHING: 0, - CONTAINING: 1, - STARTSWITH: 2, - ENDSWITH: 3, -}; - -/** - * @param {string[]} array=[] - * @param {string} string='' - * @param {MatchOption} matchOption=MatchOption.MATCHING - * @returns {boolean} - */ -function stringContains(array = [], string = '', matchOption = MatchOption.MATCHING) { - const lString = string.toLowerCase(); - return isExists(array.find(item => { - const lItem = item.toLowerCase(); - switch (matchOption) { - case MatchOption.CONTAINING: - return lItem.includes(lString); - case MatchOption.STARTSWITH: - return lItem.startsWith(lString); - case MatchOption.ENDSWITH: - return lItem.endsWith(lString); - default: - return lItem === lString; - } - })); -} -/** - * Decode URI - * @param {string} uri - */ -function safeDecodeURI(uri) { - try { - return decodeURI(uri); - } catch (e) { - if (e.message === 'URI malformed') { - return uri; - } - throw e; - } -} - -export { - MatchOption, - stringContains, - safeDecodeURI, -}; diff --git a/packages/parser-core/src/stringUtil.ts b/packages/parser-core/src/stringUtil.ts new file mode 100644 index 00000000..c47863cb --- /dev/null +++ b/packages/parser-core/src/stringUtil.ts @@ -0,0 +1,44 @@ +import { isExists } from "./typecheck"; + +enum MatchOption { + MATCHING, + CONTAINING, + STARTSWITH, + ENDSWITH, +} + +function stringContains( + array: string[] = [], + string = "", + matchOption = MatchOption.MATCHING +) { + const lString = string.toLowerCase(); + return isExists( + array.find((item) => { + const lItem = item.toLowerCase(); + switch (matchOption) { + case MatchOption.CONTAINING: + return lItem.includes(lString); + case MatchOption.STARTSWITH: + return lItem.startsWith(lString); + case MatchOption.ENDSWITH: + return lItem.endsWith(lString); + default: + return lItem === lString; + } + }) + ); +} + +function safeDecodeURI(uri: string) { + try { + return decodeURI(uri); + } catch (e) { + if (e.message === "URI malformed") { + return uri; + } + throw e; + } +} + +export { MatchOption, stringContains, safeDecodeURI }; diff --git a/packages/parser-core/src/typecheck.js b/packages/parser-core/src/typecheck.js deleted file mode 100644 index a4c19110..00000000 --- a/packages/parser-core/src/typecheck.js +++ /dev/null @@ -1,85 +0,0 @@ -/** - * Get type as string - * @param {any} any - * @param {boolean} strict=false - * @returns {string} - */ -export function getType(any, strict = false) { - const string = Object.prototype.toString.call(any).split(' ')[1]; - const type = string.substr(0, string.length - 1); - if (strict) { - if (type === 'Function') { - return any.name; - } - if (type !== 'Null' && type !== 'Undefined') { - return any.constructor.name; - } - } - return type; -} - -/** - * Return true if passed argument is an array - * @param {any} any - * @returns {boolean} - */ -export function isArray(any) { - if (Array.isArray) { - return Array.isArray(any); - } - return getType(any) === 'Array'; -} - -/** - * Return true if passed argument is a boolean - * @param {any} any - * @returns {boolean} - */ -export function isBool(any) { - return getType(any) === 'Boolean'; -} - -/** - * Return true if passed argument is undefined nor null - * @param {any} any - * @returns {boolean} - */ -export function isExists(any) { - return any !== undefined && any !== null; -} - -/** - * Return true if passed argument is function - * @param {any} any - * @returns {boolean} - */ -export function isFunc(any) { - return getType(any) === 'Function'; -} - -/** - * Return true if passed argument is object - * @param {any} any - * @returns {boolean} - */ -export function isObject(any) { - return getType(any) === 'Object'; -} - -/** - * Return true if passed argument is string - * @param {any} any - * @returns {boolean} - */ -export function isString(any) { - return typeof any === 'string'; -} - -/** - * Return true if passed argument is url - * @param {any} string - * @returns {boolean} - */ -export function isUrl(string) { - return isString(string) && isExists(string.match(/[a-z].*?:\/\//i)); -} diff --git a/packages/parser-core/src/typecheck.ts b/packages/parser-core/src/typecheck.ts new file mode 100644 index 00000000..9091be5d --- /dev/null +++ b/packages/parser-core/src/typecheck.ts @@ -0,0 +1,68 @@ +/** + * Get type as string + */ +export function getType(any: any, strict = false): string { + const string = Object.prototype.toString.call(any).split(" ")[1]; + const type = string.substr(0, string.length - 1); + if (strict) { + if (type === "Function") { + return any.name; + } + if (type !== "Null" && type !== "Undefined") { + return any.constructor.name; + } + } + return type; +} + +/** + * Return true if passed argument is an array + */ +export function isArray(any: any): any is any[] { + if (Array.isArray) { + return Array.isArray(any); + } + return getType(any) === "Array"; +} + +/** + * Return true if passed argument is a boolean + */ +export function isBool(any: any): any is boolean { + return getType(any) === "Boolean"; +} + +/** + * Return true if passed argument is undefined nor null + */ +export function isExists(any: any): any is NonNullable { + return any !== undefined && any !== null; +} + +/** + * Return true if passed argument is function + */ +export function isFunc(any: any): any is Function { + return getType(any) === "Function"; +} + +/** + * Return true if passed argument is object + */ +export function isObject(any: any): any is Record { + return getType(any) === "Object"; +} + +/** + * Return true if passed argument is string + */ +export function isString(any: any): any is string { + return typeof any === "string"; +} + +/** + * Return true if passed argument is url + */ +export function isUrl(string: string) { + return isString(string) && isExists(string.match(/[a-z].*?:\/\//i)); +} diff --git a/packages/parser-core/src/validateOptions.js b/packages/parser-core/src/validateOptions.js deleted file mode 100644 index 00092791..00000000 --- a/packages/parser-core/src/validateOptions.js +++ /dev/null @@ -1,25 +0,0 @@ -import Errors, { createError } from './errors'; -import { getType, isExists, isString } from './typecheck'; - -/** - * Validate option with interface. it will return void if it passes, throw error otherwise. - * @param {T} options - * @param {S} types - * @param {boolean} strict - * @template T, S - * @returns {void} - */ -export default function validateOptions(options, types, strict = false) { - Object.keys(options).forEach(key => { - if (!isExists(Object.getOwnPropertyDescriptor(types, key))) { - throw createError(Errors.EINVAL, 'options', 'key', key); - } - if (isString(types[key])) { - if (!isExists(types[key].split('|').find(type => type === getType(options[key], strict)))) { - throw createError(Errors.EINVAL, 'option value', 'reason', `${key} must be ${types[key]} types`); - } - } else { - validateOptions(options[key], types[key]); - } - }); -} diff --git a/packages/parser-core/src/validateOptions.ts b/packages/parser-core/src/validateOptions.ts new file mode 100644 index 00000000..5b5ba68d --- /dev/null +++ b/packages/parser-core/src/validateOptions.ts @@ -0,0 +1,35 @@ +import Errors, { createError } from "./errors"; +import { getType, isExists, isString } from "./typecheck"; + +/** + * Validate option with interface. it will return void if it passes, throw error otherwise. + */ +export default function validateOptions( + options: Options, + types: Types, + strict = false +) { + Object.keys(options).forEach((key) => { + if (!isExists(Object.getOwnPropertyDescriptor(types, key))) { + throw createError(Errors.EINVAL, "options", "key", key); + } + if (isString(types[key])) { + if ( + !isExists( + types[key] + .split("|") + .find((type) => type === getType(options[key], strict)) + ) + ) { + throw createError( + Errors.EINVAL, + "option value", + "reason", + `${key} must be ${types[key]} types` + ); + } + } else { + validateOptions(options[key], types[key]); + } + }); +} diff --git a/packages/parser-core/src/zipUtil.js b/packages/parser-core/src/zipUtil.js deleted file mode 100644 index 6a3ab848..00000000 --- a/packages/parser-core/src/zipUtil.js +++ /dev/null @@ -1,116 +0,0 @@ -import AdmZip from 'adm-zip'; -import fs from 'fs-extra'; - -import { Readable } from 'stream'; - -import { trimEnd } from './bufferUtil'; -import createCryptoStream from './createCryptoStream'; -import createSliceStream from './createSliceStream'; -import CryptoProvider from './CryptoProvider'; -import Errors, { createError } from './errors'; -import { conditionally } from './streamUtil'; -import { isExists } from './typecheck'; - -/** - * @typedef GetFileOptions - * @property {string} encoding - * @property {number} end - */ - -/** - * @typedef {Object} ZipFileInformation - * @property {string} file - * @property {AdmZip.IZipEntry[]} files - * @property {CryptoProvider} cryptoProvider - * @property {(entryPath: string) => AdmZip.IZipEntry} find - * @property {(entry: AdmZip.IZipEntry, options?: GetFileOptions) => Promise} getFile - * @property {(unzipPath: string, overwrite?: boolean) => Promise} extractAll - * @property {import('./Logger').default} logger - */ - -/** - * Find the file with a path. - * @param {string} entryPath File Path - * @returns {AdmZip.IZipEntry | undefined} A file with path or undefined if there is none. - * */ -function find(entryPath) { - return this.files.find(entry => entryPath === entry.entryName); -} - -/** - * - * @async - * @this {ZipFileInformation} - * @param {import('adm-zip').IZipEntry} entry - * @param {GetFileOptions} options - * @returns {Promise} String is encoding is provided, Buffer otherwise - */ -async function getFile(entry, options = { encoding: undefined, end: undefined }) { - const { encoding, end } = options; - let file = await new Promise(resolveFile => { - const totalSize = Math.min(end || Infinity, entry.uncompressedSize); - let data = Buffer.from([]); - const readable = Readable.from(entry.getData()); - readable - .pipe(conditionally(isExists(end), createSliceStream(0, end))) - .pipe(conditionally(this.cryptoProvider && !!this.cryptoProvider.isStreamMode, - createCryptoStream(entry.path, totalSize, this.cryptoProvider, CryptoProvider.Purpose.READ_IN_DIR))) - .on('data', chunk => { data = Buffer.concat([data, chunk]); }) - .on('end', () => { resolveFile(data); }); - }); - if (this.cryptoProvider && !this.cryptoProvider.isStreamMode) { - file = this.cryptoProvider.run(file, entry.path, CryptoProvider.Purpose.READ_IN_DIR); - if (Promise.resolve(file) === file) { - file = await file; - } - } - if (isExists(encoding)) { - file = trimEnd(file).toString(encoding); - } - return file; -} - -/** - * Extract zip file to path - * @this {ZipFileInformation} - * @param {string} unzipPath Path where files will be extracted - * @param {boolean} overwrite - * @returns {Promise} - */ -async function extractAll(unzipPath, overwrite = true) { - if (overwrite) { - fs.removeSync(unzipPath); - } - fs.mkdirpSync(unzipPath); - const zip = new AdmZip(); - await Promise.all(this.files.map(async entry => { - if (this.cryptoProvider && !entry.isDirectory) { - entry.setData(await this.cryptoProvider.run(entry.getData(), entry.entryPath, CryptoProvider.Purpose.WRITE)); - } - zip.addFile(entry.entryName, entry.getData()); - })); - zip.extractAllTo(unzipPath); -} - -/** - * @param {string | Buffer} file - * @param {CryptoProvider} cryptoProvider - * @param {Logger} logger - * @returns {ZipFileInformation} - * @throws {Errors.ENOENT} When file can't be found - */ -export default async function openZip(file, cryptoProvider, logger) { - try { - const files = new AdmZip(file).getEntries(); - return { - files, - cryptoProvider, - find, - getFile, - extractAll, - logger, - }; - } catch (err) { - throw createError(Errors.ENOENT, file); - } -} diff --git a/packages/parser-core/src/zipUtil.ts b/packages/parser-core/src/zipUtil.ts new file mode 100644 index 00000000..896d5530 --- /dev/null +++ b/packages/parser-core/src/zipUtil.ts @@ -0,0 +1,136 @@ +import AdmZip, { IZipEntry } from 'adm-zip'; +import fs from 'fs-extra'; + +import { Readable } from 'stream'; + +import { trimEnd } from './bufferUtil'; +import createCryptoStream from './createCryptoStream'; +import createSliceStream from './createSliceStream'; +import CryptoProvider from './CryptoProvider'; +import Errors, { createError } from './errors'; +import { conditionally } from './streamUtil'; +import { isExists } from './typecheck'; +import Logger from './Logger'; + +type GetFileOptions = { + encoding: BufferEncoding; + end?: number; +}; + +export type ZipFileInformation = { + file?: string; + files: IZipEntry[]; + cryptoProvider: CryptoProvider; + find(entryPath: string): IZipEntry; + getFile(entry: IZipEntry, options?: GetFileOptions): Promise; + extractAll(unzipPath: string, overwrite?: boolean): Promise; + logger: Logger; +}; + +/** + * Find the file with a path. A file with path or undefined if there is none. + * */ +function find(this: ZipFileInformation, entryPath: string) { + return this.files.find((entry) => entryPath === entry.entryName); +} + +/** + * @returns {Promise} String is encoding is provided, Buffer otherwise + */ +async function getFile( + this: ZipFileInformation, + entry: IZipEntry, + options: GetFileOptions = { encoding: undefined, end: undefined } +): Promise { + const { encoding, end } = options; + let file: Buffer | string = await new Promise((resolveFile) => { + // const totalSize = Math.min(end || Infinity, entry.header.size); // 알 수 없는 속성 + const totalSize = Math.min(end || Infinity); + let data = Buffer.from([]); + const readable = Readable.from(entry.getData()); + readable + .pipe(conditionally(isExists(end), createSliceStream(0, end))) + .pipe( + conditionally( + this.cryptoProvider && !!this.cryptoProvider.isStreamMode, + createCryptoStream( + entry.entryName, + totalSize, + this.cryptoProvider, + CryptoProvider.Purpose.READ_IN_DIR + ) + ) + ) + .on('data', (chunk) => { + data = Buffer.concat([data, chunk]); + }) + .on('end', () => { + resolveFile(data); + }); + }); + if (this.cryptoProvider && !this.cryptoProvider.isStreamMode) { + file = (await this.cryptoProvider.run( + file, + entry.entryName, + CryptoProvider.Purpose.READ_IN_DIR + )) as unknown as Buffer; + } + if (isExists(encoding)) { + file = trimEnd(file).toString(encoding); + } + return file; +} + +/** + * Extract zip file to path + * @param {string} unzipPath Path where files will be extracted + */ +async function extractAll( + this: ZipFileInformation, + unzipPath: string, + overwrite = true +) { + if (overwrite) { + fs.removeSync(unzipPath); + } + fs.mkdirpSync(unzipPath); + const zip = new AdmZip(); + await Promise.all( + this.files.map(async (entry) => { + if (this.cryptoProvider && !entry.isDirectory) { + entry.setData( + (await this.cryptoProvider.run( + entry.getData(), + entry.entryName, + CryptoProvider.Purpose.WRITE + )) as unknown as string | Buffer + ); + } + zip.addFile(entry.entryName, entry.getData()); + }) + ); + zip.extractAllTo(unzipPath); +} + +/** + * @throws {Errors.ENOENT} When file can't be found + */ +export default async function openZip( + file: string | Buffer, + cryptoProvider?: CryptoProvider, + logger?: Logger +): Promise { + try { + const files = new AdmZip(file).getEntries(); + return { + files, + cryptoProvider, + find, + getFile, + extractAll, + logger, + }; + } catch (err) { + throw createError(Errors.ENOENT, file.toString()); + } +} diff --git a/packages/parser-core/test/AesCryptor.spec.js b/packages/parser-core/test/AesCryptor.spec.js index 610c8043..a8e94212 100644 --- a/packages/parser-core/test/AesCryptor.spec.js +++ b/packages/parser-core/test/AesCryptor.spec.js @@ -4,8 +4,8 @@ import AesCryptor, { Padding, Encoding, Mode, -} from '../src/AesCryptor'; -import Errors from '../src/errors'; +} from '../lib/AesCryptor'; +import Errors from '../lib/errors'; should(); // Initialize should @@ -224,12 +224,12 @@ describe('AesCryptor', () => { it('Length is divisible by 16', () => { // An example string key const key = '1234567890123456'; - + const text = 'TextMustBe16Byte'; const cryptor = new AesCryptor(Mode.ECB, { key }); - + cryptor.encrypt(text, { encoding: Encoding.HEX }).should.equal('3ba6941b0b398d96e87e34660ecd435f'); - + const encryptedBytes = cryptor.encrypt(text); cryptor.decrypt(encryptedBytes, { encoding: Encoding.UTF8 }).should.equal(text); }); diff --git a/packages/parser-core/test/BaseBook.spec.js b/packages/parser-core/test/BaseBook.spec.js index 5fe551fa..b06ebb35 100644 --- a/packages/parser-core/test/BaseBook.spec.js +++ b/packages/parser-core/test/BaseBook.spec.js @@ -1,11 +1,11 @@ import { expect, should } from 'chai'; -import BaseBook from '../src/BaseBook'; +import BaseBook from '../lib/BaseBook'; should(); // Initialize should describe('BaseBook', () => { const baseBook = new BaseBook(); - it('toRaw() should throw an error', ()=>{ + it('toRaw() should throw an error', () => { expect(baseBook.toRaw).to.throw(Error, 'You must override in a subclass.'); }) }); diff --git a/packages/parser-core/test/BaseItem.spec.js b/packages/parser-core/test/BaseItem.spec.js index 4dafa052..073ad1d9 100644 --- a/packages/parser-core/test/BaseItem.spec.js +++ b/packages/parser-core/test/BaseItem.spec.js @@ -1,13 +1,13 @@ import { expect, should } from 'chai'; -import BaseItem from '../src/BaseItem'; +import BaseItem from '../lib/BaseItem'; should(); // Initialize should describe('BaseItem', () => { const defaultSize = 10; - const baseItemParam = {size: 10}; + const baseItemParam = { size: 10 }; const baseBook = new BaseItem(baseItemParam); - it('toRaw() should throw an error', ()=>{ + it('toRaw() should throw an error', () => { expect(baseBook.toRaw).to.throw(Error, 'You must override in a subclass.'); }) it('constructed will copy values', () => { diff --git a/packages/parser-core/test/BaseParseContext.spec.js b/packages/parser-core/test/BaseParseContext.spec.js index 2e2104a2..4985f8ee 100644 --- a/packages/parser-core/test/BaseParseContext.spec.js +++ b/packages/parser-core/test/BaseParseContext.spec.js @@ -1,5 +1,5 @@ import { expect, should } from 'chai'; -import BaseParseContext from '../src/BaseParseContext'; +import BaseParseContext from '../lib/BaseParseContext'; should(); // Initialize should diff --git a/packages/parser-core/test/BaseReadContext.spec.js b/packages/parser-core/test/BaseReadContext.spec.js index 9d5692bc..19c2719b 100644 --- a/packages/parser-core/test/BaseReadContext.spec.js +++ b/packages/parser-core/test/BaseReadContext.spec.js @@ -1,5 +1,5 @@ import { expect, should } from 'chai'; -import BaseReadContext from '../src/BaseReadContext'; +import BaseReadContext from '../lib/BaseReadContext'; should(); // Initialize should diff --git a/packages/parser-core/test/CryptoProvider.spec.js b/packages/parser-core/test/CryptoProvider.spec.js index 3177d8d2..c8845be0 100644 --- a/packages/parser-core/test/CryptoProvider.spec.js +++ b/packages/parser-core/test/CryptoProvider.spec.js @@ -1,7 +1,7 @@ import { should, assert } from 'chai'; -import Errors from '../src/errors'; -import CryptoProvider from '../src/CryptoProvider'; +import Errors from '../lib/errors'; +import CryptoProvider from '../lib/CryptoProvider'; should(); // Initialize should diff --git a/packages/parser-core/test/Logger.spec.js b/packages/parser-core/test/Logger.spec.js index 597e4e66..c099b1c4 100644 --- a/packages/parser-core/test/Logger.spec.js +++ b/packages/parser-core/test/Logger.spec.js @@ -1,6 +1,6 @@ import { should } from 'chai'; -import Logger, { LogLevel } from '../src/Logger'; +import Logger, { LogLevel } from '../lib/Logger'; should(); // Initialize should @@ -122,7 +122,7 @@ describe('Logger', () => { logger.info('info', 'test', 'log'); current.should.equal(`[Logger] info test log`); }); - + it('warn test', () => { const logger = new Logger(); logger.logLevel = LogLevel.WARN; @@ -133,7 +133,7 @@ describe('Logger', () => { logger.warn('warn', 'test', 'log'); current.should.equal(`[Logger] warn test log`); }); - + it('error test', () => { const logger = new Logger(); logger.logLevel = LogLevel.ERROR; diff --git a/packages/parser-core/test/Parser.spec.js b/packages/parser-core/test/Parser.spec.js index 073d9b0d..ffba700e 100644 --- a/packages/parser-core/test/Parser.spec.js +++ b/packages/parser-core/test/Parser.spec.js @@ -1,16 +1,16 @@ import { should, assert } from 'chai'; import path from 'path'; -import CryptoProvider from '../src/CryptoProvider'; -import Errors from '../src/errors'; -import { LogLevel } from '../src/Logger'; -import Parser from '../src/Parser'; +import CryptoProvider from '../lib/CryptoProvider'; +import Errors from '../lib/errors'; +import { LogLevel } from '../lib/Logger'; +import Parser from '../lib/Parser'; import Paths from '../../../test/paths'; -import { isString } from '../src/typecheck'; +import { isString } from '../lib/typecheck'; should(); // Initialize should -class TestCryptoProvider extends CryptoProvider {} +class TestCryptoProvider extends CryptoProvider { } class Item { constructor(rawObj) { diff --git a/packages/parser-core/test/Version.spec.js b/packages/parser-core/test/Version.spec.js index 6e3fa960..9607201b 100644 --- a/packages/parser-core/test/Version.spec.js +++ b/packages/parser-core/test/Version.spec.js @@ -1,6 +1,6 @@ import { assert, should } from 'chai'; -import Version from '../src/Version'; +import Version from '../lib/Version'; should(); // Initialize should diff --git a/packages/parser-core/test/bufferUtil.spec.js b/packages/parser-core/test/bufferUtil.spec.js index e1d5568d..0b89207b 100644 --- a/packages/parser-core/test/bufferUtil.spec.js +++ b/packages/parser-core/test/bufferUtil.spec.js @@ -1,6 +1,6 @@ import { should } from 'chai'; -import { trim, trimStart, trimEnd } from '../src/bufferUtil'; +import { trim, trimStart, trimEnd } from '../lib/bufferUtil'; should(); // Initialize should diff --git a/packages/parser-core/test/cacheFile.spec.js b/packages/parser-core/test/cacheFile.spec.js index e4d26a48..0433d82f 100644 --- a/packages/parser-core/test/cacheFile.spec.js +++ b/packages/parser-core/test/cacheFile.spec.js @@ -7,9 +7,9 @@ import { removeAllCacheFiles, readCacheFile, writeCacheFile, -} from '../src/cacheFile'; +} from '../lib/cacheFile'; -import Errors from '../src/errors'; +import Errors from '../lib/errors'; should(); // Initialize should diff --git a/packages/parser-core/test/createCryptoStream.spec.js b/packages/parser-core/test/createCryptoStream.spec.js index 759fec17..2166007a 100644 --- a/packages/parser-core/test/createCryptoStream.spec.js +++ b/packages/parser-core/test/createCryptoStream.spec.js @@ -1,7 +1,7 @@ import { assert } from 'chai'; import fs from 'fs'; -import createCryptoStream from '../src/createCryptoStream'; +import createCryptoStream from '../lib/createCryptoStream'; import Paths from '../../../test/paths'; import TestCryptoProvider from '../../epub-parser/test/TestCryptoProvider'; import TestPromiseCryptoProvider from '../../epub-parser/test/TestPromiseCryptoProvider'; diff --git a/packages/parser-core/test/createSliceStream.spec.js b/packages/parser-core/test/createSliceStream.spec.js index 44c7b8f6..6ac90156 100644 --- a/packages/parser-core/test/createSliceStream.spec.js +++ b/packages/parser-core/test/createSliceStream.spec.js @@ -1,7 +1,7 @@ import { assert } from 'chai'; import fs from 'fs'; -import createSliceStream from '../src/createSliceStream'; +import createSliceStream from '../lib/createSliceStream'; import Paths from '../../../test/paths'; describe('Util - createSliceStream', () => { diff --git a/packages/parser-core/test/cryptoUtil.spec.js b/packages/parser-core/test/cryptoUtil.spec.js index 0f0feca6..e76a690a 100644 --- a/packages/parser-core/test/cryptoUtil.spec.js +++ b/packages/parser-core/test/cryptoUtil.spec.js @@ -1,6 +1,6 @@ import { should } from 'chai'; -import { Encoding, Hash } from '../src/cryptoUtil'; +import { Encoding, Hash } from '../lib/cryptoUtil'; should(); // Initialize should diff --git a/packages/parser-core/test/index.spec.js b/packages/parser-core/test/index.spec.js index 2c1c4477..c3bc8441 100644 --- a/packages/parser-core/test/index.spec.js +++ b/packages/parser-core/test/index.spec.js @@ -15,7 +15,7 @@ import { getType, isArray, isBool, isExists, isFunc, isObject, isString, isUrl, validateOptions, openZip, -} from '../src/index'; +} from '../lib/index'; describe('parser-core', () => { it('Check imports', () => { diff --git a/packages/parser-core/test/mergeObjects.spec.js b/packages/parser-core/test/mergeObjects.spec.js index acfba213..bf251b7a 100644 --- a/packages/parser-core/test/mergeObjects.spec.js +++ b/packages/parser-core/test/mergeObjects.spec.js @@ -1,6 +1,6 @@ import { should } from 'chai'; -import mergeObjects from '../src/mergeObjects'; +import mergeObjects from '../lib/mergeObjects'; should(); // Initialize should diff --git a/packages/parser-core/test/parseBool.spec.js b/packages/parser-core/test/parseBool.spec.js index 48bc22e4..e41fa6ff 100644 --- a/packages/parser-core/test/parseBool.spec.js +++ b/packages/parser-core/test/parseBool.spec.js @@ -1,6 +1,6 @@ import { should } from 'chai'; -import parseBool from '../src/parseBool'; +import parseBool from '../lib/parseBool'; should(); // Initialize should diff --git a/packages/parser-core/test/pathUtil.spec.js b/packages/parser-core/test/pathUtil.spec.js index 20f51f43..6b44a20c 100644 --- a/packages/parser-core/test/pathUtil.spec.js +++ b/packages/parser-core/test/pathUtil.spec.js @@ -1,7 +1,7 @@ import { should } from 'chai'; import path from 'path'; -import { getPathes, safeDirname, safePathJoin } from '../src/pathUtil'; +import { getPathes, safeDirname, safePathJoin } from '../lib/pathUtil'; import Paths from '../../../test/paths'; should(); // Initialize should diff --git a/packages/parser-core/test/readEntries.spec.js b/packages/parser-core/test/readEntries.spec.js index 8b2e0cf6..772bdd8c 100644 --- a/packages/parser-core/test/readEntries.spec.js +++ b/packages/parser-core/test/readEntries.spec.js @@ -2,11 +2,11 @@ import { should, expect } from 'chai'; import fs from 'fs'; import path from 'path'; -import { readCacheFile, writeCacheFile } from '../src/cacheFile'; -import Errors from '../src/errors'; -import { isExists, isString } from '../src/typecheck'; -import readEntries from '../src/readEntries'; -import { stringContains } from '../src/stringUtil'; +import { readCacheFile, writeCacheFile } from '../lib/cacheFile'; +import Errors from '../lib/errors'; +import { isExists, isString } from '../lib/typecheck'; +import readEntries from '../lib/readEntries'; +import { stringContains } from '../lib/stringUtil'; import Paths from '../../../test/paths'; import TestSyncStreamCryptoProvider from './testSyncStreamCryptoProvider'; import TestSyncCryptoProvider from './testSyncCryptoProvider'; @@ -252,7 +252,7 @@ describe('Util - entry manager', () => { const entry = entries.first; entry.entryPath.should.equal(Paths.PDF); - const file = await entry.getFile({encoding: 'utf-8'}); + const file = await entry.getFile({ encoding: 'utf-8' }); file.should.not.null; }); }); diff --git a/packages/parser-core/test/streamUtil.spec.js b/packages/parser-core/test/streamUtil.spec.js index abb118b5..69fc2368 100644 --- a/packages/parser-core/test/streamUtil.spec.js +++ b/packages/parser-core/test/streamUtil.spec.js @@ -1,8 +1,8 @@ import { should } from 'chai'; import fs from 'fs'; -import createSliceStream from '../src/createSliceStream'; -import { conditionally } from '../src/streamUtil'; +import createSliceStream from '../lib/createSliceStream'; +import { conditionally } from '../lib/streamUtil'; import Paths from '../../../test/paths'; should(); // Initialize should @@ -30,7 +30,7 @@ describe('Util - stream', () => { }); it('conditionally test (true)', () => { - return test(()=> true).then((data) => { + return test(() => true).then((data) => { data.length.should.equal(150); }); }); @@ -42,7 +42,7 @@ describe('Util - stream', () => { }); it('conditionally test (false)', () => { - return test(()=>false).then((data) => { + return test(() => false).then((data) => { data.length.should.equal(1222); }); }); diff --git a/packages/parser-core/test/stringUtil.spec.js b/packages/parser-core/test/stringUtil.spec.js index af216c2b..672abed9 100644 --- a/packages/parser-core/test/stringUtil.spec.js +++ b/packages/parser-core/test/stringUtil.spec.js @@ -1,6 +1,6 @@ import { should, expect } from 'chai'; -import { MatchOption, stringContains, safeDecodeURI } from '../src/stringUtil'; +import { MatchOption, stringContains, safeDecodeURI } from '../lib/stringUtil'; should(); // Initialize should diff --git a/packages/parser-core/test/testAsyncCryptoProvider.js b/packages/parser-core/test/testAsyncCryptoProvider.js index 7d2d14ba..68ff9cab 100644 --- a/packages/parser-core/test/testAsyncCryptoProvider.js +++ b/packages/parser-core/test/testAsyncCryptoProvider.js @@ -1,9 +1,9 @@ import fs from 'fs'; import Paths from '../../../test/paths'; -import AesCryptor from '../src/AesCryptor'; -import CryptoProvider from '../src/CryptoProvider'; -import { Hash } from '../src/cryptoUtil'; +import AesCryptor from '../lib/AesCryptor'; +import CryptoProvider from '../lib/CryptoProvider'; +import { Hash } from '../lib/cryptoUtil'; const { Purpose } = CryptoProvider; const { Mode, Padding } = AesCryptor; @@ -11,7 +11,7 @@ const { Mode, Padding } = AesCryptor; class TestAsyncCryptoProvider extends CryptoProvider { isStreamMode = false; - bufferSize = 1024; + get bufferSize() { return 1024 }; constructor(key) { super(); diff --git a/packages/parser-core/test/testSyncCryptoProvider.js b/packages/parser-core/test/testSyncCryptoProvider.js index dff96333..5810a49a 100644 --- a/packages/parser-core/test/testSyncCryptoProvider.js +++ b/packages/parser-core/test/testSyncCryptoProvider.js @@ -1,9 +1,9 @@ import fs from 'fs'; import Paths from '../../../test/paths'; -import AesCryptor from '../src/AesCryptor'; -import CryptoProvider from '../src/CryptoProvider'; -import { Hash } from '../src/cryptoUtil'; +import AesCryptor from '../lib/AesCryptor'; +import CryptoProvider from '../lib/CryptoProvider'; +import { Hash } from '../lib/cryptoUtil'; const { Purpose } = CryptoProvider; const { Mode, Padding } = AesCryptor; diff --git a/packages/parser-core/test/testSyncStreamCryptoProvider.js b/packages/parser-core/test/testSyncStreamCryptoProvider.js index 4a60c5f7..6ce15bbd 100644 --- a/packages/parser-core/test/testSyncStreamCryptoProvider.js +++ b/packages/parser-core/test/testSyncStreamCryptoProvider.js @@ -1,9 +1,9 @@ import fs from 'fs'; import Paths from '../../../test/paths'; -import AesCryptor from '../src/AesCryptor'; -import CryptoProvider from '../src/CryptoProvider'; -import { Hash } from '../src/cryptoUtil'; +import AesCryptor from '../lib/AesCryptor'; +import CryptoProvider from '../lib/CryptoProvider'; +import { Hash } from '../lib/cryptoUtil'; const { Purpose } = CryptoProvider; const { Mode, Padding } = AesCryptor; diff --git a/packages/parser-core/test/typecheck.spec.js b/packages/parser-core/test/typecheck.spec.js index 40c2599a..ea2214a3 100644 --- a/packages/parser-core/test/typecheck.spec.js +++ b/packages/parser-core/test/typecheck.spec.js @@ -1,11 +1,11 @@ import { should } from 'chai'; import EpubBook from '../../epub-parser/src/model/EpubBook'; -import { isArray, isBool, isExists, isFunc, isObject, isString, isUrl, getType } from '../src/typecheck'; +import { isArray, isBool, isExists, isFunc, isObject, isString, isUrl, getType } from '../lib/typecheck'; should(); // Initialize should -function test() {} +function test() { } describe('Util - Type check', () => { it('getType strict mode test', () => { @@ -19,7 +19,7 @@ describe('Util - Type check', () => { getType(false, true).should.equal('Boolean'); getType(null, true).should.equal('Null'); getType(undefined, true).should.equal('Undefined'); - getType(() => {}, true).should.equal(''); + getType(() => { }, true).should.equal(''); getType(test, true).should.equal('test'); }); @@ -34,7 +34,7 @@ describe('Util - Type check', () => { isArray(false).should.be.false; isArray(null).should.be.false; isArray(undefined).should.be.false; - isArray(() => {}).should.be.false; + isArray(() => { }).should.be.false; isArray(test).should.be.false; const temp = Array.isArray; @@ -52,7 +52,7 @@ describe('Util - Type check', () => { isBool(false).should.be.true; isBool(null).should.be.false; isBool(undefined).should.be.false; - isBool(() => {}).should.be.false; + isBool(() => { }).should.be.false; isBool(test).should.be.false; }); @@ -65,7 +65,7 @@ describe('Util - Type check', () => { isExists(false).should.be.true; isExists(null).should.be.false; isExists(undefined).should.be.false; - isExists(() => {}).should.be.true; + isExists(() => { }).should.be.true; isExists(test).should.be.true; }); @@ -78,7 +78,7 @@ describe('Util - Type check', () => { isFunc(false).should.be.false; isFunc(null).should.be.false; isFunc(undefined).should.be.false; - isFunc(() => {}).should.be.true; + isFunc(() => { }).should.be.true; isFunc(test).should.be.true; }); @@ -91,7 +91,7 @@ describe('Util - Type check', () => { isObject(false).should.be.false; isObject(null).should.be.false; isObject(undefined).should.be.false; - isObject(() => {}).should.be.false; + isObject(() => { }).should.be.false; isObject(test).should.be.false; }); @@ -104,7 +104,7 @@ describe('Util - Type check', () => { isString(false).should.be.false; isString(null).should.be.false; isString(undefined).should.be.false; - isString(() => {}).should.be.false; + isString(() => { }).should.be.false; isString(test).should.be.false; }); diff --git a/packages/parser-core/test/validateOptions.spec.js b/packages/parser-core/test/validateOptions.spec.js index abcb80c0..342695bd 100644 --- a/packages/parser-core/test/validateOptions.spec.js +++ b/packages/parser-core/test/validateOptions.spec.js @@ -1,7 +1,7 @@ import { should } from 'chai'; -import Errors from '../src/errors'; -import validateOptions from '../src/validateOptions'; +import Errors from '../lib/errors'; +import validateOptions from '../lib/validateOptions'; should(); // Initialize should diff --git a/packages/parser-core/test/zipUtil.spec.js b/packages/parser-core/test/zipUtil.spec.js index 8d132bdb..30151a77 100644 --- a/packages/parser-core/test/zipUtil.spec.js +++ b/packages/parser-core/test/zipUtil.spec.js @@ -1,8 +1,8 @@ import { expect, should } from 'chai'; import fs from 'fs-extra'; -import Errors from '../src/errors'; -import openZip from '../src/zipUtil'; +import Errors from '../lib/errors'; +import openZip from '../lib/zipUtil'; import Paths from '../../../test/paths'; import TestSyncCryptoProvider from './testSyncCryptoProvider'; import TestSyncStreamCryptoProvider from './testSyncStreamCryptoProvider'; diff --git a/packages/parser-core/tsconfig.json b/packages/parser-core/tsconfig.json index 9fc4a37a..6128f834 100644 --- a/packages/parser-core/tsconfig.json +++ b/packages/parser-core/tsconfig.json @@ -1,17 +1,17 @@ { // Change this to match your project - "include": [ - "src/**/*" - ], + "include": ["src/**/*"], "compilerOptions": { + "target": "es6", + "module": "CommonJS", // Tells TypeScript to read JS files, as // normally they are ignored as source files "allowJs": true, // Generate d.ts files "declaration": true, // This compiler run should - // only output d.ts files - "emitDeclarationOnly": true, - "outDir": "type" - }, + "esModuleInterop": true, + "moduleResolution": "node", + "outDir": "lib" + } } diff --git a/packages/parser-core/type/AesCryptor.d.ts b/packages/parser-core/type/AesCryptor.d.ts deleted file mode 100644 index 945ecfab..00000000 --- a/packages/parser-core/type/AesCryptor.d.ts +++ /dev/null @@ -1,95 +0,0 @@ -export default AesCryptor; -export type ModeConfig = { - key: string; - iv?: string; -}; -export type ModeObject = { - name: string; - op: any; - configTypes: ModeConfig; -}; -export type ModeList = { - ECB: ModeObject; - CBC: ModeObject; - CFB: ModeObject; - OFB: ModeObject; - CTR: ModeObject; -}; -declare class AesCryptor { - /** - * Construct AesCryptor - * @param {ModeObject} mode Crypto mode - * @param {ModeConfig} config Crypto config - */ - constructor(mode: ModeObject, config: ModeConfig); - /** - * @typedef {(data: string | CryptoJs.lib.WordArray) => CryptoJs.lib.WordArray} EncodeAndDecode - * @typedef {Object} Operator - * @property {string} name - * @property {EncodeAndDecode} encrypt - * @property {EncodeAndDecode} decrypt - */ - /** - * @private - * @type {Operator} - */ - private operator; - /** - * Make an operator - * @private - * @param {ModeObject} mode - * @param {ModeConfig} config - * @returns {Operator} Operator - */ - private makeOperator; - /** - * @typedef {Object} CryptOption - * @property {import('./cryptoUtil').PaddingObject} padding - * @property {import('./cryptoUtil').EncodingObject} encoding - */ - /** - * Encrypt string - * @param {Buffer | Uint8Array | number[]} data - * @param {CryptOption} options - * @returns {string} encrypted string - */ - encrypt(data: Buffer | Uint8Array | number[], options?: { - padding: import('./cryptoUtil').PaddingObject; - encoding: import('./cryptoUtil').EncodingObject; - }): string; - /** - * Decrupt string - * @param {Buffer | Uint8Array | number[]} data - * @param {CryptOption} options - * @returns {string} decrypted string - */ - decrypt(data: Buffer | Uint8Array | number[], options?: { - padding: import('./cryptoUtil').PaddingObject; - encoding: import('./cryptoUtil').EncodingObject; - }): string; -} -declare namespace AesCryptor { - export { Padding }; - export { Encoding }; - export { Mode }; -} -import { Padding } from "./cryptoUtil"; -import { Encoding } from "./cryptoUtil"; -/** - * @typedef {Object} ModeObject - * @property {string} name - * @property {import('../type/CryptoJs').BlockCipherMode} op - * @property {ModeConfig} configTypes - * - * @typedef {Object} ModeList - * @property {ModeObject} ECB - * @property {ModeObject} CBC - * @property {ModeObject} CFB - * @property {ModeObject} OFB - * @property {ModeObject} CTR -*/ -/** - * @type {ModeList} -*/ -export const Mode: ModeList; -export { Padding, Encoding }; diff --git a/packages/parser-core/type/BaseBook.d.ts b/packages/parser-core/type/BaseBook.d.ts deleted file mode 100644 index bd1a74f2..00000000 --- a/packages/parser-core/type/BaseBook.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -export default BaseBook; -/** - * @abstract - * @class - */ -declare class BaseBook { - /** - * @virtual - * @returns {string} - */ - toRaw(): string; -} diff --git a/packages/parser-core/type/BaseItem.d.ts b/packages/parser-core/type/BaseItem.d.ts deleted file mode 100644 index fe2270dd..00000000 --- a/packages/parser-core/type/BaseItem.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -export default BaseItem; -export type BaseItemParam = { - size: number; -}; -/** - * @typedef {Object} BaseItemParam - * @property {number} size -*/ -declare class BaseItem { - /** - * @param {BaseItemParam} rawObj - */ - constructor(rawObj: BaseItemParam); - /** - * @public - * @type {number} - */ - public size: number; - /** - * @public - * @virtual - * @returns {string} - */ - public toRaw(): string; -} diff --git a/packages/parser-core/type/BaseParseContext.d.ts b/packages/parser-core/type/BaseParseContext.d.ts deleted file mode 100644 index b758fed0..00000000 --- a/packages/parser-core/type/BaseParseContext.d.ts +++ /dev/null @@ -1,55 +0,0 @@ -export default BaseParseContext; -export type BaseParserOption = { - /** - * If specified, unzip to that path. - */ - unzipPath: string; - /** - * If true, overwrite to unzipPath when unzip. (only using if unzipPath specified.) - */ - overwrite: boolean; -}; -export type BaseParserOptionType = { - unzipPath: string; - /** - * / - * - * /** - */ - overwrite: string; -}; -/** - * @typedef {Object} BaseParserOption - * @property {string} unzipPath If specified, unzip to that path. - * @property {boolean} overwrite If true, overwrite to unzipPath when unzip. (only using if unzipPath specified.) - * - * @typedef {Object} BaseParserOptionType - * @property {string} unzipPath - * @property {string} overwrite -* / - -/** - * @class - */ -declare class BaseParseContext { - /** - * @public - * @type {BaseParserOption} - */ - public options: BaseParserOption; - /** - * @public - * @type {import('./readEntries').ReadEntriesReturnType>} - */ - public entries: import('./readEntries').ReadEntriesReturnType; - /** - * @typedef {Object} RawBookObject - */ - /** - * @public - * @type {RawBookObject} - */ - public rawBook: { - [x: string]: import("./BaseBook").default; - }; -} diff --git a/packages/parser-core/type/BaseReadContext.d.ts b/packages/parser-core/type/BaseReadContext.d.ts deleted file mode 100644 index 303c3920..00000000 --- a/packages/parser-core/type/BaseReadContext.d.ts +++ /dev/null @@ -1,31 +0,0 @@ -export default BaseReadContext; -export type BaseReadOption = { - force: boolean; -}; -export type BaseReadOptionType = { - force: string; -}; -/** - * @typedef {Object} BaseReadOption - * @property {boolean} force - * - * @typedef {Object} BaseReadOptionType - * @property {string} force - */ -declare class BaseReadContext { - /** - * @pblic - * @type {BaseReadOption} - */ - options: BaseReadOption; - /** - * @public - * @type {import('./readEntries').ReadEntriesReturnType[]} - */ - public entries: import('./readEntries').ReadEntriesReturnType[]; - /** - * @public - * @type {Array} - */ - public items: Array; -} diff --git a/packages/parser-core/type/CryptoProvider.d.ts b/packages/parser-core/type/CryptoProvider.d.ts deleted file mode 100644 index e23877c2..00000000 --- a/packages/parser-core/type/CryptoProvider.d.ts +++ /dev/null @@ -1,69 +0,0 @@ -export default CryptoProvider; -export type CryptoProviderOption = string; -export type CryptoProviderPurpose = { - /** - * "read_in_zip" - */ - READ_IN_ZIP: CryptoProviderOption; - /** - * "read_in_dir" - */ - READ_IN_DIR: CryptoProviderOption; - /** - * "write" - */ - WRITE: CryptoProviderOption; -}; -export type Purpose = { - /** - * "read_in_zip" - */ - READ_IN_ZIP: CryptoProviderOption; - /** - * "read_in_dir" - */ - READ_IN_DIR: CryptoProviderOption; - /** - * "write" - */ - WRITE: CryptoProviderOption; -}; -declare class CryptoProvider { - isStreamMode: boolean; - /** - * Create or reuse AesCryptor by condition - * @abstract - * @param {string} filePath - * @param {string} purpose - * @returns {AesCryptor} - */ - getCryptor(filePath: string, purpose: string): AesCryptor; - /** - * Should execute encrypt or decrypt by condition if needed - * @abstract - * @param {Buffer} data - * @param {string} filePath - * @param {string} purpose - */ - run(data: Buffer, filePath: string, purpose: string): void; -} -declare namespace CryptoProvider { - export { Purpose }; -} -import AesCryptor from "./AesCryptor"; -/** - * @typedef {string} CryptoProviderOption - * - * @typedef {Object} CryptoProviderPurpose - * @property {CryptoProviderOption} READ_IN_ZIP "read_in_zip" - * @property {CryptoProviderOption} READ_IN_DIR "read_in_dir" - * @property {CryptoProviderOption} WRITE "write" -*/ -/** - * @enum {CryptoProviderPurpose} - */ -declare const Purpose: Readonly<{ - READ_IN_ZIP: string; - READ_IN_DIR: string; - WRITE: string; -}>; diff --git a/packages/parser-core/type/Logger.d.ts b/packages/parser-core/type/Logger.d.ts deleted file mode 100644 index ccb5cffe..00000000 --- a/packages/parser-core/type/Logger.d.ts +++ /dev/null @@ -1,136 +0,0 @@ -export default Logger; -export type LogLevel = { - /** - * "silent" - */ - SILENT: string; - /** - * "error" - */ - ERROR: string; - /** - * "warn" - */ - WARN: string; - /** - * "info" - */ - INFO: string; - /** - * "debug" - */ - DEBUG: string; - /** - * "verbose" - */ - VERBOSE: string; -}; -export type LoggerOptions = { - namespace: string; - logLevel: LogLevel; -}; -declare class Logger { - /** - * @param {LogLevel} current - * @param {LogLevel} target - * @returns {boolean} - */ - static confirm(current: LogLevel, target: LogLevel): boolean; - /** - * Construct Logger Class; - * @param {string} namespace - * @param {LogLevel} logLevel - */ - constructor(namespace: string, logLevel: LogLevel); - /** - * @private - */ - private _logLevel; - /** - * @private - */ - private namespace; - /** - * @private - */ - private _firstTime; - set logLevel(arg: string | LogLevel); - get logLevel(): string | LogLevel; - /** - * Log information - * @param {any?} message - * @param {any[]} ...optionalParams - */ - info(message: any | null, ...optionalParams: any[]): void; - /** - * Log warning - * @param {any?} message - * @param {any[]} ...optionalParams - */ - warn(message: any | null, ...optionalParams: any[]): void; - /** - * Log error - * @param {any?} message - * @param {any[]} ...optionalParams - */ - error(message: any | null, ...optionalParams: any[]): void; - /** - * Log degug - * @param {string?} message - * @param {any[]} ...optionalParams - */ - debug(message: string | null, ...optionalParams: any[]): void; - /** - * @async - * Measure run time onf a function. - * @param {(...any)=>Promise} func - * @param {any} thisArg - * @param {any} argsArray - * @param {any} message - * @param {any[]} optionalParams - * @returns {Promise} result of the run - * @template T - */ - measure(func: (...any: any[]) => Promise, thisArg: any, argsArray: any, message: any, ...optionalParams: any[]): Promise; - /** - * Measure run time of a function - * @param {(...any)=>T} func - * @param {any} thisArg - * @param {any} argsArray - * @param {any} message - * @param {any[]} optionalParams - * @returns {T} result of the function - * @template T - */ - measureSync(func: (...any: any[]) => T_2, thisArg: any, argsArray: any, message: any, ...optionalParams: any[]): T_2; - /** - * Measure the total time of this.measureSync - * @param {any?} message - * @param {any[]} optionalParams - */ - result(message: any | null, ...optionalParams: any[]): void; -} -/** - * @typedef LogLevel - * @property {string} SILENT "silent" - * @property {string} ERROR "error" - * @property {string} WARN "warn" - * @property {string} INFO "info" - * @property {string} DEBUG "debug" - * @property {string} VERBOSE "verbose" - * - * @typedef LoggerOptions - * @property {string} namespace - * @property {LogLevel} logLevel - */ -/** - * @enum {LogLevel} - */ -export const LogLevel: Readonly<{ - SILENT: string; - ERROR: string; - WARN: string; - INFO: string; - DEBUG: string; - VERBOSE: string; -}>; diff --git a/packages/parser-core/type/Parser.d.ts b/packages/parser-core/type/Parser.d.ts deleted file mode 100644 index bff7366a..00000000 --- a/packages/parser-core/type/Parser.d.ts +++ /dev/null @@ -1,300 +0,0 @@ -export default Parser; -export type BaseParseContext = import("./BaseParseContext").default; -export type BaseParserOption = { - /** - * If specified, unzip to that path. - */ - unzipPath: string; - /** - * If true, overwrite to unzipPath when unzip. (only using if unzipPath specified.) - */ - overwrite: boolean; -}; -export type BaseParserOptionType = { - unzipPath: string; - /** - * / - * - * /** - */ - overwrite: string; -}; -export type BaseBook = import("./BaseBook").default; -export type BaseItem = import("./BaseItem").default; -export type BaseReadContext = import("./BaseReadContext").default; -export type BaseReadOption = { - force: boolean; -}; -export type BaseReadOptionType = { - force: string; -}; -export type ParserAction = string; -export type ActionEnum = { - /** - * "parse" - */ - PARSER: ParserAction; - /** - * "readItems" - */ - READ_ITEMS: ParserAction; - /** - * "unzip" - */ - UNZIP: ParserAction; -}; -export type OnProgressCallBack = (step: number, totalStep: number, action: string) => void; -export type Task = { - /** - * Action executor - */ - fun: Function; - /** - * Action name - */ - name: string; -}; -declare class Parser { - /** - * Get default values of parse options - * @static - * @return {BaseParserOption} - */ - static get parseDefaultOptions(): import("./BaseParseContext").BaseParserOption; - /** - * Get types of parse options - * @static - * @return {BaseParserOptionType} - */ - static get parseOptionTypes(): import("./BaseParseContext").BaseParserOptionType; - /** - * Get default values of read options - * @static - * @returns {BaseReadOption | void} - */ - static get readDefaultOptions(): void | import("./BaseReadContext").BaseReadOption; - /** - * Get types of read option - * @static - * @returns {BaseReadOptionType | void} - */ - static get readOptionTypes(): void | import("./BaseReadContext").BaseReadOptionType; - /** - * Create new Parser - * @param {string} input file or directory - * @param {CryptoProvider} [cryptoProvider] en/decrypto provider - * @param {import('./Logger').LoggerOptions} [loggerOptions] logger options - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.EINVAL} invalid input - * @example - * class FooParser extends Parser { - * ... - * } - * new FooParser('./foo/bar.zip' or './foo/bar'); - */ - constructor(input: string, cryptoProvider?: CryptoProvider, loggerOptions?: import('./Logger').LoggerOptions); - /** - * Get file or directory - * @returns {string} - * - */ - get input(): string; - /** - * Get en/decrypto provider - * @returns {CryptoProvider} - */ - get cryptoProvider(): CryptoProvider; - /** - * Get logger - * @returns {Logger} - */ - get logger(): Logger; - /** - * Set callback that tells progress of parse and readItems. - * @param {OnProgressCallBack} onProgress - * @example - * parser.onProgress = (step, totalStep, action) => { - * console.log(`[${action}] ${step} / ${totalStep}`); - * } - * @see Parser.Action - */ - set onProgress(arg: (step: number, totalStep: number, action: string) => void); - /** - * Get onProgress callback - * @returns {OnProgressCallBack} - */ - get onProgress(): (step: number, totalStep: number, action: string) => void; - /** - * @virtual - * @protected - * @returns {new ()=>BaseParseContext} - */ - protected _getParseContextClass(): new () => BaseParseContext; - /** - * @virtual - * @protected - * @returns {new ()=>BaseBook} - */ - protected _getBookClass(): new () => BaseBook; - /** - * @virtual - * @protected - * @returns {new ()=>BaseReadContext} - */ - protected _getReadContextClass(): new () => BaseReadContext; - /** - * @virtual - * @protected - * @returns {new ()=>BaseItem} - */ - protected _getReadItemClass(): new () => BaseItem; - /** - * @protected - * @returns {Task[]} return before tasks - */ - protected _parseBeforeTasks(): Task[]; - /** - * @protected - * @returns {Task[]} return tasks - */ - protected _parseTasks(): Task[]; - /** - * @protected - * @returns {Task[]} return after tasks - */ - protected _parseAfterTasks(): Task[]; - /** - * Parse the input - * @async - * @param {BaseParserOption} [options] parse options - * @returns {Promise} return Book - * @see Parser.parseDefaultOptions - * @see Parser.parseOptionTypes - */ - parse(options?: BaseParserOption): Promise; - /** - * Validate parse options and get entries from input - * @async - * @param {BaseParserOption} [options] parse options - * @returns {Promise} return Context containing parse options, entries - * @throws {Errors.EINVAL} invalid options or value type - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.ENOFILE} no such file - */ - _prepareParse(options?: BaseParserOption): Promise; - /** - * Unzipping if zip source and unzipPath option specified - * @async - * @param {BaseParseContext} context intermediate result - * @returns {Promise} return Context (no change at this step) - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.EEXIST} file or directory already exists - */ - _unzipIfNeeded(context: BaseParseContext): Promise; - /** - * Create new Book from context - * @protected - * @param {BaseParseContext} context intermediate result - * @returns {Promise} return Book - */ - protected _createBook(context: BaseParseContext): Promise; - /** - * @protected - * @returns {Task[]} return before tasks - */ - protected _readBeforeTasks(): Task[]; - /** - * @protected - * @returns {Task[]} return tasks - */ - protected _readTasks(): Task[]; - /** - * @protected - * @returns {Task[]} return after tasks - */ - protected _readAfterTasks(): Task[]; - /** - * Reading contents of Item - * @param {BaseItem} item target - * @param {BaseReadOption} [options] read options - * @returns {(string|Buffer)} reading result - * @see Parser.readDefaultOptions - * @see Parser.readOptionTypes - */ - readItem(item: BaseItem, options?: BaseReadOption): (string | Buffer); - /** - * Reading contents of Items - * @async - * @param {BaseItem[]} items targets - * @param {BaseReadOption} [options] read options - * @returns {(string|Buffer)[]} reading results - * @see Parser.readDefaultOptions - * @see Parser.readOptionTypes - */ - readItems(items: BaseItem[], options?: BaseReadOption): (string | Buffer)[]; - /** - * Validate read options and get entries from input - * @async - * @param {Item[]} items targets - * @param {BaseReadOption} [options] read options - * @returns {Promise} returns Context containing target items, read options, entries - * @throws {Errors.EINVAL} invalid options or value type - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.ENOFILE} no such file - */ - _prepareRead(items: any[], options?: BaseReadOption): Promise; - /** - * Contents is read using loader suitable for context - * @async - * @override - * @param {ReadContext} context properties required for reading - * @returns {Promise>} reading results - * @throws {Errors.ENOFILE} no such file - * @see Parser.readDefaultOptions.force - */ - _read(context: any): Promise>; - /** - * @private - * @returns {Task[]} return tasks - */ - private _unzipTasks; - /** - * Unzip - * @async - * @param {string} unzipPath - * @param {boolean} overwrite - * @returns {Promise} success - * @throws {Errors.EINVAL} invalid options or value type - * @throws {Errors.ENOENT} no such file or directory - * @throws {Errors.ENOFILE} no such file - */ - unzip(unzipPath: string, overwrite?: boolean): Promise; -} -declare namespace Parser { - export { Action }; -} -import CryptoProvider from "./CryptoProvider"; -import Logger from "./Logger"; -/** - * @typedef {import('./BaseParseContext').default} BaseParseContext - * @typedef {import('./BaseParseContext').BaseParserOption} BaseParserOption - * @typedef {import('./BaseParseContext').BaseParserOptionType} BaseParserOptionType - * @typedef {import('./BaseBook').default} BaseBook - * @typedef {import('./BaseItem').default} BaseItem - * @typedef {import('./BaseReadContext').default} BaseReadContext - * @typedef {import('./BaseReadContext').BaseReadOption} BaseReadOption - * @typedef {import('./BaseReadContext').BaseReadOptionType} BaseReadOptionType - */ -/** - * @typedef {string} ParserAction - * - * @typedef {Object} ActionEnum - * @property {ParserAction} PARSER "parse" - * @property {ParserAction} READ_ITEMS "readItems" - * @property {ParserAction} UNZIP "unzip" - */ -/** - * @readonly - * @type {ActionEnum} - */ -declare const Action: ActionEnum; diff --git a/packages/parser-core/type/Version.d.ts b/packages/parser-core/type/Version.d.ts deleted file mode 100644 index bd5efb32..00000000 --- a/packages/parser-core/type/Version.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -export default Version; -declare class Version { - /** - * Construct Version object with version. - * @param {string} version - */ - constructor(version: string); - /** - * @private - */ - private major; - /** - * @private - */ - private minor; - /** - * @private - */ - private patch; - /** - * Get Version as string - * @returns {string} version - */ - toString(): string; -} diff --git a/packages/parser-core/type/bufferUtil.d.ts b/packages/parser-core/type/bufferUtil.d.ts deleted file mode 100644 index 5dbaf130..00000000 --- a/packages/parser-core/type/bufferUtil.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -/** - * Removes all the leading non zero buffer chunk - * @param {Buffer} buffer - * @returns {Buffer} Trimmed buffer - */ -export function trimStart(buffer: Buffer): Buffer; -/** - * Removes all the trailing non zero buffer chunk - * @param {Buffer} buffer - * @returns {Buffer} Trimmed buffer - */ -export function trimEnd(buffer: Buffer): Buffer; -/** - * Trim a buffer - * @param {Buffer} buffer - * @returns {Buffer} Trimmed buffer - */ -export function trim(buffer: Buffer): Buffer; diff --git a/packages/parser-core/type/cacheFile.d.ts b/packages/parser-core/type/cacheFile.d.ts deleted file mode 100644 index e0a12947..00000000 --- a/packages/parser-core/type/cacheFile.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -/** - * Get path to store cache - * @returns {string} Path of a temp directory - */ -export function getCachePath(): string; -/** - * Remove the cache file with a key - * @param {string} key Key of a cache file - * @returns {void} - */ -export function removeCacheFile(key: string): void; -/** - * Remove every cache file - * @returns {void} - */ -export function removeAllCacheFiles(): void; -/** - * Read the cache file with a key - * @param {string} key Key of a cache file - * @returns {string|null} `null` if cache does not exists, `string` otherwise - */ -export function readCacheFile(key: string): string | null; -/** - * Write cache file - * @param {string} key Key of a cache file - * @param {string | NodeJS.ArrayBufferView} message Message to store - * @param {boolean} [overwrite=false] - * @returns {void} - */ -export function writeCacheFile(key: string, message: string | NodeJS.ArrayBufferView, overwrite?: boolean): void; diff --git a/packages/parser-core/type/createCryptoStream.d.ts b/packages/parser-core/type/createCryptoStream.d.ts deleted file mode 100644 index 22be02dd..00000000 --- a/packages/parser-core/type/createCryptoStream.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -export default createCryptoStream; -/** - * @param {string} filePath - * @param {number} totalSize - * @param {CryptoProvider} cryptoProvider - * @param {import('./CryptoProvider').CryptoProviderOption} purpose - * @returns {es.MapStream} - */ -declare function createCryptoStream(filePath: string, totalSize: number, cryptoProvider: CryptoProvider, purpose: import('./CryptoProvider').CryptoProviderOption): es.MapStream; -import CryptoProvider from "./CryptoProvider"; -import * as es from "event-stream"; diff --git a/packages/parser-core/type/createSliceStream.d.ts b/packages/parser-core/type/createSliceStream.d.ts deleted file mode 100644 index 51012ea0..00000000 --- a/packages/parser-core/type/createSliceStream.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -export default createSliceStream; -/** - * @param {number} start=0 - * @param {number} end=Infinity - * @returns {es.MapStream} - */ -declare function createSliceStream(start?: number, end?: number): es.MapStream; -import * as es from "event-stream"; diff --git a/packages/parser-core/type/cryptoUtil.d.ts b/packages/parser-core/type/cryptoUtil.d.ts deleted file mode 100644 index 4a0c46a2..00000000 --- a/packages/parser-core/type/cryptoUtil.d.ts +++ /dev/null @@ -1,96 +0,0 @@ -export type PaddingObject = { - name: string; - op: PaddingList; - pad: (data: CryptoJs.lib.WordArray) => void; - unpad: (data: CryptoJs.lib.WordArray) => void; -}; -export type PaddingList = { - AUTO: PaddingObject; - PKCS7: PaddingObject; - NONE: PaddingObject; -}; -export type IterableObject = any[] | Uint8Array | Buffer; -export type DecodeFunction = (uint8ArrayOrBufferOrArray: IterableObject) => CryptoJs.lib.WordArray; -export type EncodeFunction = (wordArray: CryptoJs.lib.WordArray) => Uint8Array; -export type UINT8Object = { - decode: DecodeFunction; - encode: EncodeFunction; -}; -export type EncodingObject = { - name: string; - decode: (str: string | IterableObject) => CryptoJs.lib.WordArray; - encode: (wordArray: CryptoJs.lib.WordArray) => (string | Uint8Array); -}; -export type EncodingList = { - UTF8: EncodingObject; - HEX: EncodingObject; - UINT8: EncodingObject; - BUFFER: EncodingObject; -}; -/** - * Use `Encoding` - */ -export type HashFunction = (any: any, encoding?: EncodingObject) => string; -export type HashList = { - md5: HashFunction; - sha1: HashFunction; - sha224: HashFunction; - sha256: HashFunction; - sha384: HashFunction; - sha512: HashFunction; - sha3: HashFunction; - ripemd160: HashFunction; -}; -/** - * @typedef {Object} PaddingObject - * @property {string} name - * @property {Padding} op - * @property {(data:CryptoJs.lib.WordArray)=>void} pad - * @property {(data:CryptoJs.lib.WordArray)=>void} unpad - */ -/** - * @typedef {Object} PaddingList - * @property {PaddingObject} AUTO - * @property {PaddingObject} PKCS7 - * @property {PaddingObject} NONE - */ -/** - * @type {PaddingList} - */ -export const Padding: PaddingList; -/** - * @typedef {Object} EncodingObject - * @property {string} name - * @property {(str: string|IterableObject)=>CryptoJs.lib.WordArray} decode - * @property {(wordArray: CryptoJs.lib.WordArray)=>(string|Uint8Array)} encode - */ -/** - * @typedef {Object} EncodingList - * @property {EncodingObject} UTF8 - * @property {EncodingObject} HEX - * @property {EncodingObject} UINT8 - * @property {EncodingObject} BUFFER - */ -/** - * @type {EncodingList} - */ -export const Encoding: EncodingList; -/** - * @typedef {(any:any, encoding?:EncodingObject)=>string} HashFunction Use `Encoding` - */ -/** - * @typedef {Object} HashList - * @property {HashFunction} md5 - * @property {HashFunction} sha1 - * @property {HashFunction} sha224 - * @property {HashFunction} sha256 - * @property {HashFunction} sha384 - * @property {HashFunction} sha512 - * @property {HashFunction} sha3 - * @property {HashFunction} ripemd160 - */ -/** - * @type {HashList} - */ -export const Hash: HashList; -import * as CryptoJs from "crypto-js"; diff --git a/packages/parser-core/type/errors.d.ts b/packages/parser-core/type/errors.d.ts deleted file mode 100644 index 70fc67bf..00000000 --- a/packages/parser-core/type/errors.d.ts +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Create error - * @param {ErrorType} type - * @param {string[]} args - * @returns {Error} - */ -export function createError(type: ErrorType, ...args: string[]): Error; -/** - * Create MustOverride error - * @returns {Error} - */ -export function mustOverride(): Error; -export type Errors = { - ENOENT: ErrorType; - ENOFILE: ErrorType; - EEXIST: ErrorType; - EINVAL: ErrorType; - ENOELMT: ErrorType; - ENOATTR: ErrorType; - EREQPRM: ErrorType; - EINTR: ErrorType; - ECRYT: ErrorType; - EPDFJS: ErrorType; - ENOIMP: ErrorType; -}; -export type ErrorType = { - code: string; - format: string; -}; -export namespace Errors { - namespace ENOENT { - const code: string; - const format: string; - } - namespace ENOFILE { - const code_1: string; - export { code_1 as code }; - const format_1: string; - export { format_1 as format }; - } - namespace EEXIST { - const code_2: string; - export { code_2 as code }; - const format_2: string; - export { format_2 as format }; - } - namespace EINVAL { - const code_3: string; - export { code_3 as code }; - const format_3: string; - export { format_3 as format }; - } - namespace ENOELMT { - const code_4: string; - export { code_4 as code }; - const format_4: string; - export { format_4 as format }; - } - namespace ENOATTR { - const code_5: string; - export { code_5 as code }; - const format_5: string; - export { format_5 as format }; - } - namespace EREQPRM { - const code_6: string; - export { code_6 as code }; - const format_6: string; - export { format_6 as format }; - } - namespace EINTR { - const code_7: string; - export { code_7 as code }; - const format_7: string; - export { format_7 as format }; - } - namespace ECRYT { - const code_8: string; - export { code_8 as code }; - const format_8: string; - export { format_8 as format }; - } - namespace EPDFJS { - const code_9: string; - export { code_9 as code }; - const format_9: string; - export { format_9 as format }; - } - namespace ENOIMP { - const code_10: string; - export { code_10 as code }; - const format_10: string; - export { format_10 as format }; - } -} diff --git a/packages/parser-core/type/index.d.ts b/packages/parser-core/type/index.d.ts deleted file mode 100644 index 1167fa46..00000000 --- a/packages/parser-core/type/index.d.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { trim } from "./bufferUtil"; -import { trimStart } from "./bufferUtil"; -import { trimEnd } from "./bufferUtil"; -import { getCachePath } from "./cacheFile"; -import { removeCacheFile } from "./cacheFile"; -import { removeAllCacheFiles } from "./cacheFile"; -import { readCacheFile } from "./cacheFile"; -import { writeCacheFile } from "./cacheFile"; -import BaseBook from "./BaseBook"; -import BaseItem from "./BaseItem"; -import BaseParseContext from "./BaseParseContext"; -import BaseReadContext from "./BaseReadContext"; -import AesCryptor from "./AesCryptor"; -import CryptoProvider from "./CryptoProvider"; -import { Errors } from "./errors"; -import { createError } from "./errors"; -import { mustOverride } from "./errors"; -import { Hash } from "./cryptoUtil"; -import Logger from "./Logger"; -import { LogLevel } from "./Logger"; -import mergeObjects from "./mergeObjects"; -import parseBool from "./parseBool"; -import Parser from "./Parser"; -import { safeDirname } from "./pathUtil"; -import { safePath } from "./pathUtil"; -import { safePathJoin } from "./pathUtil"; -import { getPathes } from "./pathUtil"; -import readEntries from "./readEntries"; -import { conditionally } from "./streamUtil"; -import { MatchOption } from "./stringUtil"; -import { stringContains } from "./stringUtil"; -import { safeDecodeURI } from "./stringUtil"; -import { getType } from "./typecheck"; -import { isArray } from "./typecheck"; -import { isBool } from "./typecheck"; -import { isExists } from "./typecheck"; -import { isFunc } from "./typecheck"; -import { isObject } from "./typecheck"; -import { isString } from "./typecheck"; -import { isUrl } from "./typecheck"; -import validateOptions from "./validateOptions"; -import openZip from "./zipUtil"; -import Version from "./Version"; -export { trim, trimStart, trimEnd, getCachePath, removeCacheFile, removeAllCacheFiles, readCacheFile, writeCacheFile, BaseBook, BaseItem, BaseParseContext, BaseReadContext, AesCryptor, CryptoProvider, Errors, createError, mustOverride, Hash, Logger, LogLevel, mergeObjects, parseBool, Parser, safeDirname, safePath, safePathJoin, getPathes, readEntries, conditionally, MatchOption, stringContains, safeDecodeURI, getType, isArray, isBool, isExists, isFunc, isObject, isString, isUrl, validateOptions, openZip, Version }; diff --git a/packages/parser-core/type/mergeObjects.d.ts b/packages/parser-core/type/mergeObjects.d.ts deleted file mode 100644 index ee886627..00000000 --- a/packages/parser-core/type/mergeObjects.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * Merge Objects - * @param {T} obj1 - * @param {S} obj2 - * @returns {K} merged object - * @template T,S,K - */ -export default function mergeObjects(obj1: T, obj2: S): K; diff --git a/packages/parser-core/type/parseBool.d.ts b/packages/parser-core/type/parseBool.d.ts deleted file mode 100644 index f516a57f..00000000 --- a/packages/parser-core/type/parseBool.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Return boolean form of any input - * @param {any} any - * @returns {boolean} - */ -export default function parseBool(any: any): boolean; diff --git a/packages/parser-core/type/pathUtil.d.ts b/packages/parser-core/type/pathUtil.d.ts deleted file mode 100644 index b1b21477..00000000 --- a/packages/parser-core/type/pathUtil.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/** - * @param {string} target - * @returns {string} - */ -export function safePath(target: string): string; -/** - * @param {string} target - * @returns {string} - */ -export function safeDirname(target: string): string; -/** - * @param {string[]} components - * @returns {string} - */ -export function safePathJoin(...components: string[]): string; -/** - * @param {string} target - * @returns {string[]} - */ -export function getPathes(target: string): string[]; diff --git a/packages/parser-core/type/readEntries.d.ts b/packages/parser-core/type/readEntries.d.ts deleted file mode 100644 index 7584d982..00000000 --- a/packages/parser-core/type/readEntries.d.ts +++ /dev/null @@ -1,45 +0,0 @@ -/** - * @typedef {FileEntryObject} ReadEntriesReturnType - */ -/** - * @async - * @param {string} input - * @param {CryptoProvider} cryptoProvider - * @param {import('./Logger').default} logger - * @returns {Promise} - */ -export default function readEntries(input: string, cryptoProvider: CryptoProvider, logger: import('./Logger').default): Promise; -export type FileEntryObject = { - first: S; - length: number; - source: T; - get: (idx: number) => S; - find: (entryPath: string, strict: boolean) => S; - forEach: (callback: (value: S, index: number, array: S[]) => void) => void; - map: (callback: (value: S, index: number, array: S[]) => any) => void; - sort: (callback: (a: S, b: S) => number) => void; -}; -export type EntryBasicInformation = { - entryPath: string; - size: number; - getFile: (options: { - endocing: string; - end: number; - }) => (Promise | Buffer); -}; -export type ZipfileEntryInformation = { - method: string; - extraFieldLength: number; -}; -export type IZipEntryPlus = import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation; -export type ReadEntriesReturnType = { - first: EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation); - length: number; - source: string; - get: (idx: number) => EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation); - find: (entryPath: string, strict: boolean) => EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation); - forEach: (callback: (value: EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation), index: number, array: (EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation))[]) => void) => void; - map: (callback: (value: EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation), index: number, array: (EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation))[]) => any) => void; - sort: (callback: (a: EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation), b: EntryBasicInformation | (import("adm-zip").IZipEntry & EntryBasicInformation & ZipfileEntryInformation)) => number) => void; -}; -import CryptoProvider from "./CryptoProvider"; diff --git a/packages/parser-core/type/streamUtil.d.ts b/packages/parser-core/type/streamUtil.d.ts deleted file mode 100644 index eacc01fb..00000000 --- a/packages/parser-core/type/streamUtil.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * Apply event stream conditionally - * @param {boolean | (()=>boolean)} condition - * @param {es.MapStream} stream - * @returns {es.MapStream} Mapstream - */ -export function conditionally(condition: boolean | (() => boolean), stream: es.MapStream): es.MapStream; -import * as es from "event-stream"; diff --git a/packages/parser-core/type/stringUtil.d.ts b/packages/parser-core/type/stringUtil.d.ts deleted file mode 100644 index b8393ea5..00000000 --- a/packages/parser-core/type/stringUtil.d.ts +++ /dev/null @@ -1,41 +0,0 @@ -export type MatchOption = { - /** - * "0" - */ - MATCHING: number; - /** - * "1" - */ - CONTAINING: number; - /** - * "2" - */ - STARTSWITH: number; - /** - * "3" - */ - ENDSWITH: number; -}; -/** - * @typedef MatchOption - * @property {number} MATCHING "0" - * @property {number} CONTAINING "1" - * @property {number} STARTSWITH "2" - * @property {number} ENDSWITH "3" - */ -/** - * @type {MatchOption} - */ -export const MatchOption: MatchOption; -/** - * @param {string[]} array=[] - * @param {string} string='' - * @param {MatchOption} matchOption=MatchOption.MATCHING - * @returns {boolean} - */ -export function stringContains(array?: string[], string?: string, matchOption?: MatchOption): boolean; -/** - * Decode URI - * @param {string} uri - */ -export function safeDecodeURI(uri: string): string; diff --git a/packages/parser-core/type/typecheck.d.ts b/packages/parser-core/type/typecheck.d.ts deleted file mode 100644 index d7e04457..00000000 --- a/packages/parser-core/type/typecheck.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Get type as string - * @param {any} any - * @param {boolean} strict=false - * @returns {string} - */ -export function getType(any: any, strict?: boolean): string; -/** - * Return true if passed argument is an array - * @param {any} any - * @returns {boolean} - */ -export function isArray(any: any): boolean; -/** - * Return true if passed argument is a boolean - * @param {any} any - * @returns {boolean} - */ -export function isBool(any: any): boolean; -/** - * Return true if passed argument is undefined nor null - * @param {any} any - * @returns {boolean} - */ -export function isExists(any: any): boolean; -/** - * Return true if passed argument is function - * @param {any} any - * @returns {boolean} - */ -export function isFunc(any: any): boolean; -/** - * Return true if passed argument is object - * @param {any} any - * @returns {boolean} - */ -export function isObject(any: any): boolean; -/** - * Return true if passed argument is string - * @param {any} any - * @returns {boolean} - */ -export function isString(any: any): boolean; -/** - * Return true if passed argument is url - * @param {any} string - * @returns {boolean} - */ -export function isUrl(string: any): boolean; diff --git a/packages/parser-core/type/validateOptions.d.ts b/packages/parser-core/type/validateOptions.d.ts deleted file mode 100644 index c388f716..00000000 --- a/packages/parser-core/type/validateOptions.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Validate option with interface. it will return void if it passes, throw error otherwise. - * @param {T} options - * @param {S} types - * @param {boolean} strict - * @template T, S - * @returns {void} - */ -export default function validateOptions(options: T, types: S, strict?: boolean): void; diff --git a/packages/parser-core/type/zipUtil.d.ts b/packages/parser-core/type/zipUtil.d.ts deleted file mode 100644 index 9336d68c..00000000 --- a/packages/parser-core/type/zipUtil.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** - * @param {string | Buffer} file - * @param {CryptoProvider} cryptoProvider - * @param {Logger} logger - * @returns {ZipFileInformation} - * @throws {Errors.ENOENT} When file can't be found - */ -export default function openZip(file: string | Buffer, cryptoProvider: CryptoProvider, logger: any): ZipFileInformation; -export type GetFileOptions = { - encoding: string; - end: number; -}; -export type ZipFileInformation = { - file: string; - files: any[]; - cryptoProvider: CryptoProvider; - find: (entryPath: string) => any; - getFile: (entry: any, options?: GetFileOptions) => Promise; - extractAll: (unzipPath: string, overwrite?: boolean) => Promise; - logger: import('./Logger').default; -}; -import CryptoProvider from "./CryptoProvider"; diff --git a/packages/parser-core/yarn.lock b/packages/parser-core/yarn.lock index 4cedcff9..85621ba2 100644 --- a/packages/parser-core/yarn.lock +++ b/packages/parser-core/yarn.lock @@ -2,6 +2,174 @@ # yarn lockfile v1 +"@esbuild/android-arm64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.17.tgz#164b054d58551f8856285f386e1a8f45d9ba3a31" + integrity sha512-jaJ5IlmaDLFPNttv0ofcwy/cfeY4bh/n705Tgh+eLObbGtQBK3EPAu+CzL95JVE4nFAliyrnEu0d32Q5foavqg== + +"@esbuild/android-arm@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.17.17.tgz#1b3b5a702a69b88deef342a7a80df4c894e4f065" + integrity sha512-E6VAZwN7diCa3labs0GYvhEPL2M94WLF8A+czO8hfjREXxba8Ng7nM5VxV+9ihNXIY1iQO1XxUU4P7hbqbICxg== + +"@esbuild/android-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.17.17.tgz#6781527e3c4ea4de532b149d18a2167f06783e7f" + integrity sha512-446zpfJ3nioMC7ASvJB1pszHVskkw4u/9Eu8s5yvvsSDTzYh4p4ZIRj0DznSl3FBF0Z/mZfrKXTtt0QCoFmoHA== + +"@esbuild/darwin-arm64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.17.tgz#c5961ef4d3c1cc80dafe905cc145b5a71d2ac196" + integrity sha512-m/gwyiBwH3jqfUabtq3GH31otL/0sE0l34XKpSIqR7NjQ/XHQ3lpmQHLHbG8AHTGCw8Ao059GvV08MS0bhFIJQ== + +"@esbuild/darwin-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.17.tgz#b81f3259cc349691f67ae30f7b333a53899b3c20" + integrity sha512-4utIrsX9IykrqYaXR8ob9Ha2hAY2qLc6ohJ8c0CN1DR8yWeMrTgYFjgdeQ9LIoTOfLetXjuCu5TRPHT9yKYJVg== + +"@esbuild/freebsd-arm64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.17.tgz#db846ad16cf916fd3acdda79b85ea867cb100e87" + integrity sha512-4PxjQII/9ppOrpEwzQ1b0pXCsFLqy77i0GaHodrmzH9zq2/NEhHMAMJkJ635Ns4fyJPFOlHMz4AsklIyRqFZWA== + +"@esbuild/freebsd-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.17.tgz#4dd99acbaaba00949d509e7c144b1b6ef9e1815b" + integrity sha512-lQRS+4sW5S3P1sv0z2Ym807qMDfkmdhUYX30GRBURtLTrJOPDpoU0kI6pVz1hz3U0+YQ0tXGS9YWveQjUewAJw== + +"@esbuild/linux-arm64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.17.tgz#7f9274140b2bb9f4230dbbfdf5dc2761215e30f6" + integrity sha512-2+pwLx0whKY1/Vqt8lyzStyda1v0qjJ5INWIe+d8+1onqQxHLLi3yr5bAa4gvbzhZqBztifYEu8hh1La5+7sUw== + +"@esbuild/linux-arm@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.17.17.tgz#5c8e44c2af056bb2147cf9ad13840220bcb8948b" + integrity sha512-biDs7bjGdOdcmIk6xU426VgdRUpGg39Yz6sT9Xp23aq+IEHDb/u5cbmu/pAANpDB4rZpY/2USPhCA+w9t3roQg== + +"@esbuild/linux-ia32@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.17.17.tgz#18a6b3798658be7f46e9873fa0c8d4bec54c9212" + integrity sha512-IBTTv8X60dYo6P2t23sSUYym8fGfMAiuv7PzJ+0LcdAndZRzvke+wTVxJeCq4WgjppkOpndL04gMZIFvwoU34Q== + +"@esbuild/linux-loong64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.17.17.tgz#a8d93514a47f7b4232716c9f02aeb630bae24c40" + integrity sha512-WVMBtcDpATjaGfWfp6u9dANIqmU9r37SY8wgAivuKmgKHE+bWSuv0qXEFt/p3qXQYxJIGXQQv6hHcm7iWhWjiw== + +"@esbuild/linux-mips64el@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.17.tgz#4784efb1c3f0eac8133695fa89253d558149ee1b" + integrity sha512-2kYCGh8589ZYnY031FgMLy0kmE4VoGdvfJkxLdxP4HJvWNXpyLhjOvxVsYjYZ6awqY4bgLR9tpdYyStgZZhi2A== + +"@esbuild/linux-ppc64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.17.tgz#ef6558ec5e5dd9dc16886343e0ccdb0699d70d3c" + integrity sha512-KIdG5jdAEeAKogfyMTcszRxy3OPbZhq0PPsW4iKKcdlbk3YE4miKznxV2YOSmiK/hfOZ+lqHri3v8eecT2ATwQ== + +"@esbuild/linux-riscv64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.17.tgz#13a87fdbcb462c46809c9d16bcf79817ecf9ce6f" + integrity sha512-Cj6uWLBR5LWhcD/2Lkfg2NrkVsNb2sFM5aVEfumKB2vYetkA/9Uyc1jVoxLZ0a38sUhFk4JOVKH0aVdPbjZQeA== + +"@esbuild/linux-s390x@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.17.17.tgz#83cb16d1d3ac0dca803b3f031ba3dc13f1ec7ade" + integrity sha512-lK+SffWIr0XsFf7E0srBjhpkdFVJf3HEgXCwzkm69kNbRar8MhezFpkIwpk0qo2IOQL4JE4mJPJI8AbRPLbuOQ== + +"@esbuild/linux-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.17.tgz#7bc400568690b688e20a0c94b2faabdd89ae1a79" + integrity sha512-XcSGTQcWFQS2jx3lZtQi7cQmDYLrpLRyz1Ns1DzZCtn898cWfm5Icx/DEWNcTU+T+tyPV89RQtDnI7qL2PObPg== + +"@esbuild/netbsd-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.17.tgz#1b5dcfbc4bfba80e67a11e9148de836af5b58b6c" + integrity sha512-RNLCDmLP5kCWAJR+ItLM3cHxzXRTe4N00TQyQiimq+lyqVqZWGPAvcyfUBM0isE79eEZhIuGN09rAz8EL5KdLA== + +"@esbuild/openbsd-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.17.tgz#e275098902291149a5dcd012c9ea0796d6b7adff" + integrity sha512-PAXswI5+cQq3Pann7FNdcpSUrhrql3wKjj3gVkmuz6OHhqqYxKvi6GgRBoaHjaG22HV/ZZEgF9TlS+9ftHVigA== + +"@esbuild/sunos-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.17.17.tgz#10603474866f64986c0370a2d4fe5a2bb7fee4f5" + integrity sha512-V63egsWKnx/4V0FMYkr9NXWrKTB5qFftKGKuZKFIrAkO/7EWLFnbBZNM1CvJ6Sis+XBdPws2YQSHF1Gqf1oj/Q== + +"@esbuild/win32-arm64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.17.17.tgz#521a6d97ee0f96b7c435930353cc4e93078f0b54" + integrity sha512-YtUXLdVnd6YBSYlZODjWzH+KzbaubV0YVd6UxSfoFfa5PtNJNaW+1i+Hcmjpg2nEe0YXUCNF5bkKy1NnBv1y7Q== + +"@esbuild/win32-ia32@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.17.17.tgz#56f88462ebe82dad829dc2303175c0e0ccd8e38e" + integrity sha512-yczSLRbDdReCO74Yfc5tKG0izzm+lPMYyO1fFTcn0QNwnKmc3K+HdxZWLGKg4pZVte7XVgcFku7TIZNbWEJdeQ== + +"@esbuild/win32-x64@0.17.17": + version "0.17.17" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.17.17.tgz#2b577b976e6844106715bbe0cdc57cd1528063f9" + integrity sha512-FNZw7H3aqhF9OyRQbDDnzUApDXfC1N6fgBhkqEO2jvYCJ+DxMTfZVqg3AX0R1khg1wHTBRD5SdcibSJ+XF6bFg== + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/sourcemap-codec@1.4.14": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.18" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" + integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== + dependencies: + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + "@types/adm-zip@^0.4.34": version "0.4.34" resolved "https://registry.yarnpkg.com/@types/adm-zip/-/adm-zip-0.4.34.tgz#62ac859eb2af6024362a1b3e43527ab79e0c624e" @@ -55,11 +223,100 @@ adm-zip@^0.5.5: resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.5.5.tgz#b6549dbea741e4050309f1bb4d47c47397ce2c4f" integrity sha512-IWwXKnCbirdbyXSfUDvCCrmYrOHANRZcc8NcRrvTlIApdl7PwE9oGcsYvNeJPAVY1M+70b4PxXGKIf8AEuiQ6w== +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + +anymatch@~3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +bundle-require@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/bundle-require/-/bundle-require-4.0.1.tgz#2cc1ad76428043d15e0e7f30990ee3d5404aa2e3" + integrity sha512-9NQkRHlNdNpDBGmLpngF3EFDcwodhMUuLz9PaWYciVcQF9SE4LFjM2DB/xV1Li5JiuDMv7ZUWuC3rGbqR0MAXQ== + dependencies: + load-tsconfig "^0.2.3" + +cac@^6.7.12: + version "6.7.14" + resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.14.tgz#804e1e6f506ee363cb0e3ccbb09cad5dd9870959" + integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ== + "charenc@>= 0.0.1": version "0.0.2" resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667" integrity sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc= +chokidar@^3.5.1: + version "3.5.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +commander@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" + integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + "crypt@>= 0.0.1": version "0.0.2" resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" @@ -70,11 +327,53 @@ crypto-js@^4.0.0: resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-4.0.0.tgz#2904ab2677a9d042856a2ea2ef80de92e4a36dcc" integrity sha512-bzHZN8Pn+gS7DQA6n+iUmBfl0hO5DJq++QP3U6uTucDtk/0iGpXd/Gg7CGR0p8tJhofJyaKoWBuJI4eAO00BBg== +debug@^4.3.1: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + duplexer@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== +esbuild@^0.17.6: + version "0.17.17" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.17.17.tgz#fa906ab11b11d2ed4700f494f4f764229b25c916" + integrity sha512-/jUywtAymR8jR4qsa2RujlAF7Krpt5VWi72Q2yuLD4e/hvtNcFQ0I1j8m/bxq238pf3/0KO5yuXNpuLx8BE1KA== + optionalDependencies: + "@esbuild/android-arm" "0.17.17" + "@esbuild/android-arm64" "0.17.17" + "@esbuild/android-x64" "0.17.17" + "@esbuild/darwin-arm64" "0.17.17" + "@esbuild/darwin-x64" "0.17.17" + "@esbuild/freebsd-arm64" "0.17.17" + "@esbuild/freebsd-x64" "0.17.17" + "@esbuild/linux-arm" "0.17.17" + "@esbuild/linux-arm64" "0.17.17" + "@esbuild/linux-ia32" "0.17.17" + "@esbuild/linux-loong64" "0.17.17" + "@esbuild/linux-mips64el" "0.17.17" + "@esbuild/linux-ppc64" "0.17.17" + "@esbuild/linux-riscv64" "0.17.17" + "@esbuild/linux-s390x" "0.17.17" + "@esbuild/linux-x64" "0.17.17" + "@esbuild/netbsd-x64" "0.17.17" + "@esbuild/openbsd-x64" "0.17.17" + "@esbuild/sunos-x64" "0.17.17" + "@esbuild/win32-arm64" "0.17.17" + "@esbuild/win32-ia32" "0.17.17" + "@esbuild/win32-x64" "0.17.17" + event-stream@3.3.4: version "3.3.4" resolved "https://registry.yarnpkg.com/event-stream/-/event-stream-3.3.4.tgz#4ab4c9a0f5a54db9338b4c34d86bfce8f4b35571" @@ -88,6 +387,46 @@ event-stream@3.3.4: stream-combiner "~0.0.4" through "~2.3.1" +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fastq@^1.6.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== + dependencies: + reusify "^1.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + from@~0: version "0.1.7" resolved "https://registry.yarnpkg.com/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" @@ -102,11 +441,119 @@ fs-extra@^10.0.0: jsonfile "^6.0.1" universalify "^2.0.0" +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob@7.1.6: + version "7.1.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globby@^11.0.3: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + graceful-fs@^4.1.6, graceful-fs@^4.2.0: version "4.2.4" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +ignore@^5.2.0: + version "5.2.4" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-glob@^4.0.1, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +joycon@^3.0.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03" + integrity sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw== + jsonfile@^6.0.1: version "6.1.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" @@ -116,11 +563,121 @@ jsonfile@^6.0.1: optionalDependencies: graceful-fs "^4.1.6" +lilconfig@^2.0.5: + version "2.1.0" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" + integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +load-tsconfig@^0.2.3: + version "0.2.5" + resolved "https://registry.yarnpkg.com/load-tsconfig/-/load-tsconfig-0.2.5.tgz#453b8cd8961bfb912dea77eb6c168fe8cca3d3a1" + integrity sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== + map-stream@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/map-stream/-/map-stream-0.1.0.tgz#e56aa94c4c8055a16404a0674b78f215f7c8e194" integrity sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ= +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +minimatch@^3.0.4: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +object-assign@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + pause-stream@0.0.11: version "0.0.11" resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" @@ -128,6 +685,65 @@ pause-stream@0.0.11: dependencies: through "~2.3" +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pirates@^4.0.1: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +postcss-load-config@^3.0.1: + version "3.1.4" + resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" + integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== + dependencies: + lilconfig "^2.0.5" + yaml "^1.10.2" + +punycode@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rollup@^3.2.5: + version "3.20.6" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-3.20.6.tgz#53c0fd73e397269d2ce5f0ec12851457dd53cacd" + integrity sha512-2yEB3nQXp/tBQDN0hJScJQheXdvU2wFhh6ld7K/aiZ1vYcak6N/BKjY1QrU6BvO2JWYS8bEs14FRaxXosxy2zw== + optionalDependencies: + fsevents "~2.3.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + sha1@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/sha1/-/sha1-1.1.1.tgz#addaa7a93168f393f19eb2b15091618e2700f848" @@ -136,6 +752,35 @@ sha1@^1.1.1: charenc ">= 0.0.1" crypt ">= 0.0.1" +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +signal-exit@^3.0.3: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +source-map@0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + split@0.3: version "0.3.3" resolved "https://registry.yarnpkg.com/split/-/split-0.3.3.tgz#cd0eea5e63a211dfff7eb0f091c4133e2d0dd28f" @@ -160,12 +805,119 @@ string-natural-compare@^3.0.1: resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +sucrase@^3.20.3: + version "3.32.0" + resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.32.0.tgz#c4a95e0f1e18b6847127258a75cf360bc568d4a7" + integrity sha512-ydQOU34rpSyj2TGyz4D2p8rbktIOZ8QY9s+DGLvFU1i5pWJE8vkpruCjGCMHsdXwnD7JDcS+noSwM/a7zyNFDQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.2" + commander "^4.0.0" + glob "7.1.6" + lines-and-columns "^1.1.6" + mz "^2.7.0" + pirates "^4.0.1" + ts-interface-checker "^0.1.9" + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + through@2, through@~2.3, through@~2.3.1: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== + dependencies: + punycode "^2.1.0" + +tree-kill@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc" + integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A== + +ts-interface-checker@^0.1.9: + version "0.1.13" + resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" + integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== + +tsup@^6.7.0: + version "6.7.0" + resolved "https://registry.yarnpkg.com/tsup/-/tsup-6.7.0.tgz#416f350f32a07b6ae86792ad7e52b0cafc566d64" + integrity sha512-L3o8hGkaHnu5TdJns+mCqFsDBo83bJ44rlK7e6VdanIvpea4ArPcU3swWGsLVbXak1PqQx/V+SSmFPujBK+zEQ== + dependencies: + bundle-require "^4.0.0" + cac "^6.7.12" + chokidar "^3.5.1" + debug "^4.3.1" + esbuild "^0.17.6" + execa "^5.0.0" + globby "^11.0.3" + joycon "^3.0.1" + postcss-load-config "^3.0.1" + resolve-from "^5.0.0" + rollup "^3.2.5" + source-map "0.8.0-beta.0" + sucrase "^3.20.3" + tree-kill "^1.2.2" + universalify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +yaml@^1.10.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== diff --git a/yarn.lock b/yarn.lock index c074148d..8f1ed8b2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8110,10 +8110,10 @@ typedoc@^0.20.36: shiki "^0.9.3" typedoc-default-themes "^0.12.10" -typescript@^4.0.5: - version "4.0.5" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.0.5.tgz#ae9dddfd1069f1cb5beb3ef3b2170dd7c1332389" - integrity sha512-ywmr/VrTVCmNTJ6iV2LwIrfG1P+lv6luD8sUJs+2eI9NLGigaN+nUQc13iHqisq7bra9lnmUSYqbJvegraBOPQ== +typescript@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.0.4.tgz#b217fd20119bd61a94d4011274e0ab369058da3b" + integrity sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw== uglify-js@^3.1.4: version "3.13.5"