diff options
| -rw-r--r-- | dev/dictionary-archive-util.js | 110 | ||||
| -rw-r--r-- | dev/dictionary-validate.js | 39 | ||||
| -rw-r--r-- | dev/lib/zip.js | 14 | ||||
| -rw-r--r-- | dev/util.js | 47 | ||||
| -rw-r--r-- | test/database.test.js | 29 | ||||
| -rw-r--r-- | test/dictionary-data-validate.test.js | 15 | ||||
| -rw-r--r-- | test/fixtures/translator-test.js | 7 | ||||
| -rw-r--r-- | test/playwright/integration.spec.js | 7 | 
8 files changed, 166 insertions, 102 deletions
| diff --git a/dev/dictionary-archive-util.js b/dev/dictionary-archive-util.js new file mode 100644 index 00000000..ead5790b --- /dev/null +++ b/dev/dictionary-archive-util.js @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2024  Yomitan Authors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program.  If not, see <https://www.gnu.org/licenses/>. + */ + +import {BlobWriter, TextReader, TextWriter, Uint8ArrayReader, ZipReader, ZipWriter} from '@zip.js/zip.js'; +import {readFileSync, readdirSync} from 'fs'; +import {join} from 'path'; +import {parseJson} from './json.js'; + +/** + * Creates a zip archive from the given dictionary directory. + * @param {string} dictionaryDirectory + * @param {string} [dictionaryName] + * @returns {Promise<ArrayBuffer>} + */ +export async function createDictionaryArchiveData(dictionaryDirectory, dictionaryName) { +    const fileNames = readdirSync(dictionaryDirectory); +    const zipFileWriter = new BlobWriter(); +    // Level 0 compression used since decompression in the node environment is not supported. +    // See dev/lib/zip.js for more details. +    const zipWriter = new ZipWriter(zipFileWriter, { +        level: 0 +    }); +    for (const fileName of fileNames) { +        if (/\.json$/.test(fileName)) { +            const content = readFileSync(join(dictionaryDirectory, fileName), {encoding: 'utf8'}); +            /** @type {import('dictionary-data').Index} */ +            const json = parseJson(content); +            if (fileName === 'index.json' && typeof dictionaryName === 'string') { +                json.title = dictionaryName; +            } +            await zipWriter.add(fileName, new TextReader(JSON.stringify(json, null, 0))); +        } else { +            const content = readFileSync(join(dictionaryDirectory, fileName), {encoding: null}); +            await zipWriter.add(fileName, new Blob([content]).stream()); +        } +    } +    const blob = await zipWriter.close(); +    return await blob.arrayBuffer(); +} + +/** + * @param {import('@zip.js/zip.js').Entry} entry + * @returns {Promise<string>} + */ +export async function readArchiveEntryDataString(entry) { +    if (typeof entry.getData === 'undefined') { throw new Error('Cannot get index data'); } +    return await entry.getData(new TextWriter()); +} + +/** + * @template [T=unknown] + * @param {import('@zip.js/zip.js').Entry} entry + * @returns {Promise<T>} + */ +export async function readArchiveEntryDataJson(entry) { +    const indexContent = await readArchiveEntryDataString(entry); +    return parseJson(indexContent); +} + +/** + * @param {ArrayBuffer} data + * @returns {Promise<import('@zip.js/zip.js').Entry[]>} + */ +export async function getDictionaryArchiveEntries(data) { +    const zipFileReader = new Uint8ArrayReader(new Uint8Array(data)); +    const zipReader = new ZipReader(zipFileReader); +    return await zipReader.getEntries(); +} + +/** + * @template T + * @param {import('@zip.js/zip.js').Entry[]} entries + * @param {string} fileName + * @returns {Promise<T>} + */ +export async function getDictionaryArchiveJson(entries, fileName) { +    const entry = entries.find((item) => item.filename === fileName); +    if (typeof entry === 'undefined') { throw new Error(`File not found: ${fileName}`); } +    return await readArchiveEntryDataJson(entry); +} + +/** + * @returns {string} + */ +export function getIndexFileName() { +    return 'index.json'; +} + +/** + * @param {ArrayBuffer} data + * @returns {Promise<import('dictionary-data').Index>} + */ +export async function getDictionaryArchiveIndex(data) { +    const entries = await getDictionaryArchiveEntries(data); +    return await getDictionaryArchiveJson(entries, getIndexFileName()); +} diff --git a/dev/dictionary-validate.js b/dev/dictionary-validate.js index 18bba99e..b770f311 100644 --- a/dev/dictionary-validate.js +++ b/dev/dictionary-validate.js @@ -17,10 +17,10 @@   */  import fs from 'fs'; -import JSZip from 'jszip';  import path from 'path';  import {performance} from 'perf_hooks';  import {fileURLToPath} from 'url'; +import {getDictionaryArchiveEntries, getDictionaryArchiveJson, getIndexFileName, readArchiveEntryDataJson} from './dictionary-archive-util.js';  import {parseJson} from './json.js';  import {createJsonSchema} from './schema-validate.js';  import {toError} from './to-error.js'; @@ -39,30 +39,31 @@ function readSchema(relativeFileName) {  /**   * @param {import('dev/schema-validate').ValidateMode} mode - * @param {import('jszip')} zip + * @param {import('@zip.js/zip.js').Entry[]} entries   * @param {import('dev/dictionary-validate').SchemasDetails} schemasDetails   */ -async function validateDictionaryBanks(mode, zip, schemasDetails) { -    for (const [fileName, file] of Object.entries(zip.files)) { +async function validateDictionaryBanks(mode, entries, schemasDetails) { +    for (const entry of entries) { +        const {filename} = entry;          for (const [fileNameFormat, schema] of schemasDetails) { -            if (!fileNameFormat.test(fileName)) { continue; } +            if (!fileNameFormat.test(filename)) { continue; }              let jsonSchema;              try {                  jsonSchema = createJsonSchema(mode, schema);              } catch (e) {                  const e2 = toError(e); -                e2.message += `\n(in file ${fileName})}`; +                e2.message += `\n(in file ${filename})`;                  throw e2;              } -            const data = parseJson(await file.async('string')); +            const data = await readArchiveEntryDataJson(entry);              try {                  jsonSchema.validate(data);              } catch (e) {                  const e2 = toError(e); -                e2.message += `\n(in file ${fileName})}`; +                e2.message += `\n(in file ${filename})`;                  throw e2;              }              break; @@ -73,18 +74,13 @@ async function validateDictionaryBanks(mode, zip, schemasDetails) {  /**   * Validates a dictionary from its zip archive.   * @param {import('dev/schema-validate').ValidateMode} mode - * @param {import('jszip')} archive + * @param {ArrayBuffer} archiveData   * @param {import('dev/dictionary-validate').Schemas} schemas   */ -export async function validateDictionary(mode, archive, schemas) { -    const indexFileName = 'index.json'; -    const indexFile = archive.files[indexFileName]; -    if (!indexFile) { -        throw new Error('No dictionary index found in archive'); -    } - -    /** @type {import('dictionary-data').Index} */ -    const index = parseJson(await indexFile.async('string')); +export async function validateDictionary(mode, archiveData, schemas) { +    const entries = await getDictionaryArchiveEntries(archiveData); +    const indexFileName = getIndexFileName(); +    const index = await getDictionaryArchiveJson(entries, indexFileName);      const version = index.format || index.version;      try { @@ -92,7 +88,7 @@ export async function validateDictionary(mode, archive, schemas) {          jsonSchema.validate(index);      } catch (e) {          const e2 = toError(e); -        e2.message += `\n(in file ${indexFileName})}`; +        e2.message += `\n(in file ${indexFileName})`;          throw e2;      } @@ -105,7 +101,7 @@ export async function validateDictionary(mode, archive, schemas) {          [/^tag_bank_(\d+)\.json$/, schemas.tagBankV3]      ]; -    await validateDictionaryBanks(mode, archive, schemasDetails); +    await validateDictionaryBanks(mode, entries, schemasDetails);  }  /** @@ -138,8 +134,7 @@ export async function testDictionaryFiles(mode, dictionaryFileNames) {          try {              console.log(`Validating ${dictionaryFileName}...`);              const source = fs.readFileSync(dictionaryFileName); -            const archive = await JSZip.loadAsync(source); -            await validateDictionary(mode, archive, schemas); +            await validateDictionary(mode, source.buffer, schemas);              const end = performance.now();              console.log(`No issues detected (${((end - start) / 1000).toFixed(2)}s)`);          } catch (e) { diff --git a/dev/lib/zip.js b/dev/lib/zip.js index 62d0784c..ee603c25 100644 --- a/dev/lib/zip.js +++ b/dev/lib/zip.js @@ -15,4 +15,18 @@   * along with this program.  If not, see <https://www.gnu.org/licenses/>.   */ +/** + * This script is importing a file within the '@zip.js/zip.js' dependency rather than + * simply importing '@zip.js/zip.js'. + * + * This is done in order to only import the subset of functionality that the extension needs. + * + * In this case, this subset only includes the components to support decompression using web workers. + * + * Therefore, if this file or the built library file is imported in a development, testing, or + * benchmark script, it will not be able to properly decompress the data of compressed zip files. + * + * As a workaround, testing zip data can be generated using {level: 0} compression. + */ +  export * from '@zip.js/zip.js/lib/zip.js'; diff --git a/dev/util.js b/dev/util.js index 5a4c5788..67d8d9d8 100644 --- a/dev/util.js +++ b/dev/util.js @@ -17,9 +17,7 @@   */  import fs from 'fs'; -import JSZip from 'jszip';  import path from 'path'; -import {parseJson} from './json.js';  /**   * @param {string} baseDirectory @@ -47,48 +45,3 @@ export function getAllFiles(baseDirectory, predicate = null) {      }      return results;  } - -/** - * Creates a zip archive from the given dictionary directory. - * @param {string} dictionaryDirectory - * @param {string} [dictionaryName] - * @returns {import('jszip')} - */ -export function createDictionaryArchive(dictionaryDirectory, dictionaryName) { -    const fileNames = fs.readdirSync(dictionaryDirectory); - -    // Const zipFileWriter = new BlobWriter(); -    // const zipWriter = new ZipWriter(zipFileWriter); -    const archive = new JSZip(); - -    for (const fileName of fileNames) { -        if (/\.json$/.test(fileName)) { -            const content = fs.readFileSync(path.join(dictionaryDirectory, fileName), {encoding: 'utf8'}); -            const json = parseJson(content); -            if (fileName === 'index.json' && typeof dictionaryName === 'string') { -                /** @type {import('dictionary-data').Index} */ (json).title = dictionaryName; -            } -            archive.file(fileName, JSON.stringify(json, null, 0)); - -            // Await zipWriter.add(fileName, new TextReader(JSON.stringify(json, null, 0))); -        } else { -            const content = fs.readFileSync(path.join(dictionaryDirectory, fileName), {encoding: null}); -            archive.file(fileName, content); - -            // console.log('adding'); -            // const r = new TextReader(content); -            // console.log(r.readUint8Array(0, 10)); -            // console.log('reader done'); -            // await zipWriter.add(fileName, r); -            // console.log('??'); -        } -    } -    // Await zipWriter.close(); - -    // Retrieves the Blob object containing the zip content into `zipFileBlob`. It -    // is also returned by zipWriter.close() for more convenience. -    // const zipFileBlob = await zipFileWriter.getData(); -    return archive; - -    // Return zipFileBlob; -} diff --git a/test/database.test.js b/test/database.test.js index 5dbc1040..a930d68d 100644 --- a/test/database.test.js +++ b/test/database.test.js @@ -21,8 +21,8 @@ import {readFileSync} from 'node:fs';  import {fileURLToPath} from 'node:url';  import {join, dirname as pathDirname} from 'path';  import {beforeEach, describe, test, vi} from 'vitest'; +import {createDictionaryArchiveData, getDictionaryArchiveIndex} from '../dev/dictionary-archive-util.js';  import {parseJson} from '../dev/json.js'; -import {createDictionaryArchive} from '../dev/util.js';  import {DictionaryDatabase} from '../ext/js/dictionary/dictionary-database.js';  import {DictionaryImporter} from '../ext/js/dictionary/dictionary-importer.js';  import {DictionaryImporterMediaLoader} from './mocks/dictionary-importer-media-loader.js'; @@ -34,11 +34,11 @@ vi.stubGlobal('IDBKeyRange', IDBKeyRange);  /**   * @param {string} dictionary   * @param {string} [dictionaryName] - * @returns {import('jszip')} + * @returns {Promise<ArrayBuffer>}   */ -function createTestDictionaryArchive(dictionary, dictionaryName) { +async function createTestDictionaryArchiveData(dictionary, dictionaryName) {      const dictionaryDirectory = join(dirname, 'data', 'dictionaries', dictionary); -    return createDictionaryArchive(dictionaryDirectory, dictionaryName); +    return await createDictionaryArchiveData(dictionaryDirectory, dictionaryName);  }  /** @@ -110,10 +110,8 @@ describe('Database', () => {      });      test('Database invalid usage', async ({expect}) => {          // Load dictionary data -        const testDictionary = createTestDictionaryArchive('valid-dictionary1'); -        const testDictionarySource = await testDictionary.generateAsync({type: 'arraybuffer'}); -        /** @type {import('dictionary-data').Index} */ -        const testDictionaryIndex = parseJson(await testDictionary.files['index.json'].async('string')); +        const testDictionarySource = await createTestDictionaryArchiveData('valid-dictionary1'); +        const testDictionaryIndex = await getDictionaryArchiveIndex(testDictionarySource);          const title = testDictionaryIndex.title;          const titles = new Map([ @@ -165,8 +163,7 @@ describe('Database', () => {                  const dictionaryDatabase = new DictionaryDatabase();                  await dictionaryDatabase.prepare(); -                const testDictionary = createTestDictionaryArchive(name); -                const testDictionarySource = await testDictionary.generateAsync({type: 'arraybuffer'}); +                const testDictionarySource = await createTestDictionaryArchiveData(name);                  /** @type {import('dictionary-importer').ImportDetails} */                  const detaultImportDetails = {prefixWildcardsSupported: false}; @@ -183,10 +180,8 @@ describe('Database', () => {              const fakeImportDate = testData.expectedSummary.importDate;              // Load dictionary data -            const testDictionary = createTestDictionaryArchive('valid-dictionary1'); -            const testDictionarySource = await testDictionary.generateAsync({type: 'arraybuffer'}); -            /** @type {import('dictionary-data').Index} */ -            const testDictionaryIndex = parseJson(await testDictionary.files['index.json'].async('string')); +            const testDictionarySource = await createTestDictionaryArchiveData('valid-dictionary1'); +            const testDictionaryIndex = await getDictionaryArchiveIndex(testDictionarySource);              const title = testDictionaryIndex.title;              const titles = new Map([ @@ -315,10 +310,8 @@ describe('Database', () => {          describe.each(cleanupTestCases)('Testing cleanup method $clearMethod', ({clearMethod}) => {              test('Import data and test', async ({expect}) => {                  // Load dictionary data -                const testDictionary = createTestDictionaryArchive('valid-dictionary1'); -                const testDictionarySource = await testDictionary.generateAsync({type: 'arraybuffer'}); -                /** @type {import('dictionary-data').Index} */ -                const testDictionaryIndex = parseJson(await testDictionary.files['index.json'].async('string')); +                const testDictionarySource = await createTestDictionaryArchiveData('valid-dictionary1'); +                const testDictionaryIndex = await getDictionaryArchiveIndex(testDictionarySource);                  // Setup database                  const dictionaryDatabase = new DictionaryDatabase(); diff --git a/test/dictionary-data-validate.test.js b/test/dictionary-data-validate.test.js index b09e4ae0..a8d9f1ab 100644 --- a/test/dictionary-data-validate.test.js +++ b/test/dictionary-data-validate.test.js @@ -19,19 +19,19 @@  import {fileURLToPath} from 'node:url';  import path from 'path';  import {describe, it} from 'vitest'; +import {createDictionaryArchiveData} from '../dev/dictionary-archive-util.js';  import * as dictionaryValidate from '../dev/dictionary-validate.js'; -import {createDictionaryArchive} from '../dev/util.js';  const dirname = path.dirname(fileURLToPath(import.meta.url));  /**   * @param {string} dictionary   * @param {string} [dictionaryName] - * @returns {import('jszip')} + * @returns {Promise<ArrayBuffer>}   */ -function createTestDictionaryArchive(dictionary, dictionaryName) { +async function createTestDictionaryArchiveData(dictionary, dictionaryName) {      const dictionaryDirectory = path.join(dirname, 'data', 'dictionaries', dictionary); -    return createDictionaryArchive(dictionaryDirectory, dictionaryName); +    return await createDictionaryArchiveData(dictionaryDirectory, dictionaryName);  }  describe('Dictionary validation', () => { @@ -47,11 +47,12 @@ describe('Dictionary validation', () => {      const schemas = dictionaryValidate.getSchemas();      describe.each(testCases)('Test dictionary $name', ({name, valid}) => {          it(`should be ${valid ? 'valid' : 'invalid'}`, async ({expect}) => { -            const archive = createTestDictionaryArchive(name); +            const archive = await createTestDictionaryArchiveData(name); +            const promise = dictionaryValidate.validateDictionary(null, archive, schemas);              await (                  valid ? -                expect(dictionaryValidate.validateDictionary(null, archive, schemas)).resolves.not.toThrow() : -                expect(dictionaryValidate.validateDictionary(null, archive, schemas)).rejects.toThrow() +                expect(promise).resolves.not.toThrow() : +                expect(promise).rejects.toThrow()              );          });      }); diff --git a/test/fixtures/translator-test.js b/test/fixtures/translator-test.js index 95218830..ff3782a7 100644 --- a/test/fixtures/translator-test.js +++ b/test/fixtures/translator-test.js @@ -21,8 +21,8 @@ import {readFileSync} from 'fs';  import {fileURLToPath} from 'node:url';  import {dirname, join} from 'path';  import {expect, vi} from 'vitest'; +import {createDictionaryArchiveData} from '../../dev/dictionary-archive-util.js';  import {parseJson} from '../../dev/json.js'; -import {createDictionaryArchive} from '../../dev/util.js';  import {DictionaryDatabase} from '../../ext/js/dictionary/dictionary-database.js';  import {DictionaryImporter} from '../../ext/js/dictionary/dictionary-importer.js';  import {Translator} from '../../ext/js/language/translator.js'; @@ -45,8 +45,7 @@ vi.stubGlobal('chrome', chrome);   */  export async function createTranslatorContext(dictionaryDirectory, dictionaryName) {      // Dictionary -    const testDictionary = createDictionaryArchive(dictionaryDirectory, dictionaryName); -    const testDictionaryContent = await testDictionary.generateAsync({type: 'arraybuffer'}); +    const testDictionaryData = await createDictionaryArchiveData(dictionaryDirectory, dictionaryName);      // Setup database      const dictionaryImporterMediaLoader = new DictionaryImporterMediaLoader(); @@ -56,7 +55,7 @@ export async function createTranslatorContext(dictionaryDirectory, dictionaryNam      const {errors} = await dictionaryImporter.importDictionary(          dictionaryDatabase, -        testDictionaryContent, +        testDictionaryData,          {prefixWildcardsSupported: true}      ); diff --git a/test/playwright/integration.spec.js b/test/playwright/integration.spec.js index 8e641397..bdbe80e3 100644 --- a/test/playwright/integration.spec.js +++ b/test/playwright/integration.spec.js @@ -16,7 +16,7 @@   */  import path from 'path'; -import {createDictionaryArchive} from '../../dev/util.js'; +import {createDictionaryArchiveData} from '../../dev/dictionary-archive-util.js';  import {deferPromise} from '../../ext/js/core/utilities.js';  import {      expect, @@ -67,12 +67,11 @@ test('anki add', async ({context, page, extensionId}) => {      await page.goto(`chrome-extension://${extensionId}/settings.html`);      // Load in test dictionary -    const dictionary = createDictionaryArchive(path.join(root, 'test/data/dictionaries/valid-dictionary1'), 'valid-dictionary1'); -    const testDictionarySource = await dictionary.generateAsync({type: 'arraybuffer'}); +    const dictionary = await createDictionaryArchiveData(path.join(root, 'test/data/dictionaries/valid-dictionary1'), 'valid-dictionary1');      await page.locator('input[id="dictionary-import-file-input"]').setInputFiles({          name: 'valid-dictionary1.zip',          mimeType: 'application/x-zip', -        buffer: Buffer.from(testDictionarySource) +        buffer: Buffer.from(dictionary)      });      await expect(page.locator('id=dictionaries')).toHaveText('Dictionaries (1 installed, 1 enabled)', {timeout: 5 * 60 * 1000}); |