aboutsummaryrefslogtreecommitdiff
path: root/ext/js/language
diff options
context:
space:
mode:
Diffstat (limited to 'ext/js/language')
-rw-r--r--ext/js/language/deinflector.js39
-rw-r--r--ext/js/language/dictionary-database.js255
-rw-r--r--ext/js/language/dictionary-importer-media-loader.js10
-rw-r--r--ext/js/language/dictionary-importer.js338
-rw-r--r--ext/js/language/dictionary-worker-handler.js38
-rw-r--r--ext/js/language/dictionary-worker-media-loader.js16
-rw-r--r--ext/js/language/dictionary-worker.js110
-rw-r--r--ext/js/language/sandbox/dictionary-data-util.js161
-rw-r--r--ext/js/language/sandbox/japanese-util.js1371
-rw-r--r--ext/js/language/text-scanner.js534
-rw-r--r--ext/js/language/translator.js788
11 files changed, 2655 insertions, 1005 deletions
diff --git a/ext/js/language/deinflector.js b/ext/js/language/deinflector.js
index 3012c29a..b7a235d0 100644
--- a/ext/js/language/deinflector.js
+++ b/ext/js/language/deinflector.js
@@ -17,10 +17,18 @@
*/
export class Deinflector {
+ /**
+ * @param {import('deinflector').ReasonsRaw} reasons
+ */
constructor(reasons) {
+ /** @type {import('deinflector').Reason[]} */
this.reasons = Deinflector.normalizeReasons(reasons);
}
+ /**
+ * @param {string} source
+ * @returns {import('translation-internal').Deinflection[]}
+ */
deinflect(source) {
const results = [this._createDeinflection(source, 0, [])];
for (let i = 0; i < results.length; ++i) {
@@ -46,13 +54,25 @@ export class Deinflector {
return results;
}
+ /**
+ * @param {string} term
+ * @param {import('translation-internal').DeinflectionRuleFlags} rules
+ * @param {string[]} reasons
+ * @returns {import('translation-internal').Deinflection}
+ */
_createDeinflection(term, rules, reasons) {
return {term, rules, reasons};
}
+ /**
+ * @param {import('deinflector').ReasonsRaw} reasons
+ * @returns {import('deinflector').Reason[]}
+ */
static normalizeReasons(reasons) {
+ /** @type {import('deinflector').Reason[]} */
const normalizedReasons = [];
for (const [reason, reasonInfo] of Object.entries(reasons)) {
+ /** @type {import('deinflector').ReasonVariant[]} */
const variants = [];
for (const {kanaIn, kanaOut, rulesIn, rulesOut} of reasonInfo) {
variants.push([
@@ -67,6 +87,10 @@ export class Deinflector {
return normalizedReasons;
}
+ /**
+ * @param {string[]} rules
+ * @returns {import('translation-internal').DeinflectionRuleFlags}
+ */
static rulesToRuleFlags(rules) {
const ruleTypes = this._ruleTypes;
let value = 0;
@@ -79,13 +103,14 @@ export class Deinflector {
}
}
+/** @type {Map<string, import('translation-internal').DeinflectionRuleFlags>} */
// eslint-disable-next-line no-underscore-dangle
Deinflector._ruleTypes = new Map([
- ['v1', 0b00000001], // Verb ichidan
- ['v5', 0b00000010], // Verb godan
- ['vs', 0b00000100], // Verb suru
- ['vk', 0b00001000], // Verb kuru
- ['vz', 0b00010000], // Verb zuru
- ['adj-i', 0b00100000], // Adjective i
- ['iru', 0b01000000] // Intermediate -iru endings for progressive or perfect tense
+ ['v1', /** @type {import('translation-internal').DeinflectionRuleFlags} */ (0b00000001)], // Verb ichidan
+ ['v5', /** @type {import('translation-internal').DeinflectionRuleFlags} */ (0b00000010)], // Verb godan
+ ['vs', /** @type {import('translation-internal').DeinflectionRuleFlags} */ (0b00000100)], // Verb suru
+ ['vk', /** @type {import('translation-internal').DeinflectionRuleFlags} */ (0b00001000)], // Verb kuru
+ ['vz', /** @type {import('translation-internal').DeinflectionRuleFlags} */ (0b00010000)], // Verb zuru
+ ['adj-i', /** @type {import('translation-internal').DeinflectionRuleFlags} */ (0b00100000)], // Adjective i
+ ['iru', /** @type {import('translation-internal').DeinflectionRuleFlags} */ (0b01000000)] // Intermediate -iru endings for progressive or perfect tense
]);
diff --git a/ext/js/language/dictionary-database.js b/ext/js/language/dictionary-database.js
index da365da7..c47e1e90 100644
--- a/ext/js/language/dictionary-database.js
+++ b/ext/js/language/dictionary-database.js
@@ -21,29 +21,45 @@ import {Database} from '../data/database.js';
export class DictionaryDatabase {
constructor() {
+ /** @type {Database<import('dictionary-database').ObjectStoreName>} */
this._db = new Database();
+ /** @type {string} */
this._dbName = 'dict';
- this._schemas = new Map();
+ /** @type {import('dictionary-database').CreateQuery<string>} */
this._createOnlyQuery1 = (item) => IDBKeyRange.only(item);
+ /** @type {import('dictionary-database').CreateQuery<import('dictionary-database').DictionaryAndQueryRequest>} */
this._createOnlyQuery2 = (item) => IDBKeyRange.only(item.query);
+ /** @type {import('dictionary-database').CreateQuery<import('dictionary-database').TermExactRequest>} */
this._createOnlyQuery3 = (item) => IDBKeyRange.only(item.term);
+ /** @type {import('dictionary-database').CreateQuery<import('dictionary-database').MediaRequest>} */
this._createOnlyQuery4 = (item) => IDBKeyRange.only(item.path);
+ /** @type {import('dictionary-database').CreateQuery<string>} */
this._createBoundQuery1 = (item) => IDBKeyRange.bound(item, `${item}\uffff`, false, false);
+ /** @type {import('dictionary-database').CreateQuery<string>} */
this._createBoundQuery2 = (item) => { item = stringReverse(item); return IDBKeyRange.bound(item, `${item}\uffff`, false, false); };
- this._createTermBind1 = this._createTerm.bind(this, 'term', 'exact');
- this._createTermBind2 = this._createTerm.bind(this, 'sequence', 'exact');
+ /** @type {import('dictionary-database').CreateResult<import('dictionary-database').TermExactRequest, import('dictionary-database').DatabaseTermEntryWithId, import('dictionary-database').TermEntry>} */
+ this._createTermBind1 = this._createTermExact.bind(this);
+ /** @type {import('dictionary-database').CreateResult<import('dictionary-database').DictionaryAndQueryRequest, import('dictionary-database').DatabaseTermEntryWithId, import('dictionary-database').TermEntry>} */
+ this._createTermBind2 = this._createTermSequenceExact.bind(this);
+ /** @type {import('dictionary-database').CreateResult<string, import('dictionary-database').DatabaseTermMeta, import('dictionary-database').TermMeta>} */
this._createTermMetaBind = this._createTermMeta.bind(this);
+ /** @type {import('dictionary-database').CreateResult<string, import('dictionary-database').DatabaseKanjiEntry, import('dictionary-database').KanjiEntry>} */
this._createKanjiBind = this._createKanji.bind(this);
+ /** @type {import('dictionary-database').CreateResult<string, import('dictionary-database').DatabaseKanjiMeta, import('dictionary-database').KanjiMeta>} */
this._createKanjiMetaBind = this._createKanjiMeta.bind(this);
+ /** @type {import('dictionary-database').CreateResult<import('dictionary-database').MediaRequest, import('dictionary-database').MediaDataArrayBufferContent, import('dictionary-database').Media>} */
this._createMediaBind = this._createMedia.bind(this);
}
+ /** */
async prepare() {
await this._db.open(
this._dbName,
60,
- [
- {
+ /** @type {import('database').StructureDefinition<import('dictionary-database').ObjectStoreName>[]} */
+ ([
+ /** @type {import('database').StructureDefinition<import('dictionary-database').ObjectStoreName>} */
+ ({
version: 20,
stores: {
terms: {
@@ -63,7 +79,7 @@ export class DictionaryDatabase {
indices: ['title', 'version']
}
}
- },
+ }),
{
version: 30,
stores: {
@@ -108,18 +124,25 @@ export class DictionaryDatabase {
}
}
}
- ]
+ ])
);
}
+ /** */
async close() {
this._db.close();
}
+ /**
+ * @returns {boolean}
+ */
isPrepared() {
return this._db.isOpen();
}
+ /**
+ * @returns {Promise<boolean>}
+ */
async purge() {
if (this._db.isOpening()) {
throw new Error('Cannot purge database while opening');
@@ -138,14 +161,13 @@ export class DictionaryDatabase {
return result;
}
+ /**
+ * @param {string} dictionaryName
+ * @param {number} progressRate
+ * @param {import('dictionary-database').DeleteDictionaryProgressCallback} onProgress
+ */
async deleteDictionary(dictionaryName, progressRate, onProgress) {
- if (typeof progressRate !== 'number') {
- progressRate = 1;
- }
- if (typeof onProgress !== 'function') {
- onProgress = () => {};
- }
-
+ /** @type {[objectStoreName: import('dictionary-database').ObjectStoreName, key: string][][]} */
const targetGroups = [
[
['kanji', 'dictionary'],
@@ -165,6 +187,7 @@ export class DictionaryDatabase {
storeCount += targets.length;
}
+ /** @type {import('dictionary-database').DeleteDictionaryProgressData} */
const progressData = {
count: 0,
processed: 0,
@@ -172,6 +195,10 @@ export class DictionaryDatabase {
storesProcesed: 0
};
+ /**
+ * @param {IDBValidKey[]} keys
+ * @returns {IDBValidKey[]}
+ */
const filterKeys = (keys) => {
++progressData.storesProcesed;
progressData.count += keys.length;
@@ -197,8 +224,15 @@ export class DictionaryDatabase {
}
}
+ /**
+ * @param {string[]} termList
+ * @param {import('dictionary-database').DictionarySet} dictionaries
+ * @param {import('dictionary-database').MatchType} matchType
+ * @returns {Promise<import('dictionary-database').TermEntry[]>}
+ */
findTermsBulk(termList, dictionaries, matchType) {
const visited = new Set();
+ /** @type {import('dictionary-database').FindPredicate<string, import('dictionary-database').DatabaseTermEntryWithId>} */
const predicate = (row) => {
if (!dictionaries.has(row.dictionary)) { return false; }
const {id} = row;
@@ -224,54 +258,106 @@ export class DictionaryDatabase {
return this._findMultiBulk('terms', indexNames, termList, createQuery, predicate, createResult);
}
+ /**
+ * @param {import('dictionary-database').TermExactRequest[]} termList
+ * @param {import('dictionary-database').DictionarySet} dictionaries
+ * @returns {Promise<import('dictionary-database').TermEntry[]>}
+ */
findTermsExactBulk(termList, dictionaries) {
+ /** @type {import('dictionary-database').FindPredicate<import('dictionary-database').TermExactRequest, import('dictionary-database').DatabaseTermEntry>} */
const predicate = (row, item) => (row.reading === item.reading && dictionaries.has(row.dictionary));
return this._findMultiBulk('terms', ['expression'], termList, this._createOnlyQuery3, predicate, this._createTermBind1);
}
+ /**
+ * @param {import('dictionary-database').DictionaryAndQueryRequest[]} items
+ * @returns {Promise<import('dictionary-database').TermEntry[]>}
+ */
findTermsBySequenceBulk(items) {
+ /** @type {import('dictionary-database').FindPredicate<import('dictionary-database').DictionaryAndQueryRequest, import('dictionary-database').DatabaseTermEntry>} */
const predicate = (row, item) => (row.dictionary === item.dictionary);
return this._findMultiBulk('terms', ['sequence'], items, this._createOnlyQuery2, predicate, this._createTermBind2);
}
+ /**
+ * @param {string[]} termList
+ * @param {import('dictionary-database').DictionarySet} dictionaries
+ * @returns {Promise<import('dictionary-database').TermMeta[]>}
+ */
findTermMetaBulk(termList, dictionaries) {
+ /** @type {import('dictionary-database').FindPredicate<string, import('dictionary-database').DatabaseTermMeta>} */
const predicate = (row) => dictionaries.has(row.dictionary);
return this._findMultiBulk('termMeta', ['expression'], termList, this._createOnlyQuery1, predicate, this._createTermMetaBind);
}
+ /**
+ * @param {string[]} kanjiList
+ * @param {import('dictionary-database').DictionarySet} dictionaries
+ * @returns {Promise<import('dictionary-database').KanjiEntry[]>}
+ */
findKanjiBulk(kanjiList, dictionaries) {
+ /** @type {import('dictionary-database').FindPredicate<string, import('dictionary-database').DatabaseKanjiEntry>} */
const predicate = (row) => dictionaries.has(row.dictionary);
return this._findMultiBulk('kanji', ['character'], kanjiList, this._createOnlyQuery1, predicate, this._createKanjiBind);
}
+ /**
+ * @param {string[]} kanjiList
+ * @param {import('dictionary-database').DictionarySet} dictionaries
+ * @returns {Promise<import('dictionary-database').KanjiMeta[]>}
+ */
findKanjiMetaBulk(kanjiList, dictionaries) {
+ /** @type {import('dictionary-database').FindPredicate<string, import('dictionary-database').DatabaseKanjiMeta>} */
const predicate = (row) => dictionaries.has(row.dictionary);
return this._findMultiBulk('kanjiMeta', ['character'], kanjiList, this._createOnlyQuery1, predicate, this._createKanjiMetaBind);
}
+ /**
+ * @param {import('dictionary-database').DictionaryAndQueryRequest[]} items
+ * @returns {Promise<(import('dictionary-database').Tag|undefined)[]>}
+ */
findTagMetaBulk(items) {
+ /** @type {import('dictionary-database').FindPredicate<import('dictionary-database').DictionaryAndQueryRequest, import('dictionary-database').Tag>} */
const predicate = (row, item) => (row.dictionary === item.dictionary);
return this._findFirstBulk('tagMeta', 'name', items, this._createOnlyQuery2, predicate);
}
- findTagForTitle(name, title) {
+ /**
+ * @param {string} name
+ * @param {string} dictionary
+ * @returns {Promise<?import('dictionary-database').Tag>}
+ */
+ findTagForTitle(name, dictionary) {
const query = IDBKeyRange.only(name);
- return this._db.find('tagMeta', 'name', query, (row) => (row.dictionary === title), null, null);
+ return this._db.find('tagMeta', 'name', query, (row) => (/** @type {import('dictionary-database').Tag} */ (row).dictionary === dictionary), null, null);
}
+ /**
+ * @param {import('dictionary-database').MediaRequest[]} items
+ * @returns {Promise<import('dictionary-database').Media[]>}
+ */
getMedia(items) {
+ /** @type {import('dictionary-database').FindPredicate<import('dictionary-database').MediaRequest, import('dictionary-database').MediaDataArrayBufferContent>} */
const predicate = (row, item) => (row.dictionary === item.dictionary);
return this._findMultiBulk('media', ['path'], items, this._createOnlyQuery4, predicate, this._createMediaBind);
}
+ /**
+ * @returns {Promise<import('dictionary-importer').Summary[]>}
+ */
getDictionaryInfo() {
return new Promise((resolve, reject) => {
const transaction = this._db.transaction(['dictionaries'], 'readonly');
const objectStore = transaction.objectStore('dictionaries');
- this._db.getAll(objectStore, null, resolve, reject);
+ this._db.getAll(objectStore, null, resolve, reject, null);
});
}
+ /**
+ * @param {string[]} dictionaryNames
+ * @param {boolean} getTotal
+ * @returns {Promise<import('dictionary-database').DictionaryCounts>}
+ */
getDictionaryCounts(dictionaryNames, getTotal) {
return new Promise((resolve, reject) => {
const targets = [
@@ -290,10 +376,11 @@ export class DictionaryDatabase {
return {objectStore, index};
});
+ /** @type {import('database').CountTarget[]} */
const countTargets = [];
if (getTotal) {
for (const {objectStore} of databaseTargets) {
- countTargets.push([objectStore, null]);
+ countTargets.push([objectStore, void 0]);
}
}
for (const dictionaryName of dictionaryNames) {
@@ -303,18 +390,23 @@ export class DictionaryDatabase {
}
}
+ /**
+ * @param {number[]} results
+ */
const onCountComplete = (results) => {
const resultCount = results.length;
const targetCount = targets.length;
+ /** @type {import('dictionary-database').DictionaryCountGroup[]} */
const counts = [];
for (let i = 0; i < resultCount; i += targetCount) {
+ /** @type {import('dictionary-database').DictionaryCountGroup} */
const countGroup = {};
for (let j = 0; j < targetCount; ++j) {
countGroup[targets[j][0]] = results[i + j];
}
counts.push(countGroup);
}
- const total = getTotal ? counts.shift() : null;
+ const total = getTotal ? /** @type {import('dictionary-database').DictionaryCountGroup} */ (counts.shift()) : null;
resolve({total, counts});
};
@@ -322,22 +414,47 @@ export class DictionaryDatabase {
});
}
+ /**
+ * @param {string} title
+ * @returns {Promise<boolean>}
+ */
async dictionaryExists(title) {
const query = IDBKeyRange.only(title);
const result = await this._db.find('dictionaries', 'title', query, null, null, void 0);
return typeof result !== 'undefined';
}
+ /**
+ * @template {import('dictionary-database').ObjectStoreName} T
+ * @param {T} objectStoreName
+ * @param {import('dictionary-database').ObjectStoreData<T>[]} items
+ * @param {number} start
+ * @param {number} count
+ * @returns {Promise<void>}
+ */
bulkAdd(objectStoreName, items, start, count) {
return this._db.bulkAdd(objectStoreName, items, start, count);
}
// Private
+ /**
+ * @template [TRow=unknown]
+ * @template [TItem=unknown]
+ * @template [TResult=unknown]
+ * @param {import('dictionary-database').ObjectStoreName} objectStoreName
+ * @param {string[]} indexNames
+ * @param {TItem[]} items
+ * @param {import('dictionary-database').CreateQuery<TItem>} createQuery
+ * @param {import('dictionary-database').FindPredicate<TItem, TRow>} predicate
+ * @param {import('dictionary-database').CreateResult<TItem, TRow, TResult>} createResult
+ * @returns {Promise<TResult[]>}
+ */
_findMultiBulk(objectStoreName, indexNames, items, createQuery, predicate, createResult) {
return new Promise((resolve, reject) => {
const itemCount = items.length;
const indexCount = indexNames.length;
+ /** @type {TResult[]} */
const results = [];
if (itemCount === 0 || indexCount === 0) {
resolve(results);
@@ -352,6 +469,10 @@ export class DictionaryDatabase {
}
let completeCount = 0;
const requiredCompleteCount = itemCount * indexCount;
+ /**
+ * @param {TRow[]} rows
+ * @param {import('dictionary-database').FindMultiBulkData<TItem>} data
+ */
const onGetAll = (rows, data) => {
for (const row of rows) {
if (predicate(row, data.item)) {
@@ -366,15 +487,28 @@ export class DictionaryDatabase {
const item = items[i];
const query = createQuery(item);
for (let j = 0; j < indexCount; ++j) {
- this._db.getAll(indexList[j], query, onGetAll, reject, {item, itemIndex: i, indexIndex: j});
+ /** @type {import('dictionary-database').FindMultiBulkData<TItem>} */
+ const data = {item, itemIndex: i, indexIndex: j};
+ this._db.getAll(indexList[j], query, onGetAll, reject, data);
}
}
});
}
+ /**
+ * @template [TRow=unknown]
+ * @template [TItem=unknown]
+ * @param {import('dictionary-database').ObjectStoreName} objectStoreName
+ * @param {string} indexName
+ * @param {TItem[]} items
+ * @param {import('dictionary-database').CreateQuery<TItem>} createQuery
+ * @param {import('dictionary-database').FindPredicate<TItem, TRow>} predicate
+ * @returns {Promise<(TRow|undefined)[]>}
+ */
_findFirstBulk(objectStoreName, indexName, items, createQuery, predicate) {
return new Promise((resolve, reject) => {
const itemCount = items.length;
+ /** @type {(TRow|undefined)[]} */
const results = new Array(itemCount);
if (itemCount === 0) {
resolve(results);
@@ -385,6 +519,10 @@ export class DictionaryDatabase {
const objectStore = transaction.objectStore(objectStoreName);
const index = objectStore.index(indexName);
let completeCount = 0;
+ /**
+ * @param {TRow|undefined} row
+ * @param {number} itemIndex
+ */
const onFind = (row, itemIndex) => {
results[itemIndex] = row;
if (++completeCount >= itemCount) {
@@ -399,16 +537,47 @@ export class DictionaryDatabase {
});
}
+ /**
+ * @param {import('dictionary-database').MatchType} matchType
+ * @param {import('dictionary-database').DatabaseTermEntryWithId} row
+ * @param {import('dictionary-database').FindMultiBulkData<string>} data
+ * @returns {import('dictionary-database').TermEntry}
+ */
_createTermGeneric(matchType, row, data) {
const matchSourceIsTerm = (data.indexIndex === 0);
const matchSource = (matchSourceIsTerm ? 'term' : 'reading');
if ((matchSourceIsTerm ? row.expression : row.reading) === data.item) {
matchType = 'exact';
}
- return this._createTerm(matchSource, matchType, row, data);
+ return this._createTerm(matchSource, matchType, row, data.itemIndex);
+ }
+
+ /**
+ * @param {import('dictionary-database').DatabaseTermEntryWithId} row
+ * @param {import('dictionary-database').FindMultiBulkData<import('dictionary-database').TermExactRequest>} data
+ * @returns {import('dictionary-database').TermEntry}
+ */
+ _createTermExact(row, data) {
+ return this._createTerm('term', 'exact', row, data.itemIndex);
}
- _createTerm(matchSource, matchType, row, {itemIndex: index}) {
+ /**
+ * @param {import('dictionary-database').DatabaseTermEntryWithId} row
+ * @param {import('dictionary-database').FindMultiBulkData<import('dictionary-database').DictionaryAndQueryRequest>} data
+ * @returns {import('dictionary-database').TermEntry}
+ */
+ _createTermSequenceExact(row, data) {
+ return this._createTerm('sequence', 'exact', row, data.itemIndex);
+ }
+
+ /**
+ * @param {import('dictionary-database').MatchSource} matchSource
+ * @param {import('dictionary-database').MatchType} matchType
+ * @param {import('dictionary-database').DatabaseTermEntryWithId} row
+ * @param {number} index
+ * @returns {import('dictionary-database').TermEntry}
+ */
+ _createTerm(matchSource, matchType, row, index) {
const {sequence} = row;
return {
index,
@@ -427,7 +596,13 @@ export class DictionaryDatabase {
};
}
+ /**
+ * @param {import('dictionary-database').DatabaseKanjiEntry} row
+ * @param {import('dictionary-database').FindMultiBulkData<string>} data
+ * @returns {import('dictionary-database').KanjiEntry}
+ */
_createKanji(row, {itemIndex: index}) {
+ const {stats} = row;
return {
index,
character: row.character,
@@ -435,23 +610,51 @@ export class DictionaryDatabase {
kunyomi: this._splitField(row.kunyomi),
tags: this._splitField(row.tags),
definitions: row.meanings,
- stats: row.stats,
+ stats: typeof stats === 'object' && stats !== null ? stats : {},
dictionary: row.dictionary
};
}
+ /**
+ * @param {import('dictionary-database').DatabaseTermMeta} row
+ * @param {import('dictionary-database').FindMultiBulkData<string>} data
+ * @returns {import('dictionary-database').TermMeta}
+ * @throws {Error}
+ */
_createTermMeta({expression: term, mode, data, dictionary}, {itemIndex: index}) {
- return {term, mode, data, dictionary, index};
+ switch (mode) {
+ case 'freq':
+ return {index, term, mode, data, dictionary};
+ case 'pitch':
+ return {index, term, mode, data, dictionary};
+ default:
+ throw new Error(`Unknown mode: ${mode}`);
+ }
}
+ /**
+ * @param {import('dictionary-database').DatabaseKanjiMeta} row
+ * @param {import('dictionary-database').FindMultiBulkData<string>} data
+ * @returns {import('dictionary-database').KanjiMeta}
+ */
_createKanjiMeta({character, mode, data, dictionary}, {itemIndex: index}) {
- return {character, mode, data, dictionary, index};
+ return {index, character, mode, data, dictionary};
}
+ /**
+ * @param {import('dictionary-database').MediaDataArrayBufferContent} row
+ * @param {import('dictionary-database').FindMultiBulkData<import('dictionary-database').MediaRequest>} data
+ * @returns {import('dictionary-database').Media}
+ */
_createMedia(row, {itemIndex: index}) {
- return Object.assign({}, row, {index});
+ const {dictionary, path, mediaType, width, height, content} = row;
+ return {index, dictionary, path, mediaType, width, height, content};
}
+ /**
+ * @param {unknown} field
+ * @returns {string[]}
+ */
_splitField(field) {
return typeof field === 'string' && field.length > 0 ? field.split(' ') : [];
}
diff --git a/ext/js/language/dictionary-importer-media-loader.js b/ext/js/language/dictionary-importer-media-loader.js
index 7d4f798c..a5857dce 100644
--- a/ext/js/language/dictionary-importer-media-loader.js
+++ b/ext/js/language/dictionary-importer-media-loader.js
@@ -22,15 +22,7 @@ import {EventListenerCollection} from '../core.js';
* Class used for loading and validating media during the dictionary import process.
*/
export class DictionaryImporterMediaLoader {
- /**
- * Attempts to load an image using an ArrayBuffer and a media type to return details about it.
- * @param {ArrayBuffer} content The binary content for the image, encoded as an ArrayBuffer.
- * @param {string} mediaType The media type for the image content.
- * @param {Transferable[]} [transfer] An optional array of data that should be transferred in `postMessage` calls.
- * When the resulting promise resolves, this array will contain the `content` object.
- * @returns {Promise<{content: ArrayBuffer, width: number, height: number}>} Details about the requested image content.
- * @throws {Error} An error can be thrown if the image fails to load.
- */
+ /** @type {import('dictionary-importer-media-loader').GetImageDetailsFunction} */
getImageDetails(content, mediaType, transfer) {
return new Promise((resolve, reject) => {
const image = new Image();
diff --git a/ext/js/language/dictionary-importer.js b/ext/js/language/dictionary-importer.js
index 791d1a77..aa6d7ae6 100644
--- a/ext/js/language/dictionary-importer.js
+++ b/ext/js/language/dictionary-importer.js
@@ -20,13 +20,27 @@ import * as ajvSchemas from '../../lib/validate-schemas.js';
import {BlobWriter, TextWriter, Uint8ArrayReader, ZipReader, configure} from '../../lib/zip.js';
import {stringReverse} from '../core.js';
import {MediaUtil} from '../media/media-util.js';
+
export class DictionaryImporter {
+ /**
+ * @param {import('dictionary-importer-media-loader').GenericMediaLoader} mediaLoader
+ * @param {import('dictionary-importer').OnProgressCallback} [onProgress]
+ */
constructor(mediaLoader, onProgress) {
+ /** @type {import('dictionary-importer-media-loader').GenericMediaLoader} */
this._mediaLoader = mediaLoader;
+ /** @type {import('dictionary-importer').OnProgressCallback} */
this._onProgress = typeof onProgress === 'function' ? onProgress : () => {};
- this._progressData = null;
+ /** @type {import('dictionary-importer').ProgressData} */
+ this._progressData = this._createProgressData();
}
+ /**
+ * @param {DictionaryDatabase} dictionaryDatabase
+ * @param {ArrayBuffer} archiveContent
+ * @param {import('dictionary-importer').ImportDetails} details
+ * @returns {Promise<import('dictionary-importer').ImportResult>}
+ */
async importDictionary(dictionaryDatabase, archiveContent, details) {
if (!dictionaryDatabase) {
throw new Error('Invalid database');
@@ -69,9 +83,9 @@ export class DictionaryImporter {
}
const dictionaryTitle = index.title;
- const version = index.format || index.version;
+ const version = typeof index.format === 'number' ? index.format : index.version;
- if (!dictionaryTitle || !index.revision) {
+ if (typeof version !== 'number' || !dictionaryTitle || !index.revision) {
throw new Error('Unrecognized dictionary format');
}
@@ -80,13 +94,6 @@ export class DictionaryImporter {
throw new Error('Dictionary is already imported');
}
- // Data format converters
- const convertTermBankEntry = (version === 1 ? this._convertTermBankEntryV1.bind(this) : this._convertTermBankEntryV3.bind(this));
- const convertTermMetaBankEntry = this._convertTermMetaBankEntry.bind(this);
- const convertKanjiBankEntry = (version === 1 ? this._convertKanjiBankEntryV1.bind(this) : this._convertKanjiBankEntryV3.bind(this));
- const convertKanjiMetaBankEntry = this._convertKanjiMetaBankEntry.bind(this);
- const convertTagBankEntry = this._convertTagBankEntry.bind(this);
-
// Load schemas
this._progressNextStep(0);
const dataBankSchemas = this._getDataBankSchemas(version);
@@ -100,11 +107,19 @@ export class DictionaryImporter {
// Load data
this._progressNextStep(termFiles.length + termMetaFiles.length + kanjiFiles.length + kanjiMetaFiles.length + tagFiles.length);
- const termList = await this._readFileSequence(termFiles, convertTermBankEntry, dataBankSchemas[0], dictionaryTitle);
- const termMetaList = await this._readFileSequence(termMetaFiles, convertTermMetaBankEntry, dataBankSchemas[1], dictionaryTitle);
- const kanjiList = await this._readFileSequence(kanjiFiles, convertKanjiBankEntry, dataBankSchemas[2], dictionaryTitle);
- const kanjiMetaList = await this._readFileSequence(kanjiMetaFiles, convertKanjiMetaBankEntry, dataBankSchemas[3], dictionaryTitle);
- const tagList = await this._readFileSequence(tagFiles, convertTagBankEntry, dataBankSchemas[4], dictionaryTitle);
+ const termList = await (
+ version === 1 ?
+ this._readFileSequence(termFiles, this._convertTermBankEntryV1.bind(this), dataBankSchemas[0], dictionaryTitle) :
+ this._readFileSequence(termFiles, this._convertTermBankEntryV3.bind(this), dataBankSchemas[0], dictionaryTitle)
+ );
+ const termMetaList = await this._readFileSequence(termMetaFiles, this._convertTermMetaBankEntry.bind(this), dataBankSchemas[1], dictionaryTitle);
+ const kanjiList = await (
+ version === 1 ?
+ this._readFileSequence(kanjiFiles, this._convertKanjiBankEntryV1.bind(this), dataBankSchemas[2], dictionaryTitle) :
+ this._readFileSequence(kanjiFiles, this._convertKanjiBankEntryV3.bind(this), dataBankSchemas[2], dictionaryTitle)
+ );
+ const kanjiMetaList = await this._readFileSequence(kanjiMetaFiles, this._convertKanjiMetaBankEntry.bind(this), dataBankSchemas[3], dictionaryTitle);
+ const tagList = await this._readFileSequence(tagFiles, this._convertTagBankEntry.bind(this), dataBankSchemas[4], dictionaryTitle);
this._addOldIndexTags(index, tagList, dictionaryTitle);
// Prefix wildcard support
@@ -119,6 +134,7 @@ export class DictionaryImporter {
// Extended data support
this._progressNextStep(termList.length);
const formatProgressInterval = 1000;
+ /** @type {import('dictionary-importer').ImportRequirement[]} */
const requirements = [];
for (let i = 0, ii = termList.length; i < ii; ++i) {
const entry = termList[i];
@@ -142,6 +158,7 @@ export class DictionaryImporter {
// Add dictionary descriptor
this._progressNextStep(termList.length + termMetaList.length + kanjiList.length + kanjiMetaList.length + tagList.length + media.length);
+ /** @type {import('dictionary-importer').SummaryCounts} */
const counts = {
terms: {total: termList.length},
termMeta: this._getMetaCounts(termMetaList),
@@ -154,9 +171,15 @@ export class DictionaryImporter {
dictionaryDatabase.bulkAdd('dictionaries', [summary], 0, 1);
// Add data
+ /** @type {Error[]} */
const errors = [];
const maxTransactionLength = 1000;
+ /**
+ * @template {import('dictionary-database').ObjectStoreName} T
+ * @param {T} objectStoreName
+ * @param {import('dictionary-database').ObjectStoreData<T>[]} entries
+ */
const bulkAdd = async (objectStoreName, entries) => {
const ii = entries.length;
for (let i = 0; i < ii; i += maxTransactionLength) {
@@ -165,7 +188,7 @@ export class DictionaryImporter {
try {
await dictionaryDatabase.bulkAdd(objectStoreName, entries, i, count);
} catch (e) {
- errors.push(e);
+ errors.push(e instanceof Error ? e : new Error(`${e}`));
}
this._progressData.index += count;
@@ -185,16 +208,27 @@ export class DictionaryImporter {
return {result: summary, errors};
}
- _progressReset() {
- this._progressData = {
+ /**
+ * @returns {import('dictionary-importer').ProgressData}
+ */
+ _createProgressData() {
+ return {
stepIndex: 0,
stepCount: 6,
index: 0,
count: 0
};
+ }
+
+ /** */
+ _progressReset() {
+ this._progressData = this._createProgressData();
this._progress();
}
+ /**
+ * @param {number} count
+ */
_progressNextStep(count) {
++this._progressData.stepIndex;
this._progressData.index = 0;
@@ -202,17 +236,31 @@ export class DictionaryImporter {
this._progress();
}
+ /** */
_progress() {
this._onProgress(this._progressData);
}
+ /**
+ * @param {string} dictionaryTitle
+ * @param {number} version
+ * @param {import('dictionary-data').Index} index
+ * @param {{prefixWildcardsSupported: boolean, counts: import('dictionary-importer').SummaryCounts}} details
+ * @returns {import('dictionary-importer').Summary}
+ */
_createSummary(dictionaryTitle, version, index, details) {
+ const indexSequenced = index.sequenced;
+ const {prefixWildcardsSupported, counts} = details;
+
+ /** @type {import('dictionary-importer').Summary} */
const summary = {
title: dictionaryTitle,
revision: index.revision,
- sequenced: index.sequenced,
+ sequenced: typeof indexSequenced === 'boolean' && indexSequenced,
version,
- importDate: Date.now()
+ importDate: Date.now(),
+ prefixWildcardsSupported,
+ counts
};
const {author, url, description, attribution, frequencyMode} = index;
@@ -222,11 +270,14 @@ export class DictionaryImporter {
if (typeof attribution === 'string') { summary.attribution = attribution; }
if (typeof frequencyMode === 'string') { summary.frequencyMode = frequencyMode; }
- Object.assign(summary, details);
-
return summary;
}
+ /**
+ *
+ * @param schema
+ * @param fileName
+ */
_formatAjvSchemaError(schema, fileName) {
const e2 = new Error(`Dictionary has invalid data in '${fileName}'`);
e2.data = schema.errors;
@@ -234,6 +285,10 @@ export class DictionaryImporter {
return e2;
}
+ /**
+ *
+ * @param version
+ */
_getDataBankSchemas(version) {
const termBank = (
version === 1 ?
@@ -252,6 +307,13 @@ export class DictionaryImporter {
return [termBank, termMetaBank, kanjiBank, kanjiMetaBank, tagBank];
}
+ /**
+ * @param {import('dictionary-data').TermGlossaryText|import('dictionary-data').TermGlossaryImage|import('dictionary-data').TermGlossaryStructuredContent} data
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ * @param {import('dictionary-importer').ImportRequirement[]} requirements
+ * @returns {import('dictionary-data').TermGlossary}
+ * @throws {Error}
+ */
_formatDictionaryTermGlossaryObject(data, entry, requirements) {
switch (data.type) {
case 'text':
@@ -261,16 +323,32 @@ export class DictionaryImporter {
case 'structured-content':
return this._formatStructuredContent(data, entry, requirements);
default:
- throw new Error(`Unhandled data type: ${data.type}`);
+ throw new Error(`Unhandled data type: ${/** @type {import('core').SerializableObject} */ (data).type}`);
}
}
+ /**
+ * @param {import('dictionary-data').TermGlossaryImage} data
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ * @param {import('dictionary-importer').ImportRequirement[]} requirements
+ * @returns {import('dictionary-data').TermGlossaryImage}
+ */
_formatDictionaryTermGlossaryImage(data, entry, requirements) {
- const target = {};
- requirements.push({type: 'image', target, args: [data, entry]});
+ /** @type {import('dictionary-data').TermGlossaryImage} */
+ const target = {
+ type: 'image',
+ path: '' // Will be populated during requirement resolution
+ };
+ requirements.push({type: 'image', target, source: data, entry});
return target;
}
+ /**
+ * @param {import('dictionary-data').TermGlossaryStructuredContent} data
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ * @param {import('dictionary-importer').ImportRequirement[]} requirements
+ * @returns {import('dictionary-data').TermGlossaryStructuredContent}
+ */
_formatStructuredContent(data, entry, requirements) {
const content = this._prepareStructuredContent(data.content, entry, requirements);
return {
@@ -279,6 +357,12 @@ export class DictionaryImporter {
};
}
+ /**
+ * @param {import('structured-content').Content} content
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ * @param {import('dictionary-importer').ImportRequirement[]} requirements
+ * @returns {import('structured-content').Content}
+ */
_prepareStructuredContent(content, entry, requirements) {
if (typeof content === 'string' || !(typeof content === 'object' && content !== null)) {
return content;
@@ -301,12 +385,27 @@ export class DictionaryImporter {
return content;
}
+ /**
+ * @param {import('structured-content').ImageElement} content
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ * @param {import('dictionary-importer').ImportRequirement[]} requirements
+ * @returns {import('structured-content').ImageElement}
+ */
_prepareStructuredContentImage(content, entry, requirements) {
- const target = {};
- requirements.push({type: 'structured-content-image', target, args: [content, entry]});
+ /** @type {import('structured-content').ImageElement} */
+ const target = {
+ tag: 'img',
+ path: '' // Will be populated during requirement resolution
+ };
+ requirements.push({type: 'structured-content-image', target, source: content, entry});
return target;
}
+ /**
+ *
+ * @param requirements
+ * @param zipEntriesObject
+ */
async _resolveAsyncRequirements(requirements, zipEntriesObject) {
const media = new Map();
const context = {zipEntriesObject, media};
@@ -320,37 +419,65 @@ export class DictionaryImporter {
};
}
+ /**
+ * @param {import('dictionary-importer').ImportRequirementContext} context
+ * @param {import('dictionary-importer').ImportRequirement} requirement
+ */
async _resolveAsyncRequirement(context, requirement) {
- const {type, target, args} = requirement;
- let result;
- switch (type) {
+ switch (requirement.type) {
case 'image':
- result = await this._resolveDictionaryTermGlossaryImage(context, ...args);
+ await this._resolveDictionaryTermGlossaryImage(
+ context,
+ requirement.target,
+ requirement.source,
+ requirement.entry
+ );
break;
case 'structured-content-image':
- result = await this._resolveStructuredContentImage(context, ...args);
+ await this._resolveStructuredContentImage(
+ context,
+ requirement.target,
+ requirement.source,
+ requirement.entry
+ );
break;
default:
return;
}
- Object.assign(target, result);
++this._progressData.index;
this._progress();
}
- async _resolveDictionaryTermGlossaryImage(context, data, entry) {
- return await this._createImageData(context, data, entry, {type: 'image'});
- }
-
- async _resolveStructuredContentImage(context, content, entry) {
- const {verticalAlign, sizeUnits} = content;
- const result = await this._createImageData(context, content, entry, {tag: 'img'});
- if (typeof verticalAlign === 'string') { result.verticalAlign = verticalAlign; }
- if (typeof sizeUnits === 'string') { result.sizeUnits = sizeUnits; }
- return result;
- }
-
- async _createImageData(context, data, entry, attributes) {
+ /**
+ * @param {import('dictionary-importer').ImportRequirementContext} context
+ * @param {import('dictionary-data').TermGlossaryImage} target
+ * @param {import('dictionary-data').TermGlossaryImage} source
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ */
+ async _resolveDictionaryTermGlossaryImage(context, target, source, entry) {
+ await this._createImageData(context, target, source, entry);
+ }
+
+ /**
+ * @param {import('dictionary-importer').ImportRequirementContext} context
+ * @param {import('structured-content').ImageElement} target
+ * @param {import('structured-content').ImageElement} source
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ */
+ async _resolveStructuredContentImage(context, target, source, entry) {
+ const {verticalAlign, sizeUnits} = source;
+ await this._createImageData(context, target, source, entry);
+ if (typeof verticalAlign === 'string') { target.verticalAlign = verticalAlign; }
+ if (typeof sizeUnits === 'string') { target.sizeUnits = sizeUnits; }
+ }
+
+ /**
+ * @param {import('dictionary-importer').ImportRequirementContext} context
+ * @param {import('structured-content').ImageElementBase} target
+ * @param {import('structured-content').ImageElementBase} source
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ */
+ async _createImageData(context, target, source, entry) {
const {
path,
width: preferredWidth,
@@ -363,26 +490,37 @@ export class DictionaryImporter {
background,
collapsed,
collapsible
- } = data;
+ } = source;
const {width, height} = await this._getImageMedia(context, path, entry);
- const newData = Object.assign({}, attributes, {path, width, height});
- if (typeof preferredWidth === 'number') { newData.preferredWidth = preferredWidth; }
- if (typeof preferredHeight === 'number') { newData.preferredHeight = preferredHeight; }
- if (typeof title === 'string') { newData.title = title; }
- if (typeof description === 'string') { newData.description = description; }
- if (typeof pixelated === 'boolean') { newData.pixelated = pixelated; }
- if (typeof imageRendering === 'string') { newData.imageRendering = imageRendering; }
- if (typeof appearance === 'string') { newData.appearance = appearance; }
- if (typeof background === 'boolean') { newData.background = background; }
- if (typeof collapsed === 'boolean') { newData.collapsed = collapsed; }
- if (typeof collapsible === 'boolean') { newData.collapsible = collapsible; }
- return newData;
- }
-
+ target.path = path;
+ target.width = width;
+ target.height = height;
+ if (typeof preferredWidth === 'number') { target.preferredWidth = preferredWidth; }
+ if (typeof preferredHeight === 'number') { target.preferredHeight = preferredHeight; }
+ if (typeof title === 'string') { target.title = title; }
+ if (typeof description === 'string') { target.description = description; }
+ if (typeof pixelated === 'boolean') { target.pixelated = pixelated; }
+ if (typeof imageRendering === 'string') { target.imageRendering = imageRendering; }
+ if (typeof appearance === 'string') { target.appearance = appearance; }
+ if (typeof background === 'boolean') { target.background = background; }
+ if (typeof collapsed === 'boolean') { target.collapsed = collapsed; }
+ if (typeof collapsible === 'boolean') { target.collapsible = collapsible; }
+ }
+
+ /**
+ * @param {import('dictionary-importer').ImportRequirementContext} context
+ * @param {string} path
+ * @param {import('dictionary-database').DatabaseTermEntry} entry
+ * @returns {Promise<import('dictionary-database').MediaDataArrayBufferContent>}
+ */
async _getImageMedia(context, path, entry) {
const {media} = context;
const {dictionary} = entry;
+ /**
+ * @param {string} message
+ * @returns {Error}
+ */
const createError = (message) => {
const {expression, reading} = entry;
const readingSource = reading.length > 0 ? ` (${reading})`: '';
@@ -437,6 +575,10 @@ export class DictionaryImporter {
return mediaData;
}
+ /**
+ * @param {string} url
+ * @returns {Promise<unknown>}
+ */
async _fetchJsonAsset(url) {
const response = await fetch(url, {
method: 'GET',
@@ -452,6 +594,11 @@ export class DictionaryImporter {
return await response.json();
}
+ /**
+ * @param {import('dictionary-data').TermV1} entry
+ * @param {string} dictionary
+ * @returns {import('dictionary-database').DatabaseTermEntry}
+ */
_convertTermBankEntryV1(entry, dictionary) {
let [expression, reading, definitionTags, rules, score, ...glossary] = entry;
expression = this._normalizeTermOrReading(expression);
@@ -459,6 +606,11 @@ export class DictionaryImporter {
return {expression, reading, definitionTags, rules, score, glossary, dictionary};
}
+ /**
+ * @param {import('dictionary-data').TermV3} entry
+ * @param {string} dictionary
+ * @returns {import('dictionary-database').DatabaseTermEntry}
+ */
_convertTermBankEntryV3(entry, dictionary) {
let [expression, reading, definitionTags, rules, score, glossary, sequence, termTags] = entry;
expression = this._normalizeTermOrReading(expression);
@@ -466,40 +618,75 @@ export class DictionaryImporter {
return {expression, reading, definitionTags, rules, score, glossary, sequence, termTags, dictionary};
}
+ /**
+ * @param {import('dictionary-data').TermMeta} entry
+ * @param {string} dictionary
+ * @returns {import('dictionary-database').DatabaseTermMeta}
+ */
_convertTermMetaBankEntry(entry, dictionary) {
const [expression, mode, data] = entry;
- return {expression, mode, data, dictionary};
+ return /** @type {import('dictionary-database').DatabaseTermMeta} */ ({expression, mode, data, dictionary});
}
+ /**
+ * @param {import('dictionary-data').KanjiV1} entry
+ * @param {string} dictionary
+ * @returns {import('dictionary-database').DatabaseKanjiEntry}
+ */
_convertKanjiBankEntryV1(entry, dictionary) {
const [character, onyomi, kunyomi, tags, ...meanings] = entry;
return {character, onyomi, kunyomi, tags, meanings, dictionary};
}
+ /**
+ * @param {import('dictionary-data').KanjiV3} entry
+ * @param {string} dictionary
+ * @returns {import('dictionary-database').DatabaseKanjiEntry}
+ */
_convertKanjiBankEntryV3(entry, dictionary) {
const [character, onyomi, kunyomi, tags, meanings, stats] = entry;
return {character, onyomi, kunyomi, tags, meanings, stats, dictionary};
}
+ /**
+ * @param {import('dictionary-data').KanjiMeta} entry
+ * @param {string} dictionary
+ * @returns {import('dictionary-database').DatabaseKanjiMeta}
+ */
_convertKanjiMetaBankEntry(entry, dictionary) {
const [character, mode, data] = entry;
return {character, mode, data, dictionary};
}
+ /**
+ * @param {import('dictionary-data').Tag} entry
+ * @param {string} dictionary
+ * @returns {import('dictionary-database').Tag}
+ */
_convertTagBankEntry(entry, dictionary) {
const [name, category, order, notes, score] = entry;
return {name, category, order, notes, score, dictionary};
}
+ /**
+ * @param {import('dictionary-data').Index} index
+ * @param {import('dictionary-database').Tag[]} results
+ * @param {string} dictionary
+ */
_addOldIndexTags(index, results, dictionary) {
const {tagMeta} = index;
if (typeof tagMeta !== 'object' || tagMeta === null) { return; }
- for (const name of Object.keys(tagMeta)) {
- const {category, order, notes, score} = tagMeta[name];
+ for (const [name, value] of Object.entries(tagMeta)) {
+ const {category, order, notes, score} = value;
results.push({name, category, order, notes, score, dictionary});
}
}
+ /**
+ *
+ * @param zipEntriesObject
+ * @param fileNameFormat
+ */
_getArchiveFiles(zipEntriesObject, fileNameFormat) {
const indexPosition = fileNameFormat.indexOf('?');
const prefix = fileNameFormat.substring(0, indexPosition);
@@ -513,6 +700,13 @@ export class DictionaryImporter {
return results;
}
+ /**
+ *
+ * @param files
+ * @param convertEntry
+ * @param schemaName
+ * @param dictionaryTitle
+ */
async _readFileSequence(files, convertEntry, schemaName, dictionaryTitle) {
const progressData = this._progressData;
let startIndex = 0;
@@ -534,20 +728,28 @@ export class DictionaryImporter {
progressData.index = startIndex + 1;
this._progress();
- for (const entry of entries) {
- results.push(convertEntry(entry, dictionaryTitle));
+ if (Array.isArray(entries)) {
+ for (const entry of entries) {
+ results.push(convertEntry(/** @type {TEntry} */ (entry), dictionaryTitle));
+ }
}
}
return results;
}
+ /**
+ * @param {import('dictionary-database').DatabaseTermMeta[]|import('dictionary-database').DatabaseKanjiMeta[]} metaList
+ * @returns {import('dictionary-importer').SummaryMetaCount}
+ */
_getMetaCounts(metaList) {
+ /** @type {Map<string, number>} */
const countsMap = new Map();
for (const {mode} of metaList) {
let count = countsMap.get(mode);
count = typeof count !== 'undefined' ? count + 1 : 1;
countsMap.set(mode, count);
}
+ /** @type {import('dictionary-importer').SummaryMetaCount} */
const counts = {total: metaList.length};
for (const [key, value] of countsMap.entries()) {
if (Object.prototype.hasOwnProperty.call(counts, key)) { continue; }
@@ -556,6 +758,10 @@ export class DictionaryImporter {
return counts;
}
+ /**
+ * @param {string} text
+ * @returns {string}
+ */
_normalizeTermOrReading(text) {
// Note: this function should not perform String.normalize on the text,
// as it will characters in an undesirable way.
diff --git a/ext/js/language/dictionary-worker-handler.js b/ext/js/language/dictionary-worker-handler.js
index b8c41b26..8ac342b2 100644
--- a/ext/js/language/dictionary-worker-handler.js
+++ b/ext/js/language/dictionary-worker-handler.js
@@ -23,17 +23,22 @@ import {DictionaryWorkerMediaLoader} from './dictionary-worker-media-loader.js';
export class DictionaryWorkerHandler {
constructor() {
+ /** @type {DictionaryWorkerMediaLoader} */
this._mediaLoader = new DictionaryWorkerMediaLoader();
}
+ /** */
prepare() {
self.addEventListener('message', this._onMessage.bind(this), false);
}
// Private
- _onMessage(e) {
- const {action, params} = e.data;
+ /**
+ * @param {MessageEvent<import('dictionary-worker-handler').Message>} event
+ */
+ _onMessage(event) {
+ const {action, params} = event.data;
switch (action) {
case 'importDictionary':
this._onMessageWithProgress(params, this._importDictionary.bind(this));
@@ -50,7 +55,15 @@ export class DictionaryWorkerHandler {
}
}
+ /**
+ * @template [T=unknown]
+ * @param {T} params
+ * @param {(details: T, onProgress: import('dictionary-worker-handler').OnProgressCallback) => Promise<unknown>} handler
+ */
async _onMessageWithProgress(params, handler) {
+ /**
+ * @param {...unknown} args
+ */
const onProgress = (...args) => {
self.postMessage({
action: 'progress',
@@ -62,11 +75,16 @@ export class DictionaryWorkerHandler {
const result = await handler(params, onProgress);
response = {result};
} catch (e) {
- response = {error: serializeError(e)};
+ response = {error: ExtensionError.serialize(e)};
}
self.postMessage({action: 'complete', params: response});
}
+ /**
+ * @param {import('dictionary-worker-handler').ImportDictionaryMessageParams} details
+ * @param {import('dictionary-worker-handler').OnProgressCallback} onProgress
+ * @returns {Promise<import('dictionary-worker').MessageCompleteResultSerialized>}
+ */
async _importDictionary({details, archiveContent}, onProgress) {
const dictionaryDatabase = await this._getPreparedDictionaryDatabase();
try {
@@ -74,13 +92,18 @@ export class DictionaryWorkerHandler {
const {result, errors} = await dictionaryImporter.importDictionary(dictionaryDatabase, archiveContent, details);
return {
result,
- errors: errors.map((error) => serializeError(error))
+ errors: errors.map((error) => ExtensionError.serialize(error))
};
} finally {
dictionaryDatabase.close();
}
}
+ /**
+ * @param {import('dictionary-worker-handler').DeleteDictionaryMessageParams} details
+ * @param {import('dictionary-database').DeleteDictionaryProgressCallback} onProgress
+ * @returns {Promise<void>}
+ */
async _deleteDictionary({dictionaryTitle}, onProgress) {
const dictionaryDatabase = await this._getPreparedDictionaryDatabase();
try {
@@ -90,6 +113,10 @@ export class DictionaryWorkerHandler {
}
}
+ /**
+ * @param {import('dictionary-worker-handler').GetDictionaryCountsMessageParams} details
+ * @returns {Promise<import('dictionary-database').DictionaryCounts>}
+ */
async _getDictionaryCounts({dictionaryNames, getTotal}) {
const dictionaryDatabase = await this._getPreparedDictionaryDatabase();
try {
@@ -99,6 +126,9 @@ export class DictionaryWorkerHandler {
}
}
+ /**
+ * @returns {Promise<DictionaryDatabase>}
+ */
async _getPreparedDictionaryDatabase() {
const dictionaryDatabase = new DictionaryDatabase();
await dictionaryDatabase.prepare();
diff --git a/ext/js/language/dictionary-worker-media-loader.js b/ext/js/language/dictionary-worker-media-loader.js
index d58e46c5..9e3fd67e 100644
--- a/ext/js/language/dictionary-worker-media-loader.js
+++ b/ext/js/language/dictionary-worker-media-loader.js
@@ -27,12 +27,13 @@ export class DictionaryWorkerMediaLoader {
* Creates a new instance of the media loader.
*/
constructor() {
+ /** @type {Map<string, {resolve: (result: import('dictionary-worker-media-loader').ImageDetails) => void, reject: (reason?: import('core').RejectionReason) => void}>} */
this._requests = new Map();
}
/**
* Handles a response message posted to the worker thread.
- * @param {{id: string, error: object|undefined, result: any|undefined}} params Details of the response.
+ * @param {import('dictionary-worker-media-loader').HandleMessageParams} params Details of the response.
*/
handleMessage(params) {
const {id} = params;
@@ -41,24 +42,19 @@ export class DictionaryWorkerMediaLoader {
this._requests.delete(id);
const {error} = params;
if (typeof error !== 'undefined') {
- request.reject(deserializeError(error));
+ request.reject(ExtensionError.deserialize(error));
} else {
request.resolve(params.result);
}
}
- /**
- * Attempts to load an image using an ArrayBuffer and a media type to return details about it.
- * @param {ArrayBuffer} content The binary content for the image, encoded as an ArrayBuffer.
- * @param {string} mediaType The media type for the image content.
- * @returns {Promise<{content: ArrayBuffer, width: number, height: number}>} Details about the requested image content.
- * @throws {Error} An error can be thrown if the image fails to load.
- */
+ /** @type {import('dictionary-importer-media-loader').GetImageDetailsFunction} */
getImageDetails(content, mediaType) {
return new Promise((resolve, reject) => {
const id = generateId(16);
this._requests.set(id, {resolve, reject});
- self.postMessage({
+ // This is executed in a Worker context, so the self needs to be force cast
+ /** @type {Worker} */ (/** @type {unknown} */ (self)).postMessage({
action: 'getImageDetails',
params: {id, content, mediaType}
}, [content]);
diff --git a/ext/js/language/dictionary-worker.js b/ext/js/language/dictionary-worker.js
index 18c300af..b9d0236c 100644
--- a/ext/js/language/dictionary-worker.js
+++ b/ext/js/language/dictionary-worker.js
@@ -16,37 +16,64 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
-import {deserializeError, serializeError} from '../core.js';
import {DictionaryImporterMediaLoader} from './dictionary-importer-media-loader.js';
export class DictionaryWorker {
constructor() {
+ /** @type {DictionaryImporterMediaLoader} */
this._dictionaryImporterMediaLoader = new DictionaryImporterMediaLoader();
}
+ /**
+ * @param {ArrayBuffer} archiveContent
+ * @param {import('dictionary-importer').ImportDetails} details
+ * @param {?import('dictionary-worker').ImportProgressCallback} onProgress
+ * @returns {Promise<import('dictionary-importer').ImportResult>}
+ */
importDictionary(archiveContent, details, onProgress) {
return this._invoke(
'importDictionary',
{details, archiveContent},
[archiveContent],
onProgress,
- this._formatimportDictionaryResult.bind(this)
+ this._formatImportDictionaryResult.bind(this)
);
}
+ /**
+ * @param {string} dictionaryTitle
+ * @param {?import('dictionary-worker').DeleteProgressCallback} onProgress
+ * @returns {Promise<void>}
+ */
deleteDictionary(dictionaryTitle, onProgress) {
- return this._invoke('deleteDictionary', {dictionaryTitle}, [], onProgress);
+ return this._invoke('deleteDictionary', {dictionaryTitle}, [], onProgress, null);
}
+ /**
+ * @param {string[]} dictionaryNames
+ * @param {boolean} getTotal
+ * @returns {Promise<import('dictionary-database').DictionaryCounts>}
+ */
getDictionaryCounts(dictionaryNames, getTotal) {
- return this._invoke('getDictionaryCounts', {dictionaryNames, getTotal}, [], null);
+ return this._invoke('getDictionaryCounts', {dictionaryNames, getTotal}, [], null, null);
}
// Private
+ /**
+ * @template [TParams=import('core').SerializableObject]
+ * @template [TResponseRaw=unknown]
+ * @template [TResponse=unknown]
+ * @param {string} action
+ * @param {TParams} params
+ * @param {Transferable[]} transfer
+ * @param {?(arg: import('core').SafeAny) => void} onProgress
+ * @param {?(result: TResponseRaw) => TResponse} formatResult
+ */
_invoke(action, params, transfer, onProgress, formatResult) {
return new Promise((resolve, reject) => {
const worker = new Worker('/js/language/dictionary-worker-main.js', {type: 'module'});
+ /** @type {import('dictionary-worker').InvokeDetails<TResponseRaw, TResponse>} */
const details = {
complete: false,
worker,
@@ -56,20 +83,29 @@ export class DictionaryWorker {
onProgress,
formatResult
};
- const onMessage = this._onMessage.bind(this, details);
+ // Ugly typecast below due to not being able to explicitly state the template types
+ /** @type {(event: MessageEvent<import('dictionary-worker').MessageData<TResponseRaw>>) => void} */
+ const onMessage = /** @type {(details: import('dictionary-worker').InvokeDetails<TResponseRaw, TResponse>, event: MessageEvent<import('dictionary-worker').MessageData<TResponseRaw>>) => void} */ (this._onMessage).bind(this, details);
details.onMessage = onMessage;
worker.addEventListener('message', onMessage);
worker.postMessage({action, params}, transfer);
});
}
- _onMessage(details, e) {
+ /**
+ * @template [TResponseRaw=unknown]
+ * @template [TResponse=unknown]
+ * @param {import('dictionary-worker').InvokeDetails<TResponseRaw, TResponse>} details
+ * @param {MessageEvent<import('dictionary-worker').MessageData<TResponseRaw>>} event
+ */
+ _onMessage(details, event) {
if (details.complete) { return; }
- const {action, params} = e.data;
+ const {action, params} = event.data;
switch (action) {
case 'complete':
{
const {worker, resolve, reject, onMessage, formatResult} = details;
+ if (worker === null || onMessage === null || resolve === null || reject === null) { return; }
details.complete = true;
details.worker = null;
details.resolve = null;
@@ -86,50 +122,82 @@ export class DictionaryWorker {
this._onMessageProgress(params, details.onProgress);
break;
case 'getImageDetails':
- this._onMessageGetImageDetails(params, details.worker);
+ {
+ const {worker} = details;
+ if (worker === null) { return; }
+ this._onMessageGetImageDetails(params, worker);
+ }
break;
}
}
+ /**
+ * @template [TResponseRaw=unknown]
+ * @template [TResponse=unknown]
+ * @param {import('dictionary-worker').MessageCompleteParams<TResponseRaw>} params
+ * @param {(result: TResponse) => void} resolve
+ * @param {(reason?: import('core').RejectionReason) => void} reject
+ * @param {?(result: TResponseRaw) => TResponse} formatResult
+ */
_onMessageComplete(params, resolve, reject, formatResult) {
const {error} = params;
if (typeof error !== 'undefined') {
- reject(deserializeError(error));
+ reject(ExtensionError.deserialize(error));
} else {
- let {result} = params;
- try {
- if (typeof formatResult === 'function') {
- result = formatResult(result);
+ const {result} = params;
+ if (typeof formatResult === 'function') {
+ let result2;
+ try {
+ result2 = formatResult(result);
+ } catch (e) {
+ reject(e);
+ return;
}
- } catch (e) {
- reject(e);
- return;
+ resolve(result2);
+ } else {
+ // If formatResult is not provided, the response is assumed to be the same type
+ resolve(/** @type {TResponse} */ (/** @type {unknown} */ (result)));
}
- resolve(result);
}
}
+ /**
+ * @param {import('dictionary-worker').MessageProgressParams} params
+ * @param {?(...args: unknown[]) => void} onProgress
+ */
_onMessageProgress(params, onProgress) {
if (typeof onProgress !== 'function') { return; }
const {args} = params;
onProgress(...args);
}
+ /**
+ * @param {import('dictionary-worker').MessageGetImageDetailsParams} params
+ * @param {Worker} worker
+ */
async _onMessageGetImageDetails(params, worker) {
const {id, content, mediaType} = params;
+ /** @type {Transferable[]} */
const transfer = [];
let response;
try {
const result = await this._dictionaryImporterMediaLoader.getImageDetails(content, mediaType, transfer);
response = {id, result};
} catch (e) {
- response = {id, error: serializeError(e)};
+ response = {id, error: ExtensionError.serialize(e)};
}
worker.postMessage({action: 'getImageDetails.response', params: response}, transfer);
}
- _formatimportDictionaryResult(result) {
- result.errors = result.errors.map((error) => deserializeError(error));
- return result;
+ /**
+ * @param {import('dictionary-worker').MessageCompleteResultSerialized} response
+ * @returns {import('dictionary-worker').MessageCompleteResult}
+ */
+ _formatImportDictionaryResult(response) {
+ const {result, errors} = response;
+ return {
+ result,
+ errors: errors.map((error) => ExtensionError.deserialize(error))
+ };
}
}
diff --git a/ext/js/language/sandbox/dictionary-data-util.js b/ext/js/language/sandbox/dictionary-data-util.js
index 1b71346a..a54b043b 100644
--- a/ext/js/language/sandbox/dictionary-data-util.js
+++ b/ext/js/language/sandbox/dictionary-data-util.js
@@ -17,6 +17,10 @@
*/
export class DictionaryDataUtil {
+ /**
+ * @param {import('dictionary').TermDictionaryEntry} dictionaryEntry
+ * @returns {import('dictionary-data-util').TagGroup[]}
+ */
static groupTermTags(dictionaryEntry) {
const {headwords} = dictionaryEntry;
const headwordCount = headwords.length;
@@ -27,8 +31,8 @@ export class DictionaryDataUtil {
const {tags} = headwords[i];
for (const tag of tags) {
if (uniqueCheck) {
- const {name, category, notes, dictionary} = tag;
- const key = this._createMapKey([name, category, notes, dictionary]);
+ const {name, category, content, dictionaries} = tag;
+ const key = this._createMapKey([name, category, content, dictionaries]);
const index = resultsIndexMap.get(key);
if (typeof index !== 'undefined') {
const existingItem = results[index];
@@ -45,11 +49,16 @@ export class DictionaryDataUtil {
return results;
}
+ /**
+ * @param {import('dictionary').TermDictionaryEntry} dictionaryEntry
+ * @returns {import('dictionary-data-util').DictionaryFrequency<import('dictionary-data-util').TermFrequency>[]}
+ */
static groupTermFrequencies(dictionaryEntry) {
- const {headwords, frequencies} = dictionaryEntry;
+ const {headwords, frequencies: sourceFrequencies} = dictionaryEntry;
+ /** @type {import('dictionary-data-util').TermFrequenciesMap1} */
const map1 = new Map();
- for (const {headwordIndex, dictionary, hasReading, frequency, displayValue} of frequencies) {
+ for (const {headwordIndex, dictionary, hasReading, frequency, displayValue} of sourceFrequencies) {
const {term, reading} = headwords[headwordIndex];
let map2 = map1.get(dictionary);
@@ -68,12 +77,30 @@ export class DictionaryDataUtil {
frequencyData.values.set(this._createMapKey([frequency, displayValue]), {frequency, displayValue});
}
- return this._createFrequencyGroupsFromMap(map1);
+
+ const results = [];
+ for (const [dictionary, map2] of map1.entries()) {
+ const frequencies = [];
+ for (const {term, reading, values} of map2.values()) {
+ frequencies.push({
+ term,
+ reading,
+ values: [...values.values()]
+ });
+ }
+ results.push({dictionary, frequencies});
+ }
+ return results;
}
- static groupKanjiFrequencies(frequencies) {
+ /**
+ * @param {import('dictionary').KanjiFrequency[]} sourceFrequencies
+ * @returns {import('dictionary-data-util').DictionaryFrequency<import('dictionary-data-util').KanjiFrequency>[]}
+ */
+ static groupKanjiFrequencies(sourceFrequencies) {
+ /** @type {import('dictionary-data-util').KanjiFrequenciesMap1} */
const map1 = new Map();
- for (const {dictionary, character, frequency, displayValue} of frequencies) {
+ for (const {dictionary, character, frequency, displayValue} of sourceFrequencies) {
let map2 = map1.get(dictionary);
if (typeof map2 === 'undefined') {
map2 = new Map();
@@ -88,9 +115,25 @@ export class DictionaryDataUtil {
frequencyData.values.set(this._createMapKey([frequency, displayValue]), {frequency, displayValue});
}
- return this._createFrequencyGroupsFromMap(map1);
+
+ const results = [];
+ for (const [dictionary, map2] of map1.entries()) {
+ const frequencies = [];
+ for (const {character, values} of map2.values()) {
+ frequencies.push({
+ character,
+ values: [...values.values()]
+ });
+ }
+ results.push({dictionary, frequencies});
+ }
+ return results;
}
+ /**
+ * @param {import('dictionary').TermDictionaryEntry} dictionaryEntry
+ * @returns {import('dictionary-data-util').DictionaryGroupedPronunciations[]}
+ */
static getGroupedPronunciations(dictionaryEntry) {
const {headwords, pronunciations} = dictionaryEntry;
@@ -101,6 +144,7 @@ export class DictionaryDataUtil {
allReadings.add(reading);
}
+ /** @type {Map<string, import('dictionary-data-util').GroupedPronunciationInternal[]>} */
const groupedPronunciationsMap = new Map();
for (const {headwordIndex, dictionary, pitches} of pronunciations) {
const {term, reading} = headwords[headwordIndex];
@@ -118,9 +162,7 @@ export class DictionaryDataUtil {
position,
nasalPositions,
devoicePositions,
- tags,
- exclusiveTerms: [],
- exclusiveReadings: []
+ tags
};
dictionaryGroupedPronunciationList.push(groupedPronunciation);
}
@@ -128,27 +170,39 @@ export class DictionaryDataUtil {
}
}
+ /** @type {import('dictionary-data-util').DictionaryGroupedPronunciations[]} */
+ const results2 = [];
const multipleReadings = (allReadings.size > 1);
- for (const dictionaryGroupedPronunciationList of groupedPronunciationsMap.values()) {
+ for (const [dictionary, dictionaryGroupedPronunciationList] of groupedPronunciationsMap.entries()) {
+ /** @type {import('dictionary-data-util').GroupedPronunciation[]} */
+ const pronunciations2 = [];
for (const groupedPronunciation of dictionaryGroupedPronunciationList) {
- const {terms, reading, exclusiveTerms, exclusiveReadings} = groupedPronunciation;
- if (!this._areSetsEqual(terms, allTerms)) {
- exclusiveTerms.push(...this._getSetIntersection(terms, allTerms));
- }
+ const {terms, reading, position, nasalPositions, devoicePositions, tags} = groupedPronunciation;
+ const exclusiveTerms = !this._areSetsEqual(terms, allTerms) ? this._getSetIntersection(terms, allTerms) : [];
+ const exclusiveReadings = [];
if (multipleReadings) {
exclusiveReadings.push(reading);
}
- groupedPronunciation.terms = [...terms];
+ pronunciations2.push({
+ terms: [...terms],
+ reading,
+ position,
+ nasalPositions,
+ devoicePositions,
+ tags,
+ exclusiveTerms,
+ exclusiveReadings
+ });
}
- }
-
- const results2 = [];
- for (const [dictionary, pronunciations2] of groupedPronunciationsMap.entries()) {
results2.push({dictionary, pronunciations: pronunciations2});
}
return results2;
}
+ /**
+ * @param {import('dictionary').Tag[]|import('anki-templates').Tag[]} termTags
+ * @returns {import('dictionary-data-util').TermFrequencyType}
+ */
static getTermFrequency(termTags) {
let totalScore = 0;
for (const {score} of termTags) {
@@ -163,10 +217,19 @@ export class DictionaryDataUtil {
}
}
+ /**
+ * @param {import('dictionary').TermHeadword[]} headwords
+ * @param {number[]} headwordIndices
+ * @param {Set<string>} allTermsSet
+ * @param {Set<string>} allReadingsSet
+ * @returns {string[]}
+ */
static getDisambiguations(headwords, headwordIndices, allTermsSet, allReadingsSet) {
if (allTermsSet.size <= 1 && allReadingsSet.size <= 1) { return []; }
+ /** @type {Set<string>} */
const terms = new Set();
+ /** @type {Set<string>} */
const readings = new Set();
for (const headwordIndex of headwordIndices) {
const {term, reading} = headwords[headwordIndex];
@@ -174,6 +237,7 @@ export class DictionaryDataUtil {
readings.add(reading);
}
+ /** @type {string[]} */
const disambiguations = [];
const addTerms = !this._areSetsEqual(terms, allTermsSet);
const addReadings = !this._areSetsEqual(readings, allReadingsSet);
@@ -191,6 +255,10 @@ export class DictionaryDataUtil {
return disambiguations;
}
+ /**
+ * @param {string[]} wordClasses
+ * @returns {boolean}
+ */
static isNonNounVerbOrAdjective(wordClasses) {
let isVerbOrAdjective = false;
let isSuruVerb = false;
@@ -218,19 +286,15 @@ export class DictionaryDataUtil {
// Private
- static _createFrequencyGroupsFromMap(map) {
- const results = [];
- for (const [dictionary, map2] of map.entries()) {
- const frequencies = [];
- for (const frequencyData of map2.values()) {
- frequencyData.values = [...frequencyData.values.values()];
- frequencies.push(frequencyData);
- }
- results.push({dictionary, frequencies});
- }
- return results;
- }
-
+ /**
+ * @param {string} reading
+ * @param {number} position
+ * @param {number[]} nasalPositions
+ * @param {number[]} devoicePositions
+ * @param {import('dictionary').Tag[]} tags
+ * @param {import('dictionary-data-util').GroupedPronunciationInternal[]} groupedPronunciationList
+ * @returns {?import('dictionary-data-util').GroupedPronunciationInternal}
+ */
static _findExistingGroupedPronunciation(reading, position, nasalPositions, devoicePositions, tags, groupedPronunciationList) {
for (const pitchInfo of groupedPronunciationList) {
if (
@@ -246,6 +310,12 @@ export class DictionaryDataUtil {
return null;
}
+ /**
+ * @template [T=unknown]
+ * @param {T[]} array1
+ * @param {T[]} array2
+ * @returns {boolean}
+ */
static _areArraysEqual(array1, array2) {
const ii = array1.length;
if (ii !== array2.length) { return false; }
@@ -255,6 +325,11 @@ export class DictionaryDataUtil {
return true;
}
+ /**
+ * @param {import('dictionary').Tag[]} tagList1
+ * @param {import('dictionary').Tag[]} tagList2
+ * @returns {boolean}
+ */
static _areTagListsEqual(tagList1, tagList2) {
const ii = tagList1.length;
if (tagList2.length !== ii) { return false; }
@@ -262,7 +337,7 @@ export class DictionaryDataUtil {
for (let i = 0; i < ii; ++i) {
const tag1 = tagList1[i];
const tag2 = tagList2[i];
- if (tag1.name !== tag2.name || tag1.dictionary !== tag2.dictionary) {
+ if (tag1.name !== tag2.name || !this._areArraysEqual(tag1.dictionaries, tag2.dictionaries)) {
return false;
}
}
@@ -270,6 +345,12 @@ export class DictionaryDataUtil {
return true;
}
+ /**
+ * @template [T=unknown]
+ * @param {Set<T>} set1
+ * @param {Set<T>} set2
+ * @returns {boolean}
+ */
static _areSetsEqual(set1, set2) {
if (set1.size !== set2.size) {
return false;
@@ -284,6 +365,12 @@ export class DictionaryDataUtil {
return true;
}
+ /**
+ * @template [T=unknown]
+ * @param {Set<T>} set1
+ * @param {Set<T>} set2
+ * @returns {T[]}
+ */
static _getSetIntersection(set1, set2) {
const result = [];
for (const value of set1) {
@@ -294,6 +381,10 @@ export class DictionaryDataUtil {
return result;
}
+ /**
+ * @param {unknown[]} array
+ * @returns {string}
+ */
static _createMapKey(array) {
return JSON.stringify(array);
}
diff --git a/ext/js/language/sandbox/japanese-util.js b/ext/js/language/sandbox/japanese-util.js
index 316b1c2e..f7f20b3b 100644
--- a/ext/js/language/sandbox/japanese-util.js
+++ b/ext/js/language/sandbox/japanese-util.js
@@ -16,710 +16,877 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
-export const JapaneseUtil = (() => {
- const HIRAGANA_SMALL_TSU_CODE_POINT = 0x3063;
- const KATAKANA_SMALL_TSU_CODE_POINT = 0x30c3;
- const KATAKANA_SMALL_KA_CODE_POINT = 0x30f5;
- const KATAKANA_SMALL_KE_CODE_POINT = 0x30f6;
- const KANA_PROLONGED_SOUND_MARK_CODE_POINT = 0x30fc;
-
- const HIRAGANA_RANGE = [0x3040, 0x309f];
- const KATAKANA_RANGE = [0x30a0, 0x30ff];
-
- const HIRAGANA_CONVERSION_RANGE = [0x3041, 0x3096];
- const KATAKANA_CONVERSION_RANGE = [0x30a1, 0x30f6];
-
- const KANA_RANGES = [HIRAGANA_RANGE, KATAKANA_RANGE];
-
- const CJK_UNIFIED_IDEOGRAPHS_RANGE = [0x4e00, 0x9fff];
- const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A_RANGE = [0x3400, 0x4dbf];
- const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B_RANGE = [0x20000, 0x2a6df];
- const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C_RANGE = [0x2a700, 0x2b73f];
- const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D_RANGE = [0x2b740, 0x2b81f];
- const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E_RANGE = [0x2b820, 0x2ceaf];
- const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_F_RANGE = [0x2ceb0, 0x2ebef];
- const CJK_COMPATIBILITY_IDEOGRAPHS_RANGE = [0xf900, 0xfaff];
- const CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT_RANGE = [0x2f800, 0x2fa1f];
- const CJK_IDEOGRAPH_RANGES = [
- CJK_UNIFIED_IDEOGRAPHS_RANGE,
- CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A_RANGE,
- CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B_RANGE,
- CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C_RANGE,
- CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D_RANGE,
- CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E_RANGE,
- CJK_UNIFIED_IDEOGRAPHS_EXTENSION_F_RANGE,
- CJK_COMPATIBILITY_IDEOGRAPHS_RANGE,
- CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT_RANGE
- ];
-
- // Japanese character ranges, roughly ordered in order of expected frequency
- const JAPANESE_RANGES = [
- HIRAGANA_RANGE,
- KATAKANA_RANGE,
-
- ...CJK_IDEOGRAPH_RANGES,
-
- [0xff66, 0xff9f], // Halfwidth katakana
-
- [0x30fb, 0x30fc], // Katakana punctuation
- [0xff61, 0xff65], // Kana punctuation
- [0x3000, 0x303f], // CJK punctuation
-
- [0xff10, 0xff19], // Fullwidth numbers
- [0xff21, 0xff3a], // Fullwidth upper case Latin letters
- [0xff41, 0xff5a], // Fullwidth lower case Latin letters
-
- [0xff01, 0xff0f], // Fullwidth punctuation 1
- [0xff1a, 0xff1f], // Fullwidth punctuation 2
- [0xff3b, 0xff3f], // Fullwidth punctuation 3
- [0xff5b, 0xff60], // Fullwidth punctuation 4
- [0xffe0, 0xffee] // Currency markers
- ];
-
- const SMALL_KANA_SET = new Set(Array.from('ぁぃぅぇぉゃゅょゎァィゥェォャュョヮ'));
-
- const HALFWIDTH_KATAKANA_MAPPING = new Map([
- ['ヲ', 'ヲヺ-'],
- ['ァ', 'ァ--'],
- ['ィ', 'ィ--'],
- ['ゥ', 'ゥ--'],
- ['ェ', 'ェ--'],
- ['ォ', 'ォ--'],
- ['ャ', 'ャ--'],
- ['ュ', 'ュ--'],
- ['ョ', 'ョ--'],
- ['ッ', 'ッ--'],
- ['ー', 'ー--'],
- ['ア', 'ア--'],
- ['イ', 'イ--'],
- ['ウ', 'ウヴ-'],
- ['エ', 'エ--'],
- ['オ', 'オ--'],
- ['カ', 'カガ-'],
- ['キ', 'キギ-'],
- ['ク', 'クグ-'],
- ['ケ', 'ケゲ-'],
- ['コ', 'コゴ-'],
- ['サ', 'サザ-'],
- ['シ', 'シジ-'],
- ['ス', 'スズ-'],
- ['セ', 'セゼ-'],
- ['ソ', 'ソゾ-'],
- ['タ', 'タダ-'],
- ['チ', 'チヂ-'],
- ['ツ', 'ツヅ-'],
- ['テ', 'テデ-'],
- ['ト', 'トド-'],
- ['ナ', 'ナ--'],
- ['ニ', 'ニ--'],
- ['ヌ', 'ヌ--'],
- ['ネ', 'ネ--'],
- ['ノ', 'ノ--'],
- ['ハ', 'ハバパ'],
- ['ヒ', 'ヒビピ'],
- ['フ', 'フブプ'],
- ['ヘ', 'ヘベペ'],
- ['ホ', 'ホボポ'],
- ['マ', 'マ--'],
- ['ミ', 'ミ--'],
- ['ム', 'ム--'],
- ['メ', 'メ--'],
- ['モ', 'モ--'],
- ['ヤ', 'ヤ--'],
- ['ユ', 'ユ--'],
- ['ヨ', 'ヨ--'],
- ['ラ', 'ラ--'],
- ['リ', 'リ--'],
- ['ル', 'ル--'],
- ['レ', 'レ--'],
- ['ロ', 'ロ--'],
- ['ワ', 'ワ--'],
- ['ン', 'ン--']
- ]);
-
- const VOWEL_TO_KANA_MAPPING = new Map([
- ['a', 'ぁあかがさざただなはばぱまゃやらゎわヵァアカガサザタダナハバパマャヤラヮワヵヷ'],
- ['i', 'ぃいきぎしじちぢにひびぴみりゐィイキギシジチヂニヒビピミリヰヸ'],
- ['u', 'ぅうくぐすずっつづぬふぶぷむゅゆるゥウクグスズッツヅヌフブプムュユルヴ'],
- ['e', 'ぇえけげせぜてでねへべぺめれゑヶェエケゲセゼテデネヘベペメレヱヶヹ'],
- ['o', 'ぉおこごそぞとどのほぼぽもょよろをォオコゴソゾトドノホボポモョヨロヲヺ'],
- ['', 'のノ']
- ]);
-
- const KANA_TO_VOWEL_MAPPING = (() => {
- const map = new Map();
- for (const [vowel, characters] of VOWEL_TO_KANA_MAPPING) {
- for (const character of characters) {
- map.set(character, vowel);
- }
- }
- return map;
- })();
-
- const DIACRITIC_MAPPING = (() => {
- const kana = 'うゔ-かが-きぎ-くぐ-けげ-こご-さざ-しじ-すず-せぜ-そぞ-ただ-ちぢ-つづ-てで-とど-はばぱひびぴふぶぷへべぺほぼぽワヷ-ヰヸ-ウヴ-ヱヹ-ヲヺ-カガ-キギ-クグ-ケゲ-コゴ-サザ-シジ-スズ-セゼ-ソゾ-タダ-チヂ-ツヅ-テデ-トド-ハバパヒビピフブプヘベペホボポ';
- const map = new Map();
- for (let i = 0, ii = kana.length; i < ii; i += 3) {
- const character = kana[i];
- const dakuten = kana[i + 1];
- const handakuten = kana[i + 2];
- map.set(dakuten, {character, type: 'dakuten'});
- if (handakuten !== '-') {
- map.set(handakuten, {character, type: 'handakuten'});
- }
+const HIRAGANA_SMALL_TSU_CODE_POINT = 0x3063;
+const KATAKANA_SMALL_TSU_CODE_POINT = 0x30c3;
+const KATAKANA_SMALL_KA_CODE_POINT = 0x30f5;
+const KATAKANA_SMALL_KE_CODE_POINT = 0x30f6;
+const KANA_PROLONGED_SOUND_MARK_CODE_POINT = 0x30fc;
+
+/** @type {import('japanese-util').CodepointRange} */
+const HIRAGANA_RANGE = [0x3040, 0x309f];
+/** @type {import('japanese-util').CodepointRange} */
+const KATAKANA_RANGE = [0x30a0, 0x30ff];
+
+/** @type {import('japanese-util').CodepointRange} */
+const HIRAGANA_CONVERSION_RANGE = [0x3041, 0x3096];
+/** @type {import('japanese-util').CodepointRange} */
+const KATAKANA_CONVERSION_RANGE = [0x30a1, 0x30f6];
+
+/** @type {import('japanese-util').CodepointRange[]} */
+const KANA_RANGES = [HIRAGANA_RANGE, KATAKANA_RANGE];
+
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_UNIFIED_IDEOGRAPHS_RANGE = [0x4e00, 0x9fff];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A_RANGE = [0x3400, 0x4dbf];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B_RANGE = [0x20000, 0x2a6df];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C_RANGE = [0x2a700, 0x2b73f];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D_RANGE = [0x2b740, 0x2b81f];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E_RANGE = [0x2b820, 0x2ceaf];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_UNIFIED_IDEOGRAPHS_EXTENSION_F_RANGE = [0x2ceb0, 0x2ebef];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_COMPATIBILITY_IDEOGRAPHS_RANGE = [0xf900, 0xfaff];
+/** @type {import('japanese-util').CodepointRange} */
+const CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT_RANGE = [0x2f800, 0x2fa1f];
+/** @type {import('japanese-util').CodepointRange[]} */
+const CJK_IDEOGRAPH_RANGES = [
+ CJK_UNIFIED_IDEOGRAPHS_RANGE,
+ CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A_RANGE,
+ CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B_RANGE,
+ CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C_RANGE,
+ CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D_RANGE,
+ CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E_RANGE,
+ CJK_UNIFIED_IDEOGRAPHS_EXTENSION_F_RANGE,
+ CJK_COMPATIBILITY_IDEOGRAPHS_RANGE,
+ CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT_RANGE
+];
+
+// Japanese character ranges, roughly ordered in order of expected frequency
+/** @type {import('japanese-util').CodepointRange[]} */
+const JAPANESE_RANGES = [
+ HIRAGANA_RANGE,
+ KATAKANA_RANGE,
+
+ ...CJK_IDEOGRAPH_RANGES,
+
+ [0xff66, 0xff9f], // Halfwidth katakana
+
+ [0x30fb, 0x30fc], // Katakana punctuation
+ [0xff61, 0xff65], // Kana punctuation
+ [0x3000, 0x303f], // CJK punctuation
+
+ [0xff10, 0xff19], // Fullwidth numbers
+ [0xff21, 0xff3a], // Fullwidth upper case Latin letters
+ [0xff41, 0xff5a], // Fullwidth lower case Latin letters
+
+ [0xff01, 0xff0f], // Fullwidth punctuation 1
+ [0xff1a, 0xff1f], // Fullwidth punctuation 2
+ [0xff3b, 0xff3f], // Fullwidth punctuation 3
+ [0xff5b, 0xff60], // Fullwidth punctuation 4
+ [0xffe0, 0xffee] // Currency markers
+];
+
+const SMALL_KANA_SET = new Set(Array.from('ぁぃぅぇぉゃゅょゎァィゥェォャュョヮ'));
+
+const HALFWIDTH_KATAKANA_MAPPING = new Map([
+ ['ヲ', 'ヲヺ-'],
+ ['ァ', 'ァ--'],
+ ['ィ', 'ィ--'],
+ ['ゥ', 'ゥ--'],
+ ['ェ', 'ェ--'],
+ ['ォ', 'ォ--'],
+ ['ャ', 'ャ--'],
+ ['ュ', 'ュ--'],
+ ['ョ', 'ョ--'],
+ ['ッ', 'ッ--'],
+ ['ー', 'ー--'],
+ ['ア', 'ア--'],
+ ['イ', 'イ--'],
+ ['ウ', 'ウヴ-'],
+ ['エ', 'エ--'],
+ ['オ', 'オ--'],
+ ['カ', 'カガ-'],
+ ['キ', 'キギ-'],
+ ['ク', 'クグ-'],
+ ['ケ', 'ケゲ-'],
+ ['コ', 'コゴ-'],
+ ['サ', 'サザ-'],
+ ['シ', 'シジ-'],
+ ['ス', 'スズ-'],
+ ['セ', 'セゼ-'],
+ ['ソ', 'ソゾ-'],
+ ['タ', 'タダ-'],
+ ['チ', 'チヂ-'],
+ ['ツ', 'ツヅ-'],
+ ['テ', 'テデ-'],
+ ['ト', 'トド-'],
+ ['ナ', 'ナ--'],
+ ['ニ', 'ニ--'],
+ ['ヌ', 'ヌ--'],
+ ['ネ', 'ネ--'],
+ ['ノ', 'ノ--'],
+ ['ハ', 'ハバパ'],
+ ['ヒ', 'ヒビピ'],
+ ['フ', 'フブプ'],
+ ['ヘ', 'ヘベペ'],
+ ['ホ', 'ホボポ'],
+ ['マ', 'マ--'],
+ ['ミ', 'ミ--'],
+ ['ム', 'ム--'],
+ ['メ', 'メ--'],
+ ['モ', 'モ--'],
+ ['ヤ', 'ヤ--'],
+ ['ユ', 'ユ--'],
+ ['ヨ', 'ヨ--'],
+ ['ラ', 'ラ--'],
+ ['リ', 'リ--'],
+ ['ル', 'ル--'],
+ ['レ', 'レ--'],
+ ['ロ', 'ロ--'],
+ ['ワ', 'ワ--'],
+ ['ン', 'ン--']
+]);
+
+const VOWEL_TO_KANA_MAPPING = new Map([
+ ['a', 'ぁあかがさざただなはばぱまゃやらゎわヵァアカガサザタダナハバパマャヤラヮワヵヷ'],
+ ['i', 'ぃいきぎしじちぢにひびぴみりゐィイキギシジチヂニヒビピミリヰヸ'],
+ ['u', 'ぅうくぐすずっつづぬふぶぷむゅゆるゥウクグスズッツヅヌフブプムュユルヴ'],
+ ['e', 'ぇえけげせぜてでねへべぺめれゑヶェエケゲセゼテデネヘベペメレヱヶヹ'],
+ ['o', 'ぉおこごそぞとどのほぼぽもょよろをォオコゴソゾトドノホボポモョヨロヲヺ'],
+ ['', 'のノ']
+]);
+
+const KANA_TO_VOWEL_MAPPING = (() => {
+ /** @type {Map<string, string>} */
+ const map = new Map();
+ for (const [vowel, characters] of VOWEL_TO_KANA_MAPPING) {
+ for (const character of characters) {
+ map.set(character, vowel);
}
- return map;
- })();
-
-
- function isCodePointInRange(codePoint, [min, max]) {
- return (codePoint >= min && codePoint <= max);
}
+ return map;
+})();
- function isCodePointInRanges(codePoint, ranges) {
- for (const [min, max] of ranges) {
- if (codePoint >= min && codePoint <= max) {
- return true;
- }
+const DIACRITIC_MAPPING = (() => {
+ const kana = 'うゔ-かが-きぎ-くぐ-けげ-こご-さざ-しじ-すず-せぜ-そぞ-ただ-ちぢ-つづ-てで-とど-はばぱひびぴふぶぷへべぺほぼぽワヷ-ヰヸ-ウヴ-ヱヹ-ヲヺ-カガ-キギ-クグ-ケゲ-コゴ-サザ-シジ-スズ-セゼ-ソゾ-タダ-チヂ-ツヅ-テデ-トド-ハバパヒビピフブプヘベペホボポ';
+ /** @type {Map<string, {character: string, type: import('japanese-util').DiacriticType}>} */
+ const map = new Map();
+ for (let i = 0, ii = kana.length; i < ii; i += 3) {
+ const character = kana[i];
+ const dakuten = kana[i + 1];
+ const handakuten = kana[i + 2];
+ map.set(dakuten, {character, type: 'dakuten'});
+ if (handakuten !== '-') {
+ map.set(handakuten, {character, type: 'handakuten'});
}
- return false;
}
+ return map;
+})();
+
- function getProlongedHiragana(previousCharacter) {
- switch (KANA_TO_VOWEL_MAPPING.get(previousCharacter)) {
- case 'a': return 'あ';
- case 'i': return 'い';
- case 'u': return 'う';
- case 'e': return 'え';
- case 'o': return 'う';
- default: return null;
+/**
+ * @param {number} codePoint
+ * @param {import('japanese-util').CodepointRange} range
+ * @returns {boolean}
+ */
+// eslint-disable-next-line no-implicit-globals
+function isCodePointInRange(codePoint, [min, max]) {
+ return (codePoint >= min && codePoint <= max);
+}
+
+/**
+ * @param {number} codePoint
+ * @param {import('japanese-util').CodepointRange[]} ranges
+ * @returns {boolean}
+ */
+// eslint-disable-next-line no-implicit-globals
+function isCodePointInRanges(codePoint, ranges) {
+ for (const [min, max] of ranges) {
+ if (codePoint >= min && codePoint <= max) {
+ return true;
}
}
+ return false;
+}
+/**
+ * @param {string} previousCharacter
+ * @returns {?string}
+ */
+// eslint-disable-next-line no-implicit-globals
+function getProlongedHiragana(previousCharacter) {
+ switch (KANA_TO_VOWEL_MAPPING.get(previousCharacter)) {
+ case 'a': return 'あ';
+ case 'i': return 'い';
+ case 'u': return 'う';
+ case 'e': return 'え';
+ case 'o': return 'う';
+ default: return null;
+ }
+}
- // eslint-disable-next-line no-shadow
- class JapaneseUtil {
- constructor(wanakana=null) {
- this._wanakana = wanakana;
- }
- // Character code testing functions
+export class JapaneseUtil {
+ /**
+ * @param {?import('wanakana')} wanakana
+ */
+ constructor(wanakana=null) {
+ /** @type {?import('wanakana')} */
+ this._wanakana = wanakana;
+ }
- isCodePointKanji(codePoint) {
- return isCodePointInRanges(codePoint, CJK_IDEOGRAPH_RANGES);
- }
+ // Character code testing functions
- isCodePointKana(codePoint) {
- return isCodePointInRanges(codePoint, KANA_RANGES);
- }
+ /**
+ * @param {number} codePoint
+ * @returns {boolean}
+ */
+ isCodePointKanji(codePoint) {
+ return isCodePointInRanges(codePoint, CJK_IDEOGRAPH_RANGES);
+ }
- isCodePointJapanese(codePoint) {
- return isCodePointInRanges(codePoint, JAPANESE_RANGES);
- }
+ /**
+ * @param {number} codePoint
+ * @returns {boolean}
+ */
+ isCodePointKana(codePoint) {
+ return isCodePointInRanges(codePoint, KANA_RANGES);
+ }
- // String testing functions
+ /**
+ * @param {number} codePoint
+ * @returns {boolean}
+ */
+ isCodePointJapanese(codePoint) {
+ return isCodePointInRanges(codePoint, JAPANESE_RANGES);
+ }
- isStringEntirelyKana(str) {
- if (str.length === 0) { return false; }
- for (const c of str) {
- if (!isCodePointInRanges(c.codePointAt(0), KANA_RANGES)) {
- return false;
- }
+ // String testing functions
+
+ /**
+ * @param {string} str
+ * @returns {boolean}
+ */
+ isStringEntirelyKana(str) {
+ if (str.length === 0) { return false; }
+ for (const c of str) {
+ if (!isCodePointInRanges(/** @type {number} */ (c.codePointAt(0)), KANA_RANGES)) {
+ return false;
}
- return true;
}
+ return true;
+ }
- isStringPartiallyJapanese(str) {
- if (str.length === 0) { return false; }
- for (const c of str) {
- if (isCodePointInRanges(c.codePointAt(0), JAPANESE_RANGES)) {
- return true;
- }
+ /**
+ * @param {string} str
+ * @returns {boolean}
+ */
+ isStringPartiallyJapanese(str) {
+ if (str.length === 0) { return false; }
+ for (const c of str) {
+ if (isCodePointInRanges(/** @type {number} */ (c.codePointAt(0)), JAPANESE_RANGES)) {
+ return true;
}
- return false;
}
+ return false;
+ }
- // Mora functions
+ // Mora functions
- isMoraPitchHigh(moraIndex, pitchAccentDownstepPosition) {
- switch (pitchAccentDownstepPosition) {
- case 0: return (moraIndex > 0);
- case 1: return (moraIndex < 1);
- default: return (moraIndex > 0 && moraIndex < pitchAccentDownstepPosition);
- }
+ /**
+ * @param {number} moraIndex
+ * @param {number} pitchAccentDownstepPosition
+ * @returns {boolean}
+ */
+ isMoraPitchHigh(moraIndex, pitchAccentDownstepPosition) {
+ switch (pitchAccentDownstepPosition) {
+ case 0: return (moraIndex > 0);
+ case 1: return (moraIndex < 1);
+ default: return (moraIndex > 0 && moraIndex < pitchAccentDownstepPosition);
}
+ }
- getPitchCategory(text, pitchAccentDownstepPosition, isVerbOrAdjective) {
- if (pitchAccentDownstepPosition === 0) {
- return 'heiban';
- }
- if (isVerbOrAdjective) {
- return pitchAccentDownstepPosition > 0 ? 'kifuku' : null;
- }
- if (pitchAccentDownstepPosition === 1) {
- return 'atamadaka';
- }
- if (pitchAccentDownstepPosition > 1) {
- return pitchAccentDownstepPosition >= this.getKanaMoraCount(text) ? 'odaka' : 'nakadaka';
- }
- return null;
+ /**
+ * @param {string} text
+ * @param {number} pitchAccentDownstepPosition
+ * @param {boolean} isVerbOrAdjective
+ * @returns {?import('japanese-util').PitchCategory}
+ */
+ getPitchCategory(text, pitchAccentDownstepPosition, isVerbOrAdjective) {
+ if (pitchAccentDownstepPosition === 0) {
+ return 'heiban';
+ }
+ if (isVerbOrAdjective) {
+ return pitchAccentDownstepPosition > 0 ? 'kifuku' : null;
+ }
+ if (pitchAccentDownstepPosition === 1) {
+ return 'atamadaka';
}
+ if (pitchAccentDownstepPosition > 1) {
+ return pitchAccentDownstepPosition >= this.getKanaMoraCount(text) ? 'odaka' : 'nakadaka';
+ }
+ return null;
+ }
- getKanaMorae(text) {
- const morae = [];
- let i;
- for (const c of text) {
- if (SMALL_KANA_SET.has(c) && (i = morae.length) > 0) {
- morae[i - 1] += c;
- } else {
- morae.push(c);
- }
+ /**
+ * @param {string} text
+ * @returns {string[]}
+ */
+ getKanaMorae(text) {
+ const morae = [];
+ let i;
+ for (const c of text) {
+ if (SMALL_KANA_SET.has(c) && (i = morae.length) > 0) {
+ morae[i - 1] += c;
+ } else {
+ morae.push(c);
}
- return morae;
}
+ return morae;
+ }
- getKanaMoraCount(text) {
- let moraCount = 0;
- for (const c of text) {
- if (!(SMALL_KANA_SET.has(c) && moraCount > 0)) {
- ++moraCount;
- }
+ /**
+ * @param {string} text
+ * @returns {number}
+ */
+ getKanaMoraCount(text) {
+ let moraCount = 0;
+ for (const c of text) {
+ if (!(SMALL_KANA_SET.has(c) && moraCount > 0)) {
+ ++moraCount;
}
- return moraCount;
}
+ return moraCount;
+ }
- // Conversion functions
+ // Conversion functions
- convertToKana(text) {
- return this._getWanakana().toKana(text);
- }
+ /**
+ * @param {string} text
+ * @returns {string}
+ */
+ convertToKana(text) {
+ return this._getWanakana().toKana(text);
+ }
- convertToKanaSupported() {
- return this._wanakana !== null;
- }
+ /**
+ * @returns {boolean}
+ */
+ convertToKanaSupported() {
+ return this._wanakana !== null;
+ }
- convertKatakanaToHiragana(text, keepProlongedSoundMarks=false) {
- let result = '';
- const offset = (HIRAGANA_CONVERSION_RANGE[0] - KATAKANA_CONVERSION_RANGE[0]);
- for (let char of text) {
- const codePoint = char.codePointAt(0);
- switch (codePoint) {
- case KATAKANA_SMALL_KA_CODE_POINT:
- case KATAKANA_SMALL_KE_CODE_POINT:
- // No change
- break;
- case KANA_PROLONGED_SOUND_MARK_CODE_POINT:
- if (!keepProlongedSoundMarks && result.length > 0) {
- const char2 = getProlongedHiragana(result[result.length - 1]);
- if (char2 !== null) { char = char2; }
- }
- break;
- default:
- if (isCodePointInRange(codePoint, KATAKANA_CONVERSION_RANGE)) {
- char = String.fromCodePoint(codePoint + offset);
- }
- break;
- }
- result += char;
+ /**
+ * @param {string} text
+ * @param {boolean} [keepProlongedSoundMarks]
+ * @returns {string}
+ */
+ convertKatakanaToHiragana(text, keepProlongedSoundMarks=false) {
+ let result = '';
+ const offset = (HIRAGANA_CONVERSION_RANGE[0] - KATAKANA_CONVERSION_RANGE[0]);
+ for (let char of text) {
+ const codePoint = /** @type {number} */ (char.codePointAt(0));
+ switch (codePoint) {
+ case KATAKANA_SMALL_KA_CODE_POINT:
+ case KATAKANA_SMALL_KE_CODE_POINT:
+ // No change
+ break;
+ case KANA_PROLONGED_SOUND_MARK_CODE_POINT:
+ if (!keepProlongedSoundMarks && result.length > 0) {
+ const char2 = getProlongedHiragana(result[result.length - 1]);
+ if (char2 !== null) { char = char2; }
+ }
+ break;
+ default:
+ if (isCodePointInRange(codePoint, KATAKANA_CONVERSION_RANGE)) {
+ char = String.fromCodePoint(codePoint + offset);
+ }
+ break;
}
- return result;
+ result += char;
}
+ return result;
+ }
- convertHiraganaToKatakana(text) {
- let result = '';
- const offset = (KATAKANA_CONVERSION_RANGE[0] - HIRAGANA_CONVERSION_RANGE[0]);
- for (let char of text) {
- const codePoint = char.codePointAt(0);
- if (isCodePointInRange(codePoint, HIRAGANA_CONVERSION_RANGE)) {
- char = String.fromCodePoint(codePoint + offset);
- }
- result += char;
+ /**
+ * @param {string} text
+ * @returns {string}
+ */
+ convertHiraganaToKatakana(text) {
+ let result = '';
+ const offset = (KATAKANA_CONVERSION_RANGE[0] - HIRAGANA_CONVERSION_RANGE[0]);
+ for (let char of text) {
+ const codePoint = /** @type {number} */ (char.codePointAt(0));
+ if (isCodePointInRange(codePoint, HIRAGANA_CONVERSION_RANGE)) {
+ char = String.fromCodePoint(codePoint + offset);
}
- return result;
+ result += char;
}
+ return result;
+ }
- convertToRomaji(text) {
- const wanakana = this._getWanakana();
- return wanakana.toRomaji(text);
- }
+ /**
+ * @param {string} text
+ * @returns {string}
+ */
+ convertToRomaji(text) {
+ const wanakana = this._getWanakana();
+ return wanakana.toRomaji(text);
+ }
- convertToRomajiSupported() {
- return this._wanakana !== null;
- }
+ /**
+ * @returns {boolean}
+ */
+ convertToRomajiSupported() {
+ return this._wanakana !== null;
+ }
- convertNumericToFullWidth(text) {
- let result = '';
- for (const char of text) {
- let c = char.codePointAt(0);
- if (c >= 0x30 && c <= 0x39) { // ['0', '9']
- c += 0xff10 - 0x30; // 0xff10 = '0' full width
- result += String.fromCodePoint(c);
- } else {
- result += char;
- }
+ /**
+ * @param {string} text
+ * @returns {string}
+ */
+ convertNumericToFullWidth(text) {
+ let result = '';
+ for (const char of text) {
+ let c = /** @type {number} */ (char.codePointAt(0));
+ if (c >= 0x30 && c <= 0x39) { // ['0', '9']
+ c += 0xff10 - 0x30; // 0xff10 = '0' full width
+ result += String.fromCodePoint(c);
+ } else {
+ result += char;
}
- return result;
}
+ return result;
+ }
- convertHalfWidthKanaToFullWidth(text, sourceMap=null) {
- let result = '';
-
- // This function is safe to use charCodeAt instead of codePointAt, since all
- // the relevant characters are represented with a single UTF-16 character code.
- for (let i = 0, ii = text.length; i < ii; ++i) {
- const c = text[i];
- const mapping = HALFWIDTH_KATAKANA_MAPPING.get(c);
- if (typeof mapping !== 'string') {
- result += c;
- continue;
- }
-
- let index = 0;
- switch (text.charCodeAt(i + 1)) {
- case 0xff9e: // dakuten
- index = 1;
- break;
- case 0xff9f: // handakuten
- index = 2;
- break;
- }
-
- let c2 = mapping[index];
- if (index > 0) {
- if (c2 === '-') { // invalid
- index = 0;
- c2 = mapping[0];
- } else {
- ++i;
- }
- }
-
- if (sourceMap !== null && index > 0) {
- sourceMap.combine(result.length, 1);
- }
- result += c2;
+ /**
+ * @param {string} text
+ * @param {?TextSourceMap} [sourceMap]
+ * @returns {string}
+ */
+ convertHalfWidthKanaToFullWidth(text, sourceMap=null) {
+ let result = '';
+
+ // This function is safe to use charCodeAt instead of codePointAt, since all
+ // the relevant characters are represented with a single UTF-16 character code.
+ for (let i = 0, ii = text.length; i < ii; ++i) {
+ const c = text[i];
+ const mapping = HALFWIDTH_KATAKANA_MAPPING.get(c);
+ if (typeof mapping !== 'string') {
+ result += c;
+ continue;
}
- return result;
- }
+ let index = 0;
+ switch (text.charCodeAt(i + 1)) {
+ case 0xff9e: // dakuten
+ index = 1;
+ break;
+ case 0xff9f: // handakuten
+ index = 2;
+ break;
+ }
- convertAlphabeticToKana(text, sourceMap=null) {
- let part = '';
- let result = '';
-
- for (const char of text) {
- // Note: 0x61 is the character code for 'a'
- let c = char.codePointAt(0);
- if (c >= 0x41 && c <= 0x5a) { // ['A', 'Z']
- c += (0x61 - 0x41);
- } else if (c >= 0x61 && c <= 0x7a) { // ['a', 'z']
- // NOP; c += (0x61 - 0x61);
- } else if (c >= 0xff21 && c <= 0xff3a) { // ['A', 'Z'] fullwidth
- c += (0x61 - 0xff21);
- } else if (c >= 0xff41 && c <= 0xff5a) { // ['a', 'z'] fullwidth
- c += (0x61 - 0xff41);
- } else if (c === 0x2d || c === 0xff0d) { // '-' or fullwidth dash
- c = 0x2d; // '-'
+ let c2 = mapping[index];
+ if (index > 0) {
+ if (c2 === '-') { // invalid
+ index = 0;
+ c2 = mapping[0];
} else {
- if (part.length > 0) {
- result += this._convertAlphabeticPartToKana(part, sourceMap, result.length);
- part = '';
- }
- result += char;
- continue;
+ ++i;
}
- part += String.fromCodePoint(c);
}
- if (part.length > 0) {
- result += this._convertAlphabeticPartToKana(part, sourceMap, result.length);
+ if (sourceMap !== null && index > 0) {
+ sourceMap.combine(result.length, 1);
}
- return result;
+ result += c2;
}
- convertAlphabeticToKanaSupported() {
- return this._wanakana !== null;
+ return result;
+ }
+
+ /**
+ * @param {string} text
+ * @param {?TextSourceMap} sourceMap
+ * @returns {string}
+ */
+ convertAlphabeticToKana(text, sourceMap=null) {
+ let part = '';
+ let result = '';
+
+ for (const char of text) {
+ // Note: 0x61 is the character code for 'a'
+ let c = /** @type {number} */ (char.codePointAt(0));
+ if (c >= 0x41 && c <= 0x5a) { // ['A', 'Z']
+ c += (0x61 - 0x41);
+ } else if (c >= 0x61 && c <= 0x7a) { // ['a', 'z']
+ // NOP; c += (0x61 - 0x61);
+ } else if (c >= 0xff21 && c <= 0xff3a) { // ['A', 'Z'] fullwidth
+ c += (0x61 - 0xff21);
+ } else if (c >= 0xff41 && c <= 0xff5a) { // ['a', 'z'] fullwidth
+ c += (0x61 - 0xff41);
+ } else if (c === 0x2d || c === 0xff0d) { // '-' or fullwidth dash
+ c = 0x2d; // '-'
+ } else {
+ if (part.length > 0) {
+ result += this._convertAlphabeticPartToKana(part, sourceMap, result.length);
+ part = '';
+ }
+ result += char;
+ continue;
+ }
+ part += String.fromCodePoint(c);
}
- getKanaDiacriticInfo(character) {
- const info = DIACRITIC_MAPPING.get(character);
- return typeof info !== 'undefined' ? {character: info.character, type: info.type} : null;
+ if (part.length > 0) {
+ result += this._convertAlphabeticPartToKana(part, sourceMap, result.length);
}
+ return result;
+ }
- // Furigana distribution
+ /**
+ * @returns {boolean}
+ */
+ convertAlphabeticToKanaSupported() {
+ return this._wanakana !== null;
+ }
- distributeFurigana(term, reading) {
- if (reading === term) {
- // Same
- return [this._createFuriganaSegment(term, '')];
- }
+ /**
+ * @param {string} character
+ * @returns {?{character: string, type: import('japanese-util').DiacriticType}}
+ */
+ getKanaDiacriticInfo(character) {
+ const info = DIACRITIC_MAPPING.get(character);
+ return typeof info !== 'undefined' ? {character: info.character, type: info.type} : null;
+ }
- const groups = [];
- let groupPre = null;
- let isKanaPre = null;
- for (const c of term) {
- const codePoint = c.codePointAt(0);
- const isKana = this.isCodePointKana(codePoint);
- if (isKana === isKanaPre) {
- groupPre.text += c;
- } else {
- groupPre = {isKana, text: c, textNormalized: null};
- groups.push(groupPre);
- isKanaPre = isKana;
- }
- }
- for (const group of groups) {
- if (group.isKana) {
- group.textNormalized = this.convertKatakanaToHiragana(group.text);
- }
+ // Furigana distribution
+
+ /**
+ * @param {string} term
+ * @param {string} reading
+ * @returns {import('japanese-util').FuriganaSegment[]}
+ */
+ distributeFurigana(term, reading) {
+ if (reading === term) {
+ // Same
+ return [this._createFuriganaSegment(term, '')];
+ }
+
+ /** @type {import('japanese-util').FuriganaGroup[]} */
+ const groups = [];
+ /** @type {?import('japanese-util').FuriganaGroup} */
+ let groupPre = null;
+ let isKanaPre = null;
+ for (const c of term) {
+ const codePoint = /** @type {number} */ (c.codePointAt(0));
+ const isKana = this.isCodePointKana(codePoint);
+ if (isKana === isKanaPre) {
+ /** @type {import('japanese-util').FuriganaGroup} */ (groupPre).text += c;
+ } else {
+ groupPre = {isKana, text: c, textNormalized: null};
+ groups.push(groupPre);
+ isKanaPre = isKana;
}
-
- const readingNormalized = this.convertKatakanaToHiragana(reading);
- const segments = this._segmentizeFurigana(reading, readingNormalized, groups, 0);
- if (segments !== null) {
- return segments;
+ }
+ for (const group of groups) {
+ if (group.isKana) {
+ group.textNormalized = this.convertKatakanaToHiragana(group.text);
}
-
- // Fallback
- return [this._createFuriganaSegment(term, reading)];
}
- distributeFuriganaInflected(term, reading, source) {
- const termNormalized = this.convertKatakanaToHiragana(term);
- const readingNormalized = this.convertKatakanaToHiragana(reading);
- const sourceNormalized = this.convertKatakanaToHiragana(source);
-
- let mainText = term;
- let stemLength = this._getStemLength(termNormalized, sourceNormalized);
+ const readingNormalized = this.convertKatakanaToHiragana(reading);
+ const segments = this._segmentizeFurigana(reading, readingNormalized, groups, 0);
+ if (segments !== null) {
+ return segments;
+ }
- // Check if source is derived from the reading instead of the term
- const readingStemLength = this._getStemLength(readingNormalized, sourceNormalized);
- if (readingStemLength > 0 && readingStemLength >= stemLength) {
- mainText = reading;
- stemLength = readingStemLength;
- reading = `${source.substring(0, stemLength)}${reading.substring(stemLength)}`;
- }
+ // Fallback
+ return [this._createFuriganaSegment(term, reading)];
+ }
- const segments = [];
- if (stemLength > 0) {
- mainText = `${source.substring(0, stemLength)}${mainText.substring(stemLength)}`;
- const segments2 = this.distributeFurigana(mainText, reading);
- let consumed = 0;
- for (const segment of segments2) {
- const {text} = segment;
- const start = consumed;
- consumed += text.length;
- if (consumed < stemLength) {
- segments.push(segment);
- } else if (consumed === stemLength) {
- segments.push(segment);
- break;
- } else {
- if (start < stemLength) {
- segments.push(this._createFuriganaSegment(mainText.substring(start, stemLength), ''));
- }
- break;
+ /**
+ * @param {string} term
+ * @param {string} reading
+ * @param {string} source
+ * @returns {import('japanese-util').FuriganaSegment[]}
+ */
+ distributeFuriganaInflected(term, reading, source) {
+ const termNormalized = this.convertKatakanaToHiragana(term);
+ const readingNormalized = this.convertKatakanaToHiragana(reading);
+ const sourceNormalized = this.convertKatakanaToHiragana(source);
+
+ let mainText = term;
+ let stemLength = this._getStemLength(termNormalized, sourceNormalized);
+
+ // Check if source is derived from the reading instead of the term
+ const readingStemLength = this._getStemLength(readingNormalized, sourceNormalized);
+ if (readingStemLength > 0 && readingStemLength >= stemLength) {
+ mainText = reading;
+ stemLength = readingStemLength;
+ reading = `${source.substring(0, stemLength)}${reading.substring(stemLength)}`;
+ }
+
+ const segments = [];
+ if (stemLength > 0) {
+ mainText = `${source.substring(0, stemLength)}${mainText.substring(stemLength)}`;
+ const segments2 = this.distributeFurigana(mainText, reading);
+ let consumed = 0;
+ for (const segment of segments2) {
+ const {text} = segment;
+ const start = consumed;
+ consumed += text.length;
+ if (consumed < stemLength) {
+ segments.push(segment);
+ } else if (consumed === stemLength) {
+ segments.push(segment);
+ break;
+ } else {
+ if (start < stemLength) {
+ segments.push(this._createFuriganaSegment(mainText.substring(start, stemLength), ''));
}
+ break;
}
}
+ }
- if (stemLength < source.length) {
- const remainder = source.substring(stemLength);
- const segmentCount = segments.length;
- if (segmentCount > 0 && segments[segmentCount - 1].reading.length === 0) {
- // Append to the last segment if it has an empty reading
- segments[segmentCount - 1].text += remainder;
- } else {
- // Otherwise, create a new segment
- segments.push(this._createFuriganaSegment(remainder, ''));
- }
+ if (stemLength < source.length) {
+ const remainder = source.substring(stemLength);
+ const segmentCount = segments.length;
+ if (segmentCount > 0 && segments[segmentCount - 1].reading.length === 0) {
+ // Append to the last segment if it has an empty reading
+ segments[segmentCount - 1].text += remainder;
+ } else {
+ // Otherwise, create a new segment
+ segments.push(this._createFuriganaSegment(remainder, ''));
}
-
- return segments;
}
- // Miscellaneous
-
- collapseEmphaticSequences(text, fullCollapse, sourceMap=null) {
- let result = '';
- let collapseCodePoint = -1;
- const hasSourceMap = (sourceMap !== null);
- for (const char of text) {
- const c = char.codePointAt(0);
- if (
- c === HIRAGANA_SMALL_TSU_CODE_POINT ||
- c === KATAKANA_SMALL_TSU_CODE_POINT ||
- c === KANA_PROLONGED_SOUND_MARK_CODE_POINT
- ) {
- if (collapseCodePoint !== c) {
- collapseCodePoint = c;
- if (!fullCollapse) {
- result += char;
- continue;
- }
+ return segments;
+ }
+
+ // Miscellaneous
+
+ /**
+ * @param {string} text
+ * @param {boolean} fullCollapse
+ * @param {?TextSourceMap} [sourceMap]
+ * @returns {string}
+ */
+ collapseEmphaticSequences(text, fullCollapse, sourceMap=null) {
+ let result = '';
+ let collapseCodePoint = -1;
+ const hasSourceMap = (sourceMap !== null);
+ for (const char of text) {
+ const c = char.codePointAt(0);
+ if (
+ c === HIRAGANA_SMALL_TSU_CODE_POINT ||
+ c === KATAKANA_SMALL_TSU_CODE_POINT ||
+ c === KANA_PROLONGED_SOUND_MARK_CODE_POINT
+ ) {
+ if (collapseCodePoint !== c) {
+ collapseCodePoint = c;
+ if (!fullCollapse) {
+ result += char;
+ continue;
}
- } else {
- collapseCodePoint = -1;
- result += char;
- continue;
}
+ } else {
+ collapseCodePoint = -1;
+ result += char;
+ continue;
+ }
- if (hasSourceMap) {
- sourceMap.combine(Math.max(0, result.length - 1), 1);
- }
+ if (hasSourceMap) {
+ sourceMap.combine(Math.max(0, result.length - 1), 1);
}
- return result;
}
+ return result;
+ }
- // Private
-
- _createFuriganaSegment(text, reading) {
- return {text, reading};
- }
+ // Private
- _segmentizeFurigana(reading, readingNormalized, groups, groupsStart) {
- const groupCount = groups.length - groupsStart;
- if (groupCount <= 0) {
- return reading.length === 0 ? [] : null;
- }
+ /**
+ * @param {string} text
+ * @param {string} reading
+ * @returns {import('japanese-util').FuriganaSegment}
+ */
+ _createFuriganaSegment(text, reading) {
+ return {text, reading};
+ }
- const group = groups[groupsStart];
- const {isKana, text} = group;
- const textLength = text.length;
- if (isKana) {
- const {textNormalized} = group;
- if (readingNormalized.startsWith(textNormalized)) {
- const segments = this._segmentizeFurigana(
- reading.substring(textLength),
- readingNormalized.substring(textLength),
- groups,
- groupsStart + 1
- );
- if (segments !== null) {
- if (reading.startsWith(text)) {
- segments.unshift(this._createFuriganaSegment(text, ''));
- } else {
- segments.unshift(...this._getFuriganaKanaSegments(text, reading));
- }
- return segments;
+ /**
+ * @param {string} reading
+ * @param {string} readingNormalized
+ * @param {import('japanese-util').FuriganaGroup[]} groups
+ * @param {number} groupsStart
+ * @returns {?(import('japanese-util').FuriganaSegment[])}
+ */
+ _segmentizeFurigana(reading, readingNormalized, groups, groupsStart) {
+ const groupCount = groups.length - groupsStart;
+ if (groupCount <= 0) {
+ return reading.length === 0 ? [] : null;
+ }
+
+ const group = groups[groupsStart];
+ const {isKana, text} = group;
+ const textLength = text.length;
+ if (isKana) {
+ const {textNormalized} = group;
+ if (textNormalized !== null && readingNormalized.startsWith(textNormalized)) {
+ const segments = this._segmentizeFurigana(
+ reading.substring(textLength),
+ readingNormalized.substring(textLength),
+ groups,
+ groupsStart + 1
+ );
+ if (segments !== null) {
+ if (reading.startsWith(text)) {
+ segments.unshift(this._createFuriganaSegment(text, ''));
+ } else {
+ segments.unshift(...this._getFuriganaKanaSegments(text, reading));
}
+ return segments;
}
- return null;
- } else {
- let result = null;
- for (let i = reading.length; i >= textLength; --i) {
- const segments = this._segmentizeFurigana(
- reading.substring(i),
- readingNormalized.substring(i),
- groups,
- groupsStart + 1
- );
- if (segments !== null) {
- if (result !== null) {
- // More than one way to segmentize the tail; mark as ambiguous
- return null;
- }
- const segmentReading = reading.substring(0, i);
- segments.unshift(this._createFuriganaSegment(text, segmentReading));
- result = segments;
- }
- // There is only one way to segmentize the last non-kana group
- if (groupCount === 1) {
- break;
+ }
+ return null;
+ } else {
+ let result = null;
+ for (let i = reading.length; i >= textLength; --i) {
+ const segments = this._segmentizeFurigana(
+ reading.substring(i),
+ readingNormalized.substring(i),
+ groups,
+ groupsStart + 1
+ );
+ if (segments !== null) {
+ if (result !== null) {
+ // More than one way to segmentize the tail; mark as ambiguous
+ return null;
}
+ const segmentReading = reading.substring(0, i);
+ segments.unshift(this._createFuriganaSegment(text, segmentReading));
+ result = segments;
+ }
+ // There is only one way to segmentize the last non-kana group
+ if (groupCount === 1) {
+ break;
}
- return result;
- }
- }
-
- _getFuriganaKanaSegments(text, reading) {
- const textLength = text.length;
- const newSegments = [];
- let start = 0;
- let state = (reading[0] === text[0]);
- for (let i = 1; i < textLength; ++i) {
- const newState = (reading[i] === text[i]);
- if (state === newState) { continue; }
- newSegments.push(this._createFuriganaSegment(text.substring(start, i), state ? '' : reading.substring(start, i)));
- state = newState;
- start = i;
}
- newSegments.push(this._createFuriganaSegment(text.substring(start, textLength), state ? '' : reading.substring(start, textLength)));
- return newSegments;
+ return result;
}
+ }
- _getWanakana() {
- const wanakana = this._wanakana;
- if (wanakana === null) { throw new Error('Functions which use WanaKana are not supported in this context'); }
- return wanakana;
- }
+ /**
+ * @param {string} text
+ * @param {string} reading
+ * @returns {import('japanese-util').FuriganaSegment[]}
+ */
+ _getFuriganaKanaSegments(text, reading) {
+ const textLength = text.length;
+ const newSegments = [];
+ let start = 0;
+ let state = (reading[0] === text[0]);
+ for (let i = 1; i < textLength; ++i) {
+ const newState = (reading[i] === text[i]);
+ if (state === newState) { continue; }
+ newSegments.push(this._createFuriganaSegment(text.substring(start, i), state ? '' : reading.substring(start, i)));
+ state = newState;
+ start = i;
+ }
+ newSegments.push(this._createFuriganaSegment(text.substring(start, textLength), state ? '' : reading.substring(start, textLength)));
+ return newSegments;
+ }
- _convertAlphabeticPartToKana(text, sourceMap, sourceMapStart) {
- const wanakana = this._getWanakana();
- const result = wanakana.toHiragana(text);
-
- // Generate source mapping
- if (sourceMap !== null) {
- let i = 0;
- let resultPos = 0;
- const ii = text.length;
- while (i < ii) {
- // Find smallest matching substring
- let iNext = i + 1;
- let resultPosNext = result.length;
- while (iNext < ii) {
- const t = wanakana.toHiragana(text.substring(0, iNext));
- if (t === result.substring(0, t.length)) {
- resultPosNext = t.length;
- break;
- }
- ++iNext;
- }
+ /**
+ * @returns {import('wanakana')}
+ * @throws {Error}
+ */
+ _getWanakana() {
+ const wanakana = this._wanakana;
+ if (wanakana === null) { throw new Error('Functions which use WanaKana are not supported in this context'); }
+ return wanakana;
+ }
- // Merge characters
- const removals = iNext - i - 1;
- if (removals > 0) {
- sourceMap.combine(sourceMapStart, removals);
+ /**
+ * @param {string} text
+ * @param {?TextSourceMap} sourceMap
+ * @param {number} sourceMapStart
+ * @returns {string}
+ */
+ _convertAlphabeticPartToKana(text, sourceMap, sourceMapStart) {
+ const wanakana = this._getWanakana();
+ const result = wanakana.toHiragana(text);
+
+ // Generate source mapping
+ if (sourceMap !== null) {
+ let i = 0;
+ let resultPos = 0;
+ const ii = text.length;
+ while (i < ii) {
+ // Find smallest matching substring
+ let iNext = i + 1;
+ let resultPosNext = result.length;
+ while (iNext < ii) {
+ const t = wanakana.toHiragana(text.substring(0, iNext));
+ if (t === result.substring(0, t.length)) {
+ resultPosNext = t.length;
+ break;
}
- ++sourceMapStart;
+ ++iNext;
+ }
- // Empty elements
- const additions = resultPosNext - resultPos - 1;
- for (let j = 0; j < additions; ++j) {
- sourceMap.insert(sourceMapStart, 0);
- ++sourceMapStart;
- }
+ // Merge characters
+ const removals = iNext - i - 1;
+ if (removals > 0) {
+ sourceMap.combine(sourceMapStart, removals);
+ }
+ ++sourceMapStart;
- i = iNext;
- resultPos = resultPosNext;
+ // Empty elements
+ const additions = resultPosNext - resultPos - 1;
+ for (let j = 0; j < additions; ++j) {
+ sourceMap.insert(sourceMapStart, 0);
+ ++sourceMapStart;
}
- }
- return result;
+ i = iNext;
+ resultPos = resultPosNext;
+ }
}
- _getStemLength(text1, text2) {
- const minLength = Math.min(text1.length, text2.length);
- if (minLength === 0) { return 0; }
+ return result;
+ }
- let i = 0;
- while (true) {
- const char1 = text1.codePointAt(i);
- const char2 = text2.codePointAt(i);
- if (char1 !== char2) { break; }
- const charLength = String.fromCodePoint(char1).length;
- i += charLength;
- if (i >= minLength) {
- if (i > minLength) {
- i -= charLength; // Don't consume partial UTF16 surrogate characters
- }
- break;
+ /**
+ * @param {string} text1
+ * @param {string} text2
+ * @returns {number}
+ */
+ _getStemLength(text1, text2) {
+ const minLength = Math.min(text1.length, text2.length);
+ if (minLength === 0) { return 0; }
+
+ let i = 0;
+ while (true) {
+ const char1 = /** @type {number} */ (text1.codePointAt(i));
+ const char2 = /** @type {number} */ (text2.codePointAt(i));
+ if (char1 !== char2) { break; }
+ const charLength = String.fromCodePoint(char1).length;
+ i += charLength;
+ if (i >= minLength) {
+ if (i > minLength) {
+ i -= charLength; // Don't consume partial UTF16 surrogate characters
}
+ break;
}
- return i;
}
+ return i;
}
-
-
- return JapaneseUtil;
-})();
+}
diff --git a/ext/js/language/text-scanner.js b/ext/js/language/text-scanner.js
index ac7ef3d9..b4d9a642 100644
--- a/ext/js/language/text-scanner.js
+++ b/ext/js/language/text-scanner.js
@@ -16,11 +16,17 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
-import {EventDispatcher, EventListenerCollection, clone, isObject, log, promiseTimeout} from '../core.js';
+import {EventDispatcher, EventListenerCollection, clone, log} from '../core.js';
import {DocumentUtil} from '../dom/document-util.js';
import {yomitan} from '../yomitan.js';
+/**
+ * @augments EventDispatcher<import('text-scanner').EventType>
+ */
export class TextScanner extends EventDispatcher {
+ /**
+ * @param {import('text-scanner').ConstructorDetails} details
+ */
constructor({
node,
getSearchContext,
@@ -32,67 +38,121 @@ export class TextScanner extends EventDispatcher {
searchOnClickOnly=false
}) {
super();
+ /** @type {HTMLElement|Window} */
this._node = node;
+ /** @type {import('text-scanner').GetSearchContextCallback} */
this._getSearchContext = getSearchContext;
+ /** @type {?(() => Element[])} */
this._ignoreElements = ignoreElements;
+ /** @type {?((x: number, y: number) => Promise<boolean>)} */
this._ignorePoint = ignorePoint;
+ /** @type {boolean} */
this._searchTerms = searchTerms;
+ /** @type {boolean} */
this._searchKanji = searchKanji;
+ /** @type {boolean} */
this._searchOnClick = searchOnClick;
+ /** @type {boolean} */
this._searchOnClickOnly = searchOnClickOnly;
+ /** @type {boolean} */
this._isPrepared = false;
+ /** @type {?string} */
this._includeSelector = null;
+ /** @type {?string} */
this._excludeSelector = null;
+ /** @type {?import('text-scanner').InputInfo} */
this._inputInfoCurrent = null;
+ /** @type {?Promise<boolean>} */
this._scanTimerPromise = null;
+ /** @type {?(value: boolean) => void} */
+ this._scanTimerPromiseResolve = null;
+ /** @type {?import('text-source').TextSource} */
this._textSourceCurrent = null;
+ /** @type {boolean} */
this._textSourceCurrentSelected = false;
+ /** @type {boolean} */
this._pendingLookup = false;
+ /** @type {?import('text-scanner').SelectionRestoreInfo} */
this._selectionRestoreInfo = null;
+ /** @type {boolean} */
this._deepContentScan = false;
+ /** @type {boolean} */
this._normalizeCssZoom = true;
+ /** @type {boolean} */
this._selectText = false;
+ /** @type {number} */
this._delay = 0;
+ /** @type {boolean} */
this._touchInputEnabled = false;
+ /** @type {boolean} */
this._pointerEventsEnabled = false;
+ /** @type {number} */
this._scanLength = 1;
+ /** @type {boolean} */
this._layoutAwareScan = false;
+ /** @type {boolean} */
this._preventMiddleMouse = false;
+ /** @type {boolean} */
this._matchTypePrefix = false;
+ /** @type {number} */
this._sentenceScanExtent = 0;
+ /** @type {boolean} */
this._sentenceTerminateAtNewlines = true;
+ /** @type {Map<string, [includeCharacterAtStart: boolean, includeCharacterAtEnd: boolean]>} */
this._sentenceTerminatorMap = new Map();
+ /** @type {Map<string, [character: string, includeCharacterAtStart: boolean]>} */
this._sentenceForwardQuoteMap = new Map();
+ /** @type {Map<string, [character: string, includeCharacterAtEnd: boolean]>} */
this._sentenceBackwardQuoteMap = new Map();
+ /** @type {import('text-scanner').InputConfig[]} */
this._inputs = [];
+ /** @type {boolean} */
this._enabled = false;
+ /** @type {boolean} */
this._enabledValue = false;
+ /** @type {EventListenerCollection} */
this._eventListeners = new EventListenerCollection();
+ /** @type {boolean} */
this._preventNextClickScan = false;
+ /** @type {?number} */
this._preventNextClickScanTimer = null;
+ /** @type {number} */
this._preventNextClickScanTimerDuration = 50;
+ /** @type {() => void} */
this._preventNextClickScanTimerCallback = this._onPreventNextClickScanTimeout.bind(this);
+ /** @type {?number} */
this._primaryTouchIdentifier = null;
+ /** @type {boolean} */
this._preventNextContextMenu = false;
+ /** @type {boolean} */
this._preventNextMouseDown = false;
+ /** @type {boolean} */
this._preventNextClick = false;
+ /** @type {boolean} */
this._preventScroll = false;
+ /** @type {0|1|2|3} */
this._penPointerState = 0; // 0 = not active; 1 = hovering; 2 = touching; 3 = hovering after touching
+ /** @type {Map<number, string>} */
this._pointerIdTypeMap = new Map();
+ /** @type {boolean} */
this._canClearSelection = true;
+ /** @type {?number} */
this._textSelectionTimer = null;
+ /** @type {boolean} */
this._yomitanIsChangingTextSelectionNow = false;
+ /** @type {boolean} */
this._userHasNotSelectedAnythingManually = true;
}
+ /** @type {boolean} */
get canClearSelection() {
return this._canClearSelection;
}
@@ -101,6 +161,7 @@ export class TextScanner extends EventDispatcher {
this._canClearSelection = value;
}
+ /** @type {?string} */
get includeSelector() {
return this._includeSelector;
}
@@ -109,6 +170,7 @@ export class TextScanner extends EventDispatcher {
this._includeSelector = value;
}
+ /** @type {?string} */
get excludeSelector() {
return this._excludeSelector;
}
@@ -117,15 +179,22 @@ export class TextScanner extends EventDispatcher {
this._excludeSelector = value;
}
+ /** */
prepare() {
this._isPrepared = true;
this.setEnabled(this._enabled);
}
+ /**
+ * @returns {boolean}
+ */
isEnabled() {
return this._enabled;
}
+ /**
+ * @param {boolean} enabled
+ */
setEnabled(enabled) {
this._enabled = enabled;
@@ -145,11 +214,13 @@ export class TextScanner extends EventDispatcher {
if (value) {
this._hookEvents();
- const selection = window.getSelection();
- this._userHasNotSelectedAnythingManually = (selection === null) ? true : selection.isCollapsed;
+ this._userHasNotSelectedAnythingManually = this._computeUserHasNotSelectedAnythingManually();
}
}
+ /**
+ * @param {import('text-scanner').Options} options
+ */
setOptions({
inputs,
deepContentScan,
@@ -200,7 +271,7 @@ export class TextScanner extends EventDispatcher {
if (typeof sentenceParsingOptions === 'object' && sentenceParsingOptions !== null) {
const {scanExtent, terminationCharacterMode, terminationCharacters} = sentenceParsingOptions;
if (typeof scanExtent === 'number') {
- this._sentenceScanExtent = sentenceParsingOptions.scanExtent;
+ this._sentenceScanExtent = scanExtent;
}
if (typeof terminationCharacterMode === 'string') {
this._sentenceTerminateAtNewlines = (terminationCharacterMode === 'custom' || terminationCharacterMode === 'newlines');
@@ -229,6 +300,12 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {import('text-source').TextSource} textSource
+ * @param {number} length
+ * @param {boolean} layoutAwareScan
+ * @returns {string}
+ */
getTextSourceContent(textSource, length, layoutAwareScan) {
const clonedTextSource = textSource.clone();
@@ -243,10 +320,14 @@ export class TextScanner extends EventDispatcher {
return clonedTextSource.text();
}
+ /**
+ * @returns {boolean}
+ */
hasSelection() {
return (this._textSourceCurrent !== null);
}
+ /** */
clearSelection() {
if (!this._canClearSelection) { return; }
if (this._textSourceCurrent !== null) {
@@ -263,15 +344,21 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @returns {?import('text-source').TextSource}
+ */
getCurrentTextSource() {
return this._textSourceCurrent;
}
+ /**
+ * @param {?import('text-source').TextSource} textSource
+ */
setCurrentTextSource(textSource) {
this._textSourceCurrent = textSource;
- if (this._selectText && this._userHasNotSelectedAnythingManually) {
+ if (this._selectText && this._userHasNotSelectedAnythingManually && textSource !== null) {
this._yomitanIsChangingTextSelectionNow = true;
- this._textSourceCurrent.select();
+ textSource.select();
if (this._textSelectionTimer !== null) { clearTimeout(this._textSelectionTimer); }
// This timeout uses a 50ms delay to ensure that the selectionchange event has time to occur.
// If the delay is 0ms, the timeout will sometimes complete before the event.
@@ -285,6 +372,9 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @returns {Promise<boolean>}
+ */
async searchLast() {
if (this._textSourceCurrent !== null && this._inputInfoCurrent !== null) {
await this._search(this._textSourceCurrent, this._searchTerms, this._searchKanji, this._inputInfoCurrent);
@@ -293,6 +383,11 @@ export class TextScanner extends EventDispatcher {
return false;
}
+ /**
+ * @param {import('text-source').TextSource} textSource
+ * @param {import('text-scanner').InputInfoDetail} [inputDetail]
+ * @returns {Promise<?import('text-scanner').SearchedEventDetails>}
+ */
async search(textSource, inputDetail) {
const inputInfo = this._createInputInfo(null, 'script', 'script', true, [], [], inputDetail);
return await this._search(textSource, this._searchTerms, this._searchKanji, inputInfo);
@@ -300,6 +395,11 @@ export class TextScanner extends EventDispatcher {
// Private
+ /**
+ * @param {import('settings').OptionsContext} baseOptionsContext
+ * @param {import('text-scanner').InputInfo} inputInfo
+ * @returns {import('settings').OptionsContext}
+ */
_createOptionsContextForInput(baseOptionsContext, inputInfo) {
const optionsContext = clone(baseOptionsContext);
const {modifiers, modifierKeys} = inputInfo;
@@ -308,20 +408,33 @@ export class TextScanner extends EventDispatcher {
return optionsContext;
}
+ /**
+ * @param {import('text-source').TextSource} textSource
+ * @param {boolean} searchTerms
+ * @param {boolean} searchKanji
+ * @param {import('text-scanner').InputInfo} inputInfo
+ * @returns {Promise<?import('text-scanner').SearchedEventDetails>}
+ */
async _search(textSource, searchTerms, searchKanji, inputInfo) {
+ /** @type {?import('dictionary').DictionaryEntry[]} */
let dictionaryEntries = null;
+ /** @type {?import('display').HistoryStateSentence} */
let sentence = null;
+ /** @type {?import('display').PageType} */
let type = null;
+ /** @type {?Error} */
let error = null;
let searched = false;
+ /** @type {?import('settings').OptionsContext} */
let optionsContext = null;
+ /** @type {?import('text-scanner').SearchResultDetail} */
let detail = null;
try {
const inputInfoDetail = inputInfo.detail;
const selectionRestoreInfo = (
- (isObject(inputInfoDetail) && inputInfoDetail.restoreSelection) ?
- (this._inputInfoCurrent === null ? this._createSelectionRestoreInfo() : void 0) :
+ (typeof inputInfoDetail === 'object' && inputInfoDetail !== null && inputInfoDetail.restoreSelection) ?
+ (this._inputInfoCurrent === null ? this._createSelectionRestoreInfo() : null) :
null
);
@@ -329,8 +442,11 @@ export class TextScanner extends EventDispatcher {
return null;
}
- ({optionsContext, detail} = await this._getSearchContext());
- optionsContext = this._createOptionsContextForInput(optionsContext, inputInfo);
+ const getSearchContextPromise = this._getSearchContext();
+ const getSearchContextResult = getSearchContextPromise instanceof Promise ? await getSearchContextPromise : getSearchContextPromise;
+ const {detail: detail2} = getSearchContextResult;
+ if (typeof detail2 !== 'undefined') { detail = detail2; }
+ optionsContext = this._createOptionsContextForInput(getSearchContextResult.optionsContext, inputInfo);
searched = true;
@@ -339,9 +455,9 @@ export class TextScanner extends EventDispatcher {
if (result !== null) {
({dictionaryEntries, sentence, type} = result);
valid = true;
- } else if (textSource !== null && textSource.type === 'element' && await this._hasJapanese(textSource.fullContent)) {
+ } else if (textSource !== null && textSource instanceof TextSourceElement && await this._hasJapanese(textSource.fullContent)) {
dictionaryEntries = [];
- sentence = {sentence: '', offset: 0};
+ sentence = {text: '', offset: 0};
type = 'terms';
valid = true;
}
@@ -354,11 +470,12 @@ export class TextScanner extends EventDispatcher {
}
}
} catch (e) {
- error = e;
+ error = e instanceof Error ? e : new Error(`A search error occurred: ${e}`);
}
if (!searched) { return null; }
+ /** @type {import('text-scanner').SearchedEventDetails} */
const results = {
textScanner: this,
type,
@@ -374,41 +491,55 @@ export class TextScanner extends EventDispatcher {
return results;
}
+ /** */
_resetPreventNextClickScan() {
this._preventNextClickScan = false;
if (this._preventNextClickScanTimer !== null) { clearTimeout(this._preventNextClickScanTimer); }
this._preventNextClickScanTimer = setTimeout(this._preventNextClickScanTimerCallback, this._preventNextClickScanTimerDuration);
}
+ /** */
_onPreventNextClickScanTimeout() {
this._preventNextClickScanTimer = null;
}
+ /** */
_onSelectionChange() {
if (this._preventNextClickScanTimer !== null) { return; } // Ignore deselection that occurs at the start of the click
this._preventNextClickScan = true;
}
+ /** */
_onSelectionChangeCheckUserSelection() {
if (this._yomitanIsChangingTextSelectionNow) { return; }
- this._userHasNotSelectedAnythingManually = window.getSelection().isCollapsed;
+ this._userHasNotSelectedAnythingManually = this._computeUserHasNotSelectedAnythingManually();
}
+ /**
+ * @param {MouseEvent} e
+ */
_onSearchClickMouseDown(e) {
if (e.button !== 0) { return; }
this._resetPreventNextClickScan();
}
+ /** */
_onSearchClickTouchStart() {
this._resetPreventNextClickScan();
}
+ /**
+ * @param {MouseEvent} e
+ */
_onMouseOver(e) {
- if (this._ignoreElements !== null && this._ignoreElements().includes(e.target)) {
+ if (this._ignoreElements !== null && this._ignoreElements().includes(/** @type {Element} */ (e.target))) {
this._scanTimerClear();
}
}
+ /**
+ * @param {MouseEvent} e
+ */
_onMouseMove(e) {
this._scanTimerClear();
@@ -418,6 +549,10 @@ export class TextScanner extends EventDispatcher {
this._searchAtFromMouseMove(e.clientX, e.clientY, inputInfo);
}
+ /**
+ * @param {MouseEvent} e
+ * @returns {boolean|void}
+ */
_onMouseDown(e) {
if (this._preventNextMouseDown) {
this._preventNextMouseDown = false;
@@ -443,10 +578,15 @@ export class TextScanner extends EventDispatcher {
}
}
+ /** */
_onMouseOut() {
this._scanTimerClear();
}
+ /**
+ * @param {MouseEvent} e
+ * @returns {boolean|void}
+ */
_onClick(e) {
if (this._preventNextClick) {
this._preventNextClick = false;
@@ -460,6 +600,9 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {MouseEvent} e
+ */
_onSearchClick(e) {
const preventNextClickScan = this._preventNextClickScan;
this._preventNextClickScan = false;
@@ -476,10 +619,15 @@ export class TextScanner extends EventDispatcher {
this._searchAt(e.clientX, e.clientY, inputInfo);
}
+ /** */
_onAuxClick() {
this._preventNextContextMenu = false;
}
+ /**
+ * @param {MouseEvent} e
+ * @returns {boolean|void}
+ */
_onContextMenu(e) {
if (this._preventNextContextMenu) {
this._preventNextContextMenu = false;
@@ -489,6 +637,9 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {TouchEvent} e
+ */
_onTouchStart(e) {
if (this._primaryTouchIdentifier !== null || e.changedTouches.length === 0) {
return;
@@ -498,13 +649,20 @@ export class TextScanner extends EventDispatcher {
this._onPrimaryTouchStart(e, clientX, clientY, identifier);
}
+ /**
+ * @param {TouchEvent|PointerEvent} e
+ * @param {number} x
+ * @param {number} y
+ * @param {number} identifier
+ */
_onPrimaryTouchStart(e, x, y, identifier) {
this._preventScroll = false;
this._preventNextContextMenu = false;
this._preventNextMouseDown = false;
this._preventNextClick = false;
- if (DocumentUtil.isPointInSelection(x, y, window.getSelection())) {
+ const selection = window.getSelection();
+ if (selection !== null && DocumentUtil.isPointInSelection(x, y, selection)) {
return;
}
@@ -513,11 +671,14 @@ export class TextScanner extends EventDispatcher {
if (this._pendingLookup) { return; }
const inputInfo = this._getMatchingInputGroupFromEvent('touch', 'touchStart', e);
- if (inputInfo === null || !inputInfo.input.scanOnTouchPress) { return; }
+ if (inputInfo === null || !(inputInfo.input !== null && inputInfo.input.scanOnTouchPress)) { return; }
this._searchAtFromTouchStart(x, y, inputInfo);
}
+ /**
+ * @param {TouchEvent} e
+ */
_onTouchEnd(e) {
if (this._primaryTouchIdentifier === null) { return; }
@@ -528,6 +689,12 @@ export class TextScanner extends EventDispatcher {
this._onPrimaryTouchEnd(e, clientX, clientY, true);
}
+ /**
+ * @param {TouchEvent|PointerEvent} e
+ * @param {number} x
+ * @param {number} y
+ * @param {boolean} allowSearch
+ */
_onPrimaryTouchEnd(e, x, y, allowSearch) {
this._primaryTouchIdentifier = null;
this._preventScroll = false;
@@ -538,11 +705,14 @@ export class TextScanner extends EventDispatcher {
if (!allowSearch) { return; }
const inputInfo = this._getMatchingInputGroupFromEvent('touch', 'touchEnd', e);
- if (inputInfo === null || !inputInfo.input.scanOnTouchRelease) { return; }
+ if (inputInfo === null || !(inputInfo.input !== null && inputInfo.input.scanOnTouchRelease)) { return; }
this._searchAtFromTouchEnd(x, y, inputInfo);
}
+ /**
+ * @param {TouchEvent} e
+ */
_onTouchCancel(e) {
if (this._primaryTouchIdentifier === null) { return; }
@@ -552,6 +722,9 @@ export class TextScanner extends EventDispatcher {
this._onPrimaryTouchEnd(e, 0, 0, false);
}
+ /**
+ * @param {TouchEvent} e
+ */
_onTouchMove(e) {
if (this._primaryTouchIdentifier === null) { return; }
@@ -568,13 +741,18 @@ export class TextScanner extends EventDispatcher {
const inputInfo = this._getMatchingInputGroupFromEvent('touch', 'touchMove', e);
if (inputInfo === null) { return; }
- if (inputInfo.input.scanOnTouchMove) {
+ const {input} = inputInfo;
+ if (input !== null && input.scanOnTouchMove) {
this._searchAt(primaryTouch.clientX, primaryTouch.clientY, inputInfo);
}
e.preventDefault(); // Disable scroll
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onPointerOver(e) {
const {pointerType, pointerId, isPrimary} = e;
if (pointerType === 'pen') {
@@ -584,11 +762,15 @@ export class TextScanner extends EventDispatcher {
if (!isPrimary) { return; }
switch (pointerType) {
case 'mouse': return this._onMousePointerOver(e);
- case 'touch': return this._onTouchPointerOver(e);
+ case 'touch': return this._onTouchPointerOver();
case 'pen': return this._onPenPointerOver(e);
}
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onPointerDown(e) {
if (!e.isPrimary) { return; }
switch (this._getPointerEventType(e)) {
@@ -598,6 +780,10 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onPointerMove(e) {
if (!e.isPrimary) { return; }
switch (this._getPointerEventType(e)) {
@@ -607,92 +793,144 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onPointerUp(e) {
if (!e.isPrimary) { return; }
switch (this._getPointerEventType(e)) {
- case 'mouse': return this._onMousePointerUp(e);
+ case 'mouse': return this._onMousePointerUp();
case 'touch': return this._onTouchPointerUp(e);
case 'pen': return this._onPenPointerUp(e);
}
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onPointerCancel(e) {
this._pointerIdTypeMap.delete(e.pointerId);
if (!e.isPrimary) { return; }
switch (e.pointerType) {
- case 'mouse': return this._onMousePointerCancel(e);
+ case 'mouse': return this._onMousePointerCancel();
case 'touch': return this._onTouchPointerCancel(e);
- case 'pen': return this._onPenPointerCancel(e);
+ case 'pen': return this._onPenPointerCancel();
}
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onPointerOut(e) {
this._pointerIdTypeMap.delete(e.pointerId);
if (!e.isPrimary) { return; }
switch (e.pointerType) {
- case 'mouse': return this._onMousePointerOut(e);
- case 'touch': return this._onTouchPointerOut(e);
- case 'pen': return this._onPenPointerOut(e);
+ case 'mouse': return this._onMousePointerOut();
+ case 'touch': return this._onTouchPointerOut();
+ case 'pen': return this._onPenPointerOut();
}
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onMousePointerOver(e) {
return this._onMouseOver(e);
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onMousePointerDown(e) {
return this._onMouseDown(e);
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onMousePointerMove(e) {
return this._onMouseMove(e);
}
+ /** */
_onMousePointerUp() {
// NOP
}
- _onMousePointerCancel(e) {
- return this._onMouseOut(e);
+ /**
+ * @returns {boolean|void}
+ */
+ _onMousePointerCancel() {
+ return this._onMouseOut();
}
- _onMousePointerOut(e) {
- return this._onMouseOut(e);
+ /**
+ * @returns {boolean|void}
+ */
+ _onMousePointerOut() {
+ return this._onMouseOut();
}
+ /** */
_onTouchPointerOver() {
// NOP
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onTouchPointerDown(e) {
const {clientX, clientY, pointerId} = e;
this._onPrimaryTouchStart(e, clientX, clientY, pointerId);
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onTouchPointerMove(e) {
if (!this._preventScroll || !e.cancelable) {
return;
}
const inputInfo = this._getMatchingInputGroupFromEvent('touch', 'touchMove', e);
- if (inputInfo === null || !inputInfo.input.scanOnTouchMove) { return; }
+ if (inputInfo === null || !(inputInfo.input !== null && inputInfo.input.scanOnTouchMove)) { return; }
this._searchAt(e.clientX, e.clientY, inputInfo);
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onTouchPointerUp(e) {
const {clientX, clientY} = e;
return this._onPrimaryTouchEnd(e, clientX, clientY, true);
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {boolean|void}
+ */
_onTouchPointerCancel(e) {
return this._onPrimaryTouchEnd(e, 0, 0, false);
}
+ /** */
_onTouchPointerOut() {
// NOP
}
+ /**
+ * @param {PointerEvent} e
+ */
_onTouchMovePreventScroll(e) {
if (!this._preventScroll) { return; }
@@ -703,31 +941,45 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {PointerEvent} e
+ */
_onPenPointerOver(e) {
this._penPointerState = 1;
this._searchAtFromPen(e, 'pointerOver', false);
}
+ /**
+ * @param {PointerEvent} e
+ */
_onPenPointerDown(e) {
this._penPointerState = 2;
this._searchAtFromPen(e, 'pointerDown', true);
}
+ /**
+ * @param {PointerEvent} e
+ */
_onPenPointerMove(e) {
if (this._penPointerState === 2 && (!this._preventScroll || !e.cancelable)) { return; }
this._searchAtFromPen(e, 'pointerMove', true);
}
+ /**
+ * @param {PointerEvent} e
+ */
_onPenPointerUp(e) {
this._penPointerState = 3;
this._preventScroll = false;
this._searchAtFromPen(e, 'pointerUp', false);
}
- _onPenPointerCancel(e) {
- this._onPenPointerOut(e);
+ /** */
+ _onPenPointerCancel() {
+ this._onPenPointerOut();
}
+ /** */
_onPenPointerOut() {
this._penPointerState = 0;
this._preventScroll = false;
@@ -736,32 +988,54 @@ export class TextScanner extends EventDispatcher {
this._preventNextClick = false;
}
+ /**
+ * @returns {Promise<boolean>}
+ */
async _scanTimerWait() {
const delay = this._delay;
- const promise = promiseTimeout(delay, true);
+ const promise = /** @type {Promise<boolean>} */ (new Promise((resolve) => {
+ /** @type {?number} */
+ let timeout = setTimeout(() => {
+ timeout = null;
+ resolve(true);
+ }, delay);
+ this._scanTimerPromiseResolve = (value) => {
+ if (timeout === null) { return; }
+ clearTimeout(timeout);
+ timeout = null;
+ resolve(value);
+ };
+ }));
this._scanTimerPromise = promise;
try {
return await promise;
} finally {
if (this._scanTimerPromise === promise) {
this._scanTimerPromise = null;
+ this._scanTimerPromiseResolve = null;
}
}
}
+ /** */
_scanTimerClear() {
- if (this._scanTimerPromise !== null) {
- this._scanTimerPromise.resolve(false);
- this._scanTimerPromise = null;
- }
+ if (this._scanTimerPromiseResolve === null) { return; }
+ this._scanTimerPromiseResolve(false);
+ this._scanTimerPromiseResolve = null;
+ this._scanTimerPromise = null;
}
+ /**
+ * @returns {boolean}
+ */
_arePointerEventsSupported() {
return (this._pointerEventsEnabled && typeof PointerEvent !== 'undefined');
}
+ /** */
_hookEvents() {
const capture = true;
+ /** @type {import('event-listener-collection').AddEventListenerArgs[]} */
let eventListenerInfos;
if (this._searchOnClickOnly) {
eventListenerInfos = this._getMouseClickOnlyEventListeners(capture);
@@ -779,11 +1053,15 @@ export class TextScanner extends EventDispatcher {
eventListenerInfos.push(this._getSelectionChangeCheckUserSelectionListener());
- for (const args of eventListenerInfos) {
+ for (const [...args] of eventListenerInfos) {
this._eventListeners.addEventListener(...args);
}
}
+ /**
+ * @param {boolean} capture
+ * @returns {import('event-listener-collection').AddEventListenerArgs[]}
+ */
_getPointerEventListeners(capture) {
return [
[this._node, 'pointerover', this._onPointerOver.bind(this), capture],
@@ -799,6 +1077,10 @@ export class TextScanner extends EventDispatcher {
];
}
+ /**
+ * @param {boolean} capture
+ * @returns {import('event-listener-collection').AddEventListenerArgs[]}
+ */
_getMouseEventListeners(capture) {
return [
[this._node, 'mousedown', this._onMouseDown.bind(this), capture],
@@ -809,6 +1091,10 @@ export class TextScanner extends EventDispatcher {
];
}
+ /**
+ * @param {boolean} capture
+ * @returns {import('event-listener-collection').AddEventListenerArgs[]}
+ */
_getTouchEventListeners(capture) {
return [
[this._node, 'auxclick', this._onAuxClick.bind(this), capture],
@@ -820,14 +1106,23 @@ export class TextScanner extends EventDispatcher {
];
}
+ /**
+ * @param {boolean} capture
+ * @returns {import('event-listener-collection').AddEventListenerArgs[]}
+ */
_getMouseClickOnlyEventListeners(capture) {
return [
[this._node, 'click', this._onClick.bind(this), capture]
];
}
+ /**
+ * @param {boolean} capture
+ * @returns {import('event-listener-collection').AddEventListenerArgs[]}
+ */
_getMouseClickOnlyEventListeners2(capture) {
const {documentElement} = document;
+ /** @type {import('event-listener-collection').AddEventListenerArgs[]} */
const entries = [
[document, 'selectionchange', this._onSelectionChange.bind(this)]
];
@@ -840,10 +1135,18 @@ export class TextScanner extends EventDispatcher {
return entries;
}
+ /**
+ * @returns {import('event-listener-collection').AddEventListenerArgs}
+ */
_getSelectionChangeCheckUserSelectionListener() {
return [document, 'selectionchange', this._onSelectionChangeCheckUserSelection.bind(this)];
}
+ /**
+ * @param {TouchList} touchList
+ * @param {number} identifier
+ * @returns {?Touch}
+ */
_getTouch(touchList, identifier) {
for (const touch of touchList) {
if (touch.identifier === identifier) {
@@ -853,6 +1156,13 @@ export class TextScanner extends EventDispatcher {
return null;
}
+ /**
+ * @param {import('text-source').TextSource} textSource
+ * @param {boolean} searchTerms
+ * @param {boolean} searchKanji
+ * @param {import('settings').OptionsContext} optionsContext
+ * @returns {Promise<?import('text-scanner').SearchResults>}
+ */
async _findDictionaryEntries(textSource, searchTerms, searchKanji, optionsContext) {
if (textSource === null) {
return null;
@@ -868,6 +1178,11 @@ export class TextScanner extends EventDispatcher {
return null;
}
+ /**
+ * @param {import('text-source').TextSource} textSource
+ * @param {import('settings').OptionsContext} optionsContext
+ * @returns {Promise<?import('text-scanner').TermSearchResults>}
+ */
async _findTermDictionaryEntries(textSource, optionsContext) {
const scanLength = this._scanLength;
const sentenceScanExtent = this._sentenceScanExtent;
@@ -879,6 +1194,7 @@ export class TextScanner extends EventDispatcher {
const searchText = this.getTextSourceContent(textSource, scanLength, layoutAwareScan);
if (searchText.length === 0) { return null; }
+ /** @type {import('api').FindTermsDetails} */
const details = {};
if (this._matchTypePrefix) { details.matchType = 'prefix'; }
const {dictionaryEntries, originalTextLength} = await yomitan.api.termsFind(searchText, details, optionsContext);
@@ -898,6 +1214,11 @@ export class TextScanner extends EventDispatcher {
return {dictionaryEntries, sentence, type: 'terms'};
}
+ /**
+ * @param {import('text-source').TextSource} textSource
+ * @param {import('settings').OptionsContext} optionsContext
+ * @returns {Promise<?import('text-scanner').KanjiSearchResults>}
+ */
async _findKanjiDictionaryEntries(textSource, optionsContext) {
const sentenceScanExtent = this._sentenceScanExtent;
const sentenceTerminateAtNewlines = this._sentenceTerminateAtNewlines;
@@ -925,6 +1246,11 @@ export class TextScanner extends EventDispatcher {
return {dictionaryEntries, sentence, type: 'kanji'};
}
+ /**
+ * @param {number} x
+ * @param {number} y
+ * @param {import('text-scanner').InputInfo} inputInfo
+ */
async _searchAt(x, y, inputInfo) {
if (this._pendingLookup) { return; }
@@ -948,11 +1274,13 @@ export class TextScanner extends EventDispatcher {
deepContentScan: this._deepContentScan,
normalizeCssZoom: this._normalizeCssZoom
});
- try {
- await this._search(textSource, searchTerms, searchKanji, inputInfo);
- } finally {
- if (textSource !== null) {
- textSource.cleanup();
+ if (textSource !== null) {
+ try {
+ await this._search(textSource, searchTerms, searchKanji, inputInfo);
+ } finally {
+ if (textSource !== null) {
+ textSource.cleanup();
+ }
}
}
} catch (e) {
@@ -962,6 +1290,11 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {number} x
+ * @param {number} y
+ * @param {import('text-scanner').InputInfo} inputInfo
+ */
async _searchAtFromMouseMove(x, y, inputInfo) {
if (this._pendingLookup) { return; }
@@ -975,15 +1308,21 @@ export class TextScanner extends EventDispatcher {
await this._searchAt(x, y, inputInfo);
}
+ /**
+ * @param {number} x
+ * @param {number} y
+ * @param {import('text-scanner').InputInfo} inputInfo
+ */
async _searchAtFromTouchStart(x, y, inputInfo) {
const textSourceCurrentPrevious = this._textSourceCurrent !== null ? this._textSourceCurrent.clone() : null;
- const preventScroll = inputInfo.input.preventTouchScrolling;
+ const {input} = inputInfo;
+ const preventScroll = input !== null && input.preventTouchScrolling;
await this._searchAt(x, y, inputInfo);
if (
this._textSourceCurrent !== null &&
- !this._textSourceCurrent.hasSameStart(textSourceCurrentPrevious)
+ !(textSourceCurrentPrevious !== null && this._textSourceCurrent.hasSameStart(textSourceCurrentPrevious))
) {
this._preventScroll = preventScroll;
this._preventNextContextMenu = true;
@@ -991,10 +1330,20 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {number} x
+ * @param {number} y
+ * @param {import('text-scanner').InputInfo} inputInfo
+ */
async _searchAtFromTouchEnd(x, y, inputInfo) {
await this._searchAt(x, y, inputInfo);
}
+ /**
+ * @param {PointerEvent} e
+ * @param {import('text-scanner').PointerEventType} eventType
+ * @param {boolean} prevent
+ */
async _searchAtFromPen(e, eventType, prevent) {
if (this._pendingLookup) { return; }
@@ -1002,9 +1351,9 @@ export class TextScanner extends EventDispatcher {
if (inputInfo === null) { return; }
const {input} = inputInfo;
- if (!this._isPenEventSupported(eventType, input)) { return; }
+ if (input === null || !this._isPenEventSupported(eventType, input)) { return; }
- const preventScroll = input.preventPenScrolling;
+ const preventScroll = input !== null && input.preventPenScrolling;
await this._searchAt(e.clientX, e.clientY, inputInfo);
@@ -1019,6 +1368,11 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {import('text-scanner').PointerEventType} eventType
+ * @param {import('text-scanner').InputConfig} input
+ * @returns {boolean}
+ */
_isPenEventSupported(eventType, input) {
switch (eventType) {
case 'pointerDown':
@@ -1038,12 +1392,25 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {import('text-scanner').PointerType} pointerType
+ * @param {import('text-scanner').PointerEventType} eventType
+ * @param {MouseEvent|TouchEvent} event
+ * @returns {?import('text-scanner').InputInfo}
+ */
_getMatchingInputGroupFromEvent(pointerType, eventType, event) {
const modifiers = DocumentUtil.getActiveModifiersAndButtons(event);
const modifierKeys = DocumentUtil.getActiveModifiers(event);
return this._getMatchingInputGroup(pointerType, eventType, modifiers, modifierKeys);
}
+ /**
+ * @param {import('text-scanner').PointerType} pointerType
+ * @param {import('text-scanner').PointerEventType} eventType
+ * @param {import('input').Modifier[]} modifiers
+ * @param {import('input').ModifierKey[]} modifierKeys
+ * @returns {?import('text-scanner').InputInfo}
+ */
_getMatchingInputGroup(pointerType, eventType, modifiers, modifierKeys) {
let fallbackIndex = -1;
const modifiersSet = new Set(modifiers);
@@ -1067,10 +1434,25 @@ export class TextScanner extends EventDispatcher {
);
}
+ /**
+ * @param {?import('text-scanner').InputConfig} input
+ * @param {import('text-scanner').PointerType} pointerType
+ * @param {import('text-scanner').PointerEventType} eventType
+ * @param {boolean} passive
+ * @param {import('input').Modifier[]} modifiers
+ * @param {import('input').ModifierKey[]} modifierKeys
+ * @param {import('text-scanner').InputInfoDetail} [detail]
+ * @returns {import('text-scanner').InputInfo}
+ */
_createInputInfo(input, pointerType, eventType, passive, modifiers, modifierKeys, detail) {
return {input, pointerType, eventType, passive, modifiers, modifierKeys, detail};
}
+ /**
+ * @param {Set<string>} set
+ * @param {string[]} values
+ * @returns {boolean}
+ */
_setHasAll(set, values) {
for (const value of values) {
if (!set.has(value)) {
@@ -1080,6 +1462,10 @@ export class TextScanner extends EventDispatcher {
return true;
}
+ /**
+ * @param {import('text-scanner').InputOptionsOuter} input
+ * @returns {import('text-scanner').InputConfig}
+ */
_convertInput(input) {
const {options} = input;
return {
@@ -1101,6 +1487,10 @@ export class TextScanner extends EventDispatcher {
};
}
+ /**
+ * @param {string} value
+ * @returns {string[]}
+ */
_getInputArray(value) {
return (
typeof value === 'string' ?
@@ -1109,6 +1499,10 @@ export class TextScanner extends EventDispatcher {
);
}
+ /**
+ * @param {{mouse: boolean, touch: boolean, pen: boolean}} details
+ * @returns {Set<'mouse'|'touch'|'pen'>}
+ */
_getInputTypeSet({mouse, touch, pen}) {
const set = new Set();
if (mouse) { set.add('mouse'); }
@@ -1117,16 +1511,30 @@ export class TextScanner extends EventDispatcher {
return set;
}
+ /**
+ * @param {unknown} value
+ * @returns {boolean}
+ */
_getInputBoolean(value) {
return typeof value === 'boolean' && value;
}
+ /**
+ * @param {PointerEvent} e
+ * @returns {string}
+ */
_getPointerEventType(e) {
// Workaround for Firefox bug not detecting certain 'touch' events as 'pen' events.
const cachedPointerType = this._pointerIdTypeMap.get(e.pointerId);
return (typeof cachedPointerType !== 'undefined' ? cachedPointerType : e.pointerType);
}
+ /**
+ * @param {import('text-source').TextSource} textSource
+ * @param {?string} includeSelector
+ * @param {?string} excludeSelector
+ * @param {boolean} layoutAwareScan
+ */
_constrainTextSource(textSource, includeSelector, excludeSelector, layoutAwareScan) {
let length = textSource.text().length;
while (length > 0) {
@@ -1143,6 +1551,10 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {string} text
+ * @returns {Promise<boolean>}
+ */
async _hasJapanese(text) {
try {
return await yomitan.api.textHasJapaneseCharacters(text);
@@ -1151,19 +1563,28 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @returns {import('text-scanner').SelectionRestoreInfo}
+ */
_createSelectionRestoreInfo() {
const ranges = [];
const selection = window.getSelection();
- for (let i = 0, ii = selection.rangeCount; i < ii; ++i) {
- const range = selection.getRangeAt(i);
- ranges.push(range.cloneRange());
+ if (selection !== null) {
+ for (let i = 0, ii = selection.rangeCount; i < ii; ++i) {
+ const range = selection.getRangeAt(i);
+ ranges.push(range.cloneRange());
+ }
}
return {ranges};
}
+ /**
+ * @param {import('text-scanner').SelectionRestoreInfo} selectionRestoreInfo
+ */
_restoreSelection(selectionRestoreInfo) {
const {ranges} = selectionRestoreInfo;
const selection = window.getSelection();
+ if (selection === null) { return; }
selection.removeAllRanges();
for (const range of ranges) {
try {
@@ -1174,7 +1595,18 @@ export class TextScanner extends EventDispatcher {
}
}
+ /**
+ * @param {string} reason
+ */
_triggerClear(reason) {
this.trigger('clear', {reason});
}
+
+ /**
+ * @returns {boolean}
+ */
+ _computeUserHasNotSelectedAnythingManually() {
+ const selection = window.getSelection();
+ return selection === null || selection.isCollapsed;
+ }
}
diff --git a/ext/js/language/translator.js b/ext/js/language/translator.js
index 4044f379..9b01c1ff 100644
--- a/ext/js/language/translator.js
+++ b/ext/js/language/translator.js
@@ -26,34 +26,28 @@ import {DictionaryDatabase} from './dictionary-database.js';
*/
export class Translator {
/**
- * Information about how popup content should be shown, specifically related to the outer popup frame.
- * @typedef {object} TermFrequency
- * @property {string} term The term.
- * @property {string} reading The reading of the term.
- * @property {string} dictionary The name of the dictionary that the term frequency originates from.
- * @property {boolean} hasReading Whether or not a reading was specified.
- * @property {number|string} frequency The frequency value for the term.
- */
-
- /**
* Creates a new Translator instance.
- * @param {object} details The details for the class.
- * @param {JapaneseUtil} details.japaneseUtil An instance of JapaneseUtil.
- * @param {DictionaryDatabase} details.database An instance of DictionaryDatabase.
+ * @param {import('translator').ConstructorDetails} details The details for the class.
*/
constructor({japaneseUtil, database}) {
+ /** @type {JapaneseUtil} */
this._japaneseUtil = japaneseUtil;
+ /** @type {DictionaryDatabase} */
this._database = database;
+ /** @type {?Deinflector} */
this._deinflector = null;
+ /** @type {import('translator').DictionaryTagCache} */
this._tagCache = new Map();
+ /** @type {Intl.Collator} */
this._stringComparer = new Intl.Collator('en-US'); // Invariant locale
+ /** @type {RegExp} */
this._numberRegex = /[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?/;
}
/**
* Initializes the instance for use. The public API should not be used until
* this function has been called.
- * @param {object} deinflectionReasons The raw deinflections reasons data that the Deinflector uses.
+ * @param {import('deinflector').ReasonsRaw} deinflectionReasons The raw deinflections reasons data that the Deinflector uses.
*/
prepare(deinflectionReasons) {
this._deinflector = new Deinflector(deinflectionReasons);
@@ -68,22 +62,23 @@ export class Translator {
/**
* Finds term definitions for the given text.
- * @param {string} mode The mode to use for finding terms, which determines the format of the resulting array.
+ * @param {import('translator').FindTermsMode} mode The mode to use for finding terms, which determines the format of the resulting array.
* One of: 'group', 'merge', 'split', 'simple'
* @param {string} text The text to find terms for.
- * @param {Translation.FindTermsOptions} options A object describing settings about the lookup.
- * @returns {{dictionaryEntries: Translation.TermDictionaryEntry[], originalTextLength: number}} An object containing dictionary entries and the length of the original source text.
+ * @param {import('translation').FindTermsOptions} options A object describing settings about the lookup.
+ * @returns {Promise<{dictionaryEntries: import('dictionary').TermDictionaryEntry[], originalTextLength: number}>} An object containing dictionary entries and the length of the original source text.
*/
async findTerms(mode, text, options) {
const {enabledDictionaryMap, excludeDictionaryDefinitions, sortFrequencyDictionary, sortFrequencyDictionaryOrder} = options;
- let {dictionaryEntries, originalTextLength} = await this._findTermsInternal(text, enabledDictionaryMap, options);
+ const tagAggregator = new TranslatorTagAggregator();
+ let {dictionaryEntries, originalTextLength} = await this._findTermsInternal(text, enabledDictionaryMap, options, tagAggregator);
switch (mode) {
case 'group':
- dictionaryEntries = this._groupDictionaryEntriesByHeadword(dictionaryEntries);
+ dictionaryEntries = this._groupDictionaryEntriesByHeadword(dictionaryEntries, tagAggregator);
break;
case 'merge':
- dictionaryEntries = await this._getRelatedDictionaryEntries(dictionaryEntries, options.mainDictionary, enabledDictionaryMap);
+ dictionaryEntries = await this._getRelatedDictionaryEntries(dictionaryEntries, options.mainDictionary, enabledDictionaryMap, tagAggregator);
break;
}
@@ -91,17 +86,19 @@ export class Translator {
this._removeExcludedDefinitions(dictionaryEntries, excludeDictionaryDefinitions);
}
- if (mode === 'simple') {
+ if (mode !== 'simple') {
+ await this._addTermMeta(dictionaryEntries, enabledDictionaryMap, tagAggregator);
+ await this._expandTagGroupsAndGroup(tagAggregator.getTagExpansionTargets());
+ } else {
if (sortFrequencyDictionary !== null) {
- const sortDictionaryMap = [sortFrequencyDictionary]
- .filter((key) => enabledDictionaryMap.has(key))
- .reduce((subMap, key) => subMap.set(key, enabledDictionaryMap.get(key)), new Map());
- await this._addTermMeta(dictionaryEntries, sortDictionaryMap);
+ /** @type {import('translation').TermEnabledDictionaryMap} */
+ const sortDictionaryMap = new Map();
+ const value = enabledDictionaryMap.get(sortFrequencyDictionary);
+ if (typeof value !== 'undefined') {
+ sortDictionaryMap.set(sortFrequencyDictionary, value);
+ }
+ await this._addTermMeta(dictionaryEntries, sortDictionaryMap, tagAggregator);
}
- this._clearTermTags(dictionaryEntries);
- } else {
- await this._addTermMeta(dictionaryEntries, enabledDictionaryMap);
- await this._expandTermTags(dictionaryEntries);
}
if (sortFrequencyDictionary !== null) {
@@ -125,8 +122,8 @@ export class Translator {
* @param {string} text The text to find kanji definitions for. This string can be of any length,
* but is typically just one character, which is a single kanji. If the string is multiple
* characters long, each character will be searched in the database.
- * @param {Translation.FindKanjiOptions} options A object describing settings about the lookup.
- * @returns {Translation.KanjiDictionaryEntry[]} An array of definitions. See the _createKanjiDefinition() function for structure details.
+ * @param {import('translation').FindKanjiOptions} options A object describing settings about the lookup.
+ * @returns {Promise<import('dictionary').KanjiDictionaryEntry[]>} An array of definitions. See the _createKanjiDefinition() function for structure details.
*/
async findKanji(text, options) {
const {enabledDictionaryMap} = options;
@@ -140,19 +137,18 @@ export class Translator {
this._sortDatabaseEntriesByIndex(databaseEntries);
+ /** @type {import('dictionary').KanjiDictionaryEntry[]} */
const dictionaryEntries = [];
+ const tagAggregator = new TranslatorTagAggregator();
for (const {character, onyomi, kunyomi, tags, definitions, stats, dictionary} of databaseEntries) {
const expandedStats = await this._expandKanjiStats(stats, dictionary);
-
- const tagGroups = [];
- if (tags.length > 0) { tagGroups.push(this._createTagGroup(dictionary, tags)); }
-
- const dictionaryEntry = this._createKanjiDictionaryEntry(character, dictionary, onyomi, kunyomi, tagGroups, expandedStats, definitions);
+ const dictionaryEntry = this._createKanjiDictionaryEntry(character, dictionary, onyomi, kunyomi, expandedStats, definitions);
dictionaryEntries.push(dictionaryEntry);
+ tagAggregator.addTags(dictionaryEntry.tags, dictionary, tags);
}
await this._addKanjiMeta(dictionaryEntries, enabledDictionaryMap);
- await this._expandKanjiTags(dictionaryEntries);
+ await this._expandTagGroupsAndGroup(tagAggregator.getTagExpansionTargets());
this._sortKanjiDictionaryEntryData(dictionaryEntries);
@@ -164,8 +160,8 @@ export class Translator {
* and a list of dictionaries.
* @param {{term: string, reading: string|null}[]} termReadingList An array of `{term, reading}` pairs. If reading is null,
* the reading won't be compared.
- * @param {Iterable<string>} dictionaries An array of dictionary names.
- * @returns {TermFrequency[]} An array of term frequencies.
+ * @param {string[]} dictionaries An array of dictionary names.
+ * @returns {Promise<import('translator').TermFrequencySimple[]>} An array of term frequencies.
*/
async getTermFrequencies(termReadingList, dictionaries) {
const dictionarySet = new Set();
@@ -176,25 +172,26 @@ export class Translator {
const termList = termReadingList.map(({term}) => term);
const metas = await this._database.findTermMetaBulk(termList, dictionarySet);
+ /** @type {import('translator').TermFrequencySimple[]} */
const results = [];
for (const {mode, data, dictionary, index} of metas) {
if (mode !== 'freq') { continue; }
let {term, reading} = termReadingList[index];
- let frequency = data;
- const hasReading = (data !== null && typeof data === 'object');
- if (hasReading) {
- if (data.reading !== reading) {
- if (reading !== null) { continue; }
- reading = data.reading;
- }
- frequency = data.frequency;
+ const hasReading = (data !== null && typeof data === 'object' && typeof data.reading === 'string');
+ if (hasReading && data.reading !== reading) {
+ if (reading !== null) { continue; }
+ reading = data.reading;
}
+ const frequency = hasReading ? data.frequency : /** @type {import('dictionary-data').GenericFrequencyData} */ (data);
+ const {frequency: frequencyValue, displayValue, displayValueParsed} = this._getFrequencyInfo(frequency);
results.push({
term,
reading,
dictionary,
hasReading,
- frequency
+ frequency: frequencyValue,
+ displayValue,
+ displayValueParsed
});
}
return results;
@@ -202,7 +199,14 @@ export class Translator {
// Find terms internal implementation
- async _findTermsInternal(text, enabledDictionaryMap, options) {
+ /**
+ * @param {string} text
+ * @param {Map<string, import('translation').FindTermDictionary>} enabledDictionaryMap
+ * @param {import('translation').FindTermsOptions} options
+ * @param {TranslatorTagAggregator} tagAggregator
+ * @returns {Promise<{dictionaryEntries: import('dictionary').TermDictionaryEntry[], originalTextLength: number}>}
+ */
+ async _findTermsInternal(text, enabledDictionaryMap, options, tagAggregator) {
if (options.removeNonJapaneseCharacters) {
text = this._getJapaneseOnlyText(text);
}
@@ -221,7 +225,7 @@ export class Translator {
for (const databaseEntry of databaseEntries) {
const {id} = databaseEntry;
if (ids.has(id)) { continue; }
- const dictionaryEntry = this._createTermDictionaryEntryFromDatabaseEntry(databaseEntry, originalText, transformedText, deinflectedText, reasons, true, enabledDictionaryMap);
+ const dictionaryEntry = this._createTermDictionaryEntryFromDatabaseEntry(databaseEntry, originalText, transformedText, deinflectedText, reasons, true, enabledDictionaryMap, tagAggregator);
dictionaryEntries.push(dictionaryEntry);
ids.add(id);
}
@@ -230,11 +234,17 @@ export class Translator {
return {dictionaryEntries, originalTextLength};
}
+ /**
+ * @param {string} text
+ * @param {Map<string, import('translation').FindTermDictionary>} enabledDictionaryMap
+ * @param {import('translation').FindTermsOptions} options
+ * @returns {Promise<import('translation-internal').DatabaseDeinflection[]>}
+ */
async _findTermsInternal2(text, enabledDictionaryMap, options) {
const deinflections = (
options.deinflect ?
this._getAllDeinflections(text, options) :
- [this._createDeinflection(text, text, text, 0, [], [])]
+ [this._createDeinflection(text, text, text, 0, [])]
);
if (deinflections.length === 0) { return []; }
@@ -271,7 +281,13 @@ export class Translator {
// Deinflections and text transformations
+ /**
+ * @param {string} text
+ * @param {import('translation').FindTermsOptions} options
+ * @returns {import('translation-internal').DatabaseDeinflection[]}
+ */
_getAllDeinflections(text, options) {
+ /** @type {import('translation-internal').TextDeinflectionOptionsArrays} */
const textOptionVariantArray = [
this._getTextReplacementsVariants(options),
this._getTextOptionEntryVariants(options.convertHalfWidthCharacters),
@@ -283,9 +299,10 @@ export class Translator {
];
const jp = this._japaneseUtil;
+ /** @type {import('translation-internal').DatabaseDeinflection[]} */
const deinflections = [];
const used = new Set();
- for (const [textReplacements, halfWidth, numeric, alphabetic, katakana, hiragana, [collapseEmphatic, collapseEmphaticFull]] of this._getArrayVariants(textOptionVariantArray)) {
+ for (const [textReplacements, halfWidth, numeric, alphabetic, katakana, hiragana, [collapseEmphatic, collapseEmphaticFull]] of /** @type {Generator<import('translation-internal').TextDeinflectionOptions, void, unknown>} */ (this._getArrayVariants(textOptionVariantArray))) {
let text2 = text;
const sourceMap = new TextSourceMap(text2);
if (textReplacements !== null) {
@@ -315,14 +332,20 @@ export class Translator {
if (used.has(source)) { break; }
used.add(source);
const rawSource = sourceMap.source.substring(0, sourceMap.getSourceLength(i));
- for (const {term, rules, reasons} of this._deinflector.deinflect(source)) {
- deinflections.push(this._createDeinflection(rawSource, source, term, rules, reasons, []));
+ for (const {term, rules, reasons} of /** @type {Deinflector} */ (this._deinflector).deinflect(source)) {
+ deinflections.push(this._createDeinflection(rawSource, source, term, rules, reasons));
}
}
}
return deinflections;
}
+ /**
+ * @param {string} text
+ * @param {TextSourceMap} sourceMap
+ * @param {import('translation').FindTermsTextReplacement[]} replacements
+ * @returns {string}
+ */
_applyTextReplacements(text, sourceMap, replacements) {
for (const {pattern, replacement} of replacements) {
text = RegexUtil.applyTextReplacement(text, sourceMap, pattern, replacement);
@@ -330,11 +353,15 @@ export class Translator {
return text;
}
+ /**
+ * @param {string} text
+ * @returns {string}
+ */
_getJapaneseOnlyText(text) {
const jp = this._japaneseUtil;
let length = 0;
for (const c of text) {
- if (!jp.isCodePointJapanese(c.codePointAt(0))) {
+ if (!jp.isCodePointJapanese(/** @type {number} */ (c.codePointAt(0)))) {
return text.substring(0, length);
}
length += c.length;
@@ -342,6 +369,10 @@ export class Translator {
return text;
}
+ /**
+ * @param {import('translation').FindTermsVariantMode} value
+ * @returns {boolean[]}
+ */
_getTextOptionEntryVariants(value) {
switch (value) {
case 'true': return [true];
@@ -350,7 +381,12 @@ export class Translator {
}
}
+ /**
+ * @param {import('translation').FindTermsOptions} options
+ * @returns {[collapseEmphatic: boolean, collapseEmphaticFull: boolean][]}
+ */
_getCollapseEmphaticOptions(options) {
+ /** @type {[collapseEmphatic: boolean, collapseEmphaticFull: boolean][]} */
const collapseEmphaticOptions = [[false, false]];
switch (options.collapseEmphaticSequences) {
case 'true':
@@ -363,20 +399,43 @@ export class Translator {
return collapseEmphaticOptions;
}
+ /**
+ * @param {import('translation').FindTermsOptions} options
+ * @returns {(import('translation').FindTermsTextReplacement[] | null)[]}
+ */
_getTextReplacementsVariants(options) {
return options.textReplacements;
}
- _createDeinflection(originalText, transformedText, deinflectedText, rules, reasons, databaseEntries) {
- return {originalText, transformedText, deinflectedText, rules, reasons, databaseEntries};
+ /**
+ * @param {string} originalText
+ * @param {string} transformedText
+ * @param {string} deinflectedText
+ * @param {import('translation-internal').DeinflectionRuleFlags} rules
+ * @param {string[]} reasons
+ * @returns {import('translation-internal').DatabaseDeinflection}
+ */
+ _createDeinflection(originalText, transformedText, deinflectedText, rules, reasons) {
+ return {originalText, transformedText, deinflectedText, rules, reasons, databaseEntries: []};
}
// Term dictionary entry grouping
- async _getRelatedDictionaryEntries(dictionaryEntries, mainDictionary, enabledDictionaryMap) {
+ /**
+ * @param {import('dictionary').TermDictionaryEntry[]} dictionaryEntries
+ * @param {string} mainDictionary
+ * @param {import('translation').TermEnabledDictionaryMap} enabledDictionaryMap
+ * @param {TranslatorTagAggregator} tagAggregator
+ * @returns {Promise<import('dictionary').TermDictionaryEntry[]>}
+ */
+ async _getRelatedDictionaryEntries(dictionaryEntries, mainDictionary, enabledDictionaryMap, tagAggregator) {
+ /** @type {import('translator').SequenceQuery[]} */
const sequenceList = [];
+ /** @type {import('translator').DictionaryEntryGroup[]} */
const groupedDictionaryEntries = [];
+ /** @type {Map<number, import('translator').DictionaryEntryGroup>} */
const groupedDictionaryEntriesMap = new Map();
+ /** @type {Map<number, import('dictionary').TermDictionaryEntry>} */
const ungroupedDictionaryEntriesMap = new Map();
for (const dictionaryEntry of dictionaryEntries) {
const {definitions: [{id, dictionary, sequences: [sequence]}]} = dictionaryEntry;
@@ -400,24 +459,31 @@ export class Translator {
if (sequenceList.length > 0) {
const secondarySearchDictionaryMap = this._getSecondarySearchDictionaryMap(enabledDictionaryMap);
- await this._addRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, sequenceList, enabledDictionaryMap);
+ await this._addRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, sequenceList, enabledDictionaryMap, tagAggregator);
for (const group of groupedDictionaryEntries) {
this._sortTermDictionaryEntriesById(group.dictionaryEntries);
}
if (ungroupedDictionaryEntriesMap.size !== 0 || secondarySearchDictionaryMap.size !== 0) {
- await this._addSecondaryRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, enabledDictionaryMap, secondarySearchDictionaryMap);
+ await this._addSecondaryRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, enabledDictionaryMap, secondarySearchDictionaryMap, tagAggregator);
}
}
const newDictionaryEntries = [];
for (const group of groupedDictionaryEntries) {
- newDictionaryEntries.push(this._createGroupedDictionaryEntry(group.dictionaryEntries, true));
+ newDictionaryEntries.push(this._createGroupedDictionaryEntry(group.dictionaryEntries, true, tagAggregator));
}
- newDictionaryEntries.push(...this._groupDictionaryEntriesByHeadword(ungroupedDictionaryEntriesMap.values()));
+ newDictionaryEntries.push(...this._groupDictionaryEntriesByHeadword(ungroupedDictionaryEntriesMap.values(), tagAggregator));
return newDictionaryEntries;
}
- async _addRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, sequenceList, enabledDictionaryMap) {
+ /**
+ * @param {import('translator').DictionaryEntryGroup[]} groupedDictionaryEntries
+ * @param {Map<number, import('dictionary').TermDictionaryEntry>} ungroupedDictionaryEntriesMap
+ * @param {import('translator').SequenceQuery[]} sequenceList
+ * @param {import('translation').TermEnabledDictionaryMap} enabledDictionaryMap
+ * @param {TranslatorTagAggregator} tagAggregator
+ */
+ async _addRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, sequenceList, enabledDictionaryMap, tagAggregator) {
const databaseEntries = await this._database.findTermsBySequenceBulk(sequenceList);
for (const databaseEntry of databaseEntries) {
const {dictionaryEntries, ids} = groupedDictionaryEntries[databaseEntry.index];
@@ -425,15 +491,23 @@ export class Translator {
if (ids.has(id)) { continue; }
const {term} = databaseEntry;
- const dictionaryEntry = this._createTermDictionaryEntryFromDatabaseEntry(databaseEntry, term, term, term, [], false, enabledDictionaryMap);
+ const dictionaryEntry = this._createTermDictionaryEntryFromDatabaseEntry(databaseEntry, term, term, term, [], false, enabledDictionaryMap, tagAggregator);
dictionaryEntries.push(dictionaryEntry);
ids.add(id);
ungroupedDictionaryEntriesMap.delete(id);
}
}
- async _addSecondaryRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, enabledDictionaryMap, secondarySearchDictionaryMap) {
+ /**
+ * @param {import('translator').DictionaryEntryGroup[]} groupedDictionaryEntries
+ * @param {Map<number, import('dictionary').TermDictionaryEntry>} ungroupedDictionaryEntriesMap
+ * @param {import('translation').TermEnabledDictionaryMap} enabledDictionaryMap
+ * @param {import('translation').TermEnabledDictionaryMap} secondarySearchDictionaryMap
+ * @param {TranslatorTagAggregator} tagAggregator
+ */
+ async _addSecondaryRelatedDictionaryEntries(groupedDictionaryEntries, ungroupedDictionaryEntriesMap, enabledDictionaryMap, secondarySearchDictionaryMap, tagAggregator) {
// Prepare grouping info
+ /** @type {import('dictionary-database').TermExactRequest[]} */
const termList = [];
const targetList = [];
const targetMap = new Map();
@@ -484,7 +558,7 @@ export class Translator {
for (const {ids, dictionaryEntries} of target.groups) {
if (ids.has(id)) { continue; }
- const dictionaryEntry = this._createTermDictionaryEntryFromDatabaseEntry(databaseEntry, sourceText, sourceText, sourceText, [], false, enabledDictionaryMap);
+ const dictionaryEntry = this._createTermDictionaryEntryFromDatabaseEntry(databaseEntry, sourceText, sourceText, sourceText, [], false, enabledDictionaryMap, tagAggregator);
dictionaryEntries.push(dictionaryEntry);
ids.add(id);
ungroupedDictionaryEntriesMap.delete(id);
@@ -492,7 +566,12 @@ export class Translator {
}
}
- _groupDictionaryEntriesByHeadword(dictionaryEntries) {
+ /**
+ * @param {Iterable<import('dictionary').TermDictionaryEntry>} dictionaryEntries
+ * @param {TranslatorTagAggregator} tagAggregator
+ * @returns {import('dictionary').TermDictionaryEntry[]}
+ */
+ _groupDictionaryEntriesByHeadword(dictionaryEntries, tagAggregator) {
const groups = new Map();
for (const dictionaryEntry of dictionaryEntries) {
const {inflections, headwords: [{term, reading}]} = dictionaryEntry;
@@ -507,13 +586,17 @@ export class Translator {
const newDictionaryEntries = [];
for (const groupDictionaryEntries of groups.values()) {
- newDictionaryEntries.push(this._createGroupedDictionaryEntry(groupDictionaryEntries, false));
+ newDictionaryEntries.push(this._createGroupedDictionaryEntry(groupDictionaryEntries, false, tagAggregator));
}
return newDictionaryEntries;
}
// Removing data
+ /**
+ * @param {import('dictionary').TermDictionaryEntry[]} dictionaryEntries
+ * @param {Set<string>} excludeDictionaryDefinitions
+ */
_removeExcludedDefinitions(dictionaryEntries, excludeDictionaryDefinitions) {
for (let i = dictionaryEntries.length - 1; i >= 0; --i) {
const dictionaryEntry = dictionaryEntries[i];
@@ -534,6 +617,9 @@ export class Translator {
}
}
+ /**
+ * @param {import('dictionary').TermDictionaryEntry} dictionaryEntry
+ */
_removeUnusedHeadwords(dictionaryEntry) {
const {definitions, pronunciations, frequencies, headwords} = dictionaryEntry;
const removeHeadwordIndices = new Set();
@@ -548,6 +634,7 @@ export class Translator {
if (removeHeadwordIndices.size === 0) { return; }
+ /** @type {Map<number, number>} */
const indexRemap = new Map();
let oldIndex = 0;
for (let i = 0, ii = headwords.length; i < ii; ++i) {
@@ -566,6 +653,10 @@ export class Translator {
this._updateArrayItemsHeadwordIndex(frequencies, indexRemap);
}
+ /**
+ * @param {import('dictionary').TermDefinition[]} definitions
+ * @param {Map<number, number>} indexRemap
+ */
_updateDefinitionHeadwordIndices(definitions, indexRemap) {
for (const {headwordIndices} of definitions) {
for (let i = headwordIndices.length - 1; i >= 0; --i) {
@@ -579,6 +670,10 @@ export class Translator {
}
}
+ /**
+ * @param {import('dictionary').TermPronunciation[]|import('dictionary').TermFrequency[]} array
+ * @param {Map<number, number>} indexRemap
+ */
_updateArrayItemsHeadwordIndex(array, indexRemap) {
for (let i = array.length - 1; i >= 0; --i) {
const item = array[i];
@@ -592,6 +687,11 @@ export class Translator {
}
}
+ /**
+ * @param {import('dictionary').TermPronunciation[]|import('dictionary').TermFrequency[]|import('dictionary').TermDefinition[]} array
+ * @param {Set<string>} excludeDictionaryDefinitions
+ * @returns {boolean}
+ */
_removeArrayItemsWithDictionary(array, excludeDictionaryDefinitions) {
let changed = false;
for (let j = array.length - 1; j >= 0; --j) {
@@ -603,45 +703,48 @@ export class Translator {
return changed;
}
- _removeTagGroupsWithDictionary(array, excludeDictionaryDefinitions) {
- for (const {tags} of array) {
- this._removeArrayItemsWithDictionary(tags, excludeDictionaryDefinitions);
+ /**
+ * @param {import('dictionary').Tag[]} array
+ * @param {Set<string>} excludeDictionaryDefinitions
+ * @returns {boolean}
+ */
+ _removeArrayItemsWithDictionary2(array, excludeDictionaryDefinitions) {
+ let changed = false;
+ for (let j = array.length - 1; j >= 0; --j) {
+ const {dictionaries} = array[j];
+ if (this._hasAny(excludeDictionaryDefinitions, dictionaries)) { continue; }
+ array.splice(j, 1);
+ changed = true;
}
+ return changed;
}
- // Tags
-
- _getTermTagTargets(dictionaryEntries) {
- const tagTargets = [];
- for (const {headwords, definitions, pronunciations} of dictionaryEntries) {
- this._addTagExpansionTargets(tagTargets, headwords);
- this._addTagExpansionTargets(tagTargets, definitions);
- for (const {pitches} of pronunciations) {
- this._addTagExpansionTargets(tagTargets, pitches);
- }
+ /**
+ * @param {import('dictionary').TermDefinition[]|import('dictionary').TermHeadword[]} array
+ * @param {Set<string>} excludeDictionaryDefinitions
+ */
+ _removeTagGroupsWithDictionary(array, excludeDictionaryDefinitions) {
+ for (const {tags} of array) {
+ this._removeArrayItemsWithDictionary2(tags, excludeDictionaryDefinitions);
}
- return tagTargets;
- }
-
- _clearTermTags(dictionaryEntries) {
- this._getTermTagTargets(dictionaryEntries);
}
- async _expandTermTags(dictionaryEntries) {
- const tagTargets = this._getTermTagTargets(dictionaryEntries);
- await this._expandTagGroups(tagTargets);
- this._groupTags(tagTargets);
- }
+ // Tags
- async _expandKanjiTags(dictionaryEntries) {
- const tagTargets = [];
- this._addTagExpansionTargets(tagTargets, dictionaryEntries);
- await this._expandTagGroups(tagTargets);
- this._groupTags(tagTargets);
+ /**
+ * @param {import('translator').TagExpansionTarget[]} tagExpansionTargets
+ */
+ async _expandTagGroupsAndGroup(tagExpansionTargets) {
+ await this._expandTagGroups(tagExpansionTargets);
+ this._groupTags(tagExpansionTargets);
}
+ /**
+ * @param {import('translator').TagExpansionTarget[]} tagTargets
+ */
async _expandTagGroups(tagTargets) {
const allItems = [];
+ /** @type {import('translator').TagTargetMap} */
const targetMap = new Map();
for (const {tagGroups, tags} of tagTargets) {
for (const {dictionary, tagNames} of tagGroups) {
@@ -687,10 +790,12 @@ export class Translator {
const databaseTags = await this._database.findTagMetaBulk(nonCachedItems);
for (let i = 0; i < nonCachedItemCount; ++i) {
const item = nonCachedItems[i];
- let databaseTag = databaseTags[i];
- if (typeof databaseTag === 'undefined') { databaseTag = null; }
- item.databaseTag = databaseTag;
- item.cache.set(item.query, databaseTag);
+ const databaseTag = databaseTags[i];
+ const databaseTag2 = typeof databaseTag !== 'undefined' ? databaseTag : null;
+ item.databaseTag = databaseTag2;
+ if (item.cache !== null) {
+ item.cache.set(item.query, databaseTag2);
+ }
}
}
@@ -701,8 +806,16 @@ export class Translator {
}
}
+ /**
+ * @param {import('translator').TagExpansionTarget[]} tagTargets
+ */
_groupTags(tagTargets) {
const stringComparer = this._stringComparer;
+ /**
+ * @param {import('dictionary').Tag} v1
+ * @param {import('dictionary').Tag} v2
+ * @returns {number}
+ */
const compare = (v1, v2) => {
const i = v1.order - v2.order;
return i !== 0 ? i : stringComparer.compare(v1.name, v2.name);
@@ -715,16 +828,9 @@ export class Translator {
}
}
- _addTagExpansionTargets(tagTargets, objects) {
- for (const value of objects) {
- const tagGroups = value.tags;
- if (tagGroups.length === 0) { continue; }
- const tags = [];
- value.tags = tags;
- tagTargets.push({tagGroups, tags});
- }
- }
-
+ /**
+ * @param {import('dictionary').Tag[]} tags
+ */
_mergeSimilarTags(tags) {
let tagCount = tags.length;
for (let i = 0; i < tagCount; ++i) {
@@ -745,6 +851,11 @@ export class Translator {
}
}
+ /**
+ * @param {import('dictionary').Tag[]} tags
+ * @param {string} category
+ * @returns {string[]}
+ */
_getTagNamesWithCategory(tags, category) {
const results = [];
for (const tag of tags) {
@@ -755,6 +866,9 @@ export class Translator {
return results;
}
+ /**
+ * @param {import('dictionary').TermDefinition[]} definitions
+ */
_flagRedundantDefinitionTags(definitions) {
if (definitions.length === 0) { return; }
@@ -789,7 +903,12 @@ export class Translator {
// Metadata
- async _addTermMeta(dictionaryEntries, enabledDictionaryMap) {
+ /**
+ * @param {import('dictionary').TermDictionaryEntry[]} dictionaryEntries
+ * @param {import('translation').TermEnabledDictionaryMap} enabledDictionaryMap
+ * @param {TranslatorTagAggregator} tagAggregator
+ */
+ async _addTermMeta(dictionaryEntries, enabledDictionaryMap, tagAggregator) {
const headwordMap = new Map();
const headwordMapKeys = [];
const headwordReadingMaps = [];
@@ -821,16 +940,11 @@ export class Translator {
switch (mode) {
case 'freq':
{
- let frequency = data;
const hasReading = (data !== null && typeof data === 'object' && typeof data.reading === 'string');
- if (hasReading) {
- if (data.reading !== reading) { continue; }
- frequency = data.frequency;
- }
+ if (hasReading && data.reading !== reading) { continue; }
+ const frequency = hasReading ? data.frequency : /** @type {import('dictionary-data').GenericFrequencyData} */ (data);
for (const {frequencies, headwordIndex} of targets) {
- let displayValue;
- let displayValueParsed;
- ({frequency, displayValue, displayValueParsed} = this._getFrequencyInfo(frequency));
+ const {frequency: frequencyValue, displayValue, displayValueParsed} = this._getFrequencyInfo(frequency);
frequencies.push(this._createTermFrequency(
frequencies.length,
headwordIndex,
@@ -838,7 +952,7 @@ export class Translator {
dictionaryIndex,
dictionaryPriority,
hasReading,
- frequency,
+ frequencyValue,
displayValue,
displayValueParsed
));
@@ -848,11 +962,13 @@ export class Translator {
case 'pitch':
{
if (data.reading !== reading) { continue; }
+ /** @type {import('dictionary').TermPitch[]} */
const pitches = [];
for (const {position, tags, nasal, devoice} of data.pitches) {
+ /** @type {import('dictionary').Tag[]} */
const tags2 = [];
- if (Array.isArray(tags) && tags.length > 0) {
- tags2.push(this._createTagGroup(dictionary, tags));
+ if (Array.isArray(tags)) {
+ tagAggregator.addTags(tags2, dictionary, tags);
}
const nasalPositions = this._toNumberArray(nasal);
const devoicePositions = this._toNumberArray(devoice);
@@ -875,6 +991,10 @@ export class Translator {
}
}
+ /**
+ * @param {import('dictionary').KanjiDictionaryEntry[]} dictionaryEntries
+ * @param {import('translation').KanjiEnabledDictionaryMap} enabledDictionaryMap
+ */
async _addKanjiMeta(dictionaryEntries, enabledDictionaryMap) {
const kanjiList = [];
for (const {character} of dictionaryEntries) {
@@ -905,6 +1025,11 @@ export class Translator {
}
}
+ /**
+ * @param {{[key: string]: (string|number)}} stats
+ * @param {string} dictionary
+ * @returns {Promise<import('dictionary').KanjiStatGroups>}
+ */
async _expandKanjiStats(stats, dictionary) {
const statsEntries = Object.entries(stats);
const items = [];
@@ -915,10 +1040,11 @@ export class Translator {
const databaseInfos = await this._database.findTagMetaBulk(items);
+ /** @type {Map<string, import('dictionary').KanjiStat[]>} */
const statsGroups = new Map();
for (let i = 0, ii = statsEntries.length; i < ii; ++i) {
const databaseInfo = databaseInfos[i];
- if (databaseInfo === null) { continue; }
+ if (typeof databaseInfo === 'undefined') { continue; }
const [name, value] = statsEntries[i];
const {category} = databaseInfo;
@@ -931,6 +1057,7 @@ export class Translator {
group.push(this._createKanjiStat(name, value, databaseInfo, dictionary));
}
+ /** @type {import('dictionary').KanjiStatGroups} */
const groupedStats = {};
for (const [category, group] of statsGroups.entries()) {
this._sortKanjiStats(group);
@@ -939,6 +1066,9 @@ export class Translator {
return groupedStats;
}
+ /**
+ * @param {import('dictionary').KanjiStat[]} stats
+ */
_sortKanjiStats(stats) {
if (stats.length <= 1) { return; }
const stringComparer = this._stringComparer;
@@ -948,45 +1078,59 @@ export class Translator {
});
}
+ /**
+ * @param {string} value
+ * @returns {number}
+ */
_convertStringToNumber(value) {
const match = this._numberRegex.exec(value);
if (match === null) { return 0; }
- value = Number.parseFloat(match[0]);
- return Number.isFinite(value) ? value : 0;
+ const result = Number.parseFloat(match[0]);
+ return Number.isFinite(result) ? result : 0;
}
+ /**
+ * @param {import('dictionary-data').GenericFrequencyData} frequency
+ * @returns {{frequency: number, displayValue: ?string, displayValueParsed: boolean}}
+ */
_getFrequencyInfo(frequency) {
+ let frequencyValue = 0;
let displayValue = null;
let displayValueParsed = false;
if (typeof frequency === 'object' && frequency !== null) {
- ({value: frequency, displayValue} = frequency);
- if (typeof frequency !== 'number') { frequency = 0; }
- if (typeof displayValue !== 'string') { displayValue = null; }
+ const {value: frequencyValue2, displayValue: displayValue2} = frequency;
+ if (typeof frequencyValue2 === 'number') { frequencyValue = frequencyValue2; }
+ if (typeof displayValue2 === 'string') { displayValue = displayValue2; }
} else {
switch (typeof frequency) {
case 'number':
- // No change
+ frequencyValue = frequency;
break;
case 'string':
displayValue = frequency;
displayValueParsed = true;
- frequency = this._convertStringToNumber(frequency);
- break;
- default:
- frequency = 0;
+ frequencyValue = this._convertStringToNumber(frequency);
break;
}
}
- return {frequency, displayValue, displayValueParsed};
+ return {frequency: frequencyValue, displayValue, displayValueParsed};
}
// Helpers
+ /**
+ * @param {string} name
+ * @returns {string}
+ */
_getNameBase(name) {
const pos = name.indexOf(':');
return (pos >= 0 ? name.substring(0, pos) : name);
}
+ /**
+ * @param {import('translation').TermEnabledDictionaryMap} enabledDictionaryMap
+ * @returns {import('translation').TermEnabledDictionaryMap}
+ */
_getSecondarySearchDictionaryMap(enabledDictionaryMap) {
const secondarySearchDictionaryMap = new Map();
for (const [dictionary, details] of enabledDictionaryMap.entries()) {
@@ -996,12 +1140,22 @@ export class Translator {
return secondarySearchDictionaryMap;
}
+ /**
+ * @param {string} dictionary
+ * @param {import('translation').TermEnabledDictionaryMap|import('translation').KanjiEnabledDictionaryMap} enabledDictionaryMap
+ * @returns {{index: number, priority: number}}
+ */
_getDictionaryOrder(dictionary, enabledDictionaryMap) {
const info = enabledDictionaryMap.get(dictionary);
const {index, priority} = typeof info !== 'undefined' ? info : {index: enabledDictionaryMap.size, priority: 0};
return {index, priority};
}
+ /**
+ * @param {[...args: unknown[][]]} arrayVariants
+ * @yields {[...args: unknown[]]}
+ * @returns {Generator<unknown[], void, unknown>}
+ */
*_getArrayVariants(arrayVariants) {
const ii = arrayVariants.length;
@@ -1022,16 +1176,31 @@ export class Translator {
}
}
+ /**
+ * @param {unknown[]} array
+ * @returns {string}
+ */
_createMapKey(array) {
return JSON.stringify(array);
}
+ /**
+ * @param {number|number[]|undefined} value
+ * @returns {number[]}
+ */
_toNumberArray(value) {
return Array.isArray(value) ? value : (typeof value === 'number' ? [value] : []);
}
// Kanji data
+ /**
+ * @param {string} name
+ * @param {string|number} value
+ * @param {import('dictionary-database').Tag} databaseInfo
+ * @param {string} dictionary
+ * @returns {import('dictionary').KanjiStat}
+ */
_createKanjiStat(name, value, databaseInfo, dictionary) {
const {category, notes, order, score} = databaseInfo;
return {
@@ -1040,23 +1209,43 @@ export class Translator {
content: (typeof notes === 'string' ? notes : ''),
order: (typeof order === 'number' ? order : 0),
score: (typeof score === 'number' ? score : 0),
- dictionary: (typeof dictionary === 'string' ? dictionary : null),
+ dictionary,
value
};
}
+ /**
+ * @param {number} index
+ * @param {string} dictionary
+ * @param {number} dictionaryIndex
+ * @param {number} dictionaryPriority
+ * @param {string} character
+ * @param {number} frequency
+ * @param {?string} displayValue
+ * @param {boolean} displayValueParsed
+ * @returns {import('dictionary').KanjiFrequency}
+ */
_createKanjiFrequency(index, dictionary, dictionaryIndex, dictionaryPriority, character, frequency, displayValue, displayValueParsed) {
return {index, dictionary, dictionaryIndex, dictionaryPriority, character, frequency, displayValue, displayValueParsed};
}
- _createKanjiDictionaryEntry(character, dictionary, onyomi, kunyomi, tags, stats, definitions) {
+ /**
+ * @param {string} character
+ * @param {string} dictionary
+ * @param {string[]} onyomi
+ * @param {string[]} kunyomi
+ * @param {import('dictionary').KanjiStatGroups} stats
+ * @param {string[]} definitions
+ * @returns {import('dictionary').KanjiDictionaryEntry}
+ */
+ _createKanjiDictionaryEntry(character, dictionary, onyomi, kunyomi, stats, definitions) {
return {
type: 'kanji',
character,
dictionary,
onyomi,
kunyomi,
- tags,
+ tags: [],
stats,
definitions,
frequencies: []
@@ -1065,8 +1254,17 @@ export class Translator {
// Term data
+ /**
+ * @param {?import('dictionary-database').Tag} databaseTag
+ * @param {string} name
+ * @param {string} dictionary
+ * @returns {import('dictionary').Tag}
+ */
_createTag(databaseTag, name, dictionary) {
- const {category, notes, order, score} = (databaseTag !== null ? databaseTag : {});
+ let category, notes, order, score;
+ if (typeof databaseTag === 'object' && databaseTag !== null) {
+ ({category, notes, order, score} = databaseTag);
+ }
return {
name,
category: (typeof category === 'string' && category.length > 0 ? category : 'default'),
@@ -1078,18 +1276,46 @@ export class Translator {
};
}
- _createTagGroup(dictionary, tagNames) {
- return {dictionary, tagNames};
- }
-
+ /**
+ * @param {string} originalText
+ * @param {string} transformedText
+ * @param {string} deinflectedText
+ * @param {import('dictionary').TermSourceMatchType} matchType
+ * @param {import('dictionary').TermSourceMatchSource} matchSource
+ * @param {boolean} isPrimary
+ * @returns {import('dictionary').TermSource}
+ */
_createSource(originalText, transformedText, deinflectedText, matchType, matchSource, isPrimary) {
return {originalText, transformedText, deinflectedText, matchType, matchSource, isPrimary};
}
+ /**
+ * @param {number} index
+ * @param {string} term
+ * @param {string} reading
+ * @param {import('dictionary').TermSource[]} sources
+ * @param {import('dictionary').Tag[]} tags
+ * @param {string[]} wordClasses
+ * @returns {import('dictionary').TermHeadword}
+ */
_createTermHeadword(index, term, reading, sources, tags, wordClasses) {
return {index, term, reading, sources, tags, wordClasses};
}
+ /**
+ * @param {number} index
+ * @param {number[]} headwordIndices
+ * @param {string} dictionary
+ * @param {number} dictionaryIndex
+ * @param {number} dictionaryPriority
+ * @param {number} id
+ * @param {number} score
+ * @param {number[]} sequences
+ * @param {boolean} isPrimary
+ * @param {import('dictionary').Tag[]} tags
+ * @param {import('dictionary-data').TermGlossary[]} entries
+ * @returns {import('dictionary').TermDefinition}
+ */
_createTermDefinition(index, headwordIndices, dictionary, dictionaryIndex, dictionaryPriority, id, score, sequences, isPrimary, tags, entries) {
return {
index,
@@ -1107,14 +1333,47 @@ export class Translator {
};
}
+ /**
+ * @param {number} index
+ * @param {number} headwordIndex
+ * @param {string} dictionary
+ * @param {number} dictionaryIndex
+ * @param {number} dictionaryPriority
+ * @param {import('dictionary').TermPitch[]} pitches
+ * @returns {import('dictionary').TermPronunciation}
+ */
_createTermPronunciation(index, headwordIndex, dictionary, dictionaryIndex, dictionaryPriority, pitches) {
return {index, headwordIndex, dictionary, dictionaryIndex, dictionaryPriority, pitches};
}
+ /**
+ * @param {number} index
+ * @param {number} headwordIndex
+ * @param {string} dictionary
+ * @param {number} dictionaryIndex
+ * @param {number} dictionaryPriority
+ * @param {boolean} hasReading
+ * @param {number} frequency
+ * @param {?string} displayValue
+ * @param {boolean} displayValueParsed
+ * @returns {import('dictionary').TermFrequency}
+ */
_createTermFrequency(index, headwordIndex, dictionary, dictionaryIndex, dictionaryPriority, hasReading, frequency, displayValue, displayValueParsed) {
return {index, headwordIndex, dictionary, dictionaryIndex, dictionaryPriority, hasReading, frequency, displayValue, displayValueParsed};
}
+ /**
+ * @param {boolean} isPrimary
+ * @param {string[]} inflections
+ * @param {number} score
+ * @param {number} dictionaryIndex
+ * @param {number} dictionaryPriority
+ * @param {number} sourceTermExactMatchCount
+ * @param {number} maxTransformedTextLength
+ * @param {import('dictionary').TermHeadword[]} headwords
+ * @param {import('dictionary').TermDefinition[]} definitions
+ * @returns {import('dictionary').TermDictionaryEntry}
+ */
_createTermDictionaryEntry(isPrimary, inflections, score, dictionaryIndex, dictionaryPriority, sourceTermExactMatchCount, maxTransformedTextLength, headwords, definitions) {
return {
type: 'term',
@@ -1133,7 +1392,18 @@ export class Translator {
};
}
- _createTermDictionaryEntryFromDatabaseEntry(databaseEntry, originalText, transformedText, deinflectedText, reasons, isPrimary, enabledDictionaryMap) {
+ /**
+ * @param {import('dictionary-database').TermEntry} databaseEntry
+ * @param {string} originalText
+ * @param {string} transformedText
+ * @param {string} deinflectedText
+ * @param {string[]} reasons
+ * @param {boolean} isPrimary
+ * @param {Map<string, import('translation').FindTermDictionary>} enabledDictionaryMap
+ * @param {TranslatorTagAggregator} tagAggregator
+ * @returns {import('dictionary').TermDictionaryEntry}
+ */
+ _createTermDictionaryEntryFromDatabaseEntry(databaseEntry, originalText, transformedText, deinflectedText, reasons, isPrimary, enabledDictionaryMap, tagAggregator) {
const {matchType, matchSource, term, reading: rawReading, definitionTags, termTags, definitions, score, dictionary, id, sequence: rawSequence, rules} = databaseEntry;
const reading = (rawReading.length > 0 ? rawReading : term);
const {index: dictionaryIndex, priority: dictionaryPriority} = this._getDictionaryOrder(dictionary, enabledDictionaryMap);
@@ -1143,10 +1413,12 @@ export class Translator {
const hasSequence = (rawSequence >= 0);
const sequence = hasSequence ? rawSequence : -1;
+ /** @type {import('dictionary').Tag[]} */
const headwordTagGroups = [];
+ /** @type {import('dictionary').Tag[]} */
const definitionTagGroups = [];
- if (termTags.length > 0) { headwordTagGroups.push(this._createTagGroup(dictionary, termTags)); }
- if (definitionTags.length > 0) { definitionTagGroups.push(this._createTagGroup(dictionary, definitionTags)); }
+ tagAggregator.addTags(headwordTagGroups, dictionary, termTags);
+ tagAggregator.addTags(definitionTagGroups, dictionary, definitionTags);
return this._createTermDictionaryEntry(
isPrimary,
@@ -1161,12 +1433,19 @@ export class Translator {
);
}
- _createGroupedDictionaryEntry(dictionaryEntries, checkDuplicateDefinitions) {
+ /**
+ * @param {import('dictionary').TermDictionaryEntry[]} dictionaryEntries
+ * @param {boolean} checkDuplicateDefinitions
+ * @param {TranslatorTagAggregator} tagAggregator
+ * @returns {import('dictionary').TermDictionaryEntry}
+ */
+ _createGroupedDictionaryEntry(dictionaryEntries, checkDuplicateDefinitions, tagAggregator) {
// Headwords are generated before sorting, so that the order of dictionaryEntries can be maintained
const definitionEntries = [];
+ /** @type {Map<string, import('dictionary').TermHeadword>} */
const headwords = new Map();
for (const dictionaryEntry of dictionaryEntries) {
- const headwordIndexMap = this._addTermHeadwords(headwords, dictionaryEntry.headwords);
+ const headwordIndexMap = this._addTermHeadwords(headwords, dictionaryEntry.headwords, tagAggregator);
definitionEntries.push({index: definitionEntries.length, dictionaryEntry, headwordIndexMap});
}
@@ -1181,7 +1460,9 @@ export class Translator {
let dictionaryPriority = Number.MIN_SAFE_INTEGER;
let maxTransformedTextLength = 0;
let isPrimary = false;
+ /** @type {import('dictionary').TermDefinition[]} */
const definitions = [];
+ /** @type {?Map<string, import('dictionary').TermDefinition>} */
const definitionsMap = checkDuplicateDefinitions ? new Map() : null;
let inflections = null;
@@ -1197,8 +1478,8 @@ export class Translator {
inflections = dictionaryEntryInflections;
}
}
- if (checkDuplicateDefinitions) {
- this._addTermDefinitions(definitions, definitionsMap, dictionaryEntry.definitions, headwordIndexMap);
+ if (definitionsMap !== null) {
+ this._addTermDefinitions(definitions, definitionsMap, dictionaryEntry.definitions, headwordIndexMap, tagAggregator);
} else {
this._addTermDefinitionsFast(definitions, dictionaryEntry.definitions, headwordIndexMap);
}
@@ -1231,6 +1512,11 @@ export class Translator {
// Data collection addition functions
+ /**
+ * @template [T=unknown]
+ * @param {T[]} list
+ * @param {T[]} newItems
+ */
_addUniqueSimple(list, newItems) {
for (const item of newItems) {
if (!list.includes(item)) {
@@ -1239,6 +1525,10 @@ export class Translator {
}
}
+ /**
+ * @param {import('dictionary').TermSource[]} sources
+ * @param {import('dictionary').TermSource[]} newSources
+ */
_addUniqueSources(sources, newSources) {
if (newSources.length === 0) { return; }
if (sources.length === 0) {
@@ -1267,27 +1557,14 @@ export class Translator {
}
}
- _addUniqueTagGroups(tagGroups, newTagGroups) {
- if (newTagGroups.length === 0) { return; }
- for (const newTagGroup of newTagGroups) {
- const {dictionary} = newTagGroup;
- const ii = tagGroups.length;
- if (ii > 0) {
- let i = 0;
- for (; i < ii; ++i) {
- const tagGroup = tagGroups[i];
- if (tagGroup.dictionary === dictionary) {
- this._addUniqueSimple(tagGroup.tagNames, newTagGroup.tagNames);
- break;
- }
- }
- if (i < ii) { continue; }
- }
- tagGroups.push(newTagGroup);
- }
- }
-
- _addTermHeadwords(headwordsMap, headwords) {
+ /**
+ * @param {Map<string, import('dictionary').TermHeadword>} headwordsMap
+ * @param {import('dictionary').TermHeadword[]} headwords
+ * @param {TranslatorTagAggregator} tagAggregator
+ * @returns {number[]}
+ */
+ _addTermHeadwords(headwordsMap, headwords, tagAggregator) {
+ /** @type {number[]} */
const headwordIndexMap = [];
for (const {term, reading, sources, tags, wordClasses} of headwords) {
const key = this._createMapKey([term, reading]);
@@ -1297,13 +1574,17 @@ export class Translator {
headwordsMap.set(key, headword);
}
this._addUniqueSources(headword.sources, sources);
- this._addUniqueTagGroups(headword.tags, tags);
this._addUniqueSimple(headword.wordClasses, wordClasses);
+ tagAggregator.mergeTags(headword.tags, tags);
headwordIndexMap.push(headword.index);
}
return headwordIndexMap;
}
+ /**
+ * @param {number[]} headwordIndices
+ * @param {number} headwordIndex
+ */
_addUniqueTermHeadwordIndex(headwordIndices, headwordIndex) {
let end = headwordIndices.length;
if (end === 0) {
@@ -1327,6 +1608,11 @@ export class Translator {
headwordIndices.splice(start, 0, headwordIndex);
}
+ /**
+ * @param {import('dictionary').TermDefinition[]} definitions
+ * @param {import('dictionary').TermDefinition[]} newDefinitions
+ * @param {number[]} headwordIndexMap
+ */
_addTermDefinitionsFast(definitions, newDefinitions, headwordIndexMap) {
for (const {headwordIndices, dictionary, dictionaryIndex, dictionaryPriority, sequences, id, score, isPrimary, tags, entries} of newDefinitions) {
const headwordIndicesNew = [];
@@ -1337,7 +1623,14 @@ export class Translator {
}
}
- _addTermDefinitions(definitions, definitionsMap, newDefinitions, headwordIndexMap) {
+ /**
+ * @param {import('dictionary').TermDefinition[]} definitions
+ * @param {Map<string, import('dictionary').TermDefinition>} definitionsMap
+ * @param {import('dictionary').TermDefinition[]} newDefinitions
+ * @param {number[]} headwordIndexMap
+ * @param {TranslatorTagAggregator} tagAggregator
+ */
+ _addTermDefinitions(definitions, definitionsMap, newDefinitions, headwordIndexMap, tagAggregator) {
for (const {headwordIndices, dictionary, dictionaryIndex, dictionaryPriority, sequences, id, score, isPrimary, tags, entries} of newDefinitions) {
const key = this._createMapKey([dictionary, ...entries]);
let definition = definitionsMap.get(key);
@@ -1356,19 +1649,36 @@ export class Translator {
for (const headwordIndex of headwordIndices) {
this._addUniqueTermHeadwordIndex(newHeadwordIndices, headwordIndexMap[headwordIndex]);
}
- this._addUniqueTagGroups(definition.tags, tags);
+ tagAggregator.mergeTags(definition.tags, tags);
}
}
// Sorting functions
+ /**
+ * @param {import('dictionary-database').TermEntry[]|import('dictionary-database').KanjiEntry[]} databaseEntries
+ */
_sortDatabaseEntriesByIndex(databaseEntries) {
if (databaseEntries.length <= 1) { return; }
- databaseEntries.sort((a, b) => a.index - b.index);
+ /**
+ * @param {import('dictionary-database').TermEntry|import('dictionary-database').KanjiEntry} v1
+ * @param {import('dictionary-database').TermEntry|import('dictionary-database').KanjiEntry} v2
+ * @returns {number}
+ */
+ const compareFunction = (v1, v2) => v1.index - v2.index;
+ databaseEntries.sort(compareFunction);
}
+ /**
+ * @param {import('dictionary').TermDictionaryEntry[]} dictionaryEntries
+ */
_sortTermDictionaryEntries(dictionaryEntries) {
const stringComparer = this._stringComparer;
+ /**
+ * @param {import('dictionary').TermDictionaryEntry} v1
+ * @param {import('dictionary').TermDictionaryEntry} v2
+ * @returns {number}
+ */
const compareFunction = (v1, v2) => {
// Sort by length of source term
let i = v2.maxTransformedTextLength - v1.maxTransformedTextLength;
@@ -1419,7 +1729,15 @@ export class Translator {
dictionaryEntries.sort(compareFunction);
}
+ /**
+ * @param {import('dictionary').TermDefinition[]} definitions
+ */
_sortTermDictionaryEntryDefinitions(definitions) {
+ /**
+ * @param {import('dictionary').TermDefinition} v1
+ * @param {import('dictionary').TermDefinition} v2
+ * @returns {number}
+ */
const compareFunction = (v1, v2) => {
// Sort by frequency order
let i = v1.frequencyOrder - v2.frequencyOrder;
@@ -1455,12 +1773,23 @@ export class Translator {
definitions.sort(compareFunction);
}
+ /**
+ * @param {import('dictionary').TermDictionaryEntry[]} dictionaryEntries
+ */
_sortTermDictionaryEntriesById(dictionaryEntries) {
if (dictionaryEntries.length <= 1) { return; }
dictionaryEntries.sort((a, b) => a.definitions[0].id - b.definitions[0].id);
}
+ /**
+ * @param {import('dictionary').TermFrequency[]|import('dictionary').TermPronunciation[]} dataList
+ */
_sortTermDictionaryEntrySimpleData(dataList) {
+ /**
+ * @param {import('dictionary').TermFrequency|import('dictionary').TermPronunciation} v1
+ * @param {import('dictionary').TermFrequency|import('dictionary').TermPronunciation} v2
+ * @returns {number}
+ */
const compare = (v1, v2) => {
// Sort by dictionary priority
let i = v2.dictionaryPriority - v1.dictionaryPriority;
@@ -1481,7 +1810,15 @@ export class Translator {
dataList.sort(compare);
}
+ /**
+ * @param {import('dictionary').KanjiDictionaryEntry[]} dictionaryEntries
+ */
_sortKanjiDictionaryEntryData(dictionaryEntries) {
+ /**
+ * @param {import('dictionary').KanjiFrequency} v1
+ * @param {import('dictionary').KanjiFrequency} v2
+ * @returns {number}
+ */
const compare = (v1, v2) => {
// Sort by dictionary priority
let i = v2.dictionaryPriority - v1.dictionaryPriority;
@@ -1501,6 +1838,11 @@ export class Translator {
}
}
+ /**
+ * @param {import('dictionary').TermDictionaryEntry[]} dictionaryEntries
+ * @param {string} dictionary
+ * @param {boolean} ascending
+ */
_updateSortFrequencies(dictionaryEntries, dictionary, ascending) {
const frequencyMap = new Map();
for (const dictionaryEntry of dictionaryEntries) {
@@ -1539,4 +1881,102 @@ export class Translator {
frequencyMap.clear();
}
}
+
+ // Miscellaneous
+
+ /**
+ * @template T
+ * @param {Set<T>} set
+ * @param {T[]} values
+ * @returns {boolean}
+ */
+ _hasAny(set, values) {
+ for (const value of values) {
+ if (set.has(value)) { return true; }
+ }
+ return false;
+ }
+}
+
+class TranslatorTagAggregator {
+ constructor() {
+ /** @type {Map<import('dictionary').Tag[], import('translator').TagGroup[]>} */
+ this._tagExpansionTargetMap = new Map();
+ }
+
+ /**
+ * @param {import('dictionary').Tag[]} tags
+ * @param {string} dictionary
+ * @param {string[]} tagNames
+ */
+ addTags(tags, dictionary, tagNames) {
+ if (tagNames.length === 0) { return; }
+ const tagGroups = this._getOrCreateTagGroups(tags);
+ const tagGroup = this._getOrCreateTagGroup(tagGroups, dictionary);
+ this._addUniqueTags(tagGroup, tagNames);
+ }
+
+ /**
+ * @returns {import('translator').TagExpansionTarget[]}
+ */
+ getTagExpansionTargets() {
+ const results = [];
+ for (const [tags, tagGroups] of this._tagExpansionTargetMap) {
+ results.push({tags, tagGroups});
+ }
+ return results;
+ }
+
+ /**
+ * @param {import('dictionary').Tag[]} tags
+ * @param {import('dictionary').Tag[]} newTags
+ */
+ mergeTags(tags, newTags) {
+ const newTagGroups = this._tagExpansionTargetMap.get(newTags);
+ if (typeof newTagGroups === 'undefined') { return; }
+ const tagGroups = this._getOrCreateTagGroups(tags);
+ for (const {dictionary, tagNames} of newTagGroups) {
+ const tagGroup = this._getOrCreateTagGroup(tagGroups, dictionary);
+ this._addUniqueTags(tagGroup, tagNames);
+ }
+ }
+
+ /**
+ * @param {import('dictionary').Tag[]} tags
+ * @returns {import('translator').TagGroup[]}
+ */
+ _getOrCreateTagGroups(tags) {
+ let tagGroups = this._tagExpansionTargetMap.get(tags);
+ if (typeof tagGroups === 'undefined') {
+ tagGroups = [];
+ this._tagExpansionTargetMap.set(tags, tagGroups);
+ }
+ return tagGroups;
+ }
+
+ /**
+ * @param {import('translator').TagGroup[]} tagGroups
+ * @param {string} dictionary
+ * @returns {import('translator').TagGroup}
+ */
+ _getOrCreateTagGroup(tagGroups, dictionary) {
+ for (const tagGroup of tagGroups) {
+ if (tagGroup.dictionary === dictionary) { return tagGroup; }
+ }
+ const newTagGroup = {dictionary, tagNames: []};
+ tagGroups.push(newTagGroup);
+ return newTagGroup;
+ }
+
+ /**
+ * @param {import('translator').TagGroup} tagGroup
+ * @param {string[]} newTagNames
+ */
+ _addUniqueTags(tagGroup, newTagNames) {
+ const {tagNames} = tagGroup;
+ for (const tagName of newTagNames) {
+ if (tagNames.includes(tagName)) { continue; }
+ tagNames.push(tagName);
+ }
+ }
}