diff options
author | lonkaars <loek@pipeframe.xyz> | 2023-07-02 20:27:46 +0200 |
---|---|---|
committer | lonkaars <loek@pipeframe.xyz> | 2023-07-02 20:27:46 +0200 |
commit | fa68eb21bf1d0460303cae3a0233e2d6cefd00ca (patch) | |
tree | 2a7026d2ed3710f6b455fd867f8258452100136e /api | |
parent | f4963b89ee542592e9ae95ca29d74ddc57841c3f (diff) |
broken furigana adder :(
Diffstat (limited to 'api')
-rw-r--r-- | api/japanese.ts | 45 | ||||
-rw-r--r-- | api/sentence-word.ts | 11 | ||||
-rw-r--r-- | api/sentence.ts | 14 | ||||
-rw-r--r-- | api/word.ts | 27 |
4 files changed, 57 insertions, 40 deletions
diff --git a/api/japanese.ts b/api/japanese.ts index 0396821..9319cdd 100644 --- a/api/japanese.ts +++ b/api/japanese.ts @@ -4,6 +4,26 @@ import "../util/string.ts"; import "../util/japanese.ts"; import "../util/array.ts"; +const formatters = { + "HTML": tokens => tokens.reduce((out, token) => { + if (token.ruby) out += `<ruby>${escape(token.writing)}<rt>${escape(token.reading)}</rt></ruby>`; + else out += token.writing; + return out; + }, ""), + "parenthesis": tokens => tokens.reduce((out, token) => { + if (token.ruby) out += `${token.writing}(${token.reading}) `; + else out += token.writing; + return out; + }, ""), + "refold-tools": tokens => tokens.reduce((out, token) => { + if (token.ruby) out += `[${token.writing}](${token.reading})`; + else out += token.writing; + return out; + }, ""), +} satisfies { [name: string]: (tokens: Array<JapaneseToken>) => string }; + +export type JapaneseFormatter = keyof typeof formatters; + /** @interface Piece */ interface JapaneseToken { /** @prop token writing (kanji/katakana/hiragana) */ @@ -22,24 +42,6 @@ export default class Japanese { public reading: string; private normalized: string; - private formatters = { - "HTML": tokens => tokens.reduce((out, token) => { - if (token.ruby) out += `<ruby>${escape(token.writing)}<rt>${escape(token.reading)}</rt></ruby>`; - else out += token.writing; - return out; - }, ""), - "parenthesis": tokens => tokens.reduce((out, token) => { - if (token.ruby) out += `${token.writing}(${token.reading}) `; - else out += token.writing; - return out; - }, ""), - "refold-tools": tokens => tokens.reduce((out, token) => { - if (token.ruby) out += `[${token.writing}](${token.reading})`; - else out += token.writing; - return out; - }, ""), - } satisfies Record<string, (tokens: Array<JapaneseToken>) => string>; - constructor(writing: string, reading: string) { this.writing = writing; this.reading = reading; @@ -47,8 +49,8 @@ export default class Japanese { } /** @summary format this as text with furigana */ - public furigana(format: keyof typeof this.formatters = "HTML"): string { - return this.formatters[format](this.tokenize()); + public furigana(format: JapaneseFormatter = "HTML"): string { + return formatters[format](this.tokenize()); } /** @@ -151,3 +153,6 @@ export default class Japanese { } } +export type test = keyof typeof Japanese.formatters; + +var gert: test = "HTML"; diff --git a/api/sentence-word.ts b/api/sentence-word.ts deleted file mode 100644 index 6b2ef22..0000000 --- a/api/sentence-word.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { ParseToken } from "../language/types.ts"; -import Word from "./word.ts"; - -export default class SentenceWord extends Word { - public indexStart: number = 0; - public indexEnd: number = 0; - - constructor(source: string | ParseToken) { - super(); - } -} diff --git a/api/sentence.ts b/api/sentence.ts index 276a6c5..6d9fc6d 100644 --- a/api/sentence.ts +++ b/api/sentence.ts @@ -1,10 +1,10 @@ import { ParseResult } from "../language/types.ts"; import APIBase from "./base.ts"; -import SentenceWord from "./sentence-word.ts"; +import Japanese, { JapaneseFormatter } from "./japanese.ts"; import Word from "./word.ts"; export default class Sentence extends APIBase { - public words: Array<SentenceWord> = []; + public words: Array<Word> = []; protected query?: ParseResult; protected original: string = ""; @@ -33,7 +33,7 @@ export default class Sentence extends APIBase { let token = 0; let i = 0; while (i < this.original.length) { - this.words.push(new SentenceWord(this.query!.tokens[token]).withParent(await this.api)); + this.words.push(new Word(this.query!.tokens[token]).withParent(await this.api)); i += this.query!.tokens[token].source.length; if (i == this.original.length) break; @@ -43,8 +43,14 @@ export default class Sentence extends APIBase { if (this.query!.tokens[token]?.start == i) continue; var remainder = this.original.substring(i, this.query!.tokens[token]?.start); - this.words.push(new SentenceWord(remainder).withParent(await this.api)); + this.words.push(new Word(remainder).withParent(await this.api)); i += remainder.length; } } + + furigana(format: JapaneseFormatter = "HTML"): string { + return this.words.reduce((out, word) => { + return out + word.text.furigana(format); + }, ""); + } } diff --git a/api/word.ts b/api/word.ts index 7eba936..3421f9f 100644 --- a/api/word.ts +++ b/api/word.ts @@ -1,14 +1,31 @@ import Glossary from "./glossary.ts"; import APIBase from "./base.ts"; import { ParseToken } from "../language/types.ts"; +import Japanese from "./japanese.ts"; + +import "../util/string.ts"; export default class Word extends APIBase { - public writing = "TODO"; - public reading = "TODO"; - public conjugated = "TODO"; + /** @prop dictionary form of verb if this word is a verb */ + public base: Japanese; + /** @prop word as written in parent sentence */ + public text: Japanese; - constructor() { - super(); + constructor(input: string | ParseToken) { + super(); + if (typeof input === "string") { + input = input as string; + this.text = new Japanese(input, input); + this.base = this.text; + } else { + input = input as ParseToken; + var common = Math.max(input.writing.cmpLen(input.source), + input.reading.cmpLen(input.source)); + var conjugation = input.source.substring(common); + this.base = new Japanese(input.writing, input.reading); + this.text = new Japanese(input.writing.substring(0, common) + conjugation, + input.reading.substring(0, common) + conjugation); + } } async glossary() { |