aboutsummaryrefslogtreecommitdiff
path: root/ext/mixed
diff options
context:
space:
mode:
Diffstat (limited to 'ext/mixed')
-rw-r--r--ext/mixed/js/audio.js113
-rw-r--r--ext/mixed/js/display.js1
2 files changed, 111 insertions, 3 deletions
diff --git a/ext/mixed/js/audio.js b/ext/mixed/js/audio.js
index 50bd321f..cf8b8d24 100644
--- a/ext/mixed/js/audio.js
+++ b/ext/mixed/js/audio.js
@@ -17,7 +17,90 @@
*/
-function audioGetFromUrl(url) {
+class TextToSpeechAudio {
+ constructor(text, voice) {
+ this.text = text;
+ this.voice = voice;
+ this._utterance = null;
+ this._volume = 1;
+ }
+
+ get currentTime() {
+ return 0;
+ }
+ set currentTime(value) {
+ // NOP
+ }
+
+ get volume() {
+ return this._volume;
+ }
+ set volume(value) {
+ this._volume = value;
+ if (this._utterance !== null) {
+ this._utterance.volume = value;
+ }
+ }
+
+ play() {
+ try {
+ if (this._utterance === null) {
+ this._utterance = new SpeechSynthesisUtterance(this.text || '');
+ this._utterance.lang = 'ja-JP';
+ this._utterance.volume = this._volume;
+ this._utterance.voice = this.voice;
+ }
+
+ speechSynthesis.cancel();
+ speechSynthesis.speak(this._utterance);
+
+ } catch (e) {
+ // NOP
+ }
+ }
+
+ pause() {
+ try {
+ speechSynthesis.cancel();
+ } catch (e) {
+ // NOP
+ }
+ }
+
+ static createFromUri(ttsUri) {
+ const m = /^tts:[^#\?]*\?([^#]*)/.exec(ttsUri);
+ if (m === null) { return null; }
+
+ const searchParameters = {};
+ for (const group of m[1].split('&')) {
+ const sep = group.indexOf('=');
+ if (sep < 0) { continue; }
+ searchParameters[decodeURIComponent(group.substr(0, sep))] = decodeURIComponent(group.substr(sep + 1));
+ }
+
+ if (!searchParameters.text) { return null; }
+
+ const voice = audioGetTextToSpeechVoice(searchParameters.voice);
+ if (voice === null) { return null; }
+
+ return new TextToSpeechAudio(searchParameters.text, voice);
+ }
+
+}
+
+function audioGetFromUrl(url, download) {
+ const tts = TextToSpeechAudio.createFromUri(url);
+ if (tts !== null) {
+ if (download) {
+ throw new Error('Download not supported for text-to-speech');
+ }
+ return Promise.resolve(tts);
+ }
+
+ if (download) {
+ return Promise.resolve(null);
+ }
+
return new Promise((resolve, reject) => {
const audio = new Audio(url);
audio.addEventListener('loadeddata', () => {
@@ -46,7 +129,7 @@ async function audioGetFromSources(expression, sources, optionsContext, download
}
try {
- const audio = download ? null : await audioGetFromUrl(url);
+ const audio = await audioGetFromUrl(url, download);
const result = {audio, url, source};
if (cache !== null) {
cache[key] = result;
@@ -56,7 +139,7 @@ async function audioGetFromSources(expression, sources, optionsContext, download
// NOP
}
}
- return {audio: null, source: null};
+ return {audio: null, url: null, source: null};
}
function audioGetTextToSpeechVoice(voiceURI) {
@@ -71,3 +154,27 @@ function audioGetTextToSpeechVoice(voiceURI) {
}
return null;
}
+
+function audioPrepareTextToSpeech(options) {
+ if (
+ audioPrepareTextToSpeech.state ||
+ !options.audio.textToSpeechVoice ||
+ !(
+ options.audio.sources.includes('text-to-speech') ||
+ options.audio.sources.includes('text-to-speech-reading')
+ )
+ ) {
+ // Text-to-speech not in use.
+ return;
+ }
+
+ // Chrome needs this value called once before it will become populated.
+ // The first call will return an empty list.
+ audioPrepareTextToSpeech.state = true;
+ try {
+ speechSynthesis.getVoices();
+ } catch (e) {
+ // NOP
+ }
+}
+audioPrepareTextToSpeech.state = false;
diff --git a/ext/mixed/js/display.js b/ext/mixed/js/display.js
index cf38d09d..e0994f8a 100644
--- a/ext/mixed/js/display.js
+++ b/ext/mixed/js/display.js
@@ -197,6 +197,7 @@ class Display {
this.options = options ? options : await apiOptionsGet(this.getOptionsContext());
this.updateTheme(this.options.general.popupTheme);
this.setCustomCss(this.options.general.customPopupCss);
+ audioPrepareTextToSpeech(this.options);
}
updateTheme(themeName) {