aboutsummaryrefslogtreecommitdiff
path: root/ext/mixed/js/audio.js
diff options
context:
space:
mode:
authorAlex Yatskov <alex@foosoft.net>2019-10-20 11:23:20 -0700
committerAlex Yatskov <alex@foosoft.net>2019-10-20 11:23:20 -0700
commit438498435227cfa59cf9ed3430045b288cd2a7c0 (patch)
tree6a05520e5d6fa8d26d372673a9ed3e5d2da7e3fd /ext/mixed/js/audio.js
parent06d7713189be9eb51669d3842b78278371e6cfa4 (diff)
parentd32fd1381b6cd5141a21c22f9ef639b2fe9774fb (diff)
Merge branch 'master' into testing
Diffstat (limited to 'ext/mixed/js/audio.js')
-rw-r--r--ext/mixed/js/audio.js128
1 files changed, 124 insertions, 4 deletions
diff --git a/ext/mixed/js/audio.js b/ext/mixed/js/audio.js
index b905140c..cf8b8d24 100644
--- a/ext/mixed/js/audio.js
+++ b/ext/mixed/js/audio.js
@@ -17,7 +17,90 @@
*/
-function audioGetFromUrl(url) {
+class TextToSpeechAudio {
+ constructor(text, voice) {
+ this.text = text;
+ this.voice = voice;
+ this._utterance = null;
+ this._volume = 1;
+ }
+
+ get currentTime() {
+ return 0;
+ }
+ set currentTime(value) {
+ // NOP
+ }
+
+ get volume() {
+ return this._volume;
+ }
+ set volume(value) {
+ this._volume = value;
+ if (this._utterance !== null) {
+ this._utterance.volume = value;
+ }
+ }
+
+ play() {
+ try {
+ if (this._utterance === null) {
+ this._utterance = new SpeechSynthesisUtterance(this.text || '');
+ this._utterance.lang = 'ja-JP';
+ this._utterance.volume = this._volume;
+ this._utterance.voice = this.voice;
+ }
+
+ speechSynthesis.cancel();
+ speechSynthesis.speak(this._utterance);
+
+ } catch (e) {
+ // NOP
+ }
+ }
+
+ pause() {
+ try {
+ speechSynthesis.cancel();
+ } catch (e) {
+ // NOP
+ }
+ }
+
+ static createFromUri(ttsUri) {
+ const m = /^tts:[^#\?]*\?([^#]*)/.exec(ttsUri);
+ if (m === null) { return null; }
+
+ const searchParameters = {};
+ for (const group of m[1].split('&')) {
+ const sep = group.indexOf('=');
+ if (sep < 0) { continue; }
+ searchParameters[decodeURIComponent(group.substr(0, sep))] = decodeURIComponent(group.substr(sep + 1));
+ }
+
+ if (!searchParameters.text) { return null; }
+
+ const voice = audioGetTextToSpeechVoice(searchParameters.voice);
+ if (voice === null) { return null; }
+
+ return new TextToSpeechAudio(searchParameters.text, voice);
+ }
+
+}
+
+function audioGetFromUrl(url, download) {
+ const tts = TextToSpeechAudio.createFromUri(url);
+ if (tts !== null) {
+ if (download) {
+ throw new Error('Download not supported for text-to-speech');
+ }
+ return Promise.resolve(tts);
+ }
+
+ if (download) {
+ return Promise.resolve(null);
+ }
+
return new Promise((resolve, reject) => {
const audio = new Audio(url);
audio.addEventListener('loadeddata', () => {
@@ -32,7 +115,7 @@ function audioGetFromUrl(url) {
});
}
-async function audioGetFromSources(expression, sources, optionsContext, createAudioObject, cache=null) {
+async function audioGetFromSources(expression, sources, optionsContext, download, cache=null) {
const key = `${expression.expression}:${expression.reading}`;
if (cache !== null && cache.hasOwnProperty(expression)) {
return cache[key];
@@ -46,7 +129,7 @@ async function audioGetFromSources(expression, sources, optionsContext, createAu
}
try {
- const audio = createAudioObject ? await audioGetFromUrl(url) : null;
+ const audio = await audioGetFromUrl(url, download);
const result = {audio, url, source};
if (cache !== null) {
cache[key] = result;
@@ -56,5 +139,42 @@ async function audioGetFromSources(expression, sources, optionsContext, createAu
// NOP
}
}
- return {audio: null, source: null};
+ return {audio: null, url: null, source: null};
+}
+
+function audioGetTextToSpeechVoice(voiceURI) {
+ try {
+ for (const voice of speechSynthesis.getVoices()) {
+ if (voice.voiceURI === voiceURI) {
+ return voice;
+ }
+ }
+ } catch (e) {
+ // NOP
+ }
+ return null;
+}
+
+function audioPrepareTextToSpeech(options) {
+ if (
+ audioPrepareTextToSpeech.state ||
+ !options.audio.textToSpeechVoice ||
+ !(
+ options.audio.sources.includes('text-to-speech') ||
+ options.audio.sources.includes('text-to-speech-reading')
+ )
+ ) {
+ // Text-to-speech not in use.
+ return;
+ }
+
+ // Chrome needs this value called once before it will become populated.
+ // The first call will return an empty list.
+ audioPrepareTextToSpeech.state = true;
+ try {
+ speechSynthesis.getVoices();
+ } catch (e) {
+ // NOP
+ }
}
+audioPrepareTextToSpeech.state = false;