diff options
author | toasted-nutbread <toasted-nutbread@users.noreply.github.com> | 2020-04-18 21:10:48 -0400 |
---|---|---|
committer | GitHub <noreply@github.com> | 2020-04-18 21:10:48 -0400 |
commit | 03d77cc3a6c4dfb15fa83c6caa60563103ca7776 (patch) | |
tree | 67911632b8614e3c170dee6db731df9a43cbc42a /ext/mixed | |
parent | 7a03ce0194fafb0c2e49994dc6efd33d5fdb6a07 (diff) | |
parent | 320852f2d01d72c1039d098033081e8266d02be7 (diff) |
Merge pull request #442 from toasted-nutbread/audio-system-refactoring
Audio system refactoring
Diffstat (limited to 'ext/mixed')
-rw-r--r-- | ext/mixed/js/api.js | 4 | ||||
-rw-r--r-- | ext/mixed/js/audio-system.js | 56 | ||||
-rw-r--r-- | ext/mixed/js/display.js | 59 |
3 files changed, 78 insertions, 41 deletions
diff --git a/ext/mixed/js/api.js b/ext/mixed/js/api.js index 7080d93a..c97dc687 100644 --- a/ext/mixed/js/api.js +++ b/ext/mixed/js/api.js @@ -64,8 +64,8 @@ function apiTemplateRender(template, data) { return _apiInvoke('templateRender', {data, template}); } -function apiAudioGetUri(definition, source, optionsContext) { - return _apiInvoke('audioGetUri', {definition, source, optionsContext}); +function apiAudioGetUri(definition, source, details) { + return _apiInvoke('audioGetUri', {definition, source, details}); } function apiCommandExec(command, params) { diff --git a/ext/mixed/js/audio-system.js b/ext/mixed/js/audio-system.js index 45b733fc..3273f982 100644 --- a/ext/mixed/js/audio-system.js +++ b/ext/mixed/js/audio-system.js @@ -40,7 +40,7 @@ class TextToSpeechAudio { } } - play() { + async play() { try { if (this._utterance === null) { this._utterance = new SpeechSynthesisUtterance(this.text || ''); @@ -66,10 +66,10 @@ class TextToSpeechAudio { } class AudioSystem { - constructor({getAudioUri}) { - this._cache = new Map(); + constructor({audioUriBuilder, useCache}) { + this._cache = useCache ? new Map() : null; this._cacheSizeMaximum = 32; - this._getAudioUri = getAudioUri; + this._audioUriBuilder = audioUriBuilder; if (typeof speechSynthesis !== 'undefined') { // speechSynthesis.getVoices() will not be populated unless some API call is made. @@ -79,21 +79,31 @@ class AudioSystem { async getDefinitionAudio(definition, sources, details) { const key = `${definition.expression}:${definition.reading}`; - const cacheValue = this._cache.get(definition); - if (typeof cacheValue !== 'undefined') { - const {audio, uri, source} = cacheValue; - return {audio, uri, source}; + const hasCache = (this._cache !== null); + + if (hasCache) { + const cacheValue = this._cache.get(key); + if (typeof cacheValue !== 'undefined') { + const {audio, uri, source} = cacheValue; + const index = sources.indexOf(source); + if (index >= 0) { + return {audio, uri, index}; + } + } } - for (const source of sources) { + for (let i = 0, ii = sources.length; i < ii; ++i) { + const source = sources[i]; const uri = await this._getAudioUri(definition, source, details); if (uri === null) { continue; } try { - const audio = await this._createAudio(uri, details); - this._cacheCheck(); - this._cache.set(key, {audio, uri, source}); - return {audio, uri, source}; + const audio = await this._createAudio(uri); + if (hasCache) { + this._cacheCheck(); + this._cache.set(key, {audio, uri, source}); + } + return {audio, uri, index: i}; } catch (e) { // NOP } @@ -102,7 +112,7 @@ class AudioSystem { throw new Error('Could not create audio'); } - createTextToSpeechAudio({text, voiceUri}) { + createTextToSpeechAudio(text, voiceUri) { const voice = this._getTextToSpeechVoiceFromVoiceUri(voiceUri); if (voice === null) { throw new Error('Invalid text-to-speech voice'); @@ -114,20 +124,24 @@ class AudioSystem { // NOP } - async _createAudio(uri, details) { + async _createAudio(uri) { const ttsParameters = this._getTextToSpeechParameters(uri); if (ttsParameters !== null) { - if (typeof details === 'object' && details !== null) { - if (details.tts === false) { - throw new Error('Text-to-speech not permitted'); - } - } - return this.createTextToSpeechAudio(ttsParameters); + const {text, voiceUri} = ttsParameters; + return this.createTextToSpeechAudio(text, voiceUri); } return await this._createAudioFromUrl(uri); } + _getAudioUri(definition, source, details) { + return ( + this._audioUriBuilder !== null ? + this._audioUriBuilder.getUri(definition, source, details) : + null + ); + } + _createAudioFromUrl(url) { return new Promise((resolve, reject) => { const audio = new Audio(url); diff --git a/ext/mixed/js/display.js b/ext/mixed/js/display.js index 63687dc2..c2284ffe 100644 --- a/ext/mixed/js/display.js +++ b/ext/mixed/js/display.js @@ -45,7 +45,14 @@ class Display { this.index = 0; this.audioPlaying = null; this.audioFallback = null; - this.audioSystem = new AudioSystem({getAudioUri: this._getAudioUri.bind(this)}); + this.audioSystem = new AudioSystem({ + audioUriBuilder: { + getUri: async (definition, source, details) => { + return await apiAudioGetUri(definition, source, details); + } + }, + useCache: true + }); this.styleNode = null; this.eventListeners = new EventListenerCollection(); @@ -784,16 +791,14 @@ class Display { const expression = expressionIndex === -1 ? definition : definition.expressions[expressionIndex]; - if (this.audioPlaying !== null) { - this.audioPlaying.pause(); - this.audioPlaying = null; - } + this._stopPlayingAudio(); - const sources = this.options.audio.sources; - let audio, source, info; + let audio, info; try { - ({audio, source} = await this.audioSystem.getDefinitionAudio(expression, sources)); - info = `From source ${1 + sources.indexOf(source)}: ${source}`; + const {sources, textToSpeechVoice, customSourceUrl} = this.options.audio; + let index; + ({audio, index} = await this.audioSystem.getDefinitionAudio(expression, sources, {textToSpeechVoice, customSourceUrl})); + info = `From source ${1 + index}: ${sources[index]}`; } catch (e) { if (this.audioFallback === null) { this.audioFallback = new Audio('/mixed/mp3/button.mp3'); @@ -802,7 +807,7 @@ class Display { info = 'Could not find audio'; } - const button = this.audioButtonFindImage(entryIndex); + const button = this.audioButtonFindImage(entryIndex, expressionIndex); if (button !== null) { let titleDefault = button.dataset.titleDefault; if (!titleDefault) { @@ -812,10 +817,19 @@ class Display { button.title = `${titleDefault}\n${info}`; } + this._stopPlayingAudio(); + this.audioPlaying = audio; audio.currentTime = 0; audio.volume = this.options.audio.volume / 100.0; - audio.play(); + const playPromise = audio.play(); + if (typeof playPromise !== 'undefined') { + try { + await playPromise; + } catch (e2) { + // NOP + } + } } catch (e) { this.onError(e); } finally { @@ -823,6 +837,13 @@ class Display { } } + _stopPlayingAudio() { + if (this.audioPlaying !== null) { + this.audioPlaying.pause(); + this.audioPlaying = null; + } + } + noteUsesScreenshot(mode) { const optionsAnki = this.options.anki; const fields = (mode === 'kanji' ? optionsAnki.kanji : optionsAnki.terms).fields; @@ -901,9 +922,16 @@ class Display { viewerButton.dataset.noteId = noteId; } - audioButtonFindImage(index) { + audioButtonFindImage(index, expressionIndex) { const entry = this.getEntry(index); - return entry !== null ? entry.querySelector('.action-play-audio>img') : null; + if (entry === null) { return null; } + + const container = ( + expressionIndex >= 0 ? + entry.querySelector(`.term-expression:nth-of-type(${expressionIndex + 1})`) : + entry + ); + return container !== null ? container.querySelector('.action-play-audio>img') : null; } async getDefinitionsAddable(definitions, modes) { @@ -947,9 +975,4 @@ class Display { } }; } - - async _getAudioUri(definition, source) { - const optionsContext = this.getOptionsContext(); - return await apiAudioGetUri(definition, source, optionsContext); - } } |