Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
bf5a6229
Commit
bf5a6229
authored
Jun 10, 2024
by
Timothy J. Baek
Browse files
refac
parent
2972774d
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
81 additions
and
76 deletions
+81
-76
src/lib/components/chat/MessageInput/CallOverlay.svelte
src/lib/components/chat/MessageInput/CallOverlay.svelte
+1
-1
src/lib/components/chat/Messages/ResponseMessage.svelte
src/lib/components/chat/Messages/ResponseMessage.svelte
+80
-75
No files found.
src/lib/components/chat/MessageInput/CallOverlay.svelte
View file @
bf5a6229
...
@@ -205,7 +205,7 @@
...
@@ -205,7 +205,7 @@
if (_responses.at(0)) {
if (_responses.at(0)) {
const content = _responses[0];
const content = _responses[0];
if (content) {
if
(
(content
?? '').trim() !== ''
) {
assistantSpeakingHandler(content);
assistantSpeakingHandler(content);
}
}
}
}
...
...
src/lib/components/chat/Messages/ResponseMessage.svelte
View file @
bf5a6229
...
@@ -211,93 +211,98 @@
...
@@ -211,93 +211,98 @@
speaking = null;
speaking = null;
speakingIdx = null;
speakingIdx = null;
} else {
} else {
speaking = true;
if ((message?.content ?? '').trim() !== '') {
speaking = true;
if ($config.audio.tts.engine === 'openai') {
loadingSpeech = true;
if ($config.audio.tts.engine === 'openai') {
loadingSpeech = true;
const sentences = extractSentences(message.content).reduce((mergedTexts, currentText) => {
const lastIndex = mergedTexts.length - 1;
const sentences = extractSentences(message.content).reduce((mergedTexts, currentText) => {
if (lastIndex >= 0) {
const lastIndex = mergedTexts.length - 1;
const previousText = mergedTexts[lastIndex];
if (lastIndex >= 0) {
const wordCount = previousText.split(/\s+/).length;
const previousText = mergedTexts[lastIndex];
if (wordCount < 2) {
const wordCount = previousText.split(/\s+/).length;
mergedTexts[lastIndex] = previousText + ' ' + currentText;
if (wordCount < 2) {
mergedTexts[lastIndex] = previousText + ' ' + currentText;
} else {
mergedTexts.push(currentText);
}
} else {
} else {
mergedTexts.push(currentText);
mergedTexts.push(currentText);
}
}
} else {
return mergedTexts;
mergedTexts.push(currentText);
}, []);
}
return mergedTexts;
}, []);
console.log(sentences);
sentencesAudio = sentences.reduce((a, e, i, arr) => {
a[i] = null;
return a;
}, {});
let lastPlayedAudioPromise = Promise.resolve(); // Initialize a promise that resolves immediately
for (const [idx, sentence] of sentences.entries()) {
const res = await synthesizeOpenAISpeech(
localStorage.token,
$settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice,
sentence
).catch((error) => {
toast.error(error);
speaking = null;
loadingSpeech = false;
return null;
});
if (res) {
const blob = await res.blob();
const blobUrl = URL.createObjectURL(blob);
const audio = new Audio(blobUrl);
sentencesAudio[idx] = audio;
loadingSpeech = false;
lastPlayedAudioPromise = lastPlayedAudioPromise.then(() => playAudio(idx));
}
}
} else {
let voices = [];
const getVoicesLoop = setInterval(async () => {
voices = await speechSynthesis.getVoices();
if (voices.length > 0) {
clearInterval(getVoicesLoop);
const voice =
console.log(sentences);
voices
?.filter(
(v) => v.voiceURI === ($settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice)
)
?.at(0) ?? undefined;
console.log(voice);
sentencesAudio = sentences.reduce((a, e, i, arr) => {
a[i] = null;
return a;
}, {});
const speak = new SpeechSynthesisUtterance(message.content);
let lastPlayedAudioPromise = Promise.resolve(); // Initialize a promise that resolves immediately
console.log(speak);
for (const [idx, sentence] of sentences.entries()) {
const res = await synthesizeOpenAISpeech(
localStorage.token,
$settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice,
sentence
).catch((error) => {
toast.error(error);
speak.onend = () => {
speaking = null;
speaking = null;
if ($settings.conversationMode) {
loadingSpeech = false;
document.getElementById('voice-input-button')?.click();
return null;
});
if (res) {
const blob = await res.blob();
const blobUrl = URL.createObjectURL(blob);
const audio = new Audio(blobUrl);
sentencesAudio[idx] = audio;
loadingSpeech = false;
lastPlayedAudioPromise = lastPlayedAudioPromise.then(() => playAudio(idx));
}
}
} else {
let voices = [];
const getVoicesLoop = setInterval(async () => {
voices = await speechSynthesis.getVoices();
if (voices.length > 0) {
clearInterval(getVoicesLoop);
const voice =
voices
?.filter(
(v) =>
v.voiceURI === ($settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice)
)
?.at(0) ?? undefined;
console.log(voice);
const speak = new SpeechSynthesisUtterance(message.content);
console.log(speak);
speak.onend = () => {
speaking = null;
if ($settings.conversationMode) {
document.getElementById('voice-input-button')?.click();
}
};
if (voice) {
speak.voice = voice;
}
}
};
if (voice) {
speechSynthesis.speak(speak);
speak.voice = voice;
}
}
}, 100);
speechSynthesis.speak(speak);
}
}
} else {
}, 100
);
toast.error('No content to speak'
);
}
}
}
}
};
};
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment