Automatically respond with Voice if subscribed user sent Voice message

This commit is contained in:
Debanjum Singh Solanky 2024-06-21 15:53:01 +05:30
parent 5e5fe4b7af
commit fa7b40ab86

View file

@ -87,6 +87,7 @@ To get started, just start typing below. You can also type / to see a list of co
loadingEllipsis: null,
references: {},
rawResponse: "",
isVoice: false,
}
fetch("https://ipapi.co/json")
@ -348,6 +349,57 @@ To get started, just start typing below. You can also type / to see a list of co
.then(response => response.json())
}
function textToSpeech(message, event=null) {
// Replace the speaker with a loading icon.
let loader = document.createElement("span");
loader.classList.add("loader");
let speechButton;
let speechIcon;
if (event === null) {
// Pick the last speech button if none is provided
let speechButtons = document.getElementsByClassName("speech-button");
speechButton = speechButtons[speechButtons.length - 1];
let speechIcons = document.getElementsByClassName("speech-icon");
speechIcon = speechIcons[speechIcons.length - 1];
} else {
speechButton = event.currentTarget;
speechIcon = event.target;
}
speechButton.innerHTML = "";
speechButton.appendChild(loader);
speechButton.disabled = true;
const context = new (window.AudioContext || window.webkitAudioContext)();
fetch(`/api/chat/speech?text=${encodeURIComponent(message)}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
})
.then(response => response.arrayBuffer())
.then(arrayBuffer => context.decodeAudioData(arrayBuffer))
.then(audioBuffer => {
const source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.start(0);
source.onended = function() {
speechButton.innerHTML = "";
speechButton.appendChild(speechIcon);
speechButton.disabled = false;
};
})
.catch(err => {
console.error("Error playing speech:", err);
speechButton.innerHTML = "";
speechButton.appendChild(speechIcon);
speechButton.disabled = true;
});
}
function formatHTMLMessage(message, raw=false, willReplace=true, userQuery) {
var md = window.markdownit();
let newHTML = message;
@ -434,47 +486,9 @@ To get started, just start typing below. You can also type / to see a list of co
speechIcon.src = "/static/assets/icons/speaker.svg";
speechIcon.classList.add("speech-icon");
speechButton.appendChild(speechIcon);
speechButton.addEventListener('click', function() {
// Replace the speaker with a loading icon.
let loader = document.createElement("span");
loader.classList.add("loader");
speechButton.innerHTML = "";
speechButton.appendChild(loader);
speechButton.disabled = true;
const context = new (window.AudioContext || window.webkitAudioContext)();
fetch(`/api/chat/speech?text=${encodeURIComponent(message)}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
})
.then(response => response.arrayBuffer())
.then(arrayBuffer => {
return context.decodeAudioData(arrayBuffer);
})
.then(audioBuffer => {
const source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.start(0);
source.onended = function() {
speechButton.innerHTML = "";
speechButton.appendChild(speechIcon);
speechButton.disabled = false;
};
})
.catch(err => {
console.error("Error playing speech:", err);
speechButton.innerHTML = "";
speechButton.appendChild(speechIcon);
speechButton.disabled = true;
});
});
speechButton.addEventListener('click', (event) => textToSpeech(message, event));
}
// Append buttons to parent element
element.append(copyButton, thumbsDownButton, thumbsUpButton);
@ -584,9 +598,9 @@ To get started, just start typing below. You can also type / to see a list of co
return referencesDiv;
}
async function chat() {
async function chat(isVoice=false) {
if (websocket) {
sendMessageViaWebSocket();
sendMessageViaWebSocket(isVoice);
return;
}
@ -1050,7 +1064,7 @@ To get started, just start typing below. You can also type / to see a list of co
window.onload = loadChat;
function setupWebSocket() {
function setupWebSocket(isVoice=false) {
let chatBody = document.getElementById("chat-body");
let wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
let webSocketUrl = `${wsProtocol}//${window.location.host}/api/chat/ws`;
@ -1067,6 +1081,7 @@ To get started, just start typing below. You can also type / to see a list of co
references: {},
rawResponse: "",
rawQuery: "",
isVoice: isVoice,
}
if (chatBody.dataset.conversationId) {
@ -1080,8 +1095,13 @@ To get started, just start typing below. You can also type / to see a list of co
let chunk = event.data;
if (chunk == "start_llm_response") {
console.log("Started streaming", new Date());
} else if(chunk == "end_llm_response") {
} else if (chunk == "end_llm_response") {
console.log("Stopped streaming", new Date());
// Automatically respond with voice if the subscribed user has sent voice message
if (websocketState.isVoice && "{{ is_active }}" == "True")
textToSpeech(websocketState.rawResponse);
// Append any references after all the data has been streamed
finalizeChatBodyResponse(websocketState.references, websocketState.newResponseTextEl);
@ -1094,6 +1114,7 @@ To get started, just start typing below. You can also type / to see a list of co
references: {},
rawResponse: "",
rawQuery: liveQuery,
isVoice: false,
}
} else {
try {
@ -1172,7 +1193,7 @@ To get started, just start typing below. You can also type / to see a list of co
}
}
function sendMessageViaWebSocket() {
function sendMessageViaWebSocket(isVoice=false) {
let chatBody = document.getElementById("chat-body");
var query = document.getElementById("chat-input").value.trim();
@ -1223,6 +1244,7 @@ To get started, just start typing below. You can also type / to see a list of co
references,
rawResponse,
rawQuery: query,
isVoice: isVoice,
}
}
var userMessages = [];
@ -1851,7 +1873,7 @@ To get started, just start typing below. You can also type / to see a list of co
document.getElementById('countdown-circle').style.animation = "none";
// Send message
chat();
chat(true);
}, 3000);
})
.catch(err => {