fix(captions): Fixed captions mute/unmute, adding some debug logs (#20345)

* fix(captions): Fixed captions mute/unmute

* typescript updates

* more lint adjustments

---------

Co-authored-by: João Victor Nunes <joaovictornunes973@gmail.com>
Co-authored-by: Anton Georgiev <anto.georgiev@gmail.com>
This commit is contained in:
Lucas 2024-05-29 17:48:07 -03:00 committed by GitHub
parent fd3071c28a
commit 4f741b8cc0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 57 additions and 24 deletions

View File

@ -1,13 +1,12 @@
import React, {
useCallback,
useEffect,
useMemo,
useRef,
} from 'react';
// @ts-ignore - it's has no types
// @ts-ignore - it has no types
import { diff } from '@mconf/bbb-diff';
import { useReactiveVar, useMutation } from '@apollo/client';
import { throttle } from '/imports/utils/throttle';
import { debounce } from '/imports/utils/debounce';
import {
SpeechRecognitionAPI,
generateId,
@ -40,6 +39,7 @@ type SpeechRecognitionErrorEvent = {
interface AudioCaptionsSpeechProps {
locale: string;
connected: boolean;
muted: boolean;
}
const speechHasStarted = {
started: false,
@ -47,6 +47,7 @@ const speechHasStarted = {
const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
locale,
connected,
muted,
}) => {
const resultRef = useRef({
id: generateId(),
@ -54,7 +55,6 @@ const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
isFinal: true,
});
const idleRef = useRef(true);
const speechRecognitionRef = useRef<ReturnType<typeof SpeechRecognitionAPI>>(null);
const prevIdRef = useRef('');
const prevTranscriptRef = useRef('');
@ -129,15 +129,9 @@ const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
});
};
const throttledTranscriptUpdate = useMemo(() => throttle(
captionSubmitText, THROTTLE_TIMEOUT, {
leading: false,
trailing: true,
},
), []);
const transcriptUpdate = debounce(captionSubmitText, THROTTLE_TIMEOUT);
const updateFinalTranscript = (id: string, transcript: string, locale: string) => {
throttledTranscriptUpdate.cancel();
captionSubmitText(id, transcript, locale, true);
};
@ -161,6 +155,8 @@ const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
results,
} = event;
logger.debug('Transcription event', event);
const { id } = resultRef.current;
const { transcript } = results[resultIndex][0];
@ -170,16 +166,20 @@ const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
resultRef.current.isFinal = isFinal;
if (isFinal) {
throttledTranscriptUpdate(id, transcript, locale, true);
updateFinalTranscript(id, transcript, locale);
resultRef.current.id = generateId();
} else {
throttledTranscriptUpdate(id, transcript, locale, false);
transcriptUpdate(id, transcript, locale, false);
}
}, [locale]);
const stop = useCallback(() => {
idleRef.current = true;
logger.debug('Stopping browser speech recognition');
if (speechRecognitionRef.current) {
if (!speechHasStarted.started) {
return;
}
const {
isFinal,
transcript,
@ -191,18 +191,26 @@ const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
speechRecognitionRef.current.abort();
} else {
speechRecognitionRef.current.stop();
speechHasStarted.started = false;
}
speechHasStarted.started = false;
}
}, [locale]);
const start = (settedLocale: string) => {
logger.debug('Starting browser speech recognition');
if (speechRecognitionRef.current && isLocaleValid(settedLocale)) {
speechRecognitionRef.current.lang = settedLocale;
if (speechHasStarted.started) {
logger.warn('Already starting return');
return;
}
try {
resultRef.current.id = generateId();
speechRecognitionRef.current.start();
idleRef.current = false;
speechHasStarted.started = true;
} catch (event: unknown) {
onError(event as SpeechRecognitionErrorEvent);
}
@ -221,18 +229,19 @@ const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
}
}, [speechRecognitionRef.current]);
const connectedRef = useRef(connected);
const localeRef = useRef(locale);
const connectedRef = useRef(connected);
const mutedRef = useRef(muted);
useEffect(() => {
// Connected
if (!connectedRef.current && connected) {
if ((!connectedRef.current && connected && !muted)) {
logger.debug('Audio connected');
start(locale);
connectedRef.current = connected;
} else if (connectedRef.current && !connected) {
// Disconnected
stop();
connectedRef.current = connected;
} else if (localeRef.current !== locale) {
logger.debug('Locale changed', locale);
// Locale changed
if (connectedRef.current && connected) {
stop();
@ -240,7 +249,28 @@ const AudioCaptionsSpeech: React.FC<AudioCaptionsSpeechProps> = ({
localeRef.current = locale;
}
}
}, [connected, locale]);
// Disconnected
if ((connectedRef.current && !connected)) {
logger.debug('Audio disconnected');
stop();
connectedRef.current = connected;
}
// Unmuted and connected
if (mutedRef.current && !muted && connected) {
logger.debug('Audio unmuted and connected');
start(locale);
mutedRef.current = muted;
}
// Muted
if (!mutedRef.current && muted) {
logger.debug('Audio muted');
stop();
mutedRef.current = muted;
}
}, [connected, muted, locale]);
return null;
};
@ -249,6 +279,8 @@ const AudioCaptionsSpeechContainer: React.FC = () => {
/* eslint no-underscore-dangle: 0 */
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
const isConnected = useReactiveVar(AudioManager._isConnected.value) as boolean;
// @ts-ignore
const isMuted = useReactiveVar(AudioManager._isMuted.value) as boolean;
const {
data: currentUser,
@ -265,6 +297,7 @@ const AudioCaptionsSpeechContainer: React.FC = () => {
<AudioCaptionsSpeech
locale={currentUser.speechLocale ?? ''}
connected={isConnected}
muted={isMuted}
/>
);
};

View File

@ -63,7 +63,7 @@ class AudioManager {
};
this.defineProperties({
isMuted: false,
isMuted: makeVar(false),
isConnected: makeVar(false),
isConnecting: makeVar(false),
isHangingUp: makeVar(false),