Merge pull request #20028 from Tainan404/refactor-captions

Refactor: move captions to graphql
This commit is contained in:
Ramón Souza 2024-04-23 17:22:01 -03:00 committed by GitHub
commit 9a8a2fd98b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 192 additions and 815 deletions

View File

@ -1799,10 +1799,9 @@ SELECT *
FROM "caption"
WHERE "createdAt" > current_timestamp - INTERVAL '5 seconds';
CREATE OR REPLACE VIEW "v_caption_typed_activeLocales" AS
select distinct "meetingId", "locale", "ownerUserId"
from "caption_locale"
where "captionType" = 'TYPED';
CREATE OR REPLACE VIEW "v_caption_activeLocales" AS
select distinct "meetingId", "locale", "ownerUserId", "captionType"
from "caption_locale";
create index "idx_caption_typed_activeLocales" on caption("meetingId","locale","userId") where "captionType" = 'TYPED';

View File

@ -1,10 +1,10 @@
table:
name: v_caption_typed_activeLocales
name: v_caption_activeLocales
schema: public
configuration:
column_config: {}
custom_column_names: {}
custom_name: caption_typed_activeLocales
custom_name: caption_activeLocales
custom_root_fields: {}
object_relationships:
- name: userOwner
@ -22,6 +22,7 @@ select_permissions:
permission:
columns:
- locale
- captionType
filter:
meetingId:
_eq: X-Hasura-MeetingId

View File

@ -3,7 +3,7 @@
- "!include public_v_breakoutRoom_participant.yaml"
- "!include public_v_breakoutRoom_user.yaml"
- "!include public_v_caption.yaml"
- "!include public_v_caption_typed_activeLocales.yaml"
- "!include public_v_caption_activeLocales.yaml"
- "!include public_v_chat.yaml"
- "!include public_v_chat_message_private.yaml"
- "!include public_v_chat_message_public.yaml"

View File

@ -11,6 +11,10 @@ const AUDIO_MICROPHONE_CONSTRAINTS = Meteor.settings.public.app.defaultSettings
.application.microphoneConstraints;
const MEDIA_TAG = Meteor.settings.public.media.mediaTag;
const CONFIG = window.meetingClientSettings.public.app.audioCaptions;
const PROVIDER = CONFIG.provider;
const audioCaptionsEnabled = window.meetingClientSettings.public.app.audioCaptions.enabled;
const getAudioSessionNumber = () => {
let currItem = parseInt(sessionStorage.getItem(AUDIO_SESSION_NUM_KEY), 10);
if (!currItem) {
@ -115,6 +119,21 @@ const doGUM = async (constraints, retryOnFailure = false) => {
}
};
const isEnabled = () => audioCaptionsEnabled;
const isWebSpeechApi = () => PROVIDER === 'webspeech';
const isVosk = () => PROVIDER === 'vosk';
const isWhispering = () => PROVIDER === 'whisper';
const isDeepSpeech = () => PROVIDER === 'deepSpeech';
const isActive = () => isEnabled()
&& ((isWebSpeechApi()) || isVosk() || isWhispering() || isDeepSpeech());
const stereoUnsupported = () => isActive() && isVosk();
export {
DEFAULT_INPUT_DEVICE_ID,
DEFAULT_OUTPUT_DEVICE_ID,
@ -131,4 +150,5 @@ export {
getStoredAudioOutputDeviceId,
storeAudioOutputDeviceId,
doGUM,
stereoUnsupported,
};

View File

@ -25,8 +25,8 @@ import {
getAudioConstraints,
filterSupportedConstraints,
doGUM,
stereoUnsupported,
} from '/imports/api/audio/client/bridge/service';
import SpeechService from '/imports/ui/components/audio/captions/speech/service';
const MEDIA = Meteor.settings.public.media;
const MEDIA_TAG = MEDIA.mediaTag;
@ -722,7 +722,7 @@ class SIPSession {
// via SDP munging. Having it disabled on server side FS _does not suffice_
// because the stereo parameter is client-mandated (ie replicated in the
// answer)
if (SpeechService.stereoUnsupported()) {
if (stereoUnsupported()) {
logger.debug({
logCode: 'sipjs_transcription_disable_stereo',
}, 'Transcription provider does not support stereo, forcing stereo=0');

View File

@ -25,7 +25,10 @@ export interface Public {
clientLog: ClientLog
virtualBackgrounds: VirtualBackgrounds
}
export interface Locales {
locale: string
name: string
}
export interface App {
instanceId: string
mobileFontSize: string
@ -453,6 +456,7 @@ export interface Captions {
font: Font
lines: number
time: number
locales: Locales[]
}
export interface Font {

View File

@ -1,5 +1,4 @@
import React, { PureComponent } from 'react';
import deviceInfo from '/imports/utils/deviceInfo';
import { ActionsBarItemType, ActionsBarPosition } from 'bigbluebutton-html-plugin-sdk/dist/cjs/extensible-areas/actions-bar-item/enums';
import Styled from './styles';
import ActionsDropdown from './actions-dropdown/container';
@ -111,6 +110,7 @@ class ActionsBar extends PureComponent {
showPushLayout,
setPushLayout,
setPresentationFitToWidth,
} = this.props;
const { selectedLayout } = Settings.application;
@ -151,11 +151,8 @@ class ActionsBar extends PureComponent {
setPresentationFitToWidth,
}}
/>
{!deviceInfo.isMobile
? (
<AudioCaptionsButtonContainer />
)
: null}
<AudioCaptionsButtonContainer />
</Styled.Left>
<Styled.Center>
{this.renderPluginsActionBarItems(ActionsBarPosition.LEFT)}

View File

@ -30,6 +30,7 @@ const ActionsBarContainer = (props) => {
const { data: currentMeeting } = useMeeting((m) => ({
externalVideo: m.externalVideo,
componentsFlags: m.componentsFlags,
}));
const isSharingVideo = !!currentMeeting?.externalVideo?.externalVideoUrl;
@ -56,6 +57,7 @@ const ActionsBarContainer = (props) => {
const amIModerator = currentUserData?.isModerator;
if (actionsBarStyle.display === false) return null;
if (!currentMeeting) return null;
return (
<ActionsBar {
@ -70,14 +72,17 @@ const ActionsBarContainer = (props) => {
isThereCurrentPresentation,
isSharingVideo,
stopExternalVideoShare,
isCaptionsAvailable: currentMeeting.componentsFlags.hasCaption,
}
}
/>
);
};
const RAISE_HAND_BUTTON_ENABLED = window.meetingClientSettings.public.app.raiseHandActionButton.enabled;
const RAISE_HAND_BUTTON_CENTERED = window.meetingClientSettings.public.app.raiseHandActionButton.centered;
const RAISE_HAND_BUTTON_ENABLED = window.meetingClientSettings
.public.app.raiseHandActionButton.enabled;
const RAISE_HAND_BUTTON_CENTERED = window.meetingClientSettings
.public.app.raiseHandActionButton.centered;
const isReactionsButtonEnabled = () => {
const USER_REACTIONS_ENABLED = window.meetingClientSettings.public.userReaction.enabled;

View File

@ -18,7 +18,7 @@ import AudioContainer from '../audio/container';
import BannerBarContainer from '/imports/ui/components/banner-bar/container';
import RaiseHandNotifier from '/imports/ui/components/raisehand-notifier/container';
import ManyWebcamsNotifier from '/imports/ui/components/video-provider/many-users-notify/container';
import AudioCaptionsSpeechContainer from '/imports/ui/components/audio/captions/speech/container';
import AudioCaptionsSpeechContainer from '/imports/ui/components/audio/audio-graphql/audio-captions/speech/component';
import UploaderContainer from '/imports/ui/components/presentation/presentation-uploader/container';
import ScreenReaderAlertContainer from '../screenreader-alert/container';
import ScreenReaderAlertAdapter from '../screenreader-alert/adapter';
@ -566,6 +566,7 @@ setRandomUserSelectModalIsOpen(value) {
intl,
isModerator,
genericComponentId,
speechLocale,
} = this.props;
const {
@ -642,7 +643,9 @@ setRandomUserSelectModalIsOpen(value) {
setAudioModalIsOpen: this.setAudioModalIsOpen,
isVideoPreviewModalOpen,
setVideoPreviewModalIsOpen: this.setVideoPreviewModalIsOpen,
}} />
speechLocale,
}}
/>
<ToastContainer rtl />
{(audioAlertEnabled || pushAlertEnabled)
&& (

View File

@ -102,6 +102,7 @@ const AppContainer = (props) => {
enforceLayout: user.enforceLayout,
isModerator: user.isModerator,
presenter: user.presenter,
speechLocale: user.speechLocale,
}));
const isModerator = currentUserData?.isModerator;
@ -185,6 +186,9 @@ const AppContainer = (props) => {
const shouldShowPresentation = (!shouldShowScreenshare && !isSharedNotesPinned
&& !shouldShowExternalVideo && !shouldShowGenericComponent
&& (presentationIsOpen || presentationRestoreOnUpdate)) && isPresentationEnabled();
if (!currentUserData) return null;
return currentUserId
? (
<App
@ -222,6 +226,7 @@ const AppContainer = (props) => {
isPresenter,
numCameras: cameraDockInput.numCameras,
enforceLayout: validateEnforceLayout(currentUserData),
speechLocale: currentUserData?.speechLocale,
isModerator,
shouldShowScreenshare,
isSharedNotesPinned,
@ -230,6 +235,7 @@ const AppContainer = (props) => {
toggleVoice,
setLocalSettings,
genericComponentId: genericComponent.genericComponentId,
audioCaptions: <AudioCaptionsLiveContainer speechLocale={currentUserData?.speechLocale} />,
}}
{...otherProps}
/>

View File

@ -4,16 +4,21 @@ import { Layout } from '/imports/ui/components/layout/layoutTypes';
import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import ButtonEmoji from '/imports/ui/components/common/button/button-emoji/ButtonEmoji';
import BBBMenu from '/imports/ui/components/common/menu/component';
import { defineMessages, useIntl } from 'react-intl';
import { useMutation, useSubscription } from '@apollo/client';
import Styled from './styles';
import {
getSpeechVoices, isAudioTranscriptionEnabled, setAudioCaptions, setSpeechLocale,
setAudioCaptions, setSpeechLocale,
} from '../service';
import { defineMessages, useIntl } from 'react-intl';
import { MenuSeparatorItemType, MenuOptionItemType } from '/imports/ui/components/common/menu/menuTypes';
import useAudioCaptionEnable from '/imports/ui/core/local-states/useAudioCaptionEnable';
import { User } from '/imports/ui/Types/user';
import { useMutation } from '@apollo/client';
import { SET_SPEECH_LOCALE } from '/imports/ui/core/graphql/mutations/userMutations';
import useMeeting from '/imports/ui/core/hooks/useMeeting';
import { ActiveCaptionsResponse, getactiveCaptions } from './queries';
const CONFIG = window.meetingClientSettings.public.app.audioCaptions;
const PROVIDER = CONFIG.provider;
const intlMessages = defineMessages({
start: {
@ -89,7 +94,6 @@ interface AudioCaptionsButtonProps {
availableVoices: string[];
currentSpeechLocale: string;
isSupported: boolean;
isVoiceUser: boolean;
}
const DISABLED = '';
@ -99,8 +103,8 @@ const AudioCaptionsButton: React.FC<AudioCaptionsButtonProps> = ({
currentSpeechLocale,
availableVoices,
isSupported,
isVoiceUser,
}) => {
const knownLocales = window.meetingClientSettings.public.captions.locales;
const intl = useIntl();
const [active] = useAudioCaptionEnable();
const [setSpeechLocaleMutation] = useMutation(SET_SPEECH_LOCALE);
@ -127,11 +131,8 @@ const AudioCaptionsButton: React.FC<AudioCaptionsButtonProps> = ({
if (!isTranscriptionDisabled()) selectedLocale.current = getSelectedLocaleValue;
}, [currentSpeechLocale]);
const shouldRenderChevron = isSupported && isVoiceUser;
const toggleTranscription = () => {
setSpeechLocale(isTranscriptionDisabled() ? selectedLocale.current : DISABLED, setUserSpeechLocale);
};
const shouldRenderChevron = isSupported;
const shouldRenderSelector = isSupported && availableVoices.length > 0;
const getAvailableLocales = () => {
let indexToInsertSeparator = -1;
@ -166,39 +167,60 @@ const AudioCaptionsButton: React.FC<AudioCaptionsButtonProps> = ({
];
};
const getAvailableLocalesList = () => (
[{
const getAvailableCaptions = () => {
return availableVoices.map((caption) => {
const localeName = knownLocales ? knownLocales.find((l) => l.locale === caption)?.name : 'en';
return {
key: caption,
label: localeName,
customStyles: (selectedLocale.current === caption) && Styled.SelectedLabel,
iconRight: selectedLocale.current === caption ? 'check' : null,
onClick: () => {
selectedLocale.current = caption;
setSpeechLocale(selectedLocale.current, setUserSpeechLocale);
},
};
});
};
const getAvailableLocalesList = () => {
// audio captions
if (shouldRenderChevron) {
return [{
key: 'availableLocalesList',
label: intl.formatMessage(intlMessages.language),
customStyles: Styled.TitleLabel,
disabled: true,
},
...getAvailableLocales(),
{
key: 'divider',
label: intl.formatMessage(intlMessages.transcription),
customStyles: Styled.TitleLabel,
disabled: true,
},
{
key: 'separator-02',
isSeparator: true,
}];
}
// typed captions
return [{
key: 'availableLocalesList',
label: intl.formatMessage(intlMessages.language),
customStyles: Styled.TitleLabel,
disabled: true,
},
...getAvailableLocales(),
{
key: 'divider',
label: intl.formatMessage(intlMessages.transcription),
customStyles: Styled.TitleLabel,
disabled: true,
},
{
key: 'separator-02',
isSeparator: true,
},
{
key: 'transcriptionStatus',
label: intl.formatMessage(
isTranscriptionDisabled()
? intlMessages.transcriptionOn
: intlMessages.transcriptionOff,
),
customStyles: isTranscriptionDisabled()
? Styled.EnableTrascription : Styled.DisableTrascription,
disabled: false,
onClick: toggleTranscription,
}]
);
...getAvailableCaptions(),
];
};
const onToggleClick = (e: React.MouseEvent) => {
e.stopPropagation();
if (!currentSpeechLocale && !active) {
setUserSpeechLocale(availableVoices[0], PROVIDER);
}
setAudioCaptions(!active);
};
@ -216,7 +238,7 @@ const AudioCaptionsButton: React.FC<AudioCaptionsButtonProps> = ({
);
return (
shouldRenderChevron
shouldRenderChevron || shouldRenderSelector
? (
<Styled.SpanButtonWrapper>
<BBBMenu
@ -261,15 +283,30 @@ const AudioCaptionsButtonContainer: React.FC = () => {
}),
);
if (currentUserLoading) return null;
if (!currentUser) return null;
const {
data: currentMeetingData,
loading: currentMeetingLoading,
} = useMeeting((m) => ({
componentsFlags: m.componentsFlags,
}));
const availableVoices = getSpeechVoices();
const {
data: activeCaptionsData,
loading: activeCaptionsLoading,
} = useSubscription<ActiveCaptionsResponse>(getactiveCaptions);
if (currentUserLoading) return null;
if (currentMeetingLoading) return null;
if (activeCaptionsLoading) return null;
if (!currentUser) return null;
if (!currentMeetingData) return null;
if (!activeCaptionsData) return null;
const availableVoices = activeCaptionsData.caption_activeLocales.map((caption) => caption.locale);
const currentSpeechLocale = currentUser.speechLocale || '';
const isSupported = availableVoices.length > 0;
const isVoiceUser = !!currentUser.voice;
if (!isAudioTranscriptionEnabled()) return null;
if (!currentMeetingData.componentsFlags?.hasCaption) return null;
return (
<AudioCaptionsButton
@ -277,7 +314,6 @@ const AudioCaptionsButtonContainer: React.FC = () => {
availableVoices={availableVoices}
currentSpeechLocale={currentSpeechLocale}
isSupported={isSupported}
isVoiceUser={isVoiceUser}
/>
);
};

View File

@ -8,6 +8,20 @@ export interface GetAudioCaptionsCountResponse {
};
}
export interface ActiveCaptionsResponse {
caption_activeLocales: Array<{
locale: string;
}>;
}
export const getactiveCaptions = gql`
subscription activeCaptions {
caption_activeLocales {
locale
}
}
`;
export const GET_AUDIO_CAPTIONS_COUNT = gql`
subscription GetAudioCaptionsCount {
caption_aggregate {
@ -20,4 +34,5 @@ export const GET_AUDIO_CAPTIONS_COUNT = gql`
export default {
GET_AUDIO_CAPTIONS_COUNT,
getactiveCaptions,
};

View File

@ -1,4 +1,4 @@
import React from 'react';
import React, { useEffect } from 'react';
import { defineMessages, useIntl } from 'react-intl';
import { useMutation } from '@apollo/client';
@ -144,6 +144,14 @@ const AudioCaptionsSelect: React.FC<AudioCaptionsSelectProps> = ({
};
const AudioCaptionsSelectContainer: React.FC = () => {
const [voicesList, setVoicesList] = React.useState<string[]>([]);
const voices = getSpeechVoices();
useEffect(() => {
if (voices && voicesList.length === 0) {
setVoicesList(voices);
}
}, [voices]);
const {
data: currentUser,
} = useCurrentUser(
@ -153,15 +161,13 @@ const AudioCaptionsSelectContainer: React.FC = () => {
}),
);
const isEnabled = isAudioTranscriptionEnabled();
const voices = getSpeechVoices();
if (!currentUser || !isEnabled || !voices) return null;
return (
<AudioCaptionsSelect
isTranscriptionEnabled={isEnabled}
speechLocale={currentUser.speechLocale ?? ''}
speechVoices={voices}
speechVoices={voices || voicesList}
/>
);
};

View File

@ -5,6 +5,7 @@ import logger from '/imports/startup/client/logger';
import Styled from './styles';
import useAudioCaptionEnable from '/imports/ui/core/local-states/useAudioCaptionEnable';
import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
interface AudioCaptionsLiveProps {
captions: Caption[];
@ -54,11 +55,19 @@ const AudioCaptionsLive: React.FC<AudioCaptionsLiveProps> = ({
};
const AudioCaptionsLiveContainer: React.FC = () => {
const {
data: currentUser,
} = useCurrentUser((u) => ({
speechLocale: u.speechLocale,
}));
const {
data: AudioCaptionsLiveData,
loading: AudioCaptionsLiveLoading,
error: AudioCaptionsLiveError,
} = useSubscription<getCaptions>(GET_CAPTIONS);
} = useSubscription<getCaptions>(GET_CAPTIONS, {
variables: { locale: currentUser?.speechLocale ?? 'en-US' },
});
const [audioCaptionsEnable] = useAudioCaptionEnable();

View File

@ -26,8 +26,8 @@ export interface GetAudioCaptions {
}
export const GET_CAPTIONS = gql`
subscription getCaptions {
caption {
subscription getCaptions($locale: String!) {
caption(where: {locale: {_eq: $locale}}) {
user {
avatar
color

View File

@ -1,5 +1,4 @@
import { unique } from 'radash';
import logger from '/imports/startup/client/logger';
import { setAudioCaptionEnable } from '/imports/ui/core/local-states/useAudioCaptionEnable';
import { isLiveTranscriptionEnabled } from '/imports/ui/services/features';
@ -30,15 +29,7 @@ export const setAudioCaptions = (value: boolean) => {
};
export const setSpeechLocale = (value: string, setUserSpeechLocale: (a: string, b: string) => void) => {
const voices = getSpeechVoices();
if (voices.includes(value) || value === '') {
setUserSpeechLocale(value, CONFIG.provider);
} else {
logger.error({
logCode: 'captions_speech_locale',
}, 'Captions speech set locale error');
}
setUserSpeechLocale(value, CONFIG.provider);
};
export const useFixedLocale = () => isAudioTranscriptionEnabled() && CONFIG.language.forceLocale;

View File

@ -12,8 +12,8 @@ import Help from '../help/component';
import AudioDial from '../audio-dial/component';
import AudioAutoplayPrompt from '../autoplay/component';
import Settings from '/imports/ui/services/settings';
import CaptionsSelectContainer from '/imports/ui/components/audio/captions/select/container';
import usePreviousValue from '/imports/ui/hooks/usePreviousValue';
import AudioCaptionsSelectContainer from '../audio-graphql/audio-captions/captions/component';
const propTypes = {
intl: PropTypes.shape({
@ -351,7 +351,7 @@ const AudioModal = (props) => {
}}
/>
) : null}
<CaptionsSelectContainer />
<AudioCaptionsSelectContainer />
</div>
);
};

View File

@ -1,104 +0,0 @@
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import UserContainer from './user/container';
const CAPTIONS_CONFIG = window.meetingClientSettings.public.captions;
class LiveCaptions extends PureComponent {
constructor(props) {
super(props);
this.state = { clear: true };
this.timer = null;
}
componentDidUpdate(prevProps) {
const { clear } = this.state;
if (clear) {
const { transcript } = this.props;
if (prevProps.transcript !== transcript) {
// eslint-disable-next-line react/no-did-update-set-state
this.setState({ clear: false });
}
} else {
this.resetTimer();
this.timer = setTimeout(() => this.setState({ clear: true }), CAPTIONS_CONFIG.time);
}
}
componentWillUnmount() {
this.resetTimer();
}
resetTimer() {
if (this.timer) {
clearTimeout(this.timer);
this.timer = null;
}
}
render() {
const {
transcript,
transcriptId,
} = this.props;
const { clear } = this.state;
const hasContent = transcript.length > 0 && !clear;
const wrapperStyles = {
display: 'flex',
};
const captionStyles = {
whiteSpace: 'pre-line',
wordWrap: 'break-word',
fontFamily: 'Verdana, Arial, Helvetica, sans-serif',
fontSize: '1.5rem',
background: '#000000a0',
color: 'white',
padding: hasContent ? '.5rem' : undefined,
};
const visuallyHidden = {
position: 'absolute',
overflow: 'hidden',
clip: 'rect(0 0 0 0)',
height: '1px',
width: '1px',
margin: '-1px',
padding: '0',
border: '0',
};
return (
<div style={wrapperStyles}>
{clear ? null : (
<UserContainer
background="#000000a0"
transcriptId={transcriptId}
/>
)}
<div style={captionStyles}>
{clear ? '' : transcript}
</div>
<div
style={visuallyHidden}
aria-atomic
aria-live="polite"
>
{clear ? '' : transcript}
</div>
</div>
);
}
}
LiveCaptions.propTypes = {
transcript: PropTypes.string.isRequired,
transcriptId: PropTypes.string.isRequired,
};
export default LiveCaptions;

View File

@ -1,44 +0,0 @@
import React from 'react';
import { withTracker } from 'meteor/react-meteor-data';
import Users from '/imports/api/users';
import User from './component';
const MODERATOR = window.meetingClientSettings.public.user.role_moderator;
const Container = (props) => <User {...props} />;
const getUser = (userId) => {
const user = Users.findOne(
{ userId },
{
fields: {
avatar: 1,
color: 1,
role: 1,
name: 1,
},
},
);
if (user) {
return {
avatar: user.avatar,
color: user.color,
moderator: user.role === MODERATOR,
name: user.name,
};
}
return {
avatar: '',
color: '',
moderator: false,
name: '',
};
};
export default withTracker(({ transcriptId }) => {
const userId = transcriptId.split('-')[0];
return getUser(userId);
})(Container);

View File

@ -1,142 +0,0 @@
import React from 'react';
import PropTypes from 'prop-types';
import { defineMessages, injectIntl } from 'react-intl';
import SpeechService from '/imports/ui/components/audio/captions/speech/service';
import { useMutation } from '@apollo/client';
import { SET_SPEECH_LOCALE } from '/imports/ui/core/graphql/mutations/userMutations';
const intlMessages = defineMessages({
title: {
id: 'app.audio.captions.speech.title',
description: 'Audio speech recognition title',
},
disabled: {
id: 'app.audio.captions.speech.disabled',
description: 'Audio speech recognition disabled',
},
unsupported: {
id: 'app.audio.captions.speech.unsupported',
description: 'Audio speech recognition unsupported',
},
'de-DE': {
id: 'app.audio.captions.select.de-DE',
description: 'Audio speech recognition german language',
},
'en-US': {
id: 'app.audio.captions.select.en-US',
description: 'Audio speech recognition english language',
},
'es-ES': {
id: 'app.audio.captions.select.es-ES',
description: 'Audio speech recognition spanish language',
},
'fr-FR': {
id: 'app.audio.captions.select.fr-FR',
description: 'Audio speech recognition french language',
},
'hi-ID': {
id: 'app.audio.captions.select.hi-ID',
description: 'Audio speech recognition indian language',
},
'it-IT': {
id: 'app.audio.captions.select.it-IT',
description: 'Audio speech recognition italian language',
},
'ja-JP': {
id: 'app.audio.captions.select.ja-JP',
description: 'Audio speech recognition japanese language',
},
'pt-BR': {
id: 'app.audio.captions.select.pt-BR',
description: 'Audio speech recognition portuguese language',
},
'ru-RU': {
id: 'app.audio.captions.select.ru-RU',
description: 'Audio speech recognition russian language',
},
'zh-CN': {
id: 'app.audio.captions.select.zh-CN',
description: 'Audio speech recognition chinese language',
},
});
const Select = ({
intl,
enabled,
locale,
voices,
}) => {
const useLocaleHook = SpeechService.useFixedLocale();
const [setSpeechLocale] = useMutation(SET_SPEECH_LOCALE);
const setUserSpeechLocale = (speechLocale, provider) => {
setSpeechLocale({
variables: {
locale: speechLocale,
provider,
},
});
};
if (!enabled || useLocaleHook) return null;
if (voices.length === 0) {
return (
<div data-test="speechRecognitionUnsupported"
style={{
fontSize: '.75rem',
padding: '1rem 0',
}}
>
{`*${intl.formatMessage(intlMessages.unsupported)}`}
</div>
);
}
const onChange = (e) => {
const { value } = e.target;
SpeechService.setSpeechLocale(value, setUserSpeechLocale);
};
return (
<div style={{ padding: '1rem 0' }}>
<label
htmlFor="speechSelect"
style={{ padding: '0 .5rem' }}
>
{intl.formatMessage(intlMessages.title)}
</label>
<select
id="speechSelect"
onChange={onChange}
value={locale}
>
<option
key="disabled"
value=""
>
{intl.formatMessage(intlMessages.disabled)}
</option>
{voices.map((v) => (
<option
key={v}
value={v}
>
{intl.formatMessage(intlMessages[v])}
</option>
))}
</select>
</div>
);
};
Select.propTypes = {
enabled: PropTypes.bool.isRequired,
locale: PropTypes.string.isRequired,
voices: PropTypes.arrayOf(PropTypes.string).isRequired,
intl: PropTypes.shape({
formatMessage: PropTypes.func.isRequired,
}).isRequired,
};
export default injectIntl(Select);

View File

@ -1,15 +0,0 @@
import React from 'react';
import { withTracker } from 'meteor/react-meteor-data';
import Service from '/imports/ui/components/audio/captions/speech/service';
import Select from './component';
import AudioCaptionsSelectContainer from '../../audio-graphql/audio-captions/captions/component';
const Container = (props) => <Select {...props} />;
withTracker(() => ({
enabled: Service.isEnabled(),
locale: Service.getSpeechLocale(),
voices: Service.getSpeechVoices(),
}))(Container);
export default AudioCaptionsSelectContainer;

View File

@ -1,8 +0,0 @@
const getAudioCaptions = () => Session.get('audioCaptions') || false;
const setAudioCaptions = (value) => Session.set('audioCaptions', value);
export default {
getAudioCaptions,
setAudioCaptions,
};

View File

@ -1,160 +0,0 @@
import { PureComponent } from 'react';
import PropTypes from 'prop-types';
import logger from '/imports/startup/client/logger';
import { throttle } from 'radash';
import Service from './service';
const THROTTLE_TIMEOUT = 200;
class Speech extends PureComponent {
constructor(props) {
super(props);
this.onEnd = this.onEnd.bind(this);
this.onError = this.onError.bind(this);
this.onResult = this.onResult.bind(this);
this.result = {
id: Service.generateId(),
transcript: '',
isFinal: true,
};
this.idle = true;
this.speechRecognition = Service.initSpeechRecognition(props.setUserSpeechLocale);
if (this.speechRecognition) {
this.speechRecognition.onend = () => this.onEnd();
this.speechRecognition.onerror = (event) => this.onError(event);
this.speechRecognition.onresult = (event) => this.onResult(event);
}
this.throttledTranscriptUpdate = throttle(
{ interval: THROTTLE_TIMEOUT },
props.captionSubmitText
);
}
componentDidUpdate(prevProps) {
const {
locale,
connected,
talking,
} = this.props;
// Connected
if (!prevProps.connected && connected) {
this.start(locale);
}
// Disconnected
if (prevProps.connected && !connected) {
this.stop();
}
// Switch locale
if (prevProps.locale !== locale) {
if (prevProps.connected && connected) {
this.stop();
this.start(locale);
}
}
// Recovery from idle
if (!prevProps.talking && talking) {
if (prevProps.connected && connected) {
if (this.idle) {
this.start(locale);
}
}
}
}
componentWillUnmount() {
this.stop();
}
onEnd() {
this.stop();
}
onError(event) {
this.stop();
logger.error({
logCode: 'captions_speech_recognition',
extraInfo: {
error: event.error,
message: event.message,
},
}, 'Captions speech recognition error');
}
onResult(event) {
const {
resultIndex,
results,
} = event;
const { id } = this.result;
const { transcript } = results[resultIndex][0];
const { isFinal } = results[resultIndex];
this.result.transcript = transcript;
this.result.isFinal = isFinal;
const { locale, captionSubmitText } = this.props;
if (isFinal) {
captionSubmitText(id, transcript, locale, true);
this.result.id = Service.generateId();
} else {
this.throttledTranscriptUpdate(id, transcript, locale, false);
}
}
start(locale) {
if (this.speechRecognition && Service.isLocaleValid(locale)) {
this.speechRecognition.lang = locale;
try {
this.result.id = Service.generateId();
this.speechRecognition.start();
this.idle = false;
} catch (event) {
this.onError(event);
}
}
}
stop() {
this.idle = true;
if (this.speechRecognition) {
const {
isFinal,
transcript,
} = this.result;
if (!isFinal) {
const { locale } = this.props;
const { id } = this.result;
Service.updateFinalTranscript(id, transcript, locale);
this.speechRecognition.abort();
} else {
this.speechRecognition.stop();
}
}
}
render() {
return null;
}
}
Speech.propTypes = {
locale: PropTypes.string.isRequired,
connected: PropTypes.bool.isRequired,
talking: PropTypes.bool.isRequired,
setUserSpeechLocale: PropTypes.func.isRequired,
};
export default Speech;

View File

@ -1,84 +0,0 @@
import React from 'react';
import { withTracker } from 'meteor/react-meteor-data';
import { useMutation } from '@apollo/client';
import { diff } from '@mconf/bbb-diff';
import Service from './service';
import Speech from './component';
import AudioCaptionsSpeechContainer from '../../audio-graphql/audio-captions/speech/component';
import { SET_SPEECH_LOCALE } from '/imports/ui/core/graphql/mutations/userMutations';
import { SUBMIT_TEXT } from './mutations';
let prevId = '';
let prevTranscript = '';
const Container = (props) => {
const [setSpeechLocale] = useMutation(SET_SPEECH_LOCALE);
const [submitText] = useMutation(SUBMIT_TEXT);
const setUserSpeechLocale = (locale, provider) => {
setSpeechLocale({
variables: {
locale,
provider,
},
});
};
const captionSubmitText = (id, transcript, locale, isFinal) => {
// If it's a new sentence
if (id !== prevId) {
prevId = id;
prevTranscript = '';
}
const transcriptDiff = diff(prevTranscript, transcript);
let start = 0;
let end = 0;
let text = '';
if (transcriptDiff) {
start = transcriptDiff.start;
end = transcriptDiff.end;
text = transcriptDiff.text;
}
// Stores current transcript as previous
prevTranscript = transcript;
submitText({
variables: {
transcriptId: id,
start,
end,
text,
transcript,
locale,
isFinal,
},
});
};
return (
<Speech
setUserSpeechLocale={setUserSpeechLocale}
captionSubmitText={captionSubmitText}
{...props}
/>
);
};
withTracker(() => {
const {
locale,
connected,
talking,
} = Service.getStatus();
return {
locale,
connected,
talking,
};
})(Container);
export default AudioCaptionsSpeechContainer;

View File

@ -1,27 +0,0 @@
import { gql } from '@apollo/client';
export const SUBMIT_TEXT = gql`
mutation SubmitText(
$transcriptId: String!
$start: Int!
$end: Int!
$text: String!
$transcript: String!
$locale: String!
$isFinal: Boolean!
) {
captionSubmitText(
transcriptId: $transcriptId,
start: $start,
end: $end,
text: $text,
transcript: $transcript,
locale: $locale,
isFinal: $isFinal,
)
}
`;
export default {
SUBMIT_TEXT,
};

View File

@ -1,143 +0,0 @@
import { Session } from 'meteor/session';
import Auth from '/imports/ui/services/auth';
import logger from '/imports/startup/client/logger';
import Users from '/imports/api/users';
import AudioService from '/imports/ui/components/audio/service';
import deviceInfo from '/imports/utils/deviceInfo';
import { isLiveTranscriptionEnabled } from '/imports/ui/services/features';
import { unique } from 'radash';
const CONFIG = window.meetingClientSettings.public.app.audioCaptions;
const ENABLED = CONFIG.enabled;
const PROVIDER = CONFIG.provider;
const LANGUAGES = CONFIG.language.available;
const VALID_ENVIRONMENT = !deviceInfo.isMobile || CONFIG.mobile;
const SpeechRecognitionAPI = window.SpeechRecognition || window.webkitSpeechRecognition;
const hasSpeechRecognitionSupport = () => typeof SpeechRecognitionAPI !== 'undefined'
&& typeof window.speechSynthesis !== 'undefined'
&& VALID_ENVIRONMENT;
const setSpeechVoices = () => {
if (!hasSpeechRecognitionSupport()) return;
Session.set('speechVoices', unique(window.speechSynthesis.getVoices().map((v) => v.lang)));
};
// Trigger getVoices
setSpeechVoices();
const getSpeechVoices = () => {
if (!isWebSpeechApi()) return LANGUAGES;
const voices = Session.get('speechVoices') || [];
return voices.filter((v) => LANGUAGES.includes(v));
};
const setSpeechLocale = (value, setUserSpeechLocale) => {
const voices = getSpeechVoices();
if (voices.includes(value) || value === '') {
setUserSpeechLocale(value, CONFIG.provider);
} else {
logger.error({
logCode: 'captions_speech_locale',
}, 'Captions speech set locale error');
}
};
const useFixedLocale = () => isEnabled() && CONFIG.language.forceLocale;
const initSpeechRecognition = (setUserSpeechLocale) => {
if (!isEnabled() || !isWebSpeechApi()) return null;
if (hasSpeechRecognitionSupport()) {
// Effectivate getVoices
setSpeechVoices();
const speechRecognition = new SpeechRecognitionAPI();
speechRecognition.continuous = true;
speechRecognition.interimResults = true;
if (useFixedLocale() || localeAsDefaultSelected()) {
setSpeechLocale(getLocale(), setUserSpeechLocale);
} else {
setSpeechLocale(navigator.language, setUserSpeechLocale);
}
return speechRecognition;
}
logger.warn({
logCode: 'captions_speech_unsupported',
}, 'Captions speech unsupported');
return null;
};
const getSpeechLocale = (userId = Auth.userID) => {
const user = Users.findOne({ userId }, { fields: { speechLocale: 1 } });
if (user) return user.speechLocale;
return '';
};
const hasSpeechLocale = (userId = Auth.userID) => getSpeechLocale(userId) !== '';
const isLocaleValid = (locale) => LANGUAGES.includes(locale);
const isEnabled = () => isLiveTranscriptionEnabled();
const isWebSpeechApi = () => PROVIDER === 'webspeech';
const isVosk = () => PROVIDER === 'vosk';
const isWhispering = () => PROVIDER === 'whisper';
const isDeepSpeech = () => PROVIDER === 'deepSpeech'
const isActive = () => isEnabled() && ((isWebSpeechApi() && hasSpeechLocale()) || isVosk() || isWhispering() || isDeepSpeech());
const getStatus = () => {
const active = isActive();
const locale = getSpeechLocale();
const audio = AudioService.isConnected() && !AudioService.isEchoTest() && !AudioService.isMuted();
const connected = Meteor.status().connected && active && audio;
const talking = AudioService.isTalking();
return {
locale,
connected,
talking,
};
};
const generateId = () => `${Auth.userID}-${Date.now()}`;
const localeAsDefaultSelected = () => CONFIG.language.defaultSelectLocale;
const getLocale = () => {
const { locale } = CONFIG.language;
if (locale === 'browserLanguage') return navigator.language;
if (locale === 'disabled') return '';
return locale;
};
const stereoUnsupported = () => isActive() && isVosk() && !!getSpeechLocale();
export default {
LANGUAGES,
hasSpeechRecognitionSupport,
initSpeechRecognition,
getSpeechVoices,
getSpeechLocale,
setSpeechLocale,
hasSpeechLocale,
isLocaleValid,
isEnabled,
isActive,
getStatus,
generateId,
useFixedLocale,
stereoUnsupported,
};

View File

@ -185,6 +185,7 @@ const messages = {
export default lockContextContainer(injectIntl(withTracker(({
intl, userLocks, isAudioModalOpen, setAudioModalIsOpen, setVideoPreviewModalIsOpen,
speechLocale,
}) => {
const { microphoneConstraints } = Settings.application;
const autoJoin = getFromUserSettings('bbb_auto_join_audio', APP_CONFIG.autoJoin);
@ -239,7 +240,7 @@ export default lockContextContainer(injectIntl(withTracker(({
setAudioModalIsOpen,
microphoneConstraints,
init: async (toggleVoice) => {
await Service.init(messages, intl, toggleVoice);
await Service.init(messages, intl, toggleVoice, speechLocale);
if ((!autoJoin || didMountAutoJoin)) {
if (enableVideo && autoShareWebcam) {
openVideoPreviewModal();

View File

@ -45,7 +45,7 @@ const audioEventHandler = (toggleVoice) => (event) => {
}
};
const init = (messages, intl, toggleVoice) => {
const init = (messages, intl, toggleVoice, speechLocale) => {
AudioManager.setAudioMessages(messages, intl);
if (AudioManager.initialized) return Promise.resolve(false);
const meetingId = Auth.meetingID;
@ -66,6 +66,7 @@ const init = (messages, intl, toggleVoice) => {
username,
voiceBridge,
microphoneLockEnforced,
speechLocale,
};
return AudioManager.init(userData, audioEventHandler(toggleVoice));

View File

@ -4,7 +4,6 @@ import { withTracker } from 'meteor/react-meteor-data';
import deviceInfo from '/imports/utils/deviceInfo';
import browserInfo from '/imports/utils/browserInfo';
import OptionsDropdown from './component';
import audioCaptionsService from '/imports/ui/components/audio/captions/service';
import FullscreenService from '/imports/ui/components/common/fullscreen-button/service';
import { meetingIsBreakout } from '/imports/ui/components/app/service';
import { layoutSelectInput, layoutSelect } from '../../layout/context';
@ -18,6 +17,9 @@ const { isIphone } = deviceInfo;
const { isSafari, isValidSafariVersion } = browserInfo;
const noIOSFullscreen = !!(((isSafari && !isValidSafariVersion) || isIphone));
const getAudioCaptions = () => Session.get('audioCaptions') || false;
const setAudioCaptions = (value) => Session.set('audioCaptions', value);
const OptionsDropdownContainer = (props) => {
const { width: browserWidth } = layoutSelectInput((i) => i.browser);
@ -56,8 +58,8 @@ export default withTracker((props) => {
const handleToggleFullscreen = () => FullscreenService.toggleFullScreen();
return {
amIModerator: props.amIModerator,
audioCaptionsActive: audioCaptionsService.getAudioCaptions(),
audioCaptionsSet: (value) => audioCaptionsService.setAudioCaptions(value),
audioCaptionsActive: getAudioCaptions(),
audioCaptionsSet: (value) => setAudioCaptions(value),
isMobile: deviceInfo.isMobile,
handleToggleFullscreen,
noIOSFullscreen,

View File

@ -2,7 +2,7 @@ import { useContext } from 'react';
import { User } from '../../Types/user';
import { CurrentUserContext } from '../providers/current-user';
const useCurrentUser = (fn: (c: Partial<User>) => Partial<User>) => {
const useCurrentUser = (fn: (c: Partial<User>) => Partial<User> = (u) => u) => {
const response = useContext(CurrentUserContext);
const returnObject = {
...response,

View File

@ -482,6 +482,12 @@ export const meetingClientSettingsInitialValues: MeetingClientSettings = {
family: 'Calibri',
size: '24px',
},
locales: [
{
locale: 'en-US',
name: 'English',
},
],
lines: 2,
time: 5000,
},

View File

@ -587,6 +587,8 @@ public:
name: "Ελληνικά"
- locale: "en"
name: "English"
- locale: "en-US"
name: "English"
- locale: "eo"
name: "Esperanto"
- locale: "es"