Revert "refactor(core-html5): custom hook for voice data"

This commit is contained in:
Tiago Jacobs 2024-06-19 14:18:58 -03:00 committed by GitHub
parent dd3dbcb0cc
commit ba02a327e1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 126 additions and 167 deletions

View File

@ -7,7 +7,6 @@ import Auth from '/imports/ui/services/auth';
import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import useSettings from '/imports/ui/services/settings/hooks/useSettings';
import { SETTINGS } from '/imports/ui/services/settings/enums';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
const ReactionsButtonContainer = ({ ...props }) => {
const layoutContextDispatch = layoutDispatch();
@ -21,17 +20,16 @@ const ReactionsButtonContainer = ({ ...props }) => {
emoji: user.emoji,
raiseHand: user.raiseHand,
away: user.away,
voice: user.voice,
reactionEmoji: user.reactionEmoji,
userId: user.userId,
}));
const { voices: talkingUsers } = useWhoIsTalking();
const currentUser = {
userId: Auth.userID,
emoji: currentUserData?.emoji,
raiseHand: currentUserData?.raiseHand,
away: currentUserData?.away,
muted: Boolean(currentUserData?.userId && talkingUsers[currentUserData.userId]?.muted),
muted: currentUserData?.voice?.muted || false,
};
const { autoCloseReactionsBar } = useSettings(SETTINGS.APPLICATION);

View File

@ -302,6 +302,7 @@ const AudioCaptionsButtonContainer: React.FC = () => {
} = useCurrentUser(
(user: Partial<User>) => ({
captionLocale: user.captionLocale,
voice: user.voice,
speechLocale: user.speechLocale,
}),
);

View File

@ -174,6 +174,7 @@ const AudioCaptionsSelectContainer: React.FC = () => {
} = useCurrentUser(
(user) => ({
speechLocale: user.speechLocale,
voice: user.voice,
}),
);
const isEnabled = useIsAudioTranscriptionEnabled();

View File

@ -309,6 +309,7 @@ const AudioCaptionsSpeechContainer: React.FC = () => {
} = useCurrentUser(
(user) => ({
speechLocale: user.speechLocale,
voice: user.voice,
}),
);

View File

@ -15,7 +15,6 @@ import { joinListenOnly } from './service';
import Styled from './styles';
import InputStreamLiveSelectorContainer from './input-stream-live-selector/component';
import { UPDATE_ECHO_TEST_RUNNING } from './queries';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
const intlMessages = defineMessages({
joinAudio: {
@ -124,17 +123,13 @@ const AudioControls: React.FC<AudioControlsProps> = ({
export const AudioControlsContainer: React.FC = () => {
const { data: currentUser } = useCurrentUser((u: Partial<User>) => {
return {
userId: u.userId,
presenter: u.presenter,
isModerator: u.isModerator,
locked: u?.locked ?? false,
voice: u.voice,
away: u.away,
};
});
const { voices: talkingUsers } = useWhoIsTalking();
const currentUserVoice = currentUser?.userId
? talkingUsers[currentUser.userId]
: null;
const { data: currentMeeting } = useMeeting((m: Partial<Meeting>) => {
return {
@ -159,7 +154,7 @@ export const AudioControlsContainer: React.FC = () => {
if (!currentUser || !currentMeeting) return null;
return (
<AudioControls
inAudio={Boolean(currentUserVoice)}
inAudio={!!currentUser.voice ?? false}
isConnected={isConnected}
disabled={isConnecting || isHangingUp}
isEchoTest={isEchoTest}

View File

@ -18,7 +18,6 @@ import MutedAlert from '/imports/ui/components/muted-alert/component';
import MuteToggle from './buttons/muteToggle';
import ListenOnly from './buttons/listenOnly';
import LiveSelection from './buttons/LiveSelection';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
const AUDIO_INPUT = 'audioinput';
const AUDIO_OUTPUT = 'audiooutput';
@ -216,35 +215,27 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
};
const InputStreamLiveSelectorContainer: React.FC = () => {
const { voices: talkingUsers } = useWhoIsTalking();
const { data: currentUser } = useCurrentUser((u: Partial<User>) => {
if (!u.voice) {
return {
presenter: u.presenter,
isModerator: u.isModerator,
};
}
const { data: currentUserData } = useCurrentUser((u: Partial<User>) => {
return {
userId: u.userId,
presenter: u.presenter,
isModerator: u.isModerator,
locked: u?.locked ?? false,
away: u?.away,
voice: {
muted: u?.voice?.muted ?? false,
listenOnly: u?.voice?.listenOnly ?? false,
talking: u?.voice?.talking ?? false,
},
};
});
const currentUserVoice = currentUserData?.userId
? talkingUsers[currentUserData.userId]
: null;
const currentUser = currentUserVoice ? {
userId: currentUserData?.userId,
presenter: currentUserData?.presenter,
isModerator: currentUserData?.isModerator,
locked: currentUserData?.locked,
away: currentUserData?.away,
voice: {
muted: currentUserVoice.muted,
listenOnly: currentUserVoice.listenOnly,
talking: currentUserVoice.talking,
},
} : {
presenter: currentUserData?.presenter,
isModerator: currentUserData?.isModerator,
};
const { data: currentMeeting } = useMeeting((m: Partial<Meeting>) => {
return {

View File

@ -3,20 +3,17 @@ import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import logger from '/imports/startup/client/logger';
import AudioManager from '/imports/ui/services/audio-manager';
import useToggleVoice from './useToggleVoice';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
const useMuteMicrophone = () => {
const { data: currentUser } = useCurrentUser((u) => ({
userId: u.userId,
voice: {
muted: u.voice?.muted,
},
}));
const { voices: talkingUsers } = useWhoIsTalking();
const toggleVoice = useToggleVoice();
const currentUserVoice = currentUser?.userId
? talkingUsers[currentUser.userId]
: null;
const muted = Boolean(currentUserVoice?.muted);
const userId = currentUser?.userId;
const muted = !!currentUser?.voice?.muted;
const userId = currentUser?.userId ?? '';
return useCallback(() => {
if (!muted) {

View File

@ -1,9 +1,16 @@
import { useCallback } from 'react';
import { useMutation } from '@apollo/client';
import { USER_SET_MUTED } from '../mutations';
import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import logger from '/imports/startup/client/logger';
const useToggleVoice = () => {
const [userSetMuted] = useMutation(USER_SET_MUTED);
const { data: currentUserData } = useCurrentUser((u) => ({
voice: {
muted: u.voice?.muted,
},
}));
const toggleVoice = async (userId: string, muted: boolean) => {
try {
@ -13,7 +20,7 @@ const useToggleVoice = () => {
}
};
return toggleVoice;
return useCallback(toggleVoice, [currentUserData?.voice?.muted]);
};
export default useToggleVoice;

View File

@ -0,0 +1,17 @@
import { VOICE_USERS_SUBSCRIPTION, VoiceUsersResponse } from '../queries';
import createUseSubscription from '/imports/ui/core/hooks/createUseSubscription';
type Voice = VoiceUsersResponse['user_voice'][number];
const useVoiceUsersSubscription = createUseSubscription<Voice>(
VOICE_USERS_SUBSCRIPTION,
{},
true,
);
const useVoiceUsers = (projection: (v: Partial<Voice>) => Partial<Voice>) => {
const response = useVoiceUsersSubscription(projection);
return response;
};
export default useVoiceUsers;

View File

@ -25,7 +25,6 @@ import { SETTINGS } from '../../services/settings/enums';
import { useStorageKey } from '../../services/storage/hooks';
import { BREAKOUT_COUNT } from './queries';
import useMeeting from '../../core/hooks/useMeeting';
import useWhoIsTalking from '../../core/hooks/useWhoIsTalking';
const intlMessages = defineMessages({
joinedAudio: {
@ -174,9 +173,7 @@ const AudioContainer = (props) => {
const { hasBreakoutRooms: hadBreakoutRooms } = prevProps || {};
const userIsReturningFromBreakoutRoom = hadBreakoutRooms && !hasBreakoutRooms;
const { data: currentUser } = useCurrentUser((u) => ({ userId: u?.userId }));
const { voices: talkingUsers } = useWhoIsTalking();
const currentUserMuted = Boolean(currentUser?.userId && talkingUsers[currentUser.userId]?.muted);
const { data: currentUserMuted } = useCurrentUser((u) => u?.voice?.muted ?? false);
const joinAudio = () => {
if (Service.isConnected()) return;

View File

@ -25,7 +25,6 @@ import {
} from './service';
import { useExitVideo, useStreams } from '/imports/ui/components/video-provider/hooks';
import useDeduplicatedSubscription from '/imports/ui/core/hooks/useDeduplicatedSubscription';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
interface BreakoutRoomProps {
breakouts: BreakoutRoom[];
@ -318,13 +317,9 @@ const BreakoutRoomContainer: React.FC = () => {
} = useCurrentUser((u) => ({
isModerator: u.isModerator,
presenter: u.presenter,
voice: u.voice,
userId: u.userId,
}));
const { voices: talkingUsers } = useWhoIsTalking();
const currentUserVoice = currentUserData?.userId
? talkingUsers[currentUserData.userId]
: null;
const currentUserJoinedAudio = Boolean(currentUserVoice?.joined);
const {
data: breakoutData,
@ -353,7 +348,7 @@ const BreakoutRoomContainer: React.FC = () => {
isModerator={currentUserData.isModerator ?? false}
presenter={currentUserData.presenter ?? false}
durationInSeconds={meetingData.durationInSeconds ?? 0}
userJoinedAudio={currentUserJoinedAudio}
userJoinedAudio={currentUserData?.voice?.joined ?? false}
userId={currentUserData.userId ?? ''}
meetingId={meetingData.meetingId ?? ''}
/>

View File

@ -25,7 +25,6 @@ import useTimeSync from '/imports/ui/core/local-states/useTimeSync';
import RecordingNotify from './notify/component';
import RecordingContainer from '/imports/ui/components/recording/container';
import useDeduplicatedSubscription from '/imports/ui/core/hooks/useDeduplicatedSubscription';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
const intlMessages = defineMessages({
notificationRecordingStart: {
@ -298,11 +297,13 @@ const RecordingIndicatorContainer: React.FC = () => {
const { data: currentUser } = useCurrentUser((user: Partial<User>) => ({
userId: user.userId,
isModerator: user.isModerator,
voice: user.voice
? {
joined: user.voice.joined,
listenOnly: user.voice.listenOnly,
}
: null,
} as Partial<User>));
const { voices: talkingUsers } = useWhoIsTalking();
const currentUserVoice = currentUser?.userId
? talkingUsers[currentUser.userId]
: null;
const { data: currentMeeting } = useMeeting((meeting) => ({
meetingId: meeting.meetingId,
@ -345,7 +346,7 @@ const RecordingIndicatorContainer: React.FC = () => {
autoStartRecording={meetingRecordingPolicies?.autoStartRecording ?? false}
record={meetingRecordingPolicies?.record ?? false}
recording={meetingRecording?.isRecording ?? false}
micUser={!currentUserVoice?.listenOnly}
micUser={(currentUser?.voice && !currentUser?.voice.listenOnly) ?? false}
isPhone={isMobile}
recordingNotificationEnabled={
(meetingRecording?.startedBy !== currentUser?.userId

View File

@ -11,9 +11,13 @@ import { User } from '/imports/ui/Types/user';
import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import { muteUser } from './service';
import useToggleVoice from '../../../audio/audio-graphql/hooks/useToggleVoice';
import TALKING_INDICATOR_SUBSCRIPTION from '/imports/ui/core/graphql/queries/userVoiceSubscription';
import { setTalkingIndicatorList } from '/imports/ui/core/hooks/useTalkingIndicator';
import useDeduplicatedSubscription from '/imports/ui/core/hooks/useDeduplicatedSubscription';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
interface TalkingIndicatorSubscriptionData {
user_voice: Array<Partial<UserVoice>>;
}
const TALKING_INDICATORS_MAX = 8;
@ -179,13 +183,17 @@ const TalkingIndicatorContainer: React.FC = (() => {
}));
const {
voices: talkingUsersData,
data: talkingIndicatorData,
loading: talkingIndicatorLoading,
error: talkingIndicatorError,
} = useWhoIsTalking({
showTalkingIndicator: true,
limit: TALKING_INDICATORS_MAX,
});
} = useDeduplicatedSubscription<TalkingIndicatorSubscriptionData>(
TALKING_INDICATOR_SUBSCRIPTION,
{
variables: {
limit: TALKING_INDICATORS_MAX,
},
},
);
const {
data: isBreakoutData,
@ -206,7 +214,7 @@ const TalkingIndicatorContainer: React.FC = (() => {
);
}
const talkingUsers = Object.values(talkingUsersData);
const talkingUsers = talkingIndicatorData?.user_voice ?? [];
const isBreakout = isBreakoutData?.meeting[0]?.isBreakout ?? false;
setTalkingIndicatorList(talkingUsers);
return (

View File

@ -17,7 +17,6 @@ import normalizeEmojiName from './service';
import { convertRemToPixels } from '/imports/utils/dom-utils';
import { PluginsContext } from '/imports/ui/components/components-data/plugin-context/context';
import { useIsReactionsEnabled } from '/imports/ui/services/features';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
const messages = defineMessages({
moderator: {
@ -101,8 +100,7 @@ const UserListItem: React.FC<UserListItemProps> = ({ user, lockSettings }) => {
}
const intl = useIntl();
const { voices: talkingUsers } = useWhoIsTalking();
const voiceUser = talkingUsers[user.userId];
const voiceUser = user.voice;
const subs = [];
const LABEL = window.meetingClientSettings.public.user.label;

View File

@ -8,7 +8,7 @@ import { Layout } from '/imports/ui/components/layout/layoutTypes';
import useSettings from '/imports/ui/services/settings/hooks/useSettings';
import { SETTINGS } from '/imports/ui/services/settings/enums';
import { useStorageKey } from '/imports/ui/services/storage/hooks';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
import useVoiceUsers from '/imports/ui/components/audio/audio-graphql/hooks/useVoiceUsers';
interface VideoListItemContainerProps {
numOfStreams: number;
@ -54,8 +54,14 @@ const VideoListItemContainer: React.FC<VideoListItemContainerProps> = (props) =>
const amIModerator = currentUserData?.isModerator;
const disabledCams = useStorageKey('disabledCams') || [];
const { voices: talkingUsers } = useWhoIsTalking();
const voiceUser = talkingUsers[userId];
const voiceUsers = useVoiceUsers((v) => ({
muted: v.muted,
listenOnly: v.listenOnly,
talking: v.talking,
joined: v.joined,
userId: v.userId,
}));
const voiceUser = voiceUsers.data?.find((v) => v.userId === userId);
return (
<VideoListItem

View File

@ -66,6 +66,13 @@ subscription userCurrentSubscription {
cameras {
streamId
}
voice {
joined
muted
spoke
listenOnly
talking
}
}
}
`;

View File

@ -0,0 +1,26 @@
import { gql } from '@apollo/client';
const TALKING_INDICATOR_SUBSCRIPTION = gql`
subscription talkingIndicatorSubscription($limit: Int!) {
user_voice(
where: { showTalkingIndicator: { _eq: true } }
order_by: [{ startTime: asc_nulls_last }]
limit: $limit
) {
callerName
spoke
talking
floor
startTime
muted
userId
user {
color
name
speechLocale
}
}
}
`;
export default TALKING_INDICATOR_SUBSCRIPTION;

View File

@ -36,6 +36,13 @@ subscription UserListSubscription($offset: Int!, $limit: Int!) {
clientType
disconnected
loggedOut
voice {
joined
listenOnly
talking
muted
voiceUserId
}
cameras {
streamId
}

View File

@ -1,33 +0,0 @@
import { gql } from '@apollo/client';
import { UserVoice } from '/imports/ui/Types/userVoice';
export interface WhoIsTalkingResponse {
user_voice: Array<Partial<UserVoice>>;
}
const WHO_IS_TALKING = gql`
subscription WhoIsTalkingSubscription {
user_voice(
order_by: [{ startTime: asc_nulls_last }]
) {
callerName
floor
joined
listenOnly
muted
showTalkingIndicator
spoke
startTime
talking
userId
voiceUserId
user {
color
name
speechLocale
}
}
}
`;
export default WHO_IS_TALKING;

View File

@ -1,61 +0,0 @@
import WHO_IS_TALKING, { WhoIsTalkingResponse } from '/imports/ui/core/graphql/queries/whoIsTalking';
import logger from '/imports/startup/client/logger';
import { UserVoice } from '/imports/ui/Types/userVoice';
import useDeduplicatedSubscription from './useDeduplicatedSubscription';
import { useMemo } from 'react';
import { makePatchedQuery } from './createUseSubscription';
type UserId = string;
const PATCHED_WHO_IS_TALKING = makePatchedQuery(WHO_IS_TALKING);
const useWhoIsTalking = (filters?: {
limit?: number;
showTalkingIndicator?: boolean;
}) => {
const {
data,
loading,
error,
} = useDeduplicatedSubscription<WhoIsTalkingResponse>(
PATCHED_WHO_IS_TALKING,
);
if (error) {
logger.error({
logCode: 'who_is_talking_sub_error',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
},
}, 'useWhoIsTalking hook failed.');
}
const filteredVoices = useMemo(() => {
const { user_voice = [] } = data || {};
if (filters?.showTalkingIndicator) {
return user_voice.filter((voice) => voice.showTalkingIndicator);
}
return user_voice;
}, [data, filters?.showTalkingIndicator]);
const limitedVoices = useMemo(() => {
if (filters?.limit) {
return filteredVoices.slice(0, filters.limit);
}
return filteredVoices;
}, [filteredVoices, filters?.limit]);
const voices: Record<UserId, Partial<UserVoice>> = {};
limitedVoices.forEach((voice) => {
voices[voice.userId!] = voice;
});
return {
voices,
loading,
error,
};
};
export default useWhoIsTalking;

View File

@ -23,7 +23,7 @@ import AudioErrors from '/imports/ui/services/audio-manager/error-codes';
import Session from '/imports/ui/services/storage/in-memory';
import GrahqlSubscriptionStore, { stringToHash } from '/imports/ui/core/singletons/subscriptionStore';
import { makePatchedQuery } from '../../core/hooks/createUseSubscription';
import WHO_IS_TALKING from '../../core/graphql/queries/whoIsTalking';
import { VOICE_USERS_SUBSCRIPTION } from '../../components/audio/audio-graphql/queries';
const DEFAULT_AUDIO_BRIDGES_PATH = '/imports/api/audio/client/';
const CALL_STATES = {
@ -473,7 +473,7 @@ class AudioManager {
// listen to the VoiceUsers changes and update the flag
if (!this.muteHandle) {
const patchedSub = makePatchedQuery(WHO_IS_TALKING);
const patchedSub = makePatchedQuery(VOICE_USERS_SUBSCRIPTION);
const subHash = stringToHash(JSON.stringify({
subscription: patchedSub,
variables: {},