diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/LockSettingsUtil.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/LockSettingsUtil.scala index 69ddbf8e74..a419519b4f 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/LockSettingsUtil.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/LockSettingsUtil.scala @@ -1,9 +1,12 @@ package org.bigbluebutton -import org.bigbluebutton.common2.msgs.{ BbbCommonEnvCoreMsg, BbbCoreEnvelope, BbbCoreHeaderWithMeetingId, MessageTypes, MuteUserInVoiceConfSysMsg, MuteUserInVoiceConfSysMsgBody, Routing } +import org.apache.pekko.actor.ActorContext + +import org.bigbluebutton.common2.msgs.{ BbbCommonEnvCoreMsg, BbbCoreEnvelope, BbbCoreHeaderWithMeetingId, MessageTypes, Routing } import org.bigbluebutton.core.running.{ LiveMeeting, OutMsgRouter } import org.bigbluebutton.core2.{ MeetingStatus2x } import org.bigbluebutton.core.apps.webcam.CameraHdlrHelpers +import org.bigbluebutton.core.apps.voice.VoiceApp import org.bigbluebutton.core.models.{ Roles, Users2x, @@ -16,19 +19,19 @@ import org.bigbluebutton.core.models.{ object LockSettingsUtil { - private def muteUserInVoiceConf(liveMeeting: LiveMeeting, outGW: OutMsgRouter, vu: VoiceUserState, mute: Boolean): Unit = { - val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, liveMeeting.props.meetingProp.intId, vu.intId) - val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing) - val header = BbbCoreHeaderWithMeetingId(MuteUserInVoiceConfSysMsg.NAME, liveMeeting.props.meetingProp.intId) - - val body = MuteUserInVoiceConfSysMsgBody(liveMeeting.props.voiceProp.voiceConf, vu.voiceUserId, mute) - val event = MuteUserInVoiceConfSysMsg(header, body) - val msgEvent = BbbCommonEnvCoreMsg(envelope, event) - - outGW.send(msgEvent) + private def muteUserInVoiceConf( + liveMeeting: LiveMeeting, + outGW: OutMsgRouter, + vu: VoiceUserState, mute: Boolean + )(implicit context: ActorContext): Unit = { + VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, mute) } - private def applyMutingOfUsers(disableMic: Boolean, liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = { + private def applyMutingOfUsers( + disableMic: Boolean, + liveMeeting: LiveMeeting, + outGW: OutMsgRouter + )(implicit context: ActorContext): Unit = { VoiceUsers.findAll(liveMeeting.voiceUsers) foreach { vu => Users2x.findWithIntId(liveMeeting.users2x, vu.intId).foreach { user => if (user.role == Roles.VIEWER_ROLE && !vu.listenOnly && user.locked) { @@ -44,12 +47,20 @@ object LockSettingsUtil { } } - def enforceLockSettingsForAllVoiceUsers(liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = { + def enforceLockSettingsForAllVoiceUsers( + liveMeeting: LiveMeeting, + outGW: OutMsgRouter + )(implicit context: ActorContext): Unit = { val permissions = MeetingStatus2x.getPermissions(liveMeeting.status) applyMutingOfUsers(permissions.disableMic, liveMeeting, outGW) } - def enforceLockSettingsForVoiceUser(voiceUser: VoiceUserState, liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = { + def enforceLockSettingsForVoiceUser( + voiceUser: VoiceUserState, + liveMeeting: LiveMeeting, + outGW: OutMsgRouter + )(implicit context: ActorContext): Unit = { + val permissions = MeetingStatus2x.getPermissions(liveMeeting.status) if (permissions.disableMic) { Users2x.findWithIntId(liveMeeting.users2x, voiceUser.intId).foreach { user => @@ -65,7 +76,11 @@ object LockSettingsUtil { } } - private def enforceListenOnlyUserIsMuted(intUserId: String, liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = { + private def enforceListenOnlyUserIsMuted( + intUserId: String, + liveMeeting: LiveMeeting, + outGW: OutMsgRouter + )(implicit context: ActorContext): Unit = { val voiceUser = VoiceUsers.findWithIntId(liveMeeting.voiceUsers, intUserId) voiceUser.foreach { vu => // Make sure that listen only user is muted. (ralam dec 6, 2019 diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/SystemConfiguration.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/SystemConfiguration.scala index dc6e77927d..4e3fb58437 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/SystemConfiguration.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/SystemConfiguration.scala @@ -46,6 +46,8 @@ trait SystemConfiguration { lazy val ejectRogueVoiceUsers = Try(config.getBoolean("voiceConf.ejectRogueVoiceUsers")).getOrElse(true) lazy val dialInApprovalAudioPath = Try(config.getString("voiceConf.dialInApprovalAudioPath")).getOrElse("ivr/ivr-please_hold_while_party_contacted.wav") lazy val toggleListenOnlyAfterMuteTimer = Try(config.getInt("voiceConf.toggleListenOnlyAfterMuteTimer")).getOrElse(4) + lazy val transparentListenOnlyThreshold = Try(config.getInt("voiceConf.transparentListenOnlyThreshold")).getOrElse(0) + lazy val muteOnStartThreshold = Try(config.getInt("voiceConf.muteOnStartThreshold")).getOrElse(0) lazy val recordingChapterBreakLengthInMinutes = Try(config.getInt("recording.chapterBreakLengthInMinutes")).getOrElse(0) diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/users/MuteUserCmdMsgHdlr.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/users/MuteUserCmdMsgHdlr.scala index d29f40d113..e383ce3ba5 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/users/MuteUserCmdMsgHdlr.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/users/MuteUserCmdMsgHdlr.scala @@ -2,6 +2,7 @@ package org.bigbluebutton.core.apps.users import org.bigbluebutton.common2.msgs.MuteUserCmdMsg import org.bigbluebutton.core.apps.{ PermissionCheck, RightsManagementTrait } +import org.bigbluebutton.core.apps.voice.VoiceApp import org.bigbluebutton.core.models.{ Roles, Users2x, VoiceUsers } import org.bigbluebutton.core.running.{ LiveMeeting, OutMsgRouter } import org.bigbluebutton.core2.MeetingStatus2x @@ -51,13 +52,12 @@ trait MuteUserCmdMsgHdlr extends RightsManagementTrait { } else { if (u.muted != msg.body.mute) { log.info("Send mute user request. meetingId=" + meetingId + " userId=" + u.intId + " user=" + u) - val event = MsgBuilder.buildMuteUserInVoiceConfSysMsg( - meetingId, - voiceConf, - u.voiceUserId, + VoiceApp.muteUserInVoiceConf( + liveMeeting, + outGW, + u.intId, msg.body.mute ) - outGW.send(event) } } } diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/GetMicrophonePermissionReqMsgHdlr.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/GetMicrophonePermissionReqMsgHdlr.scala index ff78e1859f..f1bd35cd8c 100644 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/GetMicrophonePermissionReqMsgHdlr.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/GetMicrophonePermissionReqMsgHdlr.scala @@ -2,6 +2,7 @@ package org.bigbluebutton.core.apps.voice import org.bigbluebutton.common2.msgs._ import org.bigbluebutton.core.running.{ LiveMeeting, MeetingActor, OutMsgRouter } +import org.bigbluebutton.core2.MeetingStatus2x trait GetMicrophonePermissionReqMsgHdlr { this: MeetingActor => @@ -16,7 +17,8 @@ trait GetMicrophonePermissionReqMsgHdlr { voiceConf: String, userId: String, sfuSessionId: String, - allowed: Boolean + allowed: Boolean, + muteOnStart: Boolean ): Unit = { val routing = Routing.addMsgToClientRouting(MessageTypes.DIRECT, meetingId, userId) val envelope = BbbCoreEnvelope(GetMicrophonePermissionRespMsg.NAME, routing) @@ -26,7 +28,8 @@ trait GetMicrophonePermissionReqMsgHdlr { voiceConf, userId, sfuSessionId, - allowed + allowed, + muteOnStart ) val event = GetMicrophonePermissionRespMsg(header, body) val eventMsg = BbbCommonEnvCoreMsg(envelope, event) @@ -41,13 +44,18 @@ trait GetMicrophonePermissionReqMsgHdlr { msg.body.voiceConf, msg.body.callerIdNum ) + // Lock settings should only define whether the user starts muted or not. + // It must not prevent users from joining audio. + val locked = VoiceHdlrHelpers.isMicrophoneSharingLocked(liveMeeting, msg.body.userId) + val muteOnStart = MeetingStatus2x.isMeetingMuted(liveMeeting.status) || locked broadcastEvent( liveMeeting.props.meetingProp.intId, liveMeeting.props.voiceProp.voiceConf, msg.body.userId, msg.body.sfuSessionId, - allowed + allowed, + muteOnStart ) } } diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/ListenOnlyModeToggledInSfuEvtMsgHdlr.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/ListenOnlyModeToggledInSfuEvtMsgHdlr.scala index 0b16a39a61..b2e137c1e6 100644 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/ListenOnlyModeToggledInSfuEvtMsgHdlr.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/ListenOnlyModeToggledInSfuEvtMsgHdlr.scala @@ -12,11 +12,34 @@ trait ListenOnlyModeToggledInSfuEvtMsgHdlr { def handleListenOnlyModeToggledInSfuEvtMsg(msg: ListenOnlyModeToggledInSfuEvtMsg): Unit = { for { - vu <- VoiceUsers.findWithIntId(liveMeeting.voiceUsers, msg.body.userId) + vu <- VoiceUsers.findWithIntIdAndCallerNum( + liveMeeting.voiceUsers, + msg.body.userId, + msg.body.callerNum + ) } yield { - VoiceApp.holdChannelInVoiceConf( + // Do not execute if the command is asking for the channel to be HELD + // and the channel is already HELD. This is an edge case with the uuid_hold + // command being used through FSESL or fsapi where holding only works via + // the uuid_hold subcommand, which may cause the channel to be the + // opposite of what we want. + // The unhold (uuid_hold off) command is not affected by this, but we don't + // want to send it if the channel is already unheld. + if ((msg.body.enabled && !vu.hold) || !msg.body.enabled) { + VoiceApp.holdChannelInVoiceConf( + liveMeeting, + outGW, + vu.uuid, + msg.body.enabled + ) + } + + // If the channel is already in the desired state, just make sure + // any pending mute or unmute commands are sent. + VoiceApp.handleChannelHoldChanged( liveMeeting, outGW, + msg.body.userId, vu.uuid, msg.body.enabled ) diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/UserLeftVoiceConfEvtMsgHdlr.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/UserLeftVoiceConfEvtMsgHdlr.scala index 5bc7ed7818..8875aa7963 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/UserLeftVoiceConfEvtMsgHdlr.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/UserLeftVoiceConfEvtMsgHdlr.scala @@ -49,6 +49,10 @@ trait UserLeftVoiceConfEvtMsgHdlr { } yield { VoiceUsers.removeWithIntId(liveMeeting.voiceUsers, liveMeeting.props.meetingProp.intId, user.intId) broadcastEvent(user) + + if (!user.listenOnly) { + VoiceApp.enforceMuteOnStartThreshold(liveMeeting, outGW) + } } if (liveMeeting.props.meetingProp.isBreakout) { diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceApp.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceApp.scala index 780c2968d8..2303c8443c 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceApp.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceApp.scala @@ -133,13 +133,14 @@ object VoiceApp extends SystemConfiguration { liveMeeting, outGW, mutedUser.intId, + mutedUser.callerNum, muted, toggleListenOnlyAfterMuteTimer ) // If the user is muted or unmuted with an unheld channel, broadcast // the event right away. - // If the user is unmuted, but channel is held, we need to wait for the + // If the user is unmuted, but channel is held, we need to wait for the // channel to be active again to broadcast the event. See // VoiceApp.handleChannelHoldChanged for this second case. if (muted || (!muted && !mutedUser.hold)) { @@ -150,7 +151,6 @@ object VoiceApp extends SystemConfiguration { outGW ) } - } } @@ -261,7 +261,7 @@ object VoiceApp extends SystemConfiguration { callingInto: String, hold: Boolean, uuid: String = "unused" - ): Unit = { + )(implicit context: ActorContext): Unit = { def broadcastEvent(voiceUserState: VoiceUserState): Unit = { val routing = Routing.addMsgToClientRouting( @@ -324,10 +324,30 @@ object VoiceApp extends SystemConfiguration { hold, uuid ) + + val prevTransparentLOStatus = VoiceHdlrHelpers.transparentListenOnlyAllowed( + liveMeeting + ) + VoiceUsers.add(liveMeeting.voiceUsers, voiceUserState) UserVoiceDAO.update(voiceUserState) UserDAO.updateVoiceUserJoined(voiceUserState) + val newTransparentLOStatus = VoiceHdlrHelpers.transparentListenOnlyAllowed( + liveMeeting + ) + + if (prevTransparentLOStatus != newTransparentLOStatus) { + // If the transparent listen only mode was activated or deactivated + // we need to update the listen only mode for all users in the meeting + // that are not muted. + handleTransparentLOModeChange( + liveMeeting, + outGW, + newTransparentLOStatus + ) + } + broadcastEvent(voiceUserState) if (liveMeeting.props.meetingProp.isBreakout) { @@ -337,16 +357,19 @@ object VoiceApp extends SystemConfiguration { ) } - // if the meeting is muted tell freeswitch to mute the new person - if (!isListenOnly - && MeetingStatus2x.isMeetingMuted(liveMeeting.status)) { - val event = MsgBuilder.buildMuteUserInVoiceConfSysMsg( - liveMeeting.props.meetingProp.intId, - voiceConf, - voiceUserId, - true - ) - outGW.send(event) + if (!isListenOnly) { + enforceMuteOnStartThreshold(liveMeeting, outGW) + + // if the meeting is muted tell freeswitch to mute the new person + if (MeetingStatus2x.isMeetingMuted(liveMeeting.status)) { + val event = MsgBuilder.buildMuteUserInVoiceConfSysMsg( + liveMeeting.props.meetingProp.intId, + voiceConf, + voiceUserId, + true + ) + outGW.send(event) + } } // Make sure lock settings are in effect. (ralam dec 6, 2019) @@ -395,6 +418,10 @@ object VoiceApp extends SystemConfiguration { } yield { VoiceUsers.removeWithIntId(liveMeeting.voiceUsers, user.meetingId, user.intId) broadcastEvent(user) + + if (!user.listenOnly) { + enforceMuteOnStartThreshold(liveMeeting, outGW) + } } if (liveMeeting.props.meetingProp.isBreakout) { @@ -405,6 +432,43 @@ object VoiceApp extends SystemConfiguration { } } + // Once #muteOnStartThreshold number of voice users is hit, we force + // meetingMute on MeetingStatus2x and broadcast MeetingMutedEvtMsg to clients. + // Otherwise, we broadcast MeetingMutedEvtMsg with the original muteOnStart + // muteOnStartThreshold = 0 means no threshold (disabled). + def enforceMuteOnStartThreshold( + liveMeeting: LiveMeeting, + outGW: OutMsgRouter + ): Unit = { + val originalMuteOnStart = liveMeeting.props.voiceProp.muteOnStart + + if (muteOnStartThreshold == 0) { + return + } + + if (VoiceHdlrHelpers.muteOnStartThresholdReached(liveMeeting)) { + if (!MeetingStatus2x.isMeetingMuted(liveMeeting.status)) { + MeetingStatus2x.muteMeeting(liveMeeting.status) + val event = MsgBuilder.buildMeetingMutedEvtMsg( + liveMeeting.props.meetingProp.intId, + SystemUser.ID, + true, + SystemUser.ID + ) + outGW.send(event) + } + } else if (MeetingStatus2x.isMeetingMuted(liveMeeting.status) != originalMuteOnStart) { + MeetingStatus2x.setMeetingMuted(liveMeeting.status, originalMuteOnStart) + val event = MsgBuilder.buildMeetingMutedEvtMsg( + liveMeeting.props.meetingProp.intId, + SystemUser.ID, + originalMuteOnStart, + SystemUser.ID + ) + outGW.send(event) + } + } + /** Toggle audio for the given user in voice conference. * * We first stop the current audio being played, preventing the playback @@ -476,27 +540,62 @@ object VoiceApp extends SystemConfiguration { } } + def handleTransparentLOModeChange( + liveMeeting: LiveMeeting, + outGW: OutMsgRouter, + allowed: Boolean, + )(implicit context: ActorContext): Unit = { + VoiceUsers.findAllMutedVoiceUsers(liveMeeting.voiceUsers) foreach { vu => + if (allowed) { + toggleListenOnlyMode( + liveMeeting, + outGW, + vu.intId, + vu.callerNum, + vu.muted + ) + } else { + toggleListenOnlyMode( + liveMeeting, + outGW, + vu.intId, + vu.callerNum, + false + ) + } + } + } + def toggleListenOnlyMode( liveMeeting: LiveMeeting, outGW: OutMsgRouter, userId: String, + callerNum: String, enabled: Boolean, delay: Int = 0 )(implicit context: ActorContext): Unit = { implicit def executionContext = context.system.dispatcher + val allowed = VoiceHdlrHelpers.transparentListenOnlyAllowed(liveMeeting) + // Guarantee there are no other tasks for this channel + removeToggleListenOnlyTask(userId) + + // If the meeting has not yet hit the minium amount of duplex channels + // for transparent listen only to be enabled, we don't need to do anything + if (!allowed && enabled) { + return + } + def broacastEvent(): Unit = { val event = MsgBuilder.buildToggleListenOnlyModeSysMsg( liveMeeting.props.meetingProp.intId, liveMeeting.props.voiceProp.voiceConf, userId, + callerNum, enabled ) outGW.send(event) } - // Guarantee there are no other tasks for this channel - removeToggleListenOnlyTask(userId) - if (enabled && delay > 0) { // If we are enabling listen only mode, we wait a bit before actually // dispatching the command - the idea is that recently muted users @@ -547,13 +646,15 @@ object VoiceApp extends SystemConfiguration { hold ) match { case Some(vu) => - // Mute vs hold state mismatch, enforce hold state again. - // Mute state is the predominant one here. - if (vu.muted != hold) { + // Mute vs hold state mismatch. Enforce it if the user is unmuted, + // but hold is active, to avoid the user being unable to talk when + // the channel is active again. + if (!vu.muted && vu.hold) { toggleListenOnlyMode( liveMeeting, outGW, intId, + vu.callerNum, vu.muted ) } @@ -570,4 +671,48 @@ object VoiceApp extends SystemConfiguration { case _ => } } + + def muteUserInVoiceConf( + liveMeeting: LiveMeeting, + outGW: OutMsgRouter, + userId: String, + muted: Boolean + )(implicit context: ActorContext): Unit = { + for { + u <- VoiceUsers.findWithIntId( + liveMeeting.voiceUsers, + userId + ) + } yield { + if (u.muted != muted) { + val muteEvent = MsgBuilder.buildMuteUserInVoiceConfSysMsg( + liveMeeting.props.meetingProp.intId, + liveMeeting.props.voiceProp.voiceConf, + u.voiceUserId, + muted + ) + + // If we're unmuting, trigger a channel unhold -> toggle listen only + // mode -> unmute + if (!muted) { + holdChannelInVoiceConf( + liveMeeting, + outGW, + u.uuid, + muted + ) + toggleListenOnlyMode( + liveMeeting, + outGW, + u.intId, + u.callerNum, + muted, + 0 + ) + } + + outGW.send(muteEvent) + } + } + } } diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceHdlrHelpers.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceHdlrHelpers.scala index cc9e59310f..f79fa1ac29 100644 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceHdlrHelpers.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/apps/voice/VoiceHdlrHelpers.scala @@ -32,10 +32,6 @@ object VoiceHdlrHelpers extends SystemConfiguration { ): Boolean = { Users2x.findWithIntId(liveMeeting.users2x, userId) match { case Some(user) => { - val microphoneSharingLocked = LockSettingsUtil.isMicrophoneSharingLocked( - user, - liveMeeting - ) val isCallerBanned = VoiceUsers.isCallerBanned( callerIdNum, liveMeeting.voiceUsers @@ -43,11 +39,42 @@ object VoiceHdlrHelpers extends SystemConfiguration { (applyPermissionCheck && !isCallerBanned && - !microphoneSharingLocked && liveMeeting.props.meetingProp.intId == meetingId && liveMeeting.props.voiceProp.voiceConf == voiceConf) } case _ => false } } + + def isMicrophoneSharingLocked( + liveMeeting: LiveMeeting, + userId: String + ): Boolean = { + Users2x.findWithIntId(liveMeeting.users2x, userId) match { + case Some(user) => LockSettingsUtil.isMicrophoneSharingLocked( + user, + liveMeeting + ) && applyPermissionCheck + case _ => false + } + } + + def transparentListenOnlyAllowed(liveMeeting: LiveMeeting): Boolean = { + // Transparent listen only meeting-wide activation threshold. + // Threshold is the number of muted duplex audio channels in a meeting. + // 0 means no threshold, all users are subject to it + val mutedDuplexChannels = VoiceUsers.findAllMutedVoiceUsers(liveMeeting.voiceUsers).length + val threshold = transparentListenOnlyThreshold + + (threshold == 0) || (mutedDuplexChannels >= threshold) + } + + def muteOnStartThresholdReached(liveMeeting: LiveMeeting): Boolean = { + // Mute on start meeting-wide activation threshold. + // Threshold is the number of users in voice. + // muteOnStartThreshold = 0 means no threshold (disabled). + val usersInVoiceConf = VoiceUsers.usersInVoiceConf(liveMeeting.voiceUsers) + + muteOnStartThreshold > 0 && usersInVoiceConf >= muteOnStartThreshold + } } diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/models/VoiceUsers.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/models/VoiceUsers.scala index 848a597aea..6202b0ea98 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/models/VoiceUsers.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core/models/VoiceUsers.scala @@ -16,15 +16,30 @@ object VoiceUsers { users.toVector.find(u => u.uuid == uuid && u.intId == intId) } + def findWithIntIdAndCallerNum(users: VoiceUsers, intId: String, callerNum: String): Option[VoiceUserState] = { + // prlanzarin: This is a hack to allow for partial matching of callerNums. + // This is needed because the callerNums are incorrectly generated by + // FREESWITCH's ESL events when special characters are in place. + // e.g.: w_etc_0-bbbID-User;Semi (notice the semicolon) will be generated by + // FS as w_etc_0-bbbID-User (everything after the semicolon is ignored). + // We should review callerNum generation in the future as well as stop + // relying on it for session matching (use UUIDs or client session numbers instead). + users.toVector.find(u => u.intId == intId && + (u.callerNum.startsWith(callerNum) || callerNum.startsWith(u.callerNum))) + } + def findAll(users: VoiceUsers): Vector[VoiceUserState] = users.toVector def findAllNonListenOnlyVoiceUsers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.listenOnly == false) def findAllListenOnlyVoiceUsers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.listenOnly == true) def findAllFreeswitchCallers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.calledInto == "freeswitch") def findAllKurentoCallers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.calledInto == "kms") + def findAllMutedVoiceUsers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.muted == true && u.listenOnly == false) def findAllBannedCallers(users: VoiceUsers): Vector[VoiceUserState] = users.bannedUsers.values.toVector + def usersInVoiceConf(users: VoiceUsers): Int = users.size + def isCallerBanned(callerIdNum: String, users: VoiceUsers): Boolean = { users.bannedUsers.contains(callerIdNum) } @@ -135,6 +150,8 @@ class VoiceUsers { private def toVector: Vector[VoiceUserState] = users.values.toVector + private def size: Int = users.size + private def ban(user: VoiceUserState): VoiceUserState = { bannedUsers += user.callerNum -> user user diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/AnalyticsActor.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/AnalyticsActor.scala index 36fe7c2e68..c8a3fdd6f9 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/AnalyticsActor.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/AnalyticsActor.scala @@ -100,6 +100,7 @@ class AnalyticsActor(val includeChat: Boolean) extends Actor with ActorLogging { case m: ChannelHoldChangedVoiceConfEvtMsg => logMessage(msg) case m: ToggleListenOnlyModeSysMsg => logMessage(msg) case m: ListenOnlyModeToggledInSfuEvtMsg => logMessage(msg) + case m: MeetingMutedEvtMsg => logMessage(msg) // Breakout case m: BreakoutRoomEndedEvtMsg => logMessage(msg) diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/MeetingStatus2x.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/MeetingStatus2x.scala index 8b59f47e63..03ff76edea 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/MeetingStatus2x.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/MeetingStatus2x.scala @@ -37,6 +37,7 @@ object MeetingStatus2x { def getMeetingExtensionProp(status: MeetingStatus2x): MeetingExtensionProp = status.extension def muteMeeting(status: MeetingStatus2x) = status.meetingMuted = true def unmuteMeeting(status: MeetingStatus2x) = status.meetingMuted = false + def setMeetingMuted(status: MeetingStatus2x, value: Boolean) = status.meetingMuted = value def isMeetingMuted(status: MeetingStatus2x): Boolean = status.meetingMuted def recordingStarted(status: MeetingStatus2x) = status.recording = true def recordingStopped(status: MeetingStatus2x) = status.recording = false diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteAllExceptPresentersCmdMsgHdlr.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteAllExceptPresentersCmdMsgHdlr.scala index 79a25310ca..95652eb428 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteAllExceptPresentersCmdMsgHdlr.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteAllExceptPresentersCmdMsgHdlr.scala @@ -7,6 +7,7 @@ import org.bigbluebutton.core2.MeetingStatus2x import org.bigbluebutton.core.apps.{ PermissionCheck, RightsManagementTrait } import org.bigbluebutton.core.db.NotificationDAO import org.bigbluebutton.core2.message.senders.MsgBuilder +import org.bigbluebutton.core.apps.voice.VoiceApp trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait { this: MeetingActor => @@ -49,7 +50,7 @@ trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait { } val muted = MeetingStatus2x.isMeetingMuted(liveMeeting.status) - val event = build(props.meetingProp.intId, msg.body.mutedBy, muted, msg.body.mutedBy) + val event = MsgBuilder.buildMeetingMutedEvtMsg(props.meetingProp.intId, msg.body.mutedBy, muted, msg.body.mutedBy) outGW.send(event) @@ -60,8 +61,8 @@ trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait { VoiceUsers.findAll(liveMeeting.voiceUsers) foreach { vu => if (!vu.listenOnly) { Users2x.findWithIntId(liveMeeting.users2x, vu.intId) match { - case Some(u) => if (!u.presenter) muteUserInVoiceConf(vu, muted) - case None => muteUserInVoiceConf(vu, muted) + case Some(u) => if (!u.presenter) VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, muted) + case None => VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, muted) } } } @@ -74,28 +75,4 @@ trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait { Users2x.findNotPresenters(liveMeeting.users2x) } - def build(meetingId: String, userId: String, muted: Boolean, mutedBy: String): BbbCommonEnvCoreMsg = { - val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, meetingId, userId) - val envelope = BbbCoreEnvelope(MeetingMutedEvtMsg.NAME, routing) - val header = BbbClientMsgHeader(MeetingMutedEvtMsg.NAME, meetingId, userId) - - val body = MeetingMutedEvtMsgBody(muted, mutedBy) - val event = MeetingMutedEvtMsg(header, body) - - BbbCommonEnvCoreMsg(envelope, event) - } - - def muteUserInVoiceConf(vu: VoiceUserState, mute: Boolean): Unit = { - val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, props.meetingProp.intId, vu.intId) - val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing) - val header = BbbCoreHeaderWithMeetingId(MuteUserInVoiceConfSysMsg.NAME, props.meetingProp.intId) - - val body = MuteUserInVoiceConfSysMsgBody(props.voiceProp.voiceConf, vu.voiceUserId, mute) - val event = MuteUserInVoiceConfSysMsg(header, body) - val msgEvent = BbbCommonEnvCoreMsg(envelope, event) - - outGW.send(msgEvent) - - } - } diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteMeetingCmdMsgHdlr.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteMeetingCmdMsgHdlr.scala index 2becc88c59..4ca7e31e20 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteMeetingCmdMsgHdlr.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/handlers/MuteMeetingCmdMsgHdlr.scala @@ -7,6 +7,7 @@ import org.bigbluebutton.core.models.{ VoiceUserState, VoiceUsers } import org.bigbluebutton.core.running.{ MeetingActor, OutMsgRouter } import org.bigbluebutton.core2.MeetingStatus2x import org.bigbluebutton.core2.message.senders.MsgBuilder +import org.bigbluebutton.core.apps.voice.VoiceApp trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait { this: MeetingActor => @@ -20,30 +21,6 @@ trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait { val reason = "No permission to mute meeting." PermissionCheck.ejectUserForFailedPermission(meetingId, msg.header.userId, reason, outGW, liveMeeting) } else { - def build(meetingId: String, userId: String, muted: Boolean, mutedBy: String): BbbCommonEnvCoreMsg = { - val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, meetingId, userId) - val envelope = BbbCoreEnvelope(MeetingMutedEvtMsg.NAME, routing) - val header = BbbClientMsgHeader(MeetingMutedEvtMsg.NAME, meetingId, userId) - - val body = MeetingMutedEvtMsgBody(muted, mutedBy) - val event = MeetingMutedEvtMsg(header, body) - - BbbCommonEnvCoreMsg(envelope, event) - } - - def muteUserInVoiceConf(vu: VoiceUserState, mute: Boolean): Unit = { - val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, props.meetingProp.intId, vu.intId) - val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing) - val header = BbbCoreHeaderWithMeetingId(MuteUserInVoiceConfSysMsg.NAME, props.meetingProp.intId) - - val body = MuteUserInVoiceConfSysMsgBody(props.voiceProp.voiceConf, vu.voiceUserId, mute) - val event = MuteUserInVoiceConfSysMsg(header, body) - val msgEvent = BbbCommonEnvCoreMsg(envelope, event) - - outGW.send(msgEvent) - - } - if (msg.body.mute != MeetingStatus2x.isMeetingMuted(liveMeeting.status)) { if (msg.body.mute) { val notifyEvent = MsgBuilder.buildNotifyAllInMeetingEvtMsg( @@ -74,7 +51,12 @@ trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait { } val muted = MeetingStatus2x.isMeetingMuted(liveMeeting.status) - val meetingMutedEvent = build(props.meetingProp.intId, msg.body.mutedBy, muted, msg.body.mutedBy) + val meetingMutedEvent = MsgBuilder.buildMeetingMutedEvtMsg( + props.meetingProp.intId, + msg.body.mutedBy, + muted, + msg.body.mutedBy + ) outGW.send(meetingMutedEvent) @@ -82,7 +64,7 @@ trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait { if (muted) { VoiceUsers.findAll(liveMeeting.voiceUsers) foreach { vu => if (!vu.listenOnly) { - muteUserInVoiceConf(vu, muted) + VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, muted) } } } diff --git a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/senders/MsgBuilder.scala b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/senders/MsgBuilder.scala index dcb5176c43..1dc092ea89 100755 --- a/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/senders/MsgBuilder.scala +++ b/akka-bbb-apps/src/main/scala/org/bigbluebutton/core2/message/senders/MsgBuilder.scala @@ -282,6 +282,17 @@ object MsgBuilder { BbbCommonEnvCoreMsg(envelope, event) } + def buildMeetingMutedEvtMsg(meetingId: String, userId: String, muted: Boolean, mutedBy: String): BbbCommonEnvCoreMsg = { + val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, meetingId, userId) + val envelope = BbbCoreEnvelope(MeetingMutedEvtMsg.NAME, routing) + val header = BbbClientMsgHeader(MeetingMutedEvtMsg.NAME, meetingId, userId) + + val body = MeetingMutedEvtMsgBody(muted, mutedBy) + val event = MeetingMutedEvtMsg(header, body) + + BbbCommonEnvCoreMsg(envelope, event) + } + def buildMuteUserInVoiceConfSysMsg(meetingId: String, voiceConf: String, voiceUserId: String, mute: Boolean): BbbCommonEnvCoreMsg = { val routing = collection.immutable.HashMap("sender" -> "bbb-apps-akka") val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing) @@ -574,11 +585,12 @@ object MsgBuilder { meetingId: String, voiceConf: String, userId: String, + callerNum: String, enabled: Boolean ): BbbCommonEnvCoreMsg = { val routing = collection.immutable.HashMap("sender" -> "bbb-apps-akka") val envelope = BbbCoreEnvelope(ToggleListenOnlyModeSysMsg.NAME, routing) - val body = ToggleListenOnlyModeSysMsgBody(voiceConf, userId, enabled) + val body = ToggleListenOnlyModeSysMsgBody(voiceConf, userId, callerNum, enabled) val header = BbbCoreHeaderWithMeetingId(ToggleListenOnlyModeSysMsg.NAME, meetingId) val event = ToggleListenOnlyModeSysMsg(header, body) diff --git a/akka-bbb-apps/src/universal/conf/application.conf b/akka-bbb-apps/src/universal/conf/application.conf index 7bc24d5811..aa56296cec 100755 --- a/akka-bbb-apps/src/universal/conf/application.conf +++ b/akka-bbb-apps/src/universal/conf/application.conf @@ -118,6 +118,17 @@ voiceConf { # Time (seconds) to wait before requesting an audio channel hold after # muting a user. Used in the experimental, transparent listen only mode. toggleListenOnlyAfterMuteTimer = 4 + + # Transparent listen only meeting-wide activation threshold. + # Threshold is the number of muted duplex audio channels in a meeting. + # 0 = disabled + transparentListenOnlyThreshold = 0 + + # muteOnStartThreshold: forces muteOnStart=true for a meeting when the number + # of audio participants reaches the specified threshold. + # Overrides any existing muteOnStart directive (bbb-web, API and the client). + # 0 = disabled. + muteOnStartThreshold = 0 } recording { diff --git a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/FreeswitchConferenceEventListener.java b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/FreeswitchConferenceEventListener.java index 076d82cc54..b136b0fd57 100755 --- a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/FreeswitchConferenceEventListener.java +++ b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/FreeswitchConferenceEventListener.java @@ -109,6 +109,7 @@ public class FreeswitchConferenceEventListener implements ConferenceEventListene evt.callSession, evt.clientSession, evt.userId, + evt.getVoiceUserId(), evt.callerName, evt.callState, evt.origCallerIdName, diff --git a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/IVoiceConferenceService.java b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/IVoiceConferenceService.java index 5ca5f7b4c6..72080f8732 100755 --- a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/IVoiceConferenceService.java +++ b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/IVoiceConferenceService.java @@ -59,6 +59,7 @@ public interface IVoiceConferenceService { String callSession, String clientSession, String userId, + String voiceUserId, String callerName, String callState, String origCallerIdName, diff --git a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/events/VoiceCallStateEvent.java b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/events/VoiceCallStateEvent.java index 9d0a9277e6..f48b3576fd 100755 --- a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/events/VoiceCallStateEvent.java +++ b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/events/VoiceCallStateEvent.java @@ -4,6 +4,8 @@ public class VoiceCallStateEvent extends VoiceConferenceEvent { public final String callSession; public final String clientSession; public final String userId; + // AKA mod_conference memberId + public final String voiceUserId; public final String callerName; public final String callState; public final String origCallerIdName; @@ -14,6 +16,7 @@ public class VoiceCallStateEvent extends VoiceConferenceEvent { String callSession, String clientSession, String userId, + String voiceUserId, String callerName, String callState, String origCallerIdName, @@ -22,9 +25,14 @@ public class VoiceCallStateEvent extends VoiceConferenceEvent { this.callSession = callSession; this.clientSession = clientSession; this.userId = userId; + this.voiceUserId = voiceUserId; this.callerName = callerName; this.callState = callState; this.origCallerIdName = origCallerIdName; this.origCalledDest = origCalledDest; } + + public String getVoiceUserId() { + return voiceUserId; + } } diff --git a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/freeswitch/ESLEventListener.java b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/freeswitch/ESLEventListener.java index a67e6e5768..0549d6babf 100755 --- a/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/freeswitch/ESLEventListener.java +++ b/akka-bbb-fsesl/src/main/java/org/bigbluebutton/freeswitch/voice/freeswitch/ESLEventListener.java @@ -84,6 +84,7 @@ public class ESLEventListener implements IEslEventListener { String origCallerIdName = headers.get("Caller-Caller-ID-Name"); String origCallerDestNumber = headers.get("Caller-Destination-Number"); String clientSession = "0"; + String memberIdStr = memberId != null ? memberId.toString() : ""; Matcher matcher = CALLERNAME_PATTERN.matcher(callerIdName); Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(callerIdName); @@ -106,6 +107,7 @@ public class ESLEventListener implements IEslEventListener { coreuuid, clientSession, voiceUserId, + memberIdStr, callerIdName, callState, origCallerIdName, @@ -281,6 +283,7 @@ public class ESLEventListener implements IEslEventListener { String varvBridge = (eventHeaders.get("variable_vbridge") == null) ? "" : eventHeaders.get("variable_vbridge"); if ("echo".equalsIgnoreCase(application) && !varvBridge.isEmpty()) { + Integer memberId = this.getMemberId(eventHeaders); String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name"); String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number"); String coreuuid = eventHeaders.get("Core-UUID"); @@ -291,6 +294,7 @@ public class ESLEventListener implements IEslEventListener { String callerName = origCallerIdName; String clientSession = "0"; String callState = "IN_ECHO_TEST"; + String memberIdStr = memberId != null ? memberId.toString() : ""; Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName); Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName); @@ -314,6 +318,7 @@ public class ESLEventListener implements IEslEventListener { coreuuid, clientSession, voiceUserId, + memberIdStr, callerName, callState, origCallerIdName, @@ -321,6 +326,7 @@ public class ESLEventListener implements IEslEventListener { conferenceEventListener.handleConferenceEvent(csEvent); } else if ("RINGING".equalsIgnoreCase(channelCallState) && !varvBridge.isEmpty()) { + Integer memberId = this.getMemberId(eventHeaders); String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name"); String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number"); String coreuuid = eventHeaders.get("Core-UUID"); @@ -330,6 +336,7 @@ public class ESLEventListener implements IEslEventListener { String callerName = origCallerIdName; String clientSession = "0"; String callState = "CALL_STARTED"; + String memberIdStr = memberId != null ? memberId.toString() : ""; Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName); Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName); @@ -353,6 +360,7 @@ public class ESLEventListener implements IEslEventListener { coreuuid, clientSession, voiceUserId, + memberIdStr, callerName, callState, origCallerIdName, @@ -365,6 +373,7 @@ public class ESLEventListener implements IEslEventListener { String channelState = (eventHeaders.get("Channel-State") == null) ? "" : eventHeaders.get("Channel-State"); if ("HANGUP".equalsIgnoreCase(channelCallState) && "CS_DESTROY".equalsIgnoreCase(channelState)) { + Integer memberId = this.getMemberId(eventHeaders); String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name"); String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number"); String coreuuid = eventHeaders.get("Core-UUID"); @@ -374,6 +383,7 @@ public class ESLEventListener implements IEslEventListener { String callerName = origCallerIdName; String clientSession = "0"; String callState = "CALL_ENDED"; + String memberIdStr = memberId != null ? memberId.toString() : ""; Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName); Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName); @@ -397,6 +407,7 @@ public class ESLEventListener implements IEslEventListener { coreuuid, clientSession, voiceUserId, + memberIdStr, callerName, callState, origCallerIdName, @@ -405,6 +416,7 @@ public class ESLEventListener implements IEslEventListener { conferenceEventListener.handleConferenceEvent(csEvent); } else if ("RINGING".equalsIgnoreCase(channelCallState) && "CS_EXECUTE".equalsIgnoreCase(channelState)) { + Integer memberId = this.getMemberId(eventHeaders); String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name"); String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number"); String coreuuid = eventHeaders.get("Core-UUID"); @@ -414,6 +426,7 @@ public class ESLEventListener implements IEslEventListener { String callerName = origCallerIdName; String clientSession = "0"; String callState = "CALL_STARTED"; + String memberIdStr = memberId != null ? memberId.toString() : ""; Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName); Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName); @@ -437,6 +450,7 @@ public class ESLEventListener implements IEslEventListener { coreuuid, clientSession, voiceUserId, + memberIdStr, callerName, callState, origCallerIdName, diff --git a/akka-bbb-fsesl/src/main/scala/org/bigbluebutton/freeswitch/VoiceConferenceService.scala b/akka-bbb-fsesl/src/main/scala/org/bigbluebutton/freeswitch/VoiceConferenceService.scala index 4e520a82dc..4c73bedcf5 100755 --- a/akka-bbb-fsesl/src/main/scala/org/bigbluebutton/freeswitch/VoiceConferenceService.scala +++ b/akka-bbb-fsesl/src/main/scala/org/bigbluebutton/freeswitch/VoiceConferenceService.scala @@ -229,6 +229,7 @@ class VoiceConferenceService(healthz: HealthzService, callSession: String, clientSession: String, userId: String, + voiceUserId: String, callerName: String, callState: String, origCallerIdName: String, @@ -240,6 +241,7 @@ class VoiceConferenceService(healthz: HealthzService, callSession = callSession, clientSession = clientSession, userId = userId, + voiceUserId = voiceUserId, callerName = callerName, callState = callState, origCallerIdName = origCallerIdName, diff --git a/bbb-common-message/src/main/scala/org/bigbluebutton/common2/msgs/VoiceConfMsgs.scala b/bbb-common-message/src/main/scala/org/bigbluebutton/common2/msgs/VoiceConfMsgs.scala index 1100597274..7ae1c2cc44 100755 --- a/bbb-common-message/src/main/scala/org/bigbluebutton/common2/msgs/VoiceConfMsgs.scala +++ b/bbb-common-message/src/main/scala/org/bigbluebutton/common2/msgs/VoiceConfMsgs.scala @@ -528,6 +528,7 @@ case class VoiceConfCallStateEvtMsgBody( callSession: String, clientSession: String, userId: String, + voiceUserId: String, callerName: String, callState: String, origCallerIdName: String, @@ -613,7 +614,8 @@ case class GetMicrophonePermissionRespMsgBody( voiceConf: String, userId: String, sfuSessionId: String, - allowed: Boolean + allowed: Boolean, + muteOnStart: Boolean ) /** @@ -658,6 +660,7 @@ case class ToggleListenOnlyModeSysMsg( case class ToggleListenOnlyModeSysMsgBody( voiceConf: String, userId: String, + callerNum: String, enabled: Boolean ) @@ -674,5 +677,6 @@ case class ListenOnlyModeToggledInSfuEvtMsgBody( meetingId: String, voiceConf: String, userId: String, + callerNum: String, enabled: Boolean ) diff --git a/bbb-voice-conference/config/freeswitch/conf/dialplan/default/bbb_conference.xml b/bbb-voice-conference/config/freeswitch/conf/dialplan/default/bbb_conference.xml index bf223c33d7..428a4a85dc 100644 --- a/bbb-voice-conference/config/freeswitch/conf/dialplan/default/bbb_conference.xml +++ b/bbb-voice-conference/config/freeswitch/conf/dialplan/default/bbb_conference.xml @@ -8,7 +8,15 @@ - + + + + + + + + + diff --git a/bbb-voice-conference/config/freeswitch/conf/dialplan/public/bbb_sfu.xml b/bbb-voice-conference/config/freeswitch/conf/dialplan/public/bbb_sfu.xml index 7f6a6e08f4..00bd7ff61b 100644 --- a/bbb-voice-conference/config/freeswitch/conf/dialplan/public/bbb_sfu.xml +++ b/bbb-voice-conference/config/freeswitch/conf/dialplan/public/bbb_sfu.xml @@ -1,6 +1,6 @@ - + diff --git a/bbb-voice-conference/config/freeswitch/conf/dialplan/public/bbb_sfu_muted.xml b/bbb-voice-conference/config/freeswitch/conf/dialplan/public/bbb_sfu_muted.xml new file mode 100644 index 0000000000..c3586353f0 --- /dev/null +++ b/bbb-voice-conference/config/freeswitch/conf/dialplan/public/bbb_sfu_muted.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/bbb-webrtc-sfu.placeholder.sh b/bbb-webrtc-sfu.placeholder.sh index c73b069e4f..ed899aacbe 100755 --- a/bbb-webrtc-sfu.placeholder.sh +++ b/bbb-webrtc-sfu.placeholder.sh @@ -1 +1 @@ -git clone --branch v2.14.0-beta.3 --depth 1 https://github.com/bigbluebutton/bbb-webrtc-sfu bbb-webrtc-sfu +git clone --branch v2.14.0 --depth 1 https://github.com/bigbluebutton/bbb-webrtc-sfu bbb-webrtc-sfu diff --git a/bigbluebutton-html5/client/main.html b/bigbluebutton-html5/client/main.html index 9e707aa5d2..125313de36 100755 --- a/bigbluebutton-html5/client/main.html +++ b/bigbluebutton-html5/client/main.html @@ -171,7 +171,7 @@ with BigBlueButton; if not, see . - + +
diff --git a/bigbluebutton-html5/imports/api/audio/client/bridge/base.js b/bigbluebutton-html5/imports/api/audio/client/bridge/base.js index 97008cac87..0b40e5d88e 100755 --- a/bigbluebutton-html5/imports/api/audio/client/bridge/base.js +++ b/bigbluebutton-html5/imports/api/audio/client/bridge/base.js @@ -61,7 +61,11 @@ export default class BaseAudioBridge { get inputDeviceId () { return this._inputDeviceId; + } + /* eslint-disable class-methods-use-this */ + supportsTransparentListenOnly() { + return false; } /** @@ -78,6 +82,20 @@ export default class BaseAudioBridge { let backupStream; try { + // Remove all input audio tracks from the stream + // This will effectively mute the microphone + // and keep the audio output working + if (deviceId === 'listen-only') { + const stream = this.inputStream; + if (stream) { + stream.getAudioTracks().forEach((track) => { + track.stop(); + stream.removeTrack(track); + }); + } + return stream; + } + const constraints = { audio: getAudioConstraints({ deviceId }), }; diff --git a/bigbluebutton-html5/imports/api/audio/client/bridge/service.js b/bigbluebutton-html5/imports/api/audio/client/bridge/service.js index cddc3f7d97..980005722d 100644 --- a/bigbluebutton-html5/imports/api/audio/client/bridge/service.js +++ b/bigbluebutton-html5/imports/api/audio/client/bridge/service.js @@ -36,10 +36,25 @@ const getCurrentAudioSinkId = () => { return audioElement?.sinkId || DEFAULT_OUTPUT_DEVICE_ID; }; -const getStoredAudioInputDeviceId = () => getStorageSingletonInstance().getItem(INPUT_DEVICE_ID_KEY); -const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance().getItem(OUTPUT_DEVICE_ID_KEY); -const storeAudioInputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId); -const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(OUTPUT_DEVICE_ID_KEY, deviceId); +const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance() + .getItem(OUTPUT_DEVICE_ID_KEY); +const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance() + .setItem(OUTPUT_DEVICE_ID_KEY, deviceId); +const getStoredAudioInputDeviceId = () => getStorageSingletonInstance() + .getItem(INPUT_DEVICE_ID_KEY); +const storeAudioInputDeviceId = (deviceId) => { + if (deviceId === 'listen-only') { + // Do not store listen-only "devices" and remove any stored device + // So it starts from scratch next time. + getStorageSingletonInstance().removeItem(INPUT_DEVICE_ID_KEY); + + return false; + } + + getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId); + + return true; +}; /** * Filter constraints set in audioDeviceConstraints, based on diff --git a/bigbluebutton-html5/imports/api/audio/client/bridge/sfu-audio-bridge.js b/bigbluebutton-html5/imports/api/audio/client/bridge/sfu-audio-bridge.js index 6b213131fb..82ac15ce58 100755 --- a/bigbluebutton-html5/imports/api/audio/client/bridge/sfu-audio-bridge.js +++ b/bigbluebutton-html5/imports/api/audio/client/bridge/sfu-audio-bridge.js @@ -20,6 +20,7 @@ import { shouldForceRelay } from '/imports/ui/services/bbb-webrtc-sfu/utils'; const SENDRECV_ROLE = 'sendrecv'; const RECV_ROLE = 'recv'; +const PASSIVE_SENDRECV_ROLE = 'passive-sendrecv'; const BRIDGE_NAME = 'fullaudio'; const IS_CHROME = browserInfo.isChrome; @@ -81,7 +82,7 @@ export default class SFUAudioBridge extends BaseAudioBridge { const MEDIA = SETTINGS.public.media; const LISTEN_ONLY_OFFERING = MEDIA.listenOnlyOffering; const FULLAUDIO_OFFERING = MEDIA.fullAudioOffering; - return isListenOnly + return isListenOnly && !isTransparentListenOnlyEnabled() ? LISTEN_ONLY_OFFERING : (!isTransparentListenOnlyEnabled() && FULLAUDIO_OFFERING); } @@ -95,12 +96,17 @@ export default class SFUAudioBridge extends BaseAudioBridge { this.reconnecting = false; this.iceServers = []; this.bridgeName = BRIDGE_NAME; + this.isListenOnly = false; + this.bypassGUM = false; + this.supportsTransparentListenOnly = isTransparentListenOnlyEnabled; this.handleTermination = this.handleTermination.bind(this); } get inputStream() { - if (this.broker) { + // Only return the stream if the broker is active and the role isn't recvonly + // Input stream == actual input-capturing stream, not the one that's being played + if (this.broker && this.role !== RECV_ROLE) { return this.broker.getLocalStream(); } @@ -111,6 +117,18 @@ export default class SFUAudioBridge extends BaseAudioBridge { return this.broker?.role; } + getBrokerRole({ hasInputStream }) { + if (this.isListenOnly) { + return isTransparentListenOnlyEnabled() + ? PASSIVE_SENDRECV_ROLE + : RECV_ROLE; + } + + if (this.bypassGUM && !hasInputStream) return PASSIVE_SENDRECV_ROLE; + + return SENDRECV_ROLE; + } + setInputStream(stream) { if (this.broker == null) return null; @@ -326,6 +344,7 @@ export default class SFUAudioBridge extends BaseAudioBridge { extension, inputStream, forceRelay: _forceRelay = false, + bypassGUM = false, } = options; const SETTINGS = window.meetingClientSettings; @@ -349,6 +368,10 @@ export default class SFUAudioBridge extends BaseAudioBridge { try { this.inEchoTest = !!extension; this.isListenOnly = isListenOnly; + this.bypassGUM = bypassGUM; + const role = this.getBrokerRole({ + hasInputStream: !!inputStream, + }); const brokerOptions = { clientSessionNumber: getAudioSessionNumber(), @@ -365,11 +388,12 @@ export default class SFUAudioBridge extends BaseAudioBridge { mediaStreamFactory: this.mediaStreamFactory, gatheringTimeout: GATHERING_TIMEOUT, transparentListenOnly: isTransparentListenOnlyEnabled(), + bypassGUM, }; this.broker = new AudioBroker( Auth.authenticateURL(SFU_URL), - isListenOnly ? RECV_ROLE : SENDRECV_ROLE, + role, brokerOptions, ); diff --git a/bigbluebutton-html5/imports/ui/Types/meetingClientSettings.ts b/bigbluebutton-html5/imports/ui/Types/meetingClientSettings.ts index 85b932acbb..1200a4f40a 100644 --- a/bigbluebutton-html5/imports/ui/Types/meetingClientSettings.ts +++ b/bigbluebutton-html5/imports/ui/Types/meetingClientSettings.ts @@ -610,7 +610,6 @@ export interface Media { traceSip: boolean sdpSemantics: string localEchoTest: LocalEchoTest - showVolumeMeter: boolean muteAudioOutputWhenAway: boolean } diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-captions/captions/component.tsx b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-captions/captions/component.tsx index ae65baefeb..eec2ae6f72 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-captions/captions/component.tsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-captions/captions/component.tsx @@ -11,6 +11,7 @@ import { } from '../service'; import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser'; import { SET_SPEECH_LOCALE } from '/imports/ui/core/graphql/mutations/userMutations'; +import Styled from './styles'; const intlMessages = defineMessages({ title: { @@ -71,16 +72,22 @@ const intlMessages = defineMessages({ }, }); +interface AudioCaptionsContainerProps { + showTitleLabel?: boolean; +} + interface AudioCaptionsSelectProps { isTranscriptionEnabled: boolean; speechLocale: string; speechVoices: string[]; + showTitleLabel?: boolean; } const AudioCaptionsSelect: React.FC = ({ isTranscriptionEnabled, speechLocale, speechVoices, + showTitleLabel = true, }) => { const useLocaleHook = useFixedLocale(); const intl = useIntl(); @@ -118,49 +125,54 @@ const AudioCaptionsSelect: React.FC = ({ setUserLocaleProperty(value, setUserSpeechLocale); }; - return ( -
-
+ {renderSelect()} + + ) : ( + renderSelect() ); }; -const AudioCaptionsSelectContainer: React.FC = () => { +const AudioCaptionsSelectContainer: React.FC = ({ + showTitleLabel = true, +}) => { const [voicesList, setVoicesList] = React.useState([]); const voices = getSpeechVoices(); @@ -185,6 +197,7 @@ const AudioCaptionsSelectContainer: React.FC = () => { isTranscriptionEnabled={isEnabled} speechLocale={currentUser.speechLocale ?? ''} speechVoices={voices || voicesList} + showTitleLabel={showTitleLabel} /> ); }; diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-captions/captions/styles.ts b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-captions/captions/styles.ts new file mode 100644 index 0000000000..9d4c48b213 --- /dev/null +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-captions/captions/styles.ts @@ -0,0 +1,45 @@ +import styled from 'styled-components'; +import { + borderSize, +} from '/imports/ui/stylesheets/styled-components/general'; +import { + colorGrayLabel, + colorWhite, + colorGrayLighter, + colorPrimary, +} from '/imports/ui/stylesheets/styled-components/palette'; + +const CaptionsSelector = styled.div` + display: grid; + grid-auto-flow: column; + padding: 1rem 0px; + align-items: center; +`; + +const Select = styled.select` + background-color: ${colorWhite}; + border: 0.1rem solid ${colorGrayLighter}; + border-radius: ${borderSize}; + color: ${colorGrayLabel}; + width: 100%; + height: 2rem; + padding: 1px; + + &:focus { + outline: none; + border-radius: ${borderSize}; + box-shadow: 0 0 0 ${borderSize} ${colorPrimary}, inset 0 0 0 1px ${colorPrimary}; + } + + &:hover, + &:focus { + outline: transparent; + outline-style: dotted; + outline-width: ${borderSize}; + } +`; + +export default { + CaptionsSelector, + Select, +}; diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/component.tsx b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/component.tsx index 255b14066c..3c6f2b4219 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/component.tsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/component.tsx @@ -63,6 +63,8 @@ const AudioControls: React.FC = ({ const echoTestIntervalRef = React.useRef>(); const [isAudioModalOpen, setIsAudioModalOpen] = React.useState(false); + const [audioModalContent, setAudioModalContent] = React.useState(null); + const [audioModalProps, setAudioModalProps] = React.useState<{ unmuteOnExit?: boolean } | null>(null); const handleJoinAudio = useCallback((connected: boolean) => { if (connected) { @@ -72,6 +74,12 @@ const AudioControls: React.FC = ({ } }, []); + const openAudioSettings = (props: { unmuteOnExit?: boolean } = {}) => { + setAudioModalContent('settings'); + setAudioModalProps(props); + setIsAudioModalOpen(true); + }; + const joinButton = useMemo(() => { const joinAudioLabel = away ? intlMessages.joinAudioAndSetActive : intlMessages.joinAudio; @@ -107,12 +115,18 @@ const AudioControls: React.FC = ({ return ( - {!inAudio ? joinButton : } + {!inAudio ? joinButton : } {isAudioModalOpen && ( setIsAudioModalOpen(false)} + setIsOpen={() => { + setIsAudioModalOpen(false); + setAudioModalContent(null); + setAudioModalProps(null); + }} isOpen={isAudioModalOpen} + content={audioModalContent} + unmuteOnExit={audioModalProps?.unmuteOnExit} /> )} diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/LiveSelection.tsx b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/LiveSelection.tsx index 0faa7e9574..279aaecb9d 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/LiveSelection.tsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/LiveSelection.tsx @@ -56,6 +56,26 @@ const intlMessages = defineMessages({ id: 'app.audioNotification.deviceChangeFailed', description: 'Device change failed', }, + fallbackInputLabel: { + id: 'app.audio.audioSettings.fallbackInputLabel', + description: 'Audio input device label', + }, + fallbackOutputLabel: { + id: 'app.audio.audioSettings.fallbackOutputLabel', + description: 'Audio output device label', + }, + fallbackNoPermissionLabel: { + id: 'app.audio.audioSettings.fallbackNoPermission', + description: 'No permission to access audio devices label', + }, + audioSettingsTitle: { + id: 'app.audio.audioSettings.titleLabel', + description: 'Audio settings button label', + }, + noMicListenOnlyLabel: { + id: 'app.audio.audioSettings.noMicListenOnly', + description: 'No microphone (listen only) label', + }, }); interface MuteToggleProps { @@ -75,6 +95,8 @@ interface LiveSelectionProps extends MuteToggleProps { outputDeviceId: string; meetingIsBreakout: boolean; away: boolean; + openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void; + supportsTransparentListenOnly: boolean; } export const LiveSelection: React.FC = ({ @@ -90,6 +112,8 @@ export const LiveSelection: React.FC = ({ isAudioLocked, toggleMuteMicrophone, away, + openAudioSettings, + supportsTransparentListenOnly, }) => { const intl = useIntl(); @@ -105,6 +129,21 @@ export const LiveSelection: React.FC = ({ ]; } + const getFallbackLabel = (device: MediaDeviceInfo, index: number) => { + const baseLabel = device?.kind === AUDIO_OUTPUT + ? intlMessages.fallbackOutputLabel + : intlMessages.fallbackInputLabel; + let label = intl.formatMessage(baseLabel, { 0: index }); + + if (!device?.deviceId) { + label = `${label} ${intl.formatMessage(intlMessages.fallbackNoPermissionLabel)}`; + } + + return label; + }; + + const shouldTreatAsMicrophone = () => !listenOnly || supportsTransparentListenOnly; + const renderDeviceList = useCallback(( deviceKind: string, list: MediaDeviceInfo[], @@ -134,7 +173,7 @@ export const LiveSelection: React.FC = ({ { key: `${device.deviceId}-${deviceKind}`, dataTest: `${deviceKind}-${index + 1}`, - label: truncateDeviceName(device.label), + label: truncateDeviceName(device.label || getFallbackLabel(device, index + 1)), customStyles: (device.deviceId === currentDeviceId) ? Styled.SelectedLabel : null, iconRight: (device.deviceId === currentDeviceId) ? 'check' : null, onClick: () => onDeviceListClick(device.deviceId, deviceKind, callback), @@ -163,10 +202,37 @@ export const LiveSelection: React.FC = ({ ]; } + if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) { + // "None" option for audio input devices - aka listen-only + const listenOnly = deviceKind === AUDIO_INPUT + && currentDeviceId === 'listen-only'; + + deviceList.push({ + key: `listenOnly-${deviceKind}`, + dataTest: `${deviceKind}-listenOnly`, + label: intl.formatMessage(intlMessages.noMicListenOnlyLabel), + customStyles: listenOnly && Styled.SelectedLabel, + iconRight: listenOnly ? 'check' : null, + onClick: () => onDeviceListClick('listen-only', deviceKind, callback), + } as MenuOptionItemType); + } + return listTitle.concat(deviceList); }, []); const onDeviceListClick = useCallback((deviceId: string, deviceKind: string, callback: Function) => { + if (!deviceId) { + // If there's no deviceId in an audio input device, it means + // the user doesn't have permission to access it. If we support + // transparent listen-only, fire the mount AudioSettings modal to + // acquire permission and let the user configure their stuff. + if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) { + openAudioSettings({ unmuteOnExit: true }); + } + + return; + } + if (!deviceId) return; if (deviceKind === AUDIO_INPUT) { callback(deviceId).catch(() => { @@ -179,7 +245,7 @@ export const LiveSelection: React.FC = ({ } }, []); - const inputDeviceList = !listenOnly + const inputDeviceList = shouldTreatAsMicrophone() ? renderDeviceList( AUDIO_INPUT, inputDevices, @@ -196,6 +262,16 @@ export const LiveSelection: React.FC = ({ outputDeviceId, ); + const audioSettingsOption = { + icon: 'settings', + label: intl.formatMessage(intlMessages.audioSettingsTitle), + key: 'audioSettingsOption', + dataTest: 'input-selector-audio-settings', + customStyles: Styled.AudioSettingsOption, + dividerTop: true, + onClick: () => openAudioSettings(), + } as MenuOptionItemType; + const leaveAudioOption = { icon: 'logout', label: intl.formatMessage(intlMessages.leaveAudio), @@ -204,12 +280,14 @@ export const LiveSelection: React.FC = ({ customStyles: Styled.DangerColor, onClick: () => handleLeaveAudio(meetingIsBreakout), }; - const dropdownListComplete = inputDeviceList.concat(outputDeviceList) + const dropdownListComplete = inputDeviceList + .concat(outputDeviceList) .concat({ key: 'separator-02', isSeparator: true, - }) - .concat(leaveAudioOption); + }); + if (shouldTreatAsMicrophone()) dropdownListComplete.push(audioSettingsOption); + dropdownListComplete.push(leaveAudioOption); audioSettingsDropdownItems.forEach((audioSettingsDropdownItem: PluginSdk.AudioSettingsDropdownInterface) => { @@ -239,9 +317,11 @@ export const LiveSelection: React.FC = ({ const customStyles = { top: '-1rem' }; const { isMobile } = deviceInfo; + const noInputDevice = inputDeviceId === 'listen-only'; + return ( <> - {!listenOnly ? ( + {shouldTreatAsMicrophone() ? ( // eslint-disable-next-line jsx-a11y/no-access-key = ({ aria-hidden="true" /> ) : null} - {(!listenOnly && isMobile) && ( + {(shouldTreatAsMicrophone() && isMobile) && ( = ({ isAudioLocked={isAudioLocked} toggleMuteMicrophone={toggleMuteMicrophone} away={away} + noInputDevice={noInputDevice} + openAudioSettings={openAudioSettings} /> )} - {!listenOnly && !isMobile + {shouldTreatAsMicrophone() && !isMobile ? ( = ({ isAudioLocked={isAudioLocked} toggleMuteMicrophone={toggleMuteMicrophone} away={away} + noInputDevice={noInputDevice} + openAudioSettings={openAudioSettings} /> ) : ( diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/muteToggle.tsx b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/muteToggle.tsx index d5367ae653..217f40325c 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/muteToggle.tsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/buttons/muteToggle.tsx @@ -33,6 +33,8 @@ interface MuteToggleProps { isAudioLocked: boolean; toggleMuteMicrophone: (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => void; away: boolean; + noInputDevice?: boolean; + openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void; } export const MuteToggle: React.FC = ({ @@ -42,6 +44,8 @@ export const MuteToggle: React.FC = ({ isAudioLocked, toggleMuteMicrophone, away, + noInputDevice = false, + openAudioSettings, }) => { const intl = useIntl(); const toggleMuteShourtcut = useShortcut('toggleMute'); @@ -57,15 +61,22 @@ export const MuteToggle: React.FC = ({ const onClickCallback = (e: React.MouseEvent) => { e.stopPropagation(); - if (muted && away) { - muteAway(muted, true, toggleVoice); - VideoService.setTrackEnabled(true); - setAway({ - variables: { - away: false, - }, - }); + if (muted) { + if (away) { + if (!noInputDevice) muteAway(muted, true, toggleVoice); + VideoService.setTrackEnabled(true); + setAway({ + variables: { + away: false, + }, + }); + } else if (noInputDevice) { + // User is in duplex audio, passive-sendrecv, but has no input device set + // Open the audio settings modal to allow them to select an input device + openAudioSettings({ unmuteOnExit: true }); + } } + toggleMuteMicrophone(muted, toggleVoice); }; return ( diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/component.tsx b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/component.tsx index db45cd9e38..376ea5d03c 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/component.tsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/component.tsx @@ -8,18 +8,23 @@ import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser'; import { User } from '/imports/ui/Types/user'; import { defineMessages, useIntl } from 'react-intl'; import { - handleLeaveAudio, liveChangeInputDevice, liveChangeOutputDevice, notify, toggleMuteMicrophone, + handleLeaveAudio, + liveChangeInputDevice, + liveChangeOutputDevice, + notify, + toggleMuteMicrophone, + toggleMuteMicrophoneSystem, } from './service'; import useMeeting from '/imports/ui/core/hooks/useMeeting'; import { Meeting } from '/imports/ui/Types/meeting'; import logger from '/imports/startup/client/logger'; -import Auth from '/imports/ui/services/auth'; import MutedAlert from '/imports/ui/components/muted-alert/component'; import MuteToggle from './buttons/muteToggle'; import ListenOnly from './buttons/listenOnly'; import LiveSelection from './buttons/LiveSelection'; import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking'; import useWhoIsUnmuted from '/imports/ui/core/hooks/useWhoIsUnmuted'; +import useToggleVoice from '/imports/ui/components/audio/audio-graphql/hooks/useToggleVoice'; const AUDIO_INPUT = 'audioinput'; const AUDIO_OUTPUT = 'audiooutput'; @@ -52,7 +57,11 @@ const intlMessages = defineMessages({ }, }); -interface InputStreamLiveSelectorProps { +interface InputStreamLiveSelectorContainerProps { + openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void; +} + +interface InputStreamLiveSelectorProps extends InputStreamLiveSelectorContainerProps { isConnected: boolean; isPresenter: boolean; isModerator: boolean; @@ -68,6 +77,8 @@ interface InputStreamLiveSelectorProps { inputStream: string; meetingIsBreakout: boolean; away: boolean; + permissionStatus: string; + supportsTransparentListenOnly: boolean; } const InputStreamLiveSelector: React.FC = ({ @@ -86,8 +97,12 @@ const InputStreamLiveSelector: React.FC = ({ inputStream, meetingIsBreakout, away, + permissionStatus, + supportsTransparentListenOnly, + openAudioSettings, }) => { const intl = useIntl(); + const toggleVoice = useToggleVoice(); // eslint-disable-next-line no-undef const [inputDevices, setInputDevices] = React.useState([]); const [outputDevices, setOutputDevices] = React.useState([]); @@ -106,6 +121,15 @@ const InputStreamLiveSelector: React.FC = ({ const audioOutputDevices = devices.filter((i) => i.kind === AUDIO_OUTPUT); setInputDevices(audioInputDevices as InputDeviceInfo[]); setOutputDevices(audioOutputDevices); + }) + .catch((error) => { + logger.warn({ + logCode: 'audio_device_enumeration_error', + extraInfo: { + errorMessage: error.message, + errorName: error.name, + }, + }, `Error enumerating audio devices: ${error.message}`); }); if (isAudioConnected) { updateRemovedDevices(inputDevices, outputDevices); @@ -115,11 +139,11 @@ const InputStreamLiveSelector: React.FC = ({ const fallbackInputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => { if (!fallbackDevice || !fallbackDevice.deviceId) return; - logger.info({ - logCode: 'audio_device_live_selector', + logger.warn({ + logCode: 'audio_input_live_selector', extraInfo: { - userId: Auth.userID, - meetingId: Auth.meetingID, + fallbackDeviceId: fallbackDevice?.deviceId, + fallbackDeviceLabel: fallbackDevice?.label, }, }, 'Current input device was removed. Fallback to default device'); liveChangeInputDevice(fallbackDevice.deviceId).catch(() => { @@ -129,11 +153,11 @@ const InputStreamLiveSelector: React.FC = ({ const fallbackOutputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => { if (!fallbackDevice || !fallbackDevice.deviceId) return; - logger.info({ - logCode: 'audio_device_live_selector', + logger.warn({ + logCode: 'audio_output_live_selector', extraInfo: { - userId: Auth.userID, - meetingId: Auth.meetingID, + fallbackDeviceId: fallbackDevice?.deviceId, + fallbackDeviceLabel: fallbackDevice?.label, }, }, 'Current output device was removed. Fallback to default device'); liveChangeOutputDevice(fallbackDevice.deviceId, true).catch(() => { @@ -162,7 +186,16 @@ const InputStreamLiveSelector: React.FC = ({ if (enableDynamicAudioDeviceSelection) { updateDevices(inAudio); } - }, [inAudio]); + }, [inAudio, permissionStatus]); + + useEffect(() => { + // If the user has no input device, is connected to audio and unmuted, + // they need to be *muted* by the system. Further attempts to unmute + // will open the audio settings modal instead. + if (inputDeviceId === 'listen-only' && isConnected && !muted) { + toggleMuteMicrophoneSystem(muted, toggleVoice); + } + }, [inputDeviceId, isConnected, muted]); return ( <> @@ -190,6 +223,8 @@ const InputStreamLiveSelector: React.FC = ({ isAudioLocked={isAudioLocked} toggleMuteMicrophone={toggleMuteMicrophone} away={away} + supportsTransparentListenOnly={supportsTransparentListenOnly} + openAudioSettings={openAudioSettings} /> ) : ( <> @@ -201,6 +236,8 @@ const InputStreamLiveSelector: React.FC = ({ isAudioLocked={isAudioLocked} toggleMuteMicrophone={toggleMuteMicrophone} away={away} + openAudioSettings={openAudioSettings} + noInputDevice={inputDeviceId === 'listen-only'} /> )} = ({ ); }; -const InputStreamLiveSelectorContainer: React.FC = () => { +const InputStreamLiveSelectorContainer: React.FC = ({ + openAudioSettings, +}) => { const { data: currentUser } = useCurrentUser((u: Partial) => { if (!u.voice) { return { @@ -261,6 +300,10 @@ const InputStreamLiveSelectorContainer: React.FC = () => { const outputDeviceId = useReactiveVar(AudioManager._outputDeviceId.value) as string; // @ts-ignore - temporary while hybrid (meteor+GraphQl) const inputStream = useReactiveVar(AudioManager._inputStream) as string; + // @ts-ignore - temporary while hybrid (meteor+GraphQl) + const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value) as string; + // @ts-ignore - temporary while hybrid (meteor+GraphQl) + const supportsTransparentListenOnly = useReactiveVar(AudioManager._transparentListenOnlySupported.value) as boolean; return ( { inputStream={inputStream} meetingIsBreakout={currentMeeting?.isBreakout ?? false} away={currentUser?.away ?? false} + openAudioSettings={openAudioSettings} + permissionStatus={permissionStatus} + supportsTransparentListenOnly={supportsTransparentListenOnly} /> ); }; diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service.ts b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service.ts index 804edd57a5..3040121751 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service.ts +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service.ts @@ -40,32 +40,35 @@ export const handleLeaveAudio = (meetingIsBreakout: boolean) => { ); }; -const toggleMuteMicrophoneThrottled = throttle(( +const toggleMute = ( muted: boolean, toggleVoice: (userId: string, muted: boolean) => void, + actionType = 'user_action', ) => { - Storage.setItem(MUTED_KEY, !muted); - if (muted) { - logger.info( - { - logCode: 'audiomanager_unmute_audio', - extraInfo: { logType: 'user_action' }, - }, - 'microphone unmuted by user', - ); + if (AudioManager.inputDeviceId === 'listen-only') { + // User is in duplex audio, passive-sendrecv, but has no input device set + // Unmuting should not be allowed at all + return; + } + + logger.info({ + logCode: 'audiomanager_unmute_audio', + extraInfo: { logType: actionType }, + }, 'microphone unmuted'); + Storage.setItem(MUTED_KEY, false); toggleVoice(Auth.userID as string, false); } else { - logger.info( - { - logCode: 'audiomanager_mute_audio', - extraInfo: { logType: 'user_action' }, - }, - 'microphone muted by user', - ); + logger.info({ + logCode: 'audiomanager_mute_audio', + extraInfo: { logType: actionType }, + }, 'microphone muted'); + Storage.setItem(MUTED_KEY, true); toggleVoice(Auth.userID as string, true); } -}, TOGGLE_MUTE_THROTTLE_TIME); +}; + +const toggleMuteMicrophoneThrottled = throttle(toggleMute, TOGGLE_MUTE_THROTTLE_TIME); const toggleMuteMicrophoneDebounced = debounce(toggleMuteMicrophoneThrottled, TOGGLE_MUTE_DEBOUNCE_TIME, { leading: true, trailing: false }); @@ -74,6 +77,11 @@ export const toggleMuteMicrophone = (muted: boolean, toggleVoice: (userId: strin return toggleMuteMicrophoneDebounced(muted, toggleVoice); }; +// Debounce is not needed here, as this function should only called by the system. +export const toggleMuteMicrophoneSystem = (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => { + return toggleMute(muted, toggleVoice, 'system_action'); +}; + export const truncateDeviceName = (deviceName: string) => { if (deviceName && deviceName.length <= DEVICE_LABEL_MAX_LENGTH) { return deviceName; @@ -141,6 +149,7 @@ export const muteAway = ( export default { handleLeaveAudio, toggleMuteMicrophone, + toggleMuteMicrophoneSystem, truncateDeviceName, notify, liveChangeInputDevice, diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/styles.ts b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/styles.ts index c5b58bc900..7f60f37b54 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/styles.ts +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/styles.ts @@ -56,6 +56,10 @@ export const DisabledLabel = { opacity: 1, }; +export const AudioSettingsOption = { + paddingLeft: 12, +}; + export const SelectedLabel = { color: colorPrimary, backgroundColor: colorOffWhite, @@ -80,6 +84,7 @@ export default { MuteToggleButton, DisabledLabel, SelectedLabel, + AudioSettingsOption, DangerColor, AudioDropdown, }; diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/component.jsx b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/component.jsx index 12821d78c2..c98258ccee 100755 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/component.jsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/component.jsx @@ -1,11 +1,14 @@ -import React, { useEffect, useState } from 'react'; +import React, { + useCallback, + useEffect, + useState, +} from 'react'; import PropTypes from 'prop-types'; import { defineMessages, injectIntl, FormattedMessage, } from 'react-intl'; import { useMutation } from '@apollo/client'; import Styled from './styles'; -import PermissionsOverlay from '../permissions-overlay/component'; import AudioSettings from '../audio-settings/component'; import EchoTest from '../echo-test/component'; import Help from '../help/component'; @@ -21,6 +24,7 @@ import { muteAway, } from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service'; import Session from '/imports/ui/services/storage/in-memory'; +import logger from '/imports/startup/client/logger'; const propTypes = { intl: PropTypes.shape({ @@ -39,10 +43,11 @@ const propTypes = { isConnected: PropTypes.bool.isRequired, isUsingAudio: PropTypes.bool.isRequired, isListenOnly: PropTypes.bool.isRequired, + isMuted: PropTypes.bool.isRequired, + toggleMuteMicrophoneSystem: PropTypes.func.isRequired, inputDeviceId: PropTypes.string, outputDeviceId: PropTypes.string, formattedDialNum: PropTypes.string.isRequired, - showPermissionsOvelay: PropTypes.bool.isRequired, listenOnlyMode: PropTypes.bool.isRequired, joinFullAudioImmediately: PropTypes.bool, forceListenOnlyAttendee: PropTypes.bool.isRequired, @@ -55,7 +60,6 @@ const propTypes = { handleAllowAutoplay: PropTypes.func.isRequired, changeInputStream: PropTypes.func.isRequired, localEchoEnabled: PropTypes.bool.isRequired, - showVolumeMeter: PropTypes.bool.isRequired, notify: PropTypes.func.isRequired, isRTL: PropTypes.bool.isRequired, priority: PropTypes.string.isRequired, @@ -72,6 +76,15 @@ const propTypes = { }).isRequired, getTroubleshootingLink: PropTypes.func.isRequired, away: PropTypes.bool, + doGUM: PropTypes.func.isRequired, + hasMicrophonePermission: PropTypes.func.isRequired, + permissionStatus: PropTypes.string, + liveChangeInputDevice: PropTypes.func.isRequired, + content: PropTypes.string, + unmuteOnExit: PropTypes.bool, + supportsTransparentListenOnly: PropTypes.bool.isRequired, + getAudioConstraints: PropTypes.func.isRequired, + isTranscriptionEnabled: PropTypes.bool.isRequired, }; const intlMessages = defineMessages({ @@ -116,7 +129,7 @@ const intlMessages = defineMessages({ description: 'Title for the echo test', }, settingsTitle: { - id: 'app.audioModal.settingsTitle', + id: 'app.audio.audioSettings.titleLabel', description: 'Title for the audio modal', }, helpTitle: { @@ -139,6 +152,10 @@ const intlMessages = defineMessages({ id: 'app.audioModal.autoplayBlockedDesc', description: 'Message for autoplay audio block', }, + findingDevicesTitle: { + id: 'app.audio.audioSettings.findingDevicesTitle', + description: 'Message for finding audio devices', + }, }); const AudioModal = ({ @@ -148,6 +165,8 @@ const AudioModal = ({ audioLocked, isUsingAudio, isListenOnly, + isMuted, + toggleMuteMicrophoneSystem, autoplayBlocked, closeModal, isEchoTest, @@ -170,23 +189,31 @@ const AudioModal = ({ outputDeviceId = null, changeInputDevice, changeOutputDevice, - showVolumeMeter, notify, formattedTelVoice, handleAllowAutoplay, - showPermissionsOvelay, isIE, isOpen, priority, setIsOpen, getTroubleshootingLink, away = false, + doGUM, + getAudioConstraints, + hasMicrophonePermission, + liveChangeInputDevice, + content: initialContent, + supportsTransparentListenOnly, + unmuteOnExit = false, + permissionStatus = null, + isTranscriptionEnabled, }) => { - const [content, setContent] = useState(null); + const [content, setContent] = useState(initialContent); const [hasError, setHasError] = useState(false); const [disableActions, setDisableActions] = useState(false); const [errorInfo, setErrorInfo] = useState(null); const [autoplayChecked, setAutoplayChecked] = useState(false); + const [findingDevices, setFindingDevices] = useState(false); const [setAway] = useMutation(SET_AWAY); const voiceToggle = useToggleVoice(); @@ -257,6 +284,55 @@ const AudioModal = ({ }); }; + const handleGUMFailure = (error) => { + const { MIC_ERROR } = AudioError; + + logger.error({ + logCode: 'audio_gum_failed', + extraInfo: { + errorMessage: error.message, + errorName: error.name, + }, + }, `Audio gUM failed: ${error.name}`); + + setContent('help'); + setDisableActions(false); + setHasError(true); + setErrorInfo({ + errCode: error?.name === 'NotAllowedError' + ? MIC_ERROR.NO_PERMISSION + : 0, + errMessage: error?.name || 'NotAllowedError', + }); + }; + + const checkMicrophonePermission = (options) => { + setFindingDevices(true); + + return hasMicrophonePermission(options) + .then((hasPermission) => { + // null means undetermined, so we don't want to show the error modal + // and let downstream components figure it out + if (hasPermission === true || hasPermission === null) { + return hasPermission; + } + + handleGUMFailure(new DOMException( + 'Permissions API says denied', + 'NotAllowedError', + )); + + return false; + }) + .catch((error) => { + handleGUMFailure(error); + return null; + }) + .finally(() => { + setFindingDevices(false); + }); + }; + const handleGoToAudioOptions = () => { setContent(null); setHasError(true); @@ -318,14 +394,19 @@ const AudioModal = ({ }); }; - const handleJoinLocalEcho = (inputStream) => { + const handleAudioSettingsConfirmation = useCallback((inputStream) => { // Reset the modal to a connecting state - this kind of sucks? // prlanzarin Apr 04 2022 setContent(null); if (inputStream) changeInputStream(inputStream); - handleJoinMicrophone(); - disableAwayMode(); - }; + + if (!isConnected) { + handleJoinMicrophone(); + disableAwayMode(); + } else { + closeModal(); + } + }, [changeInputStream, isConnected]); const skipAudioOptions = () => (isConnecting || (forceListenOnlyAttendee && !autoplayChecked)) && !content @@ -333,7 +414,6 @@ const AudioModal = ({ const renderAudioOptions = () => { const hideMicrophone = forceListenOnlyAttendee || audioLocked; - const arrow = isRTL ? '←' : '→'; const dialAudioLabel = `${intl.formatMessage(intlMessages.audioDialTitle)} ${arrow}`; @@ -388,7 +468,7 @@ const AudioModal = ({ }} /> ) : null} - + {joinFullAudioImmediately && } ); }; @@ -400,40 +480,47 @@ const AudioModal = ({ /> ); + const handleBack = useCallback(() => { + if (isConnecting || isConnected || skipAudioOptions()) { + closeModal(); + } else { + handleGoToAudioOptions(); + } + }, [isConnecting, isConnected, skipAudioOptions]); + const renderAudioSettings = () => { + const { animations } = getSettingsSingletonInstance().application; const confirmationCallback = !localEchoEnabled ? handleRetryGoToEchoTest - : handleJoinLocalEcho; - - const handleGUMFailure = (error) => { - const code = error?.name === 'NotAllowedError' - ? AudioError.MIC_ERROR.NO_PERMISSION - : 0; - setContent('help'); - setErrorInfo({ - errCode: code, - errMessage: error?.name || 'NotAllowedError', - }); - setDisableActions(false); - }; + : handleAudioSettingsConfirmation; return ( ); }; @@ -445,9 +532,19 @@ const AudioModal = ({ message: errorInfo?.errMessage, }; + const _joinListenOnly = () => { + // Erase the content state so that the modal transitions to the connecting + // state if the user chooses listen only + setContent(null); + handleJoinListenOnly(); + }; + return ( { const { animations } = getSettingsSingletonInstance().application; + if (findingDevices && content === null) { + return ( + + + {intl.formatMessage(intlMessages.findingDevicesTitle)} + + + + ); + } + if (skipAudioOptions()) { return ( @@ -505,6 +613,7 @@ const AudioModal = ({ ); } + return content ? contents[content].component() : renderAudioOptions(); }; @@ -512,16 +621,23 @@ const AudioModal = ({ if (!isUsingAudio) { if (forceListenOnlyAttendee || audioLocked) { handleJoinListenOnly(); - return; - } + } else if (!listenOnlyMode) { + if (joinFullAudioImmediately) { + checkMicrophonePermission({ doGUM: true, permissionStatus }) + .then((hasPermission) => { + // No permission - let the Help screen be shown as it's triggered + // by the checkMicrophonePermission function + if (hasPermission === false) return; - if (joinFullAudioImmediately && !listenOnlyMode) { - handleJoinMicrophone(); - return; - } - - if (!listenOnlyMode) { - handleGoToEchoTest(); + // Permission is granted or undetermined, so we can proceed + handleJoinMicrophone(); + }); + } else { + checkMicrophonePermission({ doGUM: false, permissionStatus }).then((hasPermission) => { + if (hasPermission === false) return; + handleGoToEchoTest(); + }); + } } } }, [ @@ -551,40 +667,39 @@ const AudioModal = ({ let title = content ? intl.formatMessage(contents[content].title) : intl.formatMessage(intlMessages.audioChoiceLabel); - title = !skipAudioOptions() ? title : null; + title = !skipAudioOptions() && (!findingDevices || content) + ? title + : null; return ( - <> - {showPermissionsOvelay ? : null} - - {isIE ? ( - - Chrome, - 1: Firefox, - }} - /> - - ) : null} - - {renderContent()} - - - + + {isIE ? ( + + Chrome, + 1: Firefox, + }} + /> + + ) : null} + + {renderContent()} + + ); }; diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/container.jsx b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/container.jsx index f1a795e643..a66240a38b 100755 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/container.jsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/container.jsx @@ -19,6 +19,7 @@ import { useStorageKey } from '/imports/ui/services/storage/hooks'; import useMeeting from '/imports/ui/core/hooks/useMeeting'; import useLockContext from '/imports/ui/components/lock-viewers/hooks/useLockContext'; import deviceInfo from '/imports/utils/deviceInfo'; +import { useIsAudioTranscriptionEnabled } from '/imports/ui/components/audio/audio-graphql/audio-captions/service'; const invalidDialNumbers = ['0', '613-555-1212', '613-555-1234', '0000']; @@ -62,11 +63,8 @@ const AudioModalContainer = (props) => { combinedDialInNum = `${dialNumber.replace(/\D+/g, '')},,,${telVoice.replace(/\D+/g, '')}`; } } - const { isIe } = browserInfo; - const SHOW_VOLUME_METER = window.meetingClientSettings.public.media.showVolumeMeter; - const { enabled: LOCAL_ECHO_TEST_ENABLED, } = window.meetingClientSettings.public.media.localEchoTest; @@ -81,26 +79,27 @@ const AudioModalContainer = (props) => { const isListenOnly = useReactiveVar(AudioManager._isListenOnly.value); const isEchoTest = useReactiveVar(AudioManager._isEchoTest.value); const autoplayBlocked = useReactiveVar(AudioManager._autoplayBlocked.value); + const isMuted = useReactiveVar(AudioManager._isMuted.value); const meetingIsBreakout = AppService.useMeetingIsBreakout(); + const supportsTransparentListenOnly = useReactiveVar( + AudioManager._transparentListenOnlySupported.value, + ); + const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value); const { userLocks } = useLockContext(); - + const isListenOnlyInputDevice = Service.inputDeviceId() === 'listen-only'; + const devicesAlreadyConfigured = skipEchoTestIfPreviousDevice + && Service.inputDeviceId(); + const joinFullAudioImmediately = !isListenOnlyInputDevice + && (skipCheck || (skipCheckOnJoin && !getEchoTest) || devicesAlreadyConfigured); const { setIsOpen } = props; const close = useCallback(() => closeModal(() => setIsOpen(false)), [setIsOpen]); const joinMic = useCallback( - (skipEchoTest) => joinMicrophone(skipEchoTest || skipCheck || skipCheckOnJoin), + (options = {}) => joinMicrophone({ + skipEchoTest: options.skipEchoTest || joinFullAudioImmediately, + }), [skipCheck, skipCheckOnJoin], ); - const joinFullAudioImmediately = ( - autoJoin - && ( - skipCheck - || (skipCheckOnJoin && !getEchoTest) - )) - || ( - skipCheck - || (skipCheckOnJoin && !getEchoTest) - || (skipEchoTestIfPreviousDevice && (inputDeviceId || outputDeviceId)) - ); + const isTranscriptionEnabled = useIsAudioTranscriptionEnabled(); return ( { isConnected={isConnected} isListenOnly={isListenOnly} isEchoTest={isEchoTest} + isMuted={isMuted} + toggleMuteMicrophoneSystem={Service.toggleMuteMicrophoneSystem} autoplayBlocked={autoplayBlocked} getEchoTest={getEchoTest} joinFullAudioImmediately={joinFullAudioImmediately} @@ -123,11 +124,11 @@ const AudioModalContainer = (props) => { joinListenOnly={joinListenOnly} leaveEchoTest={leaveEchoTest} changeInputDevice={Service.changeInputDevice} + liveChangeInputDevice={Service.liveChangeInputDevice} changeInputStream={Service.changeInputStream} changeOutputDevice={Service.changeOutputDevice} joinEchoTest={Service.joinEchoTest} exitAudio={Service.exitAudio} - showVolumeMeter={SHOW_VOLUME_METER} localEchoEnabled={LOCAL_ECHO_TEST_ENABLED} listenOnlyMode={listenOnlyMode} formattedDialNum={formattedDialNum} @@ -144,7 +145,15 @@ const AudioModalContainer = (props) => { isRTL={isRTL} AudioError={AudioError} getTroubleshootingLink={AudioModalService.getTroubleshootingLink} + getMicrophonePermissionStatus={Service.getMicrophonePermissionStatus} + getAudioConstraints={Service.getAudioConstraints} + doGUM={Service.doGUM} + bypassGUM={Service.bypassGUM} + supportsTransparentListenOnly={supportsTransparentListenOnly} setIsOpen={setIsOpen} + hasMicrophonePermission={Service.hasMicrophonePermission} + permissionStatus={permissionStatus} + isTranscriptionEnabled={isTranscriptionEnabled} {...props} /> ); diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/service.js b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/service.js index 67ae971935..0f22573939 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/service.js +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/service.js @@ -20,7 +20,10 @@ export const didUserSelectedListenOnly = () => ( !!Storage.getItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY) ); -export const joinMicrophone = (skipEchoTest = false) => { +export const joinMicrophone = (options = {}) => { + const { skipEchoTest = false } = options; + const shouldSkipEcho = skipEchoTest && Service.inputDeviceId() !== 'listen-only'; + Storage.setItem(CLIENT_DID_USER_SELECTED_MICROPHONE_KEY, true); Storage.setItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY, false); @@ -30,8 +33,8 @@ export const joinMicrophone = (skipEchoTest = false) => { const call = new Promise((resolve, reject) => { try { - if ((skipEchoTest && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) { - return resolve(Service.joinMicrophone()); + if ((shouldSkipEcho && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) { + return resolve(Service.joinMicrophone(options)); } return resolve(Service.transferCall()); diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/styles.js b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/styles.js index 382b474fa2..18feb0ad33 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-modal/styles.js +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-modal/styles.js @@ -63,6 +63,7 @@ const Connecting = styled.div` margin-top: auto; margin-bottom: auto; font-size: 2rem; + text-align: center; `; const ellipsis = keyframes` diff --git a/bigbluebutton-html5/imports/ui/components/audio/audio-settings/component.jsx b/bigbluebutton-html5/imports/ui/components/audio/audio-settings/component.jsx index 8946840743..df24cfd864 100644 --- a/bigbluebutton-html5/imports/ui/components/audio/audio-settings/component.jsx +++ b/bigbluebutton-html5/imports/ui/components/audio/audio-settings/component.jsx @@ -8,47 +8,62 @@ import logger from '/imports/startup/client/logger'; import AudioStreamVolume from '/imports/ui/components/audio/audio-stream-volume/component'; import LocalEchoContainer from '/imports/ui/components/audio/local-echo/container'; import DeviceSelector from '/imports/ui/components/audio/device-selector/component'; -import { - getAudioConstraints, - doGUM, -} from '/imports/api/audio/client/bridge/service'; import MediaStreamUtils from '/imports/utils/media-stream-utils'; -import audioManager from '/imports/ui/services/audio-manager'; +import AudioManager from '/imports/ui/services/audio-manager'; import Session from '/imports/ui/services/storage/in-memory'; +import AudioCaptionsSelectContainer from '../audio-graphql/audio-captions/captions/component'; const propTypes = { intl: PropTypes.shape({ formatMessage: PropTypes.func.isRequired, }).isRequired, + animations: PropTypes.bool, changeInputDevice: PropTypes.func.isRequired, + liveChangeInputDevice: PropTypes.func.isRequired, changeOutputDevice: PropTypes.func.isRequired, handleBack: PropTypes.func.isRequired, handleConfirmation: PropTypes.func.isRequired, handleGUMFailure: PropTypes.func.isRequired, isConnecting: PropTypes.bool.isRequired, + isConnected: PropTypes.bool.isRequired, + isMuted: PropTypes.bool.isRequired, + toggleMuteMicrophoneSystem: PropTypes.func.isRequired, inputDeviceId: PropTypes.string.isRequired, outputDeviceId: PropTypes.string.isRequired, produceStreams: PropTypes.bool, withEcho: PropTypes.bool, - withVolumeMeter: PropTypes.bool, notify: PropTypes.func.isRequired, + unmuteOnExit: PropTypes.bool, + doGUM: PropTypes.func.isRequired, + getAudioConstraints: PropTypes.func.isRequired, + checkMicrophonePermission: PropTypes.func.isRequired, + supportsTransparentListenOnly: PropTypes.bool.isRequired, + toggleVoice: PropTypes.func.isRequired, + permissionStatus: PropTypes.string, + isTranscriptionEnabled: PropTypes.bool.isRequired, }; const defaultProps = { + animations: true, produceStreams: false, withEcho: false, - withVolumeMeter: false, + unmuteOnExit: false, + permissionStatus: null, }; const intlMessages = defineMessages({ + testSpeakerLabel: { + id: 'app.audio.audioSettings.testSpeakerLabel', + description: 'Test speaker label', + }, + captionsSelectorLabel: { + id: 'app.audio.captions.speech.title', + description: 'Audio speech recognition title', + }, backLabel: { id: 'app.audio.backLabel', description: 'audio settings back button label', }, - descriptionLabel: { - id: 'app.audio.audioSettings.descriptionLabel', - description: 'audio settings description label', - }, micSourceLabel: { id: 'app.audio.audioSettings.microphoneSourceLabel', description: 'Label for mic source', @@ -69,17 +84,45 @@ const intlMessages = defineMessages({ id: 'app.audioNotification.deviceChangeFailed', description: 'Device change failed', }, + confirmLabel: { + id: 'app.audio.audioSettings.confirmLabel', + description: 'Audio settings confirmation button label', + }, + cancelLabel: { + id: 'app.audio.audioSettings.cancelLabel', + description: 'Audio settings cancel button label', + }, + findingDevicesTitle: { + id: 'app.audio.audioSettings.findingDevicesTitle', + description: 'Message for finding audio devices', + }, + noMicSelectedWarning: { + id: 'app.audio.audioSettings.noMicSelectedWarning', + description: 'Warning when no mic is selected', + }, + baseSubtitle: { + id: 'app.audio.audioSettings.baseSubtitle', + description: 'Base subtitle for audio settings', + }, }); class AudioSettings extends React.Component { constructor(props) { super(props); - const { inputDeviceId, outputDeviceId } = props; + const { + inputDeviceId, + outputDeviceId, + unmuteOnExit, + permissionStatus, + } = props; this.handleInputChange = this.handleInputChange.bind(this); this.handleOutputChange = this.handleOutputChange.bind(this); this.handleConfirmationClick = this.handleConfirmationClick.bind(this); + this.handleCancelClick = this.handleCancelClick.bind(this); + this.unmuteOnExit = this.unmuteOnExit.bind(this); + this.updateDeviceList = this.updateDeviceList.bind(this); this.state = { inputDeviceId, @@ -88,32 +131,80 @@ class AudioSettings extends React.Component { // blocked until at least one stream is generated producingStreams: props.produceStreams, stream: null, + unmuteOnExit, + audioInputDevices: [], + audioOutputDevices: [], + findingDevices: permissionStatus === 'prompt' || permissionStatus === 'denied', }; this._isMounted = false; } componentDidMount() { - const { inputDeviceId, outputDeviceId } = this.state; + const { + inputDeviceId, + outputDeviceId, + } = this.state; + const { + isConnected, + isMuted, + toggleMuteMicrophoneSystem, + checkMicrophonePermission, + toggleVoice, + permissionStatus, + } = this.props; Session.setItem('inEchoTest', true); this._isMounted = true; // Guarantee initial in/out devices are initialized on all ends - this.setInputDevice(inputDeviceId); - this.setOutputDevice(outputDeviceId); - audioManager.isEchoTest = true; + AudioManager.isEchoTest = true; + checkMicrophonePermission({ gumOnPrompt: true, permissionStatus }) + .then(this.updateDeviceList) + .then(() => { + if (!this._isMounted) return; + + navigator.mediaDevices.addEventListener( + 'devicechange', + this.updateDeviceList, + ); + this.setState({ findingDevices: false }); + this.setInputDevice(inputDeviceId); + this.setOutputDevice(outputDeviceId); + }); + + // If connected and unmuted, we need to mute the audio and revert it + // back to the original state on exit. + if (isConnected && !isMuted) { + toggleMuteMicrophoneSystem(isMuted, toggleVoice); + // We only need to revert the mute state if the user is not listen-only + if (inputDeviceId !== 'listen-only') this.setState({ unmuteOnExit: true }); + } + } + + componentDidUpdate(prevProps) { + const { permissionStatus } = this.props; + + if (prevProps.permissionStatus !== permissionStatus) { + this.updateDeviceList(); + } } componentWillUnmount() { const { stream } = this.state; Session.setItem('inEchoTest', false); - this._mounted = false; + this._isMounted = false; if (stream) { MediaStreamUtils.stopMediaStreamTracks(stream); } - audioManager.isEchoTest = false; + + AudioManager.isEchoTest = false; + navigator.mediaDevices.removeEventListener( + 'devicechange', this.updateDeviceList, + ); + + this.unmuteOnExit(); } handleInputChange(deviceId) { @@ -125,63 +216,104 @@ class AudioSettings extends React.Component { } handleConfirmationClick() { - const { stream } = this.state; - const { produceStreams, handleConfirmation } = this.props; + const { stream, inputDeviceId: selectedInputDeviceId } = this.state; + const { + isConnected, + produceStreams, + handleConfirmation, + liveChangeInputDevice, + } = this.props; - // Stream generation disabled or there isn't any stream: just run the provided callback - if (!produceStreams || !stream) return handleConfirmation(); + const confirm = () => { + // Stream generation disabled or there isn't any stream: just run the provided callback + if (!produceStreams || !stream) return handleConfirmation(); - // Stream generation enabled and there is a valid input stream => call - // the confirmation callback with the input stream as arg so it can be used - // in upstream components. The rationale is no surplus gUM calls. - // We're cloning it because the original will be cleaned up on unmount here. - const clonedStream = stream.clone(); - return handleConfirmation(clonedStream); + // Stream generation enabled and there is a valid input stream => call + // the confirmation callback with the input stream as arg so it can be used + // in upstream components. The rationale is no surplus gUM calls. + // We're cloning it because the original will be cleaned up on unmount here. + const clonedStream = stream.clone(); + + return handleConfirmation(clonedStream); + }; + + if (isConnected) { + // If connected, we need to use the in-call device change method so that all + // components pick up the change and the peer is properly updated. + liveChangeInputDevice(selectedInputDeviceId).catch((error) => { + logger.warn({ + logCode: 'audiosettings_live_change_device_failed', + extraInfo: { + errorMessage: error?.message, + errorStack: error?.stack, + errorName: error?.name, + }, + }, `Audio settings live change device failed: ${error.name}`); + }).finally(() => { + confirm(); + }); + } else { + confirm(); + } + } + + handleCancelClick() { + const { handleBack } = this.props; + + handleBack(); } setInputDevice(deviceId) { - const { handleGUMFailure, changeInputDevice, produceStreams, intl, notify } = this.props; + const { + isConnected, + handleGUMFailure, + changeInputDevice, + produceStreams, + intl, + notify, + } = this.props; const { inputDeviceId: currentInputDeviceId } = this.state; - try { - changeInputDevice(deviceId); + if (!isConnected) changeInputDevice(deviceId); + // Only generate input streams if they're going to be used with something // In this case, the volume meter or local echo test. if (produceStreams) { - this.generateInputStream(deviceId) - .then((stream) => { - // Extract the deviceId again from the stream to guarantee consistency - // between stream DID vs chosen DID. That's necessary in scenarios where, - // eg, there's no default/pre-set deviceId ('') and the browser's - // default device has been altered by the user (browser default != system's - // default). - const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio'); - if (extractedDeviceId && extractedDeviceId !== deviceId) + this.generateInputStream(deviceId).then((stream) => { + // Extract the deviceId again from the stream to guarantee consistency + // between stream DID vs chosen DID. That's necessary in scenarios where, + // eg, there's no default/pre-set deviceId ('') and the browser's + // default device has been altered by the user (browser default != system's + // default). + let extractedDeviceId = deviceId; + + if (stream) { + extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio'); + + if (extractedDeviceId !== deviceId && !isConnected) { changeInputDevice(extractedDeviceId); + } + } - // Component unmounted after gUM resolution -> skip echo rendering - if (!this._isMounted) return; + // Component unmounted after gUM resolution -> skip echo rendering + if (!this._isMounted) return; - this.setState({ - inputDeviceId: extractedDeviceId, - stream, - producingStreams: false, - }); - }) - .catch((error) => { - logger.warn( - { - logCode: 'audiosettings_gum_failed', - extraInfo: { - deviceId, - errorMessage: error.message, - errorName: error.name, - }, - }, - `Audio settings gUM failed: ${error.name}` - ); - handleGUMFailure(error); + this.setState({ + inputDeviceId: extractedDeviceId, + stream, + producingStreams: false, }); + }).catch((error) => { + logger.warn({ + logCode: 'audiosettings_gum_failed', + extraInfo: { + deviceId, + errorMessage: error.message, + errorName: error.name, + }, + }, `Audio settings gUM failed: ${error.name}`); + handleGUMFailure(error); + }); } else { this.setState({ inputDeviceId: deviceId, @@ -198,15 +330,20 @@ class AudioSettings extends React.Component { newDeviceId: deviceId, }, }, - `Audio settings: error changing input device - {${error.name}: ${error.message}}` + `Audio settings: error changing input device - {${error.name}: ${error.message}}`, ); notify(intl.formatMessage(intlMessages.deviceChangeFailed), true); } } setOutputDevice(deviceId) { - const { changeOutputDevice, withEcho, intl, notify } = this.props; const { outputDeviceId: currentOutputDeviceId } = this.state; + const { + changeOutputDevice, + withEcho, + intl, + notify, + } = this.props; // withEcho usage (isLive arg): if local echo is enabled we need the device // change to be performed seamlessly (which is what the isLive parameter guarantees) @@ -217,23 +354,42 @@ class AudioSettings extends React.Component { }); }) .catch((error) => { - logger.debug( - { - logCode: 'audiosettings_output_device_change_failure', - extraInfo: { - errorName: error.name, - errorMessage: error.message, - deviceId: currentOutputDeviceId, - newDeviceId: deviceId, - }, + logger.debug({ + logCode: 'audiosettings_output_device_change_failure', + extraInfo: { + errorName: error.name, + errorMessage: error.message, + deviceId: currentOutputDeviceId, + newDeviceId: deviceId, }, - `Audio settings: error changing output device - {${error.name}: ${error.message}}` - ); + }, `Audio settings: error changing output device - {${error.name}: ${error.message}}`); notify(intl.formatMessage(intlMessages.deviceChangeFailed), true); }); } + updateDeviceList() { + return navigator.mediaDevices.enumerateDevices() + .then((devices) => { + const audioInputDevices = devices.filter((i) => i.kind === 'audioinput'); + const audioOutputDevices = devices.filter((i) => i.kind === 'audiooutput'); + + this.setState({ + audioInputDevices, + audioOutputDevices, + }); + }); + } + + unmuteOnExit() { + const { toggleMuteMicrophoneSystem, toggleVoice } = this.props; + const { unmuteOnExit } = this.state; + + // Unmutes microphone if flagged to do so + if (unmuteOnExit) toggleMuteMicrophoneSystem(true, toggleVoice); + } + generateInputStream(inputDeviceId) { + const { doGUM, getAudioConstraints } = this.props; const { stream } = this.state; if (inputDeviceId && stream) { @@ -244,6 +400,8 @@ class AudioSettings extends React.Component { MediaStreamUtils.stopMediaStreamTracks(stream); } + if (inputDeviceId === 'listen-only') return Promise.resolve(null); + const constraints = { audio: getAudioConstraints({ deviceId: inputDeviceId }), }; @@ -251,107 +409,143 @@ class AudioSettings extends React.Component { return doGUM(constraints, true); } - renderOutputTest() { - const { withEcho, intl } = this.props; - const { stream } = this.state; + renderAudioCaptionsSelector() { + const { intl, isTranscriptionEnabled } = this.props; + + if (!isTranscriptionEnabled) return null; return ( - - - - {!withEcho ? ( - - ) : ( - - )} - - - + + + {intl.formatMessage(intlMessages.captionsSelectorLabel)} + + + ); } - renderVolumeMeter() { - const { withVolumeMeter, intl } = this.props; + renderDeviceSelectors() { + const { + inputDeviceId, + outputDeviceId, + producingStreams, + audioInputDevices, + audioOutputDevices, + findingDevices, + } = this.state; + const { + intl, + isConnecting, + supportsTransparentListenOnly, + withEcho, + } = this.props; const { stream } = this.state; + const blocked = producingStreams || isConnecting || findingDevices; - return withVolumeMeter ? ( - + return ( + <> + + + {intl.formatMessage(intlMessages.micSourceLabel)} + + + {intl.formatMessage(intlMessages.streamVolumeLabel)} - - ) : null; + + + {intl.formatMessage(intlMessages.speakerSourceLabel)} + + + + + {intl.formatMessage(intlMessages.testSpeakerLabel)} + {!withEcho ? ( + + ) : ( + + )} + + {this.renderAudioCaptionsSelector()} + + ); } - renderDeviceSelectors() { - const { inputDeviceId, outputDeviceId, producingStreams } = this.state; - const { intl, isConnecting } = this.props; - const blocked = producingStreams || isConnecting; + renderAudioNote() { + const { + animations, + intl, + } = this.props; + const { findingDevices, inputDeviceId: selectedInputDeviceId } = this.state; + let subtitle = intl.formatMessage(intlMessages.baseSubtitle); + + if (findingDevices) { + subtitle = intl.formatMessage(intlMessages.findingDevicesTitle); + } else if (selectedInputDeviceId === 'listen-only') { + subtitle = intl.formatMessage(intlMessages.noMicSelectedWarning); + } return ( - - - - - {intl.formatMessage(intlMessages.micSourceLabel)} - - - - - - - - {intl.formatMessage(intlMessages.speakerSourceLabel)} - - - - - + + {subtitle} + {findingDevices && } + ); } render() { - const { isConnecting, intl, handleBack } = this.props; - const { producingStreams } = this.state; + const { + producingStreams, + } = this.state; + const { + isConnecting, + isConnected, + intl, + } = this.props; return ( + {this.renderAudioNote()} - - {intl.formatMessage(intlMessages.descriptionLabel)} - {this.renderDeviceSelectors()} - {this.renderOutputTest()} - {this.renderVolumeMeter()} - +