Merge pull request #20782 from prlanzarin/u30/feat/tlo-ui-rebased250724

feat(audio): rework audio join UI for transparentListenOnly
This commit is contained in:
Ramón Souza 2024-08-15 16:38:35 -03:00 committed by GitHub
commit 39d68e4156
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
64 changed files with 1772 additions and 911 deletions

View File

@ -1,9 +1,12 @@
package org.bigbluebutton
import org.bigbluebutton.common2.msgs.{ BbbCommonEnvCoreMsg, BbbCoreEnvelope, BbbCoreHeaderWithMeetingId, MessageTypes, MuteUserInVoiceConfSysMsg, MuteUserInVoiceConfSysMsgBody, Routing }
import org.apache.pekko.actor.ActorContext
import org.bigbluebutton.common2.msgs.{ BbbCommonEnvCoreMsg, BbbCoreEnvelope, BbbCoreHeaderWithMeetingId, MessageTypes, Routing }
import org.bigbluebutton.core.running.{ LiveMeeting, OutMsgRouter }
import org.bigbluebutton.core2.{ MeetingStatus2x }
import org.bigbluebutton.core.apps.webcam.CameraHdlrHelpers
import org.bigbluebutton.core.apps.voice.VoiceApp
import org.bigbluebutton.core.models.{
Roles,
Users2x,
@ -16,19 +19,19 @@ import org.bigbluebutton.core.models.{
object LockSettingsUtil {
private def muteUserInVoiceConf(liveMeeting: LiveMeeting, outGW: OutMsgRouter, vu: VoiceUserState, mute: Boolean): Unit = {
val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, liveMeeting.props.meetingProp.intId, vu.intId)
val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing)
val header = BbbCoreHeaderWithMeetingId(MuteUserInVoiceConfSysMsg.NAME, liveMeeting.props.meetingProp.intId)
val body = MuteUserInVoiceConfSysMsgBody(liveMeeting.props.voiceProp.voiceConf, vu.voiceUserId, mute)
val event = MuteUserInVoiceConfSysMsg(header, body)
val msgEvent = BbbCommonEnvCoreMsg(envelope, event)
outGW.send(msgEvent)
private def muteUserInVoiceConf(
liveMeeting: LiveMeeting,
outGW: OutMsgRouter,
vu: VoiceUserState, mute: Boolean
)(implicit context: ActorContext): Unit = {
VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, mute)
}
private def applyMutingOfUsers(disableMic: Boolean, liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = {
private def applyMutingOfUsers(
disableMic: Boolean,
liveMeeting: LiveMeeting,
outGW: OutMsgRouter
)(implicit context: ActorContext): Unit = {
VoiceUsers.findAll(liveMeeting.voiceUsers) foreach { vu =>
Users2x.findWithIntId(liveMeeting.users2x, vu.intId).foreach { user =>
if (user.role == Roles.VIEWER_ROLE && !vu.listenOnly && user.locked) {
@ -44,12 +47,20 @@ object LockSettingsUtil {
}
}
def enforceLockSettingsForAllVoiceUsers(liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = {
def enforceLockSettingsForAllVoiceUsers(
liveMeeting: LiveMeeting,
outGW: OutMsgRouter
)(implicit context: ActorContext): Unit = {
val permissions = MeetingStatus2x.getPermissions(liveMeeting.status)
applyMutingOfUsers(permissions.disableMic, liveMeeting, outGW)
}
def enforceLockSettingsForVoiceUser(voiceUser: VoiceUserState, liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = {
def enforceLockSettingsForVoiceUser(
voiceUser: VoiceUserState,
liveMeeting: LiveMeeting,
outGW: OutMsgRouter
)(implicit context: ActorContext): Unit = {
val permissions = MeetingStatus2x.getPermissions(liveMeeting.status)
if (permissions.disableMic) {
Users2x.findWithIntId(liveMeeting.users2x, voiceUser.intId).foreach { user =>
@ -65,7 +76,11 @@ object LockSettingsUtil {
}
}
private def enforceListenOnlyUserIsMuted(intUserId: String, liveMeeting: LiveMeeting, outGW: OutMsgRouter): Unit = {
private def enforceListenOnlyUserIsMuted(
intUserId: String,
liveMeeting: LiveMeeting,
outGW: OutMsgRouter
)(implicit context: ActorContext): Unit = {
val voiceUser = VoiceUsers.findWithIntId(liveMeeting.voiceUsers, intUserId)
voiceUser.foreach { vu =>
// Make sure that listen only user is muted. (ralam dec 6, 2019

View File

@ -46,6 +46,8 @@ trait SystemConfiguration {
lazy val ejectRogueVoiceUsers = Try(config.getBoolean("voiceConf.ejectRogueVoiceUsers")).getOrElse(true)
lazy val dialInApprovalAudioPath = Try(config.getString("voiceConf.dialInApprovalAudioPath")).getOrElse("ivr/ivr-please_hold_while_party_contacted.wav")
lazy val toggleListenOnlyAfterMuteTimer = Try(config.getInt("voiceConf.toggleListenOnlyAfterMuteTimer")).getOrElse(4)
lazy val transparentListenOnlyThreshold = Try(config.getInt("voiceConf.transparentListenOnlyThreshold")).getOrElse(0)
lazy val muteOnStartThreshold = Try(config.getInt("voiceConf.muteOnStartThreshold")).getOrElse(0)
lazy val recordingChapterBreakLengthInMinutes = Try(config.getInt("recording.chapterBreakLengthInMinutes")).getOrElse(0)

View File

@ -2,6 +2,7 @@ package org.bigbluebutton.core.apps.users
import org.bigbluebutton.common2.msgs.MuteUserCmdMsg
import org.bigbluebutton.core.apps.{ PermissionCheck, RightsManagementTrait }
import org.bigbluebutton.core.apps.voice.VoiceApp
import org.bigbluebutton.core.models.{ Roles, Users2x, VoiceUsers }
import org.bigbluebutton.core.running.{ LiveMeeting, OutMsgRouter }
import org.bigbluebutton.core2.MeetingStatus2x
@ -51,13 +52,12 @@ trait MuteUserCmdMsgHdlr extends RightsManagementTrait {
} else {
if (u.muted != msg.body.mute) {
log.info("Send mute user request. meetingId=" + meetingId + " userId=" + u.intId + " user=" + u)
val event = MsgBuilder.buildMuteUserInVoiceConfSysMsg(
meetingId,
voiceConf,
u.voiceUserId,
VoiceApp.muteUserInVoiceConf(
liveMeeting,
outGW,
u.intId,
msg.body.mute
)
outGW.send(event)
}
}
}

View File

@ -2,6 +2,7 @@ package org.bigbluebutton.core.apps.voice
import org.bigbluebutton.common2.msgs._
import org.bigbluebutton.core.running.{ LiveMeeting, MeetingActor, OutMsgRouter }
import org.bigbluebutton.core2.MeetingStatus2x
trait GetMicrophonePermissionReqMsgHdlr {
this: MeetingActor =>
@ -16,7 +17,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
voiceConf: String,
userId: String,
sfuSessionId: String,
allowed: Boolean
allowed: Boolean,
muteOnStart: Boolean
): Unit = {
val routing = Routing.addMsgToClientRouting(MessageTypes.DIRECT, meetingId, userId)
val envelope = BbbCoreEnvelope(GetMicrophonePermissionRespMsg.NAME, routing)
@ -26,7 +28,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
voiceConf,
userId,
sfuSessionId,
allowed
allowed,
muteOnStart
)
val event = GetMicrophonePermissionRespMsg(header, body)
val eventMsg = BbbCommonEnvCoreMsg(envelope, event)
@ -41,13 +44,18 @@ trait GetMicrophonePermissionReqMsgHdlr {
msg.body.voiceConf,
msg.body.callerIdNum
)
// Lock settings should only define whether the user starts muted or not.
// It must not prevent users from joining audio.
val locked = VoiceHdlrHelpers.isMicrophoneSharingLocked(liveMeeting, msg.body.userId)
val muteOnStart = MeetingStatus2x.isMeetingMuted(liveMeeting.status) || locked
broadcastEvent(
liveMeeting.props.meetingProp.intId,
liveMeeting.props.voiceProp.voiceConf,
msg.body.userId,
msg.body.sfuSessionId,
allowed
allowed,
muteOnStart
)
}
}

View File

@ -12,11 +12,34 @@ trait ListenOnlyModeToggledInSfuEvtMsgHdlr {
def handleListenOnlyModeToggledInSfuEvtMsg(msg: ListenOnlyModeToggledInSfuEvtMsg): Unit = {
for {
vu <- VoiceUsers.findWithIntId(liveMeeting.voiceUsers, msg.body.userId)
vu <- VoiceUsers.findWithIntIdAndCallerNum(
liveMeeting.voiceUsers,
msg.body.userId,
msg.body.callerNum
)
} yield {
VoiceApp.holdChannelInVoiceConf(
// Do not execute if the command is asking for the channel to be HELD
// and the channel is already HELD. This is an edge case with the uuid_hold
// command being used through FSESL or fsapi where holding only works via
// the uuid_hold <toggle> subcommand, which may cause the channel to be the
// opposite of what we want.
// The unhold (uuid_hold off) command is not affected by this, but we don't
// want to send it if the channel is already unheld.
if ((msg.body.enabled && !vu.hold) || !msg.body.enabled) {
VoiceApp.holdChannelInVoiceConf(
liveMeeting,
outGW,
vu.uuid,
msg.body.enabled
)
}
// If the channel is already in the desired state, just make sure
// any pending mute or unmute commands are sent.
VoiceApp.handleChannelHoldChanged(
liveMeeting,
outGW,
msg.body.userId,
vu.uuid,
msg.body.enabled
)

View File

@ -49,6 +49,10 @@ trait UserLeftVoiceConfEvtMsgHdlr {
} yield {
VoiceUsers.removeWithIntId(liveMeeting.voiceUsers, liveMeeting.props.meetingProp.intId, user.intId)
broadcastEvent(user)
if (!user.listenOnly) {
VoiceApp.enforceMuteOnStartThreshold(liveMeeting, outGW)
}
}
if (liveMeeting.props.meetingProp.isBreakout) {

View File

@ -133,13 +133,14 @@ object VoiceApp extends SystemConfiguration {
liveMeeting,
outGW,
mutedUser.intId,
mutedUser.callerNum,
muted,
toggleListenOnlyAfterMuteTimer
)
// If the user is muted or unmuted with an unheld channel, broadcast
// the event right away.
// If the user is unmuted, but channel is held, we need to wait for the
// If the user is unmuted, but channel is held, we need to wait for the
// channel to be active again to broadcast the event. See
// VoiceApp.handleChannelHoldChanged for this second case.
if (muted || (!muted && !mutedUser.hold)) {
@ -150,7 +151,6 @@ object VoiceApp extends SystemConfiguration {
outGW
)
}
}
}
@ -261,7 +261,7 @@ object VoiceApp extends SystemConfiguration {
callingInto: String,
hold: Boolean,
uuid: String = "unused"
): Unit = {
)(implicit context: ActorContext): Unit = {
def broadcastEvent(voiceUserState: VoiceUserState): Unit = {
val routing = Routing.addMsgToClientRouting(
@ -324,10 +324,30 @@ object VoiceApp extends SystemConfiguration {
hold,
uuid
)
val prevTransparentLOStatus = VoiceHdlrHelpers.transparentListenOnlyAllowed(
liveMeeting
)
VoiceUsers.add(liveMeeting.voiceUsers, voiceUserState)
UserVoiceDAO.update(voiceUserState)
UserDAO.updateVoiceUserJoined(voiceUserState)
val newTransparentLOStatus = VoiceHdlrHelpers.transparentListenOnlyAllowed(
liveMeeting
)
if (prevTransparentLOStatus != newTransparentLOStatus) {
// If the transparent listen only mode was activated or deactivated
// we need to update the listen only mode for all users in the meeting
// that are not muted.
handleTransparentLOModeChange(
liveMeeting,
outGW,
newTransparentLOStatus
)
}
broadcastEvent(voiceUserState)
if (liveMeeting.props.meetingProp.isBreakout) {
@ -337,16 +357,19 @@ object VoiceApp extends SystemConfiguration {
)
}
// if the meeting is muted tell freeswitch to mute the new person
if (!isListenOnly
&& MeetingStatus2x.isMeetingMuted(liveMeeting.status)) {
val event = MsgBuilder.buildMuteUserInVoiceConfSysMsg(
liveMeeting.props.meetingProp.intId,
voiceConf,
voiceUserId,
true
)
outGW.send(event)
if (!isListenOnly) {
enforceMuteOnStartThreshold(liveMeeting, outGW)
// if the meeting is muted tell freeswitch to mute the new person
if (MeetingStatus2x.isMeetingMuted(liveMeeting.status)) {
val event = MsgBuilder.buildMuteUserInVoiceConfSysMsg(
liveMeeting.props.meetingProp.intId,
voiceConf,
voiceUserId,
true
)
outGW.send(event)
}
}
// Make sure lock settings are in effect. (ralam dec 6, 2019)
@ -395,6 +418,10 @@ object VoiceApp extends SystemConfiguration {
} yield {
VoiceUsers.removeWithIntId(liveMeeting.voiceUsers, user.meetingId, user.intId)
broadcastEvent(user)
if (!user.listenOnly) {
enforceMuteOnStartThreshold(liveMeeting, outGW)
}
}
if (liveMeeting.props.meetingProp.isBreakout) {
@ -405,6 +432,43 @@ object VoiceApp extends SystemConfiguration {
}
}
// Once #muteOnStartThreshold number of voice users is hit, we force
// meetingMute on MeetingStatus2x and broadcast MeetingMutedEvtMsg to clients.
// Otherwise, we broadcast MeetingMutedEvtMsg with the original muteOnStart
// muteOnStartThreshold = 0 means no threshold (disabled).
def enforceMuteOnStartThreshold(
liveMeeting: LiveMeeting,
outGW: OutMsgRouter
): Unit = {
val originalMuteOnStart = liveMeeting.props.voiceProp.muteOnStart
if (muteOnStartThreshold == 0) {
return
}
if (VoiceHdlrHelpers.muteOnStartThresholdReached(liveMeeting)) {
if (!MeetingStatus2x.isMeetingMuted(liveMeeting.status)) {
MeetingStatus2x.muteMeeting(liveMeeting.status)
val event = MsgBuilder.buildMeetingMutedEvtMsg(
liveMeeting.props.meetingProp.intId,
SystemUser.ID,
true,
SystemUser.ID
)
outGW.send(event)
}
} else if (MeetingStatus2x.isMeetingMuted(liveMeeting.status) != originalMuteOnStart) {
MeetingStatus2x.setMeetingMuted(liveMeeting.status, originalMuteOnStart)
val event = MsgBuilder.buildMeetingMutedEvtMsg(
liveMeeting.props.meetingProp.intId,
SystemUser.ID,
originalMuteOnStart,
SystemUser.ID
)
outGW.send(event)
}
}
/** Toggle audio for the given user in voice conference.
*
* We first stop the current audio being played, preventing the playback
@ -476,27 +540,62 @@ object VoiceApp extends SystemConfiguration {
}
}
def handleTransparentLOModeChange(
liveMeeting: LiveMeeting,
outGW: OutMsgRouter,
allowed: Boolean,
)(implicit context: ActorContext): Unit = {
VoiceUsers.findAllMutedVoiceUsers(liveMeeting.voiceUsers) foreach { vu =>
if (allowed) {
toggleListenOnlyMode(
liveMeeting,
outGW,
vu.intId,
vu.callerNum,
vu.muted
)
} else {
toggleListenOnlyMode(
liveMeeting,
outGW,
vu.intId,
vu.callerNum,
false
)
}
}
}
def toggleListenOnlyMode(
liveMeeting: LiveMeeting,
outGW: OutMsgRouter,
userId: String,
callerNum: String,
enabled: Boolean,
delay: Int = 0
)(implicit context: ActorContext): Unit = {
implicit def executionContext = context.system.dispatcher
val allowed = VoiceHdlrHelpers.transparentListenOnlyAllowed(liveMeeting)
// Guarantee there are no other tasks for this channel
removeToggleListenOnlyTask(userId)
// If the meeting has not yet hit the minium amount of duplex channels
// for transparent listen only to be enabled, we don't need to do anything
if (!allowed && enabled) {
return
}
def broacastEvent(): Unit = {
val event = MsgBuilder.buildToggleListenOnlyModeSysMsg(
liveMeeting.props.meetingProp.intId,
liveMeeting.props.voiceProp.voiceConf,
userId,
callerNum,
enabled
)
outGW.send(event)
}
// Guarantee there are no other tasks for this channel
removeToggleListenOnlyTask(userId)
if (enabled && delay > 0) {
// If we are enabling listen only mode, we wait a bit before actually
// dispatching the command - the idea is that recently muted users
@ -547,13 +646,15 @@ object VoiceApp extends SystemConfiguration {
hold
) match {
case Some(vu) =>
// Mute vs hold state mismatch, enforce hold state again.
// Mute state is the predominant one here.
if (vu.muted != hold) {
// Mute vs hold state mismatch. Enforce it if the user is unmuted,
// but hold is active, to avoid the user being unable to talk when
// the channel is active again.
if (!vu.muted && vu.hold) {
toggleListenOnlyMode(
liveMeeting,
outGW,
intId,
vu.callerNum,
vu.muted
)
}
@ -570,4 +671,48 @@ object VoiceApp extends SystemConfiguration {
case _ =>
}
}
def muteUserInVoiceConf(
liveMeeting: LiveMeeting,
outGW: OutMsgRouter,
userId: String,
muted: Boolean
)(implicit context: ActorContext): Unit = {
for {
u <- VoiceUsers.findWithIntId(
liveMeeting.voiceUsers,
userId
)
} yield {
if (u.muted != muted) {
val muteEvent = MsgBuilder.buildMuteUserInVoiceConfSysMsg(
liveMeeting.props.meetingProp.intId,
liveMeeting.props.voiceProp.voiceConf,
u.voiceUserId,
muted
)
// If we're unmuting, trigger a channel unhold -> toggle listen only
// mode -> unmute
if (!muted) {
holdChannelInVoiceConf(
liveMeeting,
outGW,
u.uuid,
muted
)
toggleListenOnlyMode(
liveMeeting,
outGW,
u.intId,
u.callerNum,
muted,
0
)
}
outGW.send(muteEvent)
}
}
}
}

View File

@ -32,10 +32,6 @@ object VoiceHdlrHelpers extends SystemConfiguration {
): Boolean = {
Users2x.findWithIntId(liveMeeting.users2x, userId) match {
case Some(user) => {
val microphoneSharingLocked = LockSettingsUtil.isMicrophoneSharingLocked(
user,
liveMeeting
)
val isCallerBanned = VoiceUsers.isCallerBanned(
callerIdNum,
liveMeeting.voiceUsers
@ -43,11 +39,42 @@ object VoiceHdlrHelpers extends SystemConfiguration {
(applyPermissionCheck &&
!isCallerBanned &&
!microphoneSharingLocked &&
liveMeeting.props.meetingProp.intId == meetingId &&
liveMeeting.props.voiceProp.voiceConf == voiceConf)
}
case _ => false
}
}
def isMicrophoneSharingLocked(
liveMeeting: LiveMeeting,
userId: String
): Boolean = {
Users2x.findWithIntId(liveMeeting.users2x, userId) match {
case Some(user) => LockSettingsUtil.isMicrophoneSharingLocked(
user,
liveMeeting
) && applyPermissionCheck
case _ => false
}
}
def transparentListenOnlyAllowed(liveMeeting: LiveMeeting): Boolean = {
// Transparent listen only meeting-wide activation threshold.
// Threshold is the number of muted duplex audio channels in a meeting.
// 0 means no threshold, all users are subject to it
val mutedDuplexChannels = VoiceUsers.findAllMutedVoiceUsers(liveMeeting.voiceUsers).length
val threshold = transparentListenOnlyThreshold
(threshold == 0) || (mutedDuplexChannels >= threshold)
}
def muteOnStartThresholdReached(liveMeeting: LiveMeeting): Boolean = {
// Mute on start meeting-wide activation threshold.
// Threshold is the number of users in voice.
// muteOnStartThreshold = 0 means no threshold (disabled).
val usersInVoiceConf = VoiceUsers.usersInVoiceConf(liveMeeting.voiceUsers)
muteOnStartThreshold > 0 && usersInVoiceConf >= muteOnStartThreshold
}
}

View File

@ -16,15 +16,30 @@ object VoiceUsers {
users.toVector.find(u => u.uuid == uuid && u.intId == intId)
}
def findWithIntIdAndCallerNum(users: VoiceUsers, intId: String, callerNum: String): Option[VoiceUserState] = {
// prlanzarin: This is a hack to allow for partial matching of callerNums.
// This is needed because the callerNums are incorrectly generated by
// FREESWITCH's ESL events when special characters are in place.
// e.g.: w_etc_0-bbbID-User;Semi (notice the semicolon) will be generated by
// FS as w_etc_0-bbbID-User (everything after the semicolon is ignored).
// We should review callerNum generation in the future as well as stop
// relying on it for session matching (use UUIDs or client session numbers instead).
users.toVector.find(u => u.intId == intId &&
(u.callerNum.startsWith(callerNum) || callerNum.startsWith(u.callerNum)))
}
def findAll(users: VoiceUsers): Vector[VoiceUserState] = users.toVector
def findAllNonListenOnlyVoiceUsers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.listenOnly == false)
def findAllListenOnlyVoiceUsers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.listenOnly == true)
def findAllFreeswitchCallers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.calledInto == "freeswitch")
def findAllKurentoCallers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.calledInto == "kms")
def findAllMutedVoiceUsers(users: VoiceUsers): Vector[VoiceUserState] = users.toVector.filter(u => u.muted == true && u.listenOnly == false)
def findAllBannedCallers(users: VoiceUsers): Vector[VoiceUserState] = users.bannedUsers.values.toVector
def usersInVoiceConf(users: VoiceUsers): Int = users.size
def isCallerBanned(callerIdNum: String, users: VoiceUsers): Boolean = {
users.bannedUsers.contains(callerIdNum)
}
@ -135,6 +150,8 @@ class VoiceUsers {
private def toVector: Vector[VoiceUserState] = users.values.toVector
private def size: Int = users.size
private def ban(user: VoiceUserState): VoiceUserState = {
bannedUsers += user.callerNum -> user
user

View File

@ -100,6 +100,7 @@ class AnalyticsActor(val includeChat: Boolean) extends Actor with ActorLogging {
case m: ChannelHoldChangedVoiceConfEvtMsg => logMessage(msg)
case m: ToggleListenOnlyModeSysMsg => logMessage(msg)
case m: ListenOnlyModeToggledInSfuEvtMsg => logMessage(msg)
case m: MeetingMutedEvtMsg => logMessage(msg)
// Breakout
case m: BreakoutRoomEndedEvtMsg => logMessage(msg)

View File

@ -37,6 +37,7 @@ object MeetingStatus2x {
def getMeetingExtensionProp(status: MeetingStatus2x): MeetingExtensionProp = status.extension
def muteMeeting(status: MeetingStatus2x) = status.meetingMuted = true
def unmuteMeeting(status: MeetingStatus2x) = status.meetingMuted = false
def setMeetingMuted(status: MeetingStatus2x, value: Boolean) = status.meetingMuted = value
def isMeetingMuted(status: MeetingStatus2x): Boolean = status.meetingMuted
def recordingStarted(status: MeetingStatus2x) = status.recording = true
def recordingStopped(status: MeetingStatus2x) = status.recording = false

View File

@ -7,6 +7,7 @@ import org.bigbluebutton.core2.MeetingStatus2x
import org.bigbluebutton.core.apps.{ PermissionCheck, RightsManagementTrait }
import org.bigbluebutton.core.db.NotificationDAO
import org.bigbluebutton.core2.message.senders.MsgBuilder
import org.bigbluebutton.core.apps.voice.VoiceApp
trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait {
this: MeetingActor =>
@ -49,7 +50,7 @@ trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait {
}
val muted = MeetingStatus2x.isMeetingMuted(liveMeeting.status)
val event = build(props.meetingProp.intId, msg.body.mutedBy, muted, msg.body.mutedBy)
val event = MsgBuilder.buildMeetingMutedEvtMsg(props.meetingProp.intId, msg.body.mutedBy, muted, msg.body.mutedBy)
outGW.send(event)
@ -60,8 +61,8 @@ trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait {
VoiceUsers.findAll(liveMeeting.voiceUsers) foreach { vu =>
if (!vu.listenOnly) {
Users2x.findWithIntId(liveMeeting.users2x, vu.intId) match {
case Some(u) => if (!u.presenter) muteUserInVoiceConf(vu, muted)
case None => muteUserInVoiceConf(vu, muted)
case Some(u) => if (!u.presenter) VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, muted)
case None => VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, muted)
}
}
}
@ -74,28 +75,4 @@ trait MuteAllExceptPresentersCmdMsgHdlr extends RightsManagementTrait {
Users2x.findNotPresenters(liveMeeting.users2x)
}
def build(meetingId: String, userId: String, muted: Boolean, mutedBy: String): BbbCommonEnvCoreMsg = {
val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, meetingId, userId)
val envelope = BbbCoreEnvelope(MeetingMutedEvtMsg.NAME, routing)
val header = BbbClientMsgHeader(MeetingMutedEvtMsg.NAME, meetingId, userId)
val body = MeetingMutedEvtMsgBody(muted, mutedBy)
val event = MeetingMutedEvtMsg(header, body)
BbbCommonEnvCoreMsg(envelope, event)
}
def muteUserInVoiceConf(vu: VoiceUserState, mute: Boolean): Unit = {
val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, props.meetingProp.intId, vu.intId)
val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing)
val header = BbbCoreHeaderWithMeetingId(MuteUserInVoiceConfSysMsg.NAME, props.meetingProp.intId)
val body = MuteUserInVoiceConfSysMsgBody(props.voiceProp.voiceConf, vu.voiceUserId, mute)
val event = MuteUserInVoiceConfSysMsg(header, body)
val msgEvent = BbbCommonEnvCoreMsg(envelope, event)
outGW.send(msgEvent)
}
}

View File

@ -7,6 +7,7 @@ import org.bigbluebutton.core.models.{ VoiceUserState, VoiceUsers }
import org.bigbluebutton.core.running.{ MeetingActor, OutMsgRouter }
import org.bigbluebutton.core2.MeetingStatus2x
import org.bigbluebutton.core2.message.senders.MsgBuilder
import org.bigbluebutton.core.apps.voice.VoiceApp
trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait {
this: MeetingActor =>
@ -20,30 +21,6 @@ trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait {
val reason = "No permission to mute meeting."
PermissionCheck.ejectUserForFailedPermission(meetingId, msg.header.userId, reason, outGW, liveMeeting)
} else {
def build(meetingId: String, userId: String, muted: Boolean, mutedBy: String): BbbCommonEnvCoreMsg = {
val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, meetingId, userId)
val envelope = BbbCoreEnvelope(MeetingMutedEvtMsg.NAME, routing)
val header = BbbClientMsgHeader(MeetingMutedEvtMsg.NAME, meetingId, userId)
val body = MeetingMutedEvtMsgBody(muted, mutedBy)
val event = MeetingMutedEvtMsg(header, body)
BbbCommonEnvCoreMsg(envelope, event)
}
def muteUserInVoiceConf(vu: VoiceUserState, mute: Boolean): Unit = {
val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, props.meetingProp.intId, vu.intId)
val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing)
val header = BbbCoreHeaderWithMeetingId(MuteUserInVoiceConfSysMsg.NAME, props.meetingProp.intId)
val body = MuteUserInVoiceConfSysMsgBody(props.voiceProp.voiceConf, vu.voiceUserId, mute)
val event = MuteUserInVoiceConfSysMsg(header, body)
val msgEvent = BbbCommonEnvCoreMsg(envelope, event)
outGW.send(msgEvent)
}
if (msg.body.mute != MeetingStatus2x.isMeetingMuted(liveMeeting.status)) {
if (msg.body.mute) {
val notifyEvent = MsgBuilder.buildNotifyAllInMeetingEvtMsg(
@ -74,7 +51,12 @@ trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait {
}
val muted = MeetingStatus2x.isMeetingMuted(liveMeeting.status)
val meetingMutedEvent = build(props.meetingProp.intId, msg.body.mutedBy, muted, msg.body.mutedBy)
val meetingMutedEvent = MsgBuilder.buildMeetingMutedEvtMsg(
props.meetingProp.intId,
msg.body.mutedBy,
muted,
msg.body.mutedBy
)
outGW.send(meetingMutedEvent)
@ -82,7 +64,7 @@ trait MuteMeetingCmdMsgHdlr extends RightsManagementTrait {
if (muted) {
VoiceUsers.findAll(liveMeeting.voiceUsers) foreach { vu =>
if (!vu.listenOnly) {
muteUserInVoiceConf(vu, muted)
VoiceApp.muteUserInVoiceConf(liveMeeting, outGW, vu.intId, muted)
}
}
}

View File

@ -282,6 +282,17 @@ object MsgBuilder {
BbbCommonEnvCoreMsg(envelope, event)
}
def buildMeetingMutedEvtMsg(meetingId: String, userId: String, muted: Boolean, mutedBy: String): BbbCommonEnvCoreMsg = {
val routing = Routing.addMsgToClientRouting(MessageTypes.BROADCAST_TO_MEETING, meetingId, userId)
val envelope = BbbCoreEnvelope(MeetingMutedEvtMsg.NAME, routing)
val header = BbbClientMsgHeader(MeetingMutedEvtMsg.NAME, meetingId, userId)
val body = MeetingMutedEvtMsgBody(muted, mutedBy)
val event = MeetingMutedEvtMsg(header, body)
BbbCommonEnvCoreMsg(envelope, event)
}
def buildMuteUserInVoiceConfSysMsg(meetingId: String, voiceConf: String, voiceUserId: String, mute: Boolean): BbbCommonEnvCoreMsg = {
val routing = collection.immutable.HashMap("sender" -> "bbb-apps-akka")
val envelope = BbbCoreEnvelope(MuteUserInVoiceConfSysMsg.NAME, routing)
@ -574,11 +585,12 @@ object MsgBuilder {
meetingId: String,
voiceConf: String,
userId: String,
callerNum: String,
enabled: Boolean
): BbbCommonEnvCoreMsg = {
val routing = collection.immutable.HashMap("sender" -> "bbb-apps-akka")
val envelope = BbbCoreEnvelope(ToggleListenOnlyModeSysMsg.NAME, routing)
val body = ToggleListenOnlyModeSysMsgBody(voiceConf, userId, enabled)
val body = ToggleListenOnlyModeSysMsgBody(voiceConf, userId, callerNum, enabled)
val header = BbbCoreHeaderWithMeetingId(ToggleListenOnlyModeSysMsg.NAME, meetingId)
val event = ToggleListenOnlyModeSysMsg(header, body)

View File

@ -118,6 +118,17 @@ voiceConf {
# Time (seconds) to wait before requesting an audio channel hold after
# muting a user. Used in the experimental, transparent listen only mode.
toggleListenOnlyAfterMuteTimer = 4
# Transparent listen only meeting-wide activation threshold.
# Threshold is the number of muted duplex audio channels in a meeting.
# 0 = disabled
transparentListenOnlyThreshold = 0
# muteOnStartThreshold: forces muteOnStart=true for a meeting when the number
# of audio participants reaches the specified threshold.
# Overrides any existing muteOnStart directive (bbb-web, API and the client).
# 0 = disabled.
muteOnStartThreshold = 0
}
recording {

View File

@ -109,6 +109,7 @@ public class FreeswitchConferenceEventListener implements ConferenceEventListene
evt.callSession,
evt.clientSession,
evt.userId,
evt.getVoiceUserId(),
evt.callerName,
evt.callState,
evt.origCallerIdName,

View File

@ -59,6 +59,7 @@ public interface IVoiceConferenceService {
String callSession,
String clientSession,
String userId,
String voiceUserId,
String callerName,
String callState,
String origCallerIdName,

View File

@ -4,6 +4,8 @@ public class VoiceCallStateEvent extends VoiceConferenceEvent {
public final String callSession;
public final String clientSession;
public final String userId;
// AKA mod_conference memberId
public final String voiceUserId;
public final String callerName;
public final String callState;
public final String origCallerIdName;
@ -14,6 +16,7 @@ public class VoiceCallStateEvent extends VoiceConferenceEvent {
String callSession,
String clientSession,
String userId,
String voiceUserId,
String callerName,
String callState,
String origCallerIdName,
@ -22,9 +25,14 @@ public class VoiceCallStateEvent extends VoiceConferenceEvent {
this.callSession = callSession;
this.clientSession = clientSession;
this.userId = userId;
this.voiceUserId = voiceUserId;
this.callerName = callerName;
this.callState = callState;
this.origCallerIdName = origCallerIdName;
this.origCalledDest = origCalledDest;
}
public String getVoiceUserId() {
return voiceUserId;
}
}

View File

@ -84,6 +84,7 @@ public class ESLEventListener implements IEslEventListener {
String origCallerIdName = headers.get("Caller-Caller-ID-Name");
String origCallerDestNumber = headers.get("Caller-Destination-Number");
String clientSession = "0";
String memberIdStr = memberId != null ? memberId.toString() : "";
Matcher matcher = CALLERNAME_PATTERN.matcher(callerIdName);
Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(callerIdName);
@ -106,6 +107,7 @@ public class ESLEventListener implements IEslEventListener {
coreuuid,
clientSession,
voiceUserId,
memberIdStr,
callerIdName,
callState,
origCallerIdName,
@ -281,6 +283,7 @@ public class ESLEventListener implements IEslEventListener {
String varvBridge = (eventHeaders.get("variable_vbridge") == null) ? "" : eventHeaders.get("variable_vbridge");
if ("echo".equalsIgnoreCase(application) && !varvBridge.isEmpty()) {
Integer memberId = this.getMemberId(eventHeaders);
String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name");
String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number");
String coreuuid = eventHeaders.get("Core-UUID");
@ -291,6 +294,7 @@ public class ESLEventListener implements IEslEventListener {
String callerName = origCallerIdName;
String clientSession = "0";
String callState = "IN_ECHO_TEST";
String memberIdStr = memberId != null ? memberId.toString() : "";
Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName);
Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName);
@ -314,6 +318,7 @@ public class ESLEventListener implements IEslEventListener {
coreuuid,
clientSession,
voiceUserId,
memberIdStr,
callerName,
callState,
origCallerIdName,
@ -321,6 +326,7 @@ public class ESLEventListener implements IEslEventListener {
conferenceEventListener.handleConferenceEvent(csEvent);
} else if ("RINGING".equalsIgnoreCase(channelCallState) && !varvBridge.isEmpty()) {
Integer memberId = this.getMemberId(eventHeaders);
String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name");
String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number");
String coreuuid = eventHeaders.get("Core-UUID");
@ -330,6 +336,7 @@ public class ESLEventListener implements IEslEventListener {
String callerName = origCallerIdName;
String clientSession = "0";
String callState = "CALL_STARTED";
String memberIdStr = memberId != null ? memberId.toString() : "";
Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName);
Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName);
@ -353,6 +360,7 @@ public class ESLEventListener implements IEslEventListener {
coreuuid,
clientSession,
voiceUserId,
memberIdStr,
callerName,
callState,
origCallerIdName,
@ -365,6 +373,7 @@ public class ESLEventListener implements IEslEventListener {
String channelState = (eventHeaders.get("Channel-State") == null) ? "" : eventHeaders.get("Channel-State");
if ("HANGUP".equalsIgnoreCase(channelCallState) && "CS_DESTROY".equalsIgnoreCase(channelState)) {
Integer memberId = this.getMemberId(eventHeaders);
String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name");
String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number");
String coreuuid = eventHeaders.get("Core-UUID");
@ -374,6 +383,7 @@ public class ESLEventListener implements IEslEventListener {
String callerName = origCallerIdName;
String clientSession = "0";
String callState = "CALL_ENDED";
String memberIdStr = memberId != null ? memberId.toString() : "";
Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName);
Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName);
@ -397,6 +407,7 @@ public class ESLEventListener implements IEslEventListener {
coreuuid,
clientSession,
voiceUserId,
memberIdStr,
callerName,
callState,
origCallerIdName,
@ -405,6 +416,7 @@ public class ESLEventListener implements IEslEventListener {
conferenceEventListener.handleConferenceEvent(csEvent);
} else if ("RINGING".equalsIgnoreCase(channelCallState) && "CS_EXECUTE".equalsIgnoreCase(channelState)) {
Integer memberId = this.getMemberId(eventHeaders);
String origCallerIdName = eventHeaders.get("Caller-Caller-ID-Name");
String origCallerDestNumber = eventHeaders.get("Caller-Destination-Number");
String coreuuid = eventHeaders.get("Core-UUID");
@ -414,6 +426,7 @@ public class ESLEventListener implements IEslEventListener {
String callerName = origCallerIdName;
String clientSession = "0";
String callState = "CALL_STARTED";
String memberIdStr = memberId != null ? memberId.toString() : "";
Matcher callerListenOnly = CALLERNAME_LISTENONLY_PATTERN.matcher(origCallerIdName);
Matcher callWithSess = CALLERNAME_WITH_SESS_INFO_PATTERN.matcher(origCallerIdName);
@ -437,6 +450,7 @@ public class ESLEventListener implements IEslEventListener {
coreuuid,
clientSession,
voiceUserId,
memberIdStr,
callerName,
callState,
origCallerIdName,

View File

@ -229,6 +229,7 @@ class VoiceConferenceService(healthz: HealthzService,
callSession: String,
clientSession: String,
userId: String,
voiceUserId: String,
callerName: String,
callState: String,
origCallerIdName: String,
@ -240,6 +241,7 @@ class VoiceConferenceService(healthz: HealthzService,
callSession = callSession,
clientSession = clientSession,
userId = userId,
voiceUserId = voiceUserId,
callerName = callerName,
callState = callState,
origCallerIdName = origCallerIdName,

View File

@ -528,6 +528,7 @@ case class VoiceConfCallStateEvtMsgBody(
callSession: String,
clientSession: String,
userId: String,
voiceUserId: String,
callerName: String,
callState: String,
origCallerIdName: String,
@ -613,7 +614,8 @@ case class GetMicrophonePermissionRespMsgBody(
voiceConf: String,
userId: String,
sfuSessionId: String,
allowed: Boolean
allowed: Boolean,
muteOnStart: Boolean
)
/**
@ -658,6 +660,7 @@ case class ToggleListenOnlyModeSysMsg(
case class ToggleListenOnlyModeSysMsgBody(
voiceConf: String,
userId: String,
callerNum: String,
enabled: Boolean
)
@ -674,5 +677,6 @@ case class ListenOnlyModeToggledInSfuEvtMsgBody(
meetingId: String,
voiceConf: String,
userId: String,
callerNum: String,
enabled: Boolean
)

View File

@ -8,7 +8,15 @@
<action application="set" data="rtp_jitter_buffer_during_bridge=true" />
<action application="set" data="suppress_cng=true" />
<action application="answer" />
<action application="conference" data="$1@cdquality" />
<!-- Special condition for BBB's "transparent listen only" mechanism - HOLD on creation -->
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu-mhos$" break="never">
<action application="log" data="INFO Channel is going to be HELD and MUTED on creation ${uuid}" />
<action application="set" data="api_result=${uuid_hold(${uuid})}" />
</condition>
<!-- Duplicate condition to guarantee line-order (not nested-order) execution of this extension -->
<condition field="destination_number" expression="^(\d{5,11})$" require-nested="false">
<action application="conference" data="$1@cdquality" />
</condition>
</condition>
</extension>
<extension name="bbb_conferences">

View File

@ -1,6 +1,6 @@
<include>
<extension name="bbb_webrtc_call" continue="true">
<condition field="${sip_user_agent}" expression="bbb-webrtc-sfu" break="on-false">
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu$" break="on-false">
<action application="set" data="presence_data=from_bbb-webrtc-sfu"/>
<action application="set" data="bbb_authorized=true"/>
<action application="set" data="rtp_manual_rtp_bugs=ACCEPT_ANY_PACKETS"/>

View File

@ -0,0 +1,12 @@
<include>
<extension name="bbb_webrtc_sfu_call" continue="true">
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu-muos|bbb-webrtc-sfu-mhos$" break="on-false">
<action application="set" data="presence_data=from_bbb-webrtc-sfu"/>
<action application="set" data="bbb_authorized=true"/>
<action application="set" data="rtp_manual_rtp_bugs=ACCEPT_ANY_PACKETS"/>
<action application="set" data="jb_use_timestamps=true"/>
<action application="set" data="conference_member_flags=mute"/>
<action application="transfer" data="${destination_number} XML default"/>
</condition>
</extension>
</include>

View File

@ -1 +1 @@
git clone --branch v2.14.0-beta.3 --depth 1 https://github.com/bigbluebutton/bbb-webrtc-sfu bbb-webrtc-sfu
git clone --branch v2.14.0 --depth 1 https://github.com/bigbluebutton/bbb-webrtc-sfu bbb-webrtc-sfu

View File

@ -171,7 +171,7 @@ with BigBlueButton; if not, see <http://www.gnu.org/licenses/>.
</main>
</div>
<span id="destination"></span>
<audio id="remote-media" autoplay>
</audio>
<audio id="remote-media" autoplay></audio>
<audio id="local-media" autoplay></audio>
<div id="modals-container"></div>
</body>

View File

@ -61,7 +61,11 @@ export default class BaseAudioBridge {
get inputDeviceId () {
return this._inputDeviceId;
}
/* eslint-disable class-methods-use-this */
supportsTransparentListenOnly() {
return false;
}
/**
@ -78,6 +82,20 @@ export default class BaseAudioBridge {
let backupStream;
try {
// Remove all input audio tracks from the stream
// This will effectively mute the microphone
// and keep the audio output working
if (deviceId === 'listen-only') {
const stream = this.inputStream;
if (stream) {
stream.getAudioTracks().forEach((track) => {
track.stop();
stream.removeTrack(track);
});
}
return stream;
}
const constraints = {
audio: getAudioConstraints({ deviceId }),
};

View File

@ -36,10 +36,25 @@ const getCurrentAudioSinkId = () => {
return audioElement?.sinkId || DEFAULT_OUTPUT_DEVICE_ID;
};
const getStoredAudioInputDeviceId = () => getStorageSingletonInstance().getItem(INPUT_DEVICE_ID_KEY);
const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance().getItem(OUTPUT_DEVICE_ID_KEY);
const storeAudioInputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId);
const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(OUTPUT_DEVICE_ID_KEY, deviceId);
const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance()
.getItem(OUTPUT_DEVICE_ID_KEY);
const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance()
.setItem(OUTPUT_DEVICE_ID_KEY, deviceId);
const getStoredAudioInputDeviceId = () => getStorageSingletonInstance()
.getItem(INPUT_DEVICE_ID_KEY);
const storeAudioInputDeviceId = (deviceId) => {
if (deviceId === 'listen-only') {
// Do not store listen-only "devices" and remove any stored device
// So it starts from scratch next time.
getStorageSingletonInstance().removeItem(INPUT_DEVICE_ID_KEY);
return false;
}
getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId);
return true;
};
/**
* Filter constraints set in audioDeviceConstraints, based on

View File

@ -20,6 +20,7 @@ import { shouldForceRelay } from '/imports/ui/services/bbb-webrtc-sfu/utils';
const SENDRECV_ROLE = 'sendrecv';
const RECV_ROLE = 'recv';
const PASSIVE_SENDRECV_ROLE = 'passive-sendrecv';
const BRIDGE_NAME = 'fullaudio';
const IS_CHROME = browserInfo.isChrome;
@ -81,7 +82,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
const MEDIA = SETTINGS.public.media;
const LISTEN_ONLY_OFFERING = MEDIA.listenOnlyOffering;
const FULLAUDIO_OFFERING = MEDIA.fullAudioOffering;
return isListenOnly
return isListenOnly && !isTransparentListenOnlyEnabled()
? LISTEN_ONLY_OFFERING
: (!isTransparentListenOnlyEnabled() && FULLAUDIO_OFFERING);
}
@ -95,12 +96,17 @@ export default class SFUAudioBridge extends BaseAudioBridge {
this.reconnecting = false;
this.iceServers = [];
this.bridgeName = BRIDGE_NAME;
this.isListenOnly = false;
this.bypassGUM = false;
this.supportsTransparentListenOnly = isTransparentListenOnlyEnabled;
this.handleTermination = this.handleTermination.bind(this);
}
get inputStream() {
if (this.broker) {
// Only return the stream if the broker is active and the role isn't recvonly
// Input stream == actual input-capturing stream, not the one that's being played
if (this.broker && this.role !== RECV_ROLE) {
return this.broker.getLocalStream();
}
@ -111,6 +117,18 @@ export default class SFUAudioBridge extends BaseAudioBridge {
return this.broker?.role;
}
getBrokerRole({ hasInputStream }) {
if (this.isListenOnly) {
return isTransparentListenOnlyEnabled()
? PASSIVE_SENDRECV_ROLE
: RECV_ROLE;
}
if (this.bypassGUM && !hasInputStream) return PASSIVE_SENDRECV_ROLE;
return SENDRECV_ROLE;
}
setInputStream(stream) {
if (this.broker == null) return null;
@ -326,6 +344,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
extension,
inputStream,
forceRelay: _forceRelay = false,
bypassGUM = false,
} = options;
const SETTINGS = window.meetingClientSettings;
@ -349,6 +368,10 @@ export default class SFUAudioBridge extends BaseAudioBridge {
try {
this.inEchoTest = !!extension;
this.isListenOnly = isListenOnly;
this.bypassGUM = bypassGUM;
const role = this.getBrokerRole({
hasInputStream: !!inputStream,
});
const brokerOptions = {
clientSessionNumber: getAudioSessionNumber(),
@ -365,11 +388,12 @@ export default class SFUAudioBridge extends BaseAudioBridge {
mediaStreamFactory: this.mediaStreamFactory,
gatheringTimeout: GATHERING_TIMEOUT,
transparentListenOnly: isTransparentListenOnlyEnabled(),
bypassGUM,
};
this.broker = new AudioBroker(
Auth.authenticateURL(SFU_URL),
isListenOnly ? RECV_ROLE : SENDRECV_ROLE,
role,
brokerOptions,
);

View File

@ -610,7 +610,6 @@ export interface Media {
traceSip: boolean
sdpSemantics: string
localEchoTest: LocalEchoTest
showVolumeMeter: boolean
muteAudioOutputWhenAway: boolean
}

View File

@ -11,6 +11,7 @@ import {
} from '../service';
import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import { SET_SPEECH_LOCALE } from '/imports/ui/core/graphql/mutations/userMutations';
import Styled from './styles';
const intlMessages = defineMessages({
title: {
@ -71,16 +72,22 @@ const intlMessages = defineMessages({
},
});
interface AudioCaptionsContainerProps {
showTitleLabel?: boolean;
}
interface AudioCaptionsSelectProps {
isTranscriptionEnabled: boolean;
speechLocale: string;
speechVoices: string[];
showTitleLabel?: boolean;
}
const AudioCaptionsSelect: React.FC<AudioCaptionsSelectProps> = ({
isTranscriptionEnabled,
speechLocale,
speechVoices,
showTitleLabel = true,
}) => {
const useLocaleHook = useFixedLocale();
const intl = useIntl();
@ -118,49 +125,54 @@ const AudioCaptionsSelect: React.FC<AudioCaptionsSelectProps> = ({
setUserLocaleProperty(value, setUserSpeechLocale);
};
return (
<div style={{ padding: '1rem 0' }}>
<label
htmlFor="speechSelect"
style={{ padding: '0 .5rem' }}
const renderSelect = () => (
<Styled.Select
id="speechSelect"
onChange={onChange}
value={speechLocale}
>
<option
key="disabled"
value=""
>
{intl.formatMessage(intlMessages.disabled)}
</option>
{isGladia()
? (
<option
key="auto"
value="auto"
>
{intl.formatMessage(intlMessages.auto)}
</option>
)
: null}
{speechVoices.map((v) => (
<option
key={v}
value={v}
>
{intl.formatMessage(intlMessages[v as keyof typeof intlMessages])}
</option>
))}
</Styled.Select>
);
return showTitleLabel ? (
<Styled.CaptionsSelector>
<label htmlFor="speechSelect" style={{ padding: '0 .5rem' }}>
{intl.formatMessage(intlMessages.title)}
</label>
<select
id="speechSelect"
onChange={onChange}
value={speechLocale}
>
<option
key="disabled"
value=""
>
{intl.formatMessage(intlMessages.disabled)}
</option>
{isGladia()
? (
<option
key="auto"
value="auto"
>
{intl.formatMessage(intlMessages.auto)}
</option>
)
: null}
{speechVoices.map((v) => (
<option
key={v}
value={v}
>
{intl.formatMessage(intlMessages[v as keyof typeof intlMessages])}
</option>
))}
</select>
</div>
{renderSelect()}
</Styled.CaptionsSelector>
) : (
renderSelect()
);
};
const AudioCaptionsSelectContainer: React.FC = () => {
const AudioCaptionsSelectContainer: React.FC<AudioCaptionsContainerProps> = ({
showTitleLabel = true,
}) => {
const [voicesList, setVoicesList] = React.useState<string[]>([]);
const voices = getSpeechVoices();
@ -185,6 +197,7 @@ const AudioCaptionsSelectContainer: React.FC = () => {
isTranscriptionEnabled={isEnabled}
speechLocale={currentUser.speechLocale ?? ''}
speechVoices={voices || voicesList}
showTitleLabel={showTitleLabel}
/>
);
};

View File

@ -0,0 +1,45 @@
import styled from 'styled-components';
import {
borderSize,
} from '/imports/ui/stylesheets/styled-components/general';
import {
colorGrayLabel,
colorWhite,
colorGrayLighter,
colorPrimary,
} from '/imports/ui/stylesheets/styled-components/palette';
const CaptionsSelector = styled.div`
display: grid;
grid-auto-flow: column;
padding: 1rem 0px;
align-items: center;
`;
const Select = styled.select`
background-color: ${colorWhite};
border: 0.1rem solid ${colorGrayLighter};
border-radius: ${borderSize};
color: ${colorGrayLabel};
width: 100%;
height: 2rem;
padding: 1px;
&:focus {
outline: none;
border-radius: ${borderSize};
box-shadow: 0 0 0 ${borderSize} ${colorPrimary}, inset 0 0 0 1px ${colorPrimary};
}
&:hover,
&:focus {
outline: transparent;
outline-style: dotted;
outline-width: ${borderSize};
}
`;
export default {
CaptionsSelector,
Select,
};

View File

@ -63,6 +63,8 @@ const AudioControls: React.FC<AudioControlsProps> = ({
const echoTestIntervalRef = React.useRef<ReturnType<typeof setTimeout>>();
const [isAudioModalOpen, setIsAudioModalOpen] = React.useState(false);
const [audioModalContent, setAudioModalContent] = React.useState<string | null>(null);
const [audioModalProps, setAudioModalProps] = React.useState<{ unmuteOnExit?: boolean } | null>(null);
const handleJoinAudio = useCallback((connected: boolean) => {
if (connected) {
@ -72,6 +74,12 @@ const AudioControls: React.FC<AudioControlsProps> = ({
}
}, []);
const openAudioSettings = (props: { unmuteOnExit?: boolean } = {}) => {
setAudioModalContent('settings');
setAudioModalProps(props);
setIsAudioModalOpen(true);
};
const joinButton = useMemo(() => {
const joinAudioLabel = away ? intlMessages.joinAudioAndSetActive : intlMessages.joinAudio;
@ -107,12 +115,18 @@ const AudioControls: React.FC<AudioControlsProps> = ({
return (
<Styled.Container>
{!inAudio ? joinButton : <InputStreamLiveSelectorContainer />}
{!inAudio ? joinButton : <InputStreamLiveSelectorContainer openAudioSettings={openAudioSettings} />}
{isAudioModalOpen && (
<AudioModalContainer
priority="low"
setIsOpen={() => setIsAudioModalOpen(false)}
setIsOpen={() => {
setIsAudioModalOpen(false);
setAudioModalContent(null);
setAudioModalProps(null);
}}
isOpen={isAudioModalOpen}
content={audioModalContent}
unmuteOnExit={audioModalProps?.unmuteOnExit}
/>
)}
</Styled.Container>

View File

@ -56,6 +56,26 @@ const intlMessages = defineMessages({
id: 'app.audioNotification.deviceChangeFailed',
description: 'Device change failed',
},
fallbackInputLabel: {
id: 'app.audio.audioSettings.fallbackInputLabel',
description: 'Audio input device label',
},
fallbackOutputLabel: {
id: 'app.audio.audioSettings.fallbackOutputLabel',
description: 'Audio output device label',
},
fallbackNoPermissionLabel: {
id: 'app.audio.audioSettings.fallbackNoPermission',
description: 'No permission to access audio devices label',
},
audioSettingsTitle: {
id: 'app.audio.audioSettings.titleLabel',
description: 'Audio settings button label',
},
noMicListenOnlyLabel: {
id: 'app.audio.audioSettings.noMicListenOnly',
description: 'No microphone (listen only) label',
},
});
interface MuteToggleProps {
@ -75,6 +95,8 @@ interface LiveSelectionProps extends MuteToggleProps {
outputDeviceId: string;
meetingIsBreakout: boolean;
away: boolean;
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
supportsTransparentListenOnly: boolean;
}
export const LiveSelection: React.FC<LiveSelectionProps> = ({
@ -90,6 +112,8 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
isAudioLocked,
toggleMuteMicrophone,
away,
openAudioSettings,
supportsTransparentListenOnly,
}) => {
const intl = useIntl();
@ -105,6 +129,21 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
];
}
const getFallbackLabel = (device: MediaDeviceInfo, index: number) => {
const baseLabel = device?.kind === AUDIO_OUTPUT
? intlMessages.fallbackOutputLabel
: intlMessages.fallbackInputLabel;
let label = intl.formatMessage(baseLabel, { 0: index });
if (!device?.deviceId) {
label = `${label} ${intl.formatMessage(intlMessages.fallbackNoPermissionLabel)}`;
}
return label;
};
const shouldTreatAsMicrophone = () => !listenOnly || supportsTransparentListenOnly;
const renderDeviceList = useCallback((
deviceKind: string,
list: MediaDeviceInfo[],
@ -134,7 +173,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
{
key: `${device.deviceId}-${deviceKind}`,
dataTest: `${deviceKind}-${index + 1}`,
label: truncateDeviceName(device.label),
label: truncateDeviceName(device.label || getFallbackLabel(device, index + 1)),
customStyles: (device.deviceId === currentDeviceId) ? Styled.SelectedLabel : null,
iconRight: (device.deviceId === currentDeviceId) ? 'check' : null,
onClick: () => onDeviceListClick(device.deviceId, deviceKind, callback),
@ -163,10 +202,37 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
];
}
if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) {
// "None" option for audio input devices - aka listen-only
const listenOnly = deviceKind === AUDIO_INPUT
&& currentDeviceId === 'listen-only';
deviceList.push({
key: `listenOnly-${deviceKind}`,
dataTest: `${deviceKind}-listenOnly`,
label: intl.formatMessage(intlMessages.noMicListenOnlyLabel),
customStyles: listenOnly && Styled.SelectedLabel,
iconRight: listenOnly ? 'check' : null,
onClick: () => onDeviceListClick('listen-only', deviceKind, callback),
} as MenuOptionItemType);
}
return listTitle.concat(deviceList);
}, []);
const onDeviceListClick = useCallback((deviceId: string, deviceKind: string, callback: Function) => {
if (!deviceId) {
// If there's no deviceId in an audio input device, it means
// the user doesn't have permission to access it. If we support
// transparent listen-only, fire the mount AudioSettings modal to
// acquire permission and let the user configure their stuff.
if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) {
openAudioSettings({ unmuteOnExit: true });
}
return;
}
if (!deviceId) return;
if (deviceKind === AUDIO_INPUT) {
callback(deviceId).catch(() => {
@ -179,7 +245,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
}
}, []);
const inputDeviceList = !listenOnly
const inputDeviceList = shouldTreatAsMicrophone()
? renderDeviceList(
AUDIO_INPUT,
inputDevices,
@ -196,6 +262,16 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
outputDeviceId,
);
const audioSettingsOption = {
icon: 'settings',
label: intl.formatMessage(intlMessages.audioSettingsTitle),
key: 'audioSettingsOption',
dataTest: 'input-selector-audio-settings',
customStyles: Styled.AudioSettingsOption,
dividerTop: true,
onClick: () => openAudioSettings(),
} as MenuOptionItemType;
const leaveAudioOption = {
icon: 'logout',
label: intl.formatMessage(intlMessages.leaveAudio),
@ -204,12 +280,14 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
customStyles: Styled.DangerColor,
onClick: () => handleLeaveAudio(meetingIsBreakout),
};
const dropdownListComplete = inputDeviceList.concat(outputDeviceList)
const dropdownListComplete = inputDeviceList
.concat(outputDeviceList)
.concat({
key: 'separator-02',
isSeparator: true,
})
.concat(leaveAudioOption);
});
if (shouldTreatAsMicrophone()) dropdownListComplete.push(audioSettingsOption);
dropdownListComplete.push(leaveAudioOption);
audioSettingsDropdownItems.forEach((audioSettingsDropdownItem:
PluginSdk.AudioSettingsDropdownInterface) => {
@ -239,9 +317,11 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
const customStyles = { top: '-1rem' };
const { isMobile } = deviceInfo;
const noInputDevice = inputDeviceId === 'listen-only';
return (
<>
{!listenOnly ? (
{shouldTreatAsMicrophone() ? (
// eslint-disable-next-line jsx-a11y/no-access-key
<span
style={{ display: 'none' }}
@ -250,7 +330,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
aria-hidden="true"
/>
) : null}
{(!listenOnly && isMobile) && (
{(shouldTreatAsMicrophone() && isMobile) && (
<MuteToggle
talking={talking}
muted={muted}
@ -258,13 +338,15 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
noInputDevice={noInputDevice}
openAudioSettings={openAudioSettings}
/>
)}
<BBBMenu
customStyles={!isMobile ? customStyles : null}
trigger={(
<>
{!listenOnly && !isMobile
{shouldTreatAsMicrophone() && !isMobile
? (
<MuteToggle
talking={talking}
@ -273,6 +355,8 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
noInputDevice={noInputDevice}
openAudioSettings={openAudioSettings}
/>
)
: (

View File

@ -33,6 +33,8 @@ interface MuteToggleProps {
isAudioLocked: boolean;
toggleMuteMicrophone: (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => void;
away: boolean;
noInputDevice?: boolean;
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
}
export const MuteToggle: React.FC<MuteToggleProps> = ({
@ -42,6 +44,8 @@ export const MuteToggle: React.FC<MuteToggleProps> = ({
isAudioLocked,
toggleMuteMicrophone,
away,
noInputDevice = false,
openAudioSettings,
}) => {
const intl = useIntl();
const toggleMuteShourtcut = useShortcut('toggleMute');
@ -57,15 +61,22 @@ export const MuteToggle: React.FC<MuteToggleProps> = ({
const onClickCallback = (e: React.MouseEvent<HTMLButtonElement>) => {
e.stopPropagation();
if (muted && away) {
muteAway(muted, true, toggleVoice);
VideoService.setTrackEnabled(true);
setAway({
variables: {
away: false,
},
});
if (muted) {
if (away) {
if (!noInputDevice) muteAway(muted, true, toggleVoice);
VideoService.setTrackEnabled(true);
setAway({
variables: {
away: false,
},
});
} else if (noInputDevice) {
// User is in duplex audio, passive-sendrecv, but has no input device set
// Open the audio settings modal to allow them to select an input device
openAudioSettings({ unmuteOnExit: true });
}
}
toggleMuteMicrophone(muted, toggleVoice);
};
return (

View File

@ -8,18 +8,23 @@ import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import { User } from '/imports/ui/Types/user';
import { defineMessages, useIntl } from 'react-intl';
import {
handleLeaveAudio, liveChangeInputDevice, liveChangeOutputDevice, notify, toggleMuteMicrophone,
handleLeaveAudio,
liveChangeInputDevice,
liveChangeOutputDevice,
notify,
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
} from './service';
import useMeeting from '/imports/ui/core/hooks/useMeeting';
import { Meeting } from '/imports/ui/Types/meeting';
import logger from '/imports/startup/client/logger';
import Auth from '/imports/ui/services/auth';
import MutedAlert from '/imports/ui/components/muted-alert/component';
import MuteToggle from './buttons/muteToggle';
import ListenOnly from './buttons/listenOnly';
import LiveSelection from './buttons/LiveSelection';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
import useWhoIsUnmuted from '/imports/ui/core/hooks/useWhoIsUnmuted';
import useToggleVoice from '/imports/ui/components/audio/audio-graphql/hooks/useToggleVoice';
const AUDIO_INPUT = 'audioinput';
const AUDIO_OUTPUT = 'audiooutput';
@ -52,7 +57,11 @@ const intlMessages = defineMessages({
},
});
interface InputStreamLiveSelectorProps {
interface InputStreamLiveSelectorContainerProps {
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
}
interface InputStreamLiveSelectorProps extends InputStreamLiveSelectorContainerProps {
isConnected: boolean;
isPresenter: boolean;
isModerator: boolean;
@ -68,6 +77,8 @@ interface InputStreamLiveSelectorProps {
inputStream: string;
meetingIsBreakout: boolean;
away: boolean;
permissionStatus: string;
supportsTransparentListenOnly: boolean;
}
const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
@ -86,8 +97,12 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
inputStream,
meetingIsBreakout,
away,
permissionStatus,
supportsTransparentListenOnly,
openAudioSettings,
}) => {
const intl = useIntl();
const toggleVoice = useToggleVoice();
// eslint-disable-next-line no-undef
const [inputDevices, setInputDevices] = React.useState<InputDeviceInfo[]>([]);
const [outputDevices, setOutputDevices] = React.useState<MediaDeviceInfo[]>([]);
@ -106,6 +121,15 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
const audioOutputDevices = devices.filter((i) => i.kind === AUDIO_OUTPUT);
setInputDevices(audioInputDevices as InputDeviceInfo[]);
setOutputDevices(audioOutputDevices);
})
.catch((error) => {
logger.warn({
logCode: 'audio_device_enumeration_error',
extraInfo: {
errorMessage: error.message,
errorName: error.name,
},
}, `Error enumerating audio devices: ${error.message}`);
});
if (isAudioConnected) {
updateRemovedDevices(inputDevices, outputDevices);
@ -115,11 +139,11 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
const fallbackInputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
if (!fallbackDevice || !fallbackDevice.deviceId) return;
logger.info({
logCode: 'audio_device_live_selector',
logger.warn({
logCode: 'audio_input_live_selector',
extraInfo: {
userId: Auth.userID,
meetingId: Auth.meetingID,
fallbackDeviceId: fallbackDevice?.deviceId,
fallbackDeviceLabel: fallbackDevice?.label,
},
}, 'Current input device was removed. Fallback to default device');
liveChangeInputDevice(fallbackDevice.deviceId).catch(() => {
@ -129,11 +153,11 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
const fallbackOutputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
if (!fallbackDevice || !fallbackDevice.deviceId) return;
logger.info({
logCode: 'audio_device_live_selector',
logger.warn({
logCode: 'audio_output_live_selector',
extraInfo: {
userId: Auth.userID,
meetingId: Auth.meetingID,
fallbackDeviceId: fallbackDevice?.deviceId,
fallbackDeviceLabel: fallbackDevice?.label,
},
}, 'Current output device was removed. Fallback to default device');
liveChangeOutputDevice(fallbackDevice.deviceId, true).catch(() => {
@ -162,7 +186,16 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
if (enableDynamicAudioDeviceSelection) {
updateDevices(inAudio);
}
}, [inAudio]);
}, [inAudio, permissionStatus]);
useEffect(() => {
// If the user has no input device, is connected to audio and unmuted,
// they need to be *muted* by the system. Further attempts to unmute
// will open the audio settings modal instead.
if (inputDeviceId === 'listen-only' && isConnected && !muted) {
toggleMuteMicrophoneSystem(muted, toggleVoice);
}
}, [inputDeviceId, isConnected, muted]);
return (
<>
@ -190,6 +223,8 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
supportsTransparentListenOnly={supportsTransparentListenOnly}
openAudioSettings={openAudioSettings}
/>
) : (
<>
@ -201,6 +236,8 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
openAudioSettings={openAudioSettings}
noInputDevice={inputDeviceId === 'listen-only'}
/>
)}
<ListenOnly
@ -216,7 +253,9 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
);
};
const InputStreamLiveSelectorContainer: React.FC = () => {
const InputStreamLiveSelectorContainer: React.FC<InputStreamLiveSelectorContainerProps> = ({
openAudioSettings,
}) => {
const { data: currentUser } = useCurrentUser((u: Partial<User>) => {
if (!u.voice) {
return {
@ -261,6 +300,10 @@ const InputStreamLiveSelectorContainer: React.FC = () => {
const outputDeviceId = useReactiveVar(AudioManager._outputDeviceId.value) as string;
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
const inputStream = useReactiveVar(AudioManager._inputStream) as string;
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value) as string;
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
const supportsTransparentListenOnly = useReactiveVar(AudioManager._transparentListenOnlySupported.value) as boolean;
return (
<InputStreamLiveSelector
@ -280,6 +323,9 @@ const InputStreamLiveSelectorContainer: React.FC = () => {
inputStream={inputStream}
meetingIsBreakout={currentMeeting?.isBreakout ?? false}
away={currentUser?.away ?? false}
openAudioSettings={openAudioSettings}
permissionStatus={permissionStatus}
supportsTransparentListenOnly={supportsTransparentListenOnly}
/>
);
};

View File

@ -40,32 +40,35 @@ export const handleLeaveAudio = (meetingIsBreakout: boolean) => {
);
};
const toggleMuteMicrophoneThrottled = throttle((
const toggleMute = (
muted: boolean,
toggleVoice: (userId: string, muted: boolean) => void,
actionType = 'user_action',
) => {
Storage.setItem(MUTED_KEY, !muted);
if (muted) {
logger.info(
{
logCode: 'audiomanager_unmute_audio',
extraInfo: { logType: 'user_action' },
},
'microphone unmuted by user',
);
if (AudioManager.inputDeviceId === 'listen-only') {
// User is in duplex audio, passive-sendrecv, but has no input device set
// Unmuting should not be allowed at all
return;
}
logger.info({
logCode: 'audiomanager_unmute_audio',
extraInfo: { logType: actionType },
}, 'microphone unmuted');
Storage.setItem(MUTED_KEY, false);
toggleVoice(Auth.userID as string, false);
} else {
logger.info(
{
logCode: 'audiomanager_mute_audio',
extraInfo: { logType: 'user_action' },
},
'microphone muted by user',
);
logger.info({
logCode: 'audiomanager_mute_audio',
extraInfo: { logType: actionType },
}, 'microphone muted');
Storage.setItem(MUTED_KEY, true);
toggleVoice(Auth.userID as string, true);
}
}, TOGGLE_MUTE_THROTTLE_TIME);
};
const toggleMuteMicrophoneThrottled = throttle(toggleMute, TOGGLE_MUTE_THROTTLE_TIME);
const toggleMuteMicrophoneDebounced = debounce(toggleMuteMicrophoneThrottled, TOGGLE_MUTE_DEBOUNCE_TIME,
{ leading: true, trailing: false });
@ -74,6 +77,11 @@ export const toggleMuteMicrophone = (muted: boolean, toggleVoice: (userId: strin
return toggleMuteMicrophoneDebounced(muted, toggleVoice);
};
// Debounce is not needed here, as this function should only called by the system.
export const toggleMuteMicrophoneSystem = (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => {
return toggleMute(muted, toggleVoice, 'system_action');
};
export const truncateDeviceName = (deviceName: string) => {
if (deviceName && deviceName.length <= DEVICE_LABEL_MAX_LENGTH) {
return deviceName;
@ -141,6 +149,7 @@ export const muteAway = (
export default {
handleLeaveAudio,
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
truncateDeviceName,
notify,
liveChangeInputDevice,

View File

@ -56,6 +56,10 @@ export const DisabledLabel = {
opacity: 1,
};
export const AudioSettingsOption = {
paddingLeft: 12,
};
export const SelectedLabel = {
color: colorPrimary,
backgroundColor: colorOffWhite,
@ -80,6 +84,7 @@ export default {
MuteToggleButton,
DisabledLabel,
SelectedLabel,
AudioSettingsOption,
DangerColor,
AudioDropdown,
};

View File

@ -1,11 +1,14 @@
import React, { useEffect, useState } from 'react';
import React, {
useCallback,
useEffect,
useState,
} from 'react';
import PropTypes from 'prop-types';
import {
defineMessages, injectIntl, FormattedMessage,
} from 'react-intl';
import { useMutation } from '@apollo/client';
import Styled from './styles';
import PermissionsOverlay from '../permissions-overlay/component';
import AudioSettings from '../audio-settings/component';
import EchoTest from '../echo-test/component';
import Help from '../help/component';
@ -21,6 +24,7 @@ import {
muteAway,
} from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service';
import Session from '/imports/ui/services/storage/in-memory';
import logger from '/imports/startup/client/logger';
const propTypes = {
intl: PropTypes.shape({
@ -39,10 +43,11 @@ const propTypes = {
isConnected: PropTypes.bool.isRequired,
isUsingAudio: PropTypes.bool.isRequired,
isListenOnly: PropTypes.bool.isRequired,
isMuted: PropTypes.bool.isRequired,
toggleMuteMicrophoneSystem: PropTypes.func.isRequired,
inputDeviceId: PropTypes.string,
outputDeviceId: PropTypes.string,
formattedDialNum: PropTypes.string.isRequired,
showPermissionsOvelay: PropTypes.bool.isRequired,
listenOnlyMode: PropTypes.bool.isRequired,
joinFullAudioImmediately: PropTypes.bool,
forceListenOnlyAttendee: PropTypes.bool.isRequired,
@ -55,7 +60,6 @@ const propTypes = {
handleAllowAutoplay: PropTypes.func.isRequired,
changeInputStream: PropTypes.func.isRequired,
localEchoEnabled: PropTypes.bool.isRequired,
showVolumeMeter: PropTypes.bool.isRequired,
notify: PropTypes.func.isRequired,
isRTL: PropTypes.bool.isRequired,
priority: PropTypes.string.isRequired,
@ -72,6 +76,15 @@ const propTypes = {
}).isRequired,
getTroubleshootingLink: PropTypes.func.isRequired,
away: PropTypes.bool,
doGUM: PropTypes.func.isRequired,
hasMicrophonePermission: PropTypes.func.isRequired,
permissionStatus: PropTypes.string,
liveChangeInputDevice: PropTypes.func.isRequired,
content: PropTypes.string,
unmuteOnExit: PropTypes.bool,
supportsTransparentListenOnly: PropTypes.bool.isRequired,
getAudioConstraints: PropTypes.func.isRequired,
isTranscriptionEnabled: PropTypes.bool.isRequired,
};
const intlMessages = defineMessages({
@ -116,7 +129,7 @@ const intlMessages = defineMessages({
description: 'Title for the echo test',
},
settingsTitle: {
id: 'app.audioModal.settingsTitle',
id: 'app.audio.audioSettings.titleLabel',
description: 'Title for the audio modal',
},
helpTitle: {
@ -139,6 +152,10 @@ const intlMessages = defineMessages({
id: 'app.audioModal.autoplayBlockedDesc',
description: 'Message for autoplay audio block',
},
findingDevicesTitle: {
id: 'app.audio.audioSettings.findingDevicesTitle',
description: 'Message for finding audio devices',
},
});
const AudioModal = ({
@ -148,6 +165,8 @@ const AudioModal = ({
audioLocked,
isUsingAudio,
isListenOnly,
isMuted,
toggleMuteMicrophoneSystem,
autoplayBlocked,
closeModal,
isEchoTest,
@ -170,23 +189,31 @@ const AudioModal = ({
outputDeviceId = null,
changeInputDevice,
changeOutputDevice,
showVolumeMeter,
notify,
formattedTelVoice,
handleAllowAutoplay,
showPermissionsOvelay,
isIE,
isOpen,
priority,
setIsOpen,
getTroubleshootingLink,
away = false,
doGUM,
getAudioConstraints,
hasMicrophonePermission,
liveChangeInputDevice,
content: initialContent,
supportsTransparentListenOnly,
unmuteOnExit = false,
permissionStatus = null,
isTranscriptionEnabled,
}) => {
const [content, setContent] = useState(null);
const [content, setContent] = useState(initialContent);
const [hasError, setHasError] = useState(false);
const [disableActions, setDisableActions] = useState(false);
const [errorInfo, setErrorInfo] = useState(null);
const [autoplayChecked, setAutoplayChecked] = useState(false);
const [findingDevices, setFindingDevices] = useState(false);
const [setAway] = useMutation(SET_AWAY);
const voiceToggle = useToggleVoice();
@ -257,6 +284,55 @@ const AudioModal = ({
});
};
const handleGUMFailure = (error) => {
const { MIC_ERROR } = AudioError;
logger.error({
logCode: 'audio_gum_failed',
extraInfo: {
errorMessage: error.message,
errorName: error.name,
},
}, `Audio gUM failed: ${error.name}`);
setContent('help');
setDisableActions(false);
setHasError(true);
setErrorInfo({
errCode: error?.name === 'NotAllowedError'
? MIC_ERROR.NO_PERMISSION
: 0,
errMessage: error?.name || 'NotAllowedError',
});
};
const checkMicrophonePermission = (options) => {
setFindingDevices(true);
return hasMicrophonePermission(options)
.then((hasPermission) => {
// null means undetermined, so we don't want to show the error modal
// and let downstream components figure it out
if (hasPermission === true || hasPermission === null) {
return hasPermission;
}
handleGUMFailure(new DOMException(
'Permissions API says denied',
'NotAllowedError',
));
return false;
})
.catch((error) => {
handleGUMFailure(error);
return null;
})
.finally(() => {
setFindingDevices(false);
});
};
const handleGoToAudioOptions = () => {
setContent(null);
setHasError(true);
@ -318,14 +394,19 @@ const AudioModal = ({
});
};
const handleJoinLocalEcho = (inputStream) => {
const handleAudioSettingsConfirmation = useCallback((inputStream) => {
// Reset the modal to a connecting state - this kind of sucks?
// prlanzarin Apr 04 2022
setContent(null);
if (inputStream) changeInputStream(inputStream);
handleJoinMicrophone();
disableAwayMode();
};
if (!isConnected) {
handleJoinMicrophone();
disableAwayMode();
} else {
closeModal();
}
}, [changeInputStream, isConnected]);
const skipAudioOptions = () => (isConnecting || (forceListenOnlyAttendee && !autoplayChecked))
&& !content
@ -333,7 +414,6 @@ const AudioModal = ({
const renderAudioOptions = () => {
const hideMicrophone = forceListenOnlyAttendee || audioLocked;
const arrow = isRTL ? '←' : '→';
const dialAudioLabel = `${intl.formatMessage(intlMessages.audioDialTitle)} ${arrow}`;
@ -388,7 +468,7 @@ const AudioModal = ({
}}
/>
) : null}
<AudioCaptionsSelectContainer />
{joinFullAudioImmediately && <AudioCaptionsSelectContainer />}
</div>
);
};
@ -400,40 +480,47 @@ const AudioModal = ({
/>
);
const handleBack = useCallback(() => {
if (isConnecting || isConnected || skipAudioOptions()) {
closeModal();
} else {
handleGoToAudioOptions();
}
}, [isConnecting, isConnected, skipAudioOptions]);
const renderAudioSettings = () => {
const { animations } = getSettingsSingletonInstance().application;
const confirmationCallback = !localEchoEnabled
? handleRetryGoToEchoTest
: handleJoinLocalEcho;
const handleGUMFailure = (error) => {
const code = error?.name === 'NotAllowedError'
? AudioError.MIC_ERROR.NO_PERMISSION
: 0;
setContent('help');
setErrorInfo({
errCode: code,
errMessage: error?.name || 'NotAllowedError',
});
setDisableActions(false);
};
: handleAudioSettingsConfirmation;
return (
<AudioSettings
handleBack={handleGoToAudioOptions}
animations={animations}
handleBack={handleBack}
handleConfirmation={confirmationCallback}
handleGUMFailure={handleGUMFailure}
joinEchoTest={joinEchoTest}
changeInputDevice={changeInputDevice}
liveChangeInputDevice={liveChangeInputDevice}
changeOutputDevice={changeOutputDevice}
isConnecting={isConnecting}
isConnected={isConnected}
isEchoTest={isEchoTest}
isMuted={isMuted}
toggleMuteMicrophoneSystem={toggleMuteMicrophoneSystem}
inputDeviceId={inputDeviceId}
outputDeviceId={outputDeviceId}
withVolumeMeter={showVolumeMeter}
withEcho={localEchoEnabled}
produceStreams={localEchoEnabled || showVolumeMeter}
produceStreams
notify={notify}
unmuteOnExit={unmuteOnExit}
doGUM={doGUM}
getAudioConstraints={getAudioConstraints}
checkMicrophonePermission={checkMicrophonePermission}
supportsTransparentListenOnly={supportsTransparentListenOnly}
toggleVoice={voiceToggle}
permissionStatus={permissionStatus}
isTranscriptionEnabled={isTranscriptionEnabled}
/>
);
};
@ -445,9 +532,19 @@ const AudioModal = ({
message: errorInfo?.errMessage,
};
const _joinListenOnly = () => {
// Erase the content state so that the modal transitions to the connecting
// state if the user chooses listen only
setContent(null);
handleJoinListenOnly();
};
return (
<Help
handleBack={handleGoToAudioOptions}
isConnected={isConnected}
handleBack={handleBack}
handleJoinListenOnly={_joinListenOnly}
handleRetryMic={handleGoToAudioSettings}
audioErr={audioErr}
isListenOnly={isListenOnly}
troubleshootingLink={getTroubleshootingLink(errorInfo?.errCode)}
@ -495,6 +592,17 @@ const AudioModal = ({
const renderContent = () => {
const { animations } = getSettingsSingletonInstance().application;
if (findingDevices && content === null) {
return (
<Styled.Connecting role="alert">
<span data-test="findingDevicesLabel">
{intl.formatMessage(intlMessages.findingDevicesTitle)}
</span>
<Styled.ConnectingAnimation animations={animations} />
</Styled.Connecting>
);
}
if (skipAudioOptions()) {
return (
<Styled.Connecting role="alert">
@ -505,6 +613,7 @@ const AudioModal = ({
</Styled.Connecting>
);
}
return content ? contents[content].component() : renderAudioOptions();
};
@ -512,16 +621,23 @@ const AudioModal = ({
if (!isUsingAudio) {
if (forceListenOnlyAttendee || audioLocked) {
handleJoinListenOnly();
return;
}
} else if (!listenOnlyMode) {
if (joinFullAudioImmediately) {
checkMicrophonePermission({ doGUM: true, permissionStatus })
.then((hasPermission) => {
// No permission - let the Help screen be shown as it's triggered
// by the checkMicrophonePermission function
if (hasPermission === false) return;
if (joinFullAudioImmediately && !listenOnlyMode) {
handleJoinMicrophone();
return;
}
if (!listenOnlyMode) {
handleGoToEchoTest();
// Permission is granted or undetermined, so we can proceed
handleJoinMicrophone();
});
} else {
checkMicrophonePermission({ doGUM: false, permissionStatus }).then((hasPermission) => {
if (hasPermission === false) return;
handleGoToEchoTest();
});
}
}
}
}, [
@ -551,40 +667,39 @@ const AudioModal = ({
let title = content
? intl.formatMessage(contents[content].title)
: intl.formatMessage(intlMessages.audioChoiceLabel);
title = !skipAudioOptions() ? title : null;
title = !skipAudioOptions() && (!findingDevices || content)
? title
: null;
return (
<>
{showPermissionsOvelay ? <PermissionsOverlay closeModal={closeModal} /> : null}
<Styled.AudioModal
modalName="AUDIO"
onRequestClose={closeModal}
data-test="audioModal"
contentLabel={intl.formatMessage(intlMessages.ariaModalTitle)}
title={title}
{...{
setIsOpen,
isOpen,
priority,
}}
>
{isIE ? (
<Styled.BrowserWarning>
<FormattedMessage
id="app.audioModal.unsupportedBrowserLabel"
description="Warning when someone joins with a browser that isn't supported"
values={{
0: <a href="https://www.google.com/chrome/">Chrome</a>,
1: <a href="https://getfirefox.com">Firefox</a>,
}}
/>
</Styled.BrowserWarning>
) : null}
<Styled.Content>
{renderContent()}
</Styled.Content>
</Styled.AudioModal>
</>
<Styled.AudioModal
modalName="AUDIO"
onRequestClose={closeModal}
data-test="audioModal"
contentLabel={intl.formatMessage(intlMessages.ariaModalTitle)}
title={title}
{...{
setIsOpen,
isOpen,
priority,
}}
>
{isIE ? (
<Styled.BrowserWarning>
<FormattedMessage
id="app.audioModal.unsupportedBrowserLabel"
description="Warning when someone joins with a browser that isn't supported"
values={{
0: <a href="https://www.google.com/chrome/">Chrome</a>,
1: <a href="https://getfirefox.com">Firefox</a>,
}}
/>
</Styled.BrowserWarning>
) : null}
<Styled.Content>
{renderContent()}
</Styled.Content>
</Styled.AudioModal>
);
};

View File

@ -19,6 +19,7 @@ import { useStorageKey } from '/imports/ui/services/storage/hooks';
import useMeeting from '/imports/ui/core/hooks/useMeeting';
import useLockContext from '/imports/ui/components/lock-viewers/hooks/useLockContext';
import deviceInfo from '/imports/utils/deviceInfo';
import { useIsAudioTranscriptionEnabled } from '/imports/ui/components/audio/audio-graphql/audio-captions/service';
const invalidDialNumbers = ['0', '613-555-1212', '613-555-1234', '0000'];
@ -62,11 +63,8 @@ const AudioModalContainer = (props) => {
combinedDialInNum = `${dialNumber.replace(/\D+/g, '')},,,${telVoice.replace(/\D+/g, '')}`;
}
}
const { isIe } = browserInfo;
const SHOW_VOLUME_METER = window.meetingClientSettings.public.media.showVolumeMeter;
const {
enabled: LOCAL_ECHO_TEST_ENABLED,
} = window.meetingClientSettings.public.media.localEchoTest;
@ -81,26 +79,27 @@ const AudioModalContainer = (props) => {
const isListenOnly = useReactiveVar(AudioManager._isListenOnly.value);
const isEchoTest = useReactiveVar(AudioManager._isEchoTest.value);
const autoplayBlocked = useReactiveVar(AudioManager._autoplayBlocked.value);
const isMuted = useReactiveVar(AudioManager._isMuted.value);
const meetingIsBreakout = AppService.useMeetingIsBreakout();
const supportsTransparentListenOnly = useReactiveVar(
AudioManager._transparentListenOnlySupported.value,
);
const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value);
const { userLocks } = useLockContext();
const isListenOnlyInputDevice = Service.inputDeviceId() === 'listen-only';
const devicesAlreadyConfigured = skipEchoTestIfPreviousDevice
&& Service.inputDeviceId();
const joinFullAudioImmediately = !isListenOnlyInputDevice
&& (skipCheck || (skipCheckOnJoin && !getEchoTest) || devicesAlreadyConfigured);
const { setIsOpen } = props;
const close = useCallback(() => closeModal(() => setIsOpen(false)), [setIsOpen]);
const joinMic = useCallback(
(skipEchoTest) => joinMicrophone(skipEchoTest || skipCheck || skipCheckOnJoin),
(options = {}) => joinMicrophone({
skipEchoTest: options.skipEchoTest || joinFullAudioImmediately,
}),
[skipCheck, skipCheckOnJoin],
);
const joinFullAudioImmediately = (
autoJoin
&& (
skipCheck
|| (skipCheckOnJoin && !getEchoTest)
))
|| (
skipCheck
|| (skipCheckOnJoin && !getEchoTest)
|| (skipEchoTestIfPreviousDevice && (inputDeviceId || outputDeviceId))
);
const isTranscriptionEnabled = useIsAudioTranscriptionEnabled();
return (
<AudioModal
@ -114,6 +113,8 @@ const AudioModalContainer = (props) => {
isConnected={isConnected}
isListenOnly={isListenOnly}
isEchoTest={isEchoTest}
isMuted={isMuted}
toggleMuteMicrophoneSystem={Service.toggleMuteMicrophoneSystem}
autoplayBlocked={autoplayBlocked}
getEchoTest={getEchoTest}
joinFullAudioImmediately={joinFullAudioImmediately}
@ -123,11 +124,11 @@ const AudioModalContainer = (props) => {
joinListenOnly={joinListenOnly}
leaveEchoTest={leaveEchoTest}
changeInputDevice={Service.changeInputDevice}
liveChangeInputDevice={Service.liveChangeInputDevice}
changeInputStream={Service.changeInputStream}
changeOutputDevice={Service.changeOutputDevice}
joinEchoTest={Service.joinEchoTest}
exitAudio={Service.exitAudio}
showVolumeMeter={SHOW_VOLUME_METER}
localEchoEnabled={LOCAL_ECHO_TEST_ENABLED}
listenOnlyMode={listenOnlyMode}
formattedDialNum={formattedDialNum}
@ -144,7 +145,15 @@ const AudioModalContainer = (props) => {
isRTL={isRTL}
AudioError={AudioError}
getTroubleshootingLink={AudioModalService.getTroubleshootingLink}
getMicrophonePermissionStatus={Service.getMicrophonePermissionStatus}
getAudioConstraints={Service.getAudioConstraints}
doGUM={Service.doGUM}
bypassGUM={Service.bypassGUM}
supportsTransparentListenOnly={supportsTransparentListenOnly}
setIsOpen={setIsOpen}
hasMicrophonePermission={Service.hasMicrophonePermission}
permissionStatus={permissionStatus}
isTranscriptionEnabled={isTranscriptionEnabled}
{...props}
/>
);

View File

@ -20,7 +20,10 @@ export const didUserSelectedListenOnly = () => (
!!Storage.getItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY)
);
export const joinMicrophone = (skipEchoTest = false) => {
export const joinMicrophone = (options = {}) => {
const { skipEchoTest = false } = options;
const shouldSkipEcho = skipEchoTest && Service.inputDeviceId() !== 'listen-only';
Storage.setItem(CLIENT_DID_USER_SELECTED_MICROPHONE_KEY, true);
Storage.setItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY, false);
@ -30,8 +33,8 @@ export const joinMicrophone = (skipEchoTest = false) => {
const call = new Promise((resolve, reject) => {
try {
if ((skipEchoTest && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) {
return resolve(Service.joinMicrophone());
if ((shouldSkipEcho && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) {
return resolve(Service.joinMicrophone(options));
}
return resolve(Service.transferCall());

View File

@ -63,6 +63,7 @@ const Connecting = styled.div`
margin-top: auto;
margin-bottom: auto;
font-size: 2rem;
text-align: center;
`;
const ellipsis = keyframes`

View File

@ -8,47 +8,62 @@ import logger from '/imports/startup/client/logger';
import AudioStreamVolume from '/imports/ui/components/audio/audio-stream-volume/component';
import LocalEchoContainer from '/imports/ui/components/audio/local-echo/container';
import DeviceSelector from '/imports/ui/components/audio/device-selector/component';
import {
getAudioConstraints,
doGUM,
} from '/imports/api/audio/client/bridge/service';
import MediaStreamUtils from '/imports/utils/media-stream-utils';
import audioManager from '/imports/ui/services/audio-manager';
import AudioManager from '/imports/ui/services/audio-manager';
import Session from '/imports/ui/services/storage/in-memory';
import AudioCaptionsSelectContainer from '../audio-graphql/audio-captions/captions/component';
const propTypes = {
intl: PropTypes.shape({
formatMessage: PropTypes.func.isRequired,
}).isRequired,
animations: PropTypes.bool,
changeInputDevice: PropTypes.func.isRequired,
liveChangeInputDevice: PropTypes.func.isRequired,
changeOutputDevice: PropTypes.func.isRequired,
handleBack: PropTypes.func.isRequired,
handleConfirmation: PropTypes.func.isRequired,
handleGUMFailure: PropTypes.func.isRequired,
isConnecting: PropTypes.bool.isRequired,
isConnected: PropTypes.bool.isRequired,
isMuted: PropTypes.bool.isRequired,
toggleMuteMicrophoneSystem: PropTypes.func.isRequired,
inputDeviceId: PropTypes.string.isRequired,
outputDeviceId: PropTypes.string.isRequired,
produceStreams: PropTypes.bool,
withEcho: PropTypes.bool,
withVolumeMeter: PropTypes.bool,
notify: PropTypes.func.isRequired,
unmuteOnExit: PropTypes.bool,
doGUM: PropTypes.func.isRequired,
getAudioConstraints: PropTypes.func.isRequired,
checkMicrophonePermission: PropTypes.func.isRequired,
supportsTransparentListenOnly: PropTypes.bool.isRequired,
toggleVoice: PropTypes.func.isRequired,
permissionStatus: PropTypes.string,
isTranscriptionEnabled: PropTypes.bool.isRequired,
};
const defaultProps = {
animations: true,
produceStreams: false,
withEcho: false,
withVolumeMeter: false,
unmuteOnExit: false,
permissionStatus: null,
};
const intlMessages = defineMessages({
testSpeakerLabel: {
id: 'app.audio.audioSettings.testSpeakerLabel',
description: 'Test speaker label',
},
captionsSelectorLabel: {
id: 'app.audio.captions.speech.title',
description: 'Audio speech recognition title',
},
backLabel: {
id: 'app.audio.backLabel',
description: 'audio settings back button label',
},
descriptionLabel: {
id: 'app.audio.audioSettings.descriptionLabel',
description: 'audio settings description label',
},
micSourceLabel: {
id: 'app.audio.audioSettings.microphoneSourceLabel',
description: 'Label for mic source',
@ -69,17 +84,45 @@ const intlMessages = defineMessages({
id: 'app.audioNotification.deviceChangeFailed',
description: 'Device change failed',
},
confirmLabel: {
id: 'app.audio.audioSettings.confirmLabel',
description: 'Audio settings confirmation button label',
},
cancelLabel: {
id: 'app.audio.audioSettings.cancelLabel',
description: 'Audio settings cancel button label',
},
findingDevicesTitle: {
id: 'app.audio.audioSettings.findingDevicesTitle',
description: 'Message for finding audio devices',
},
noMicSelectedWarning: {
id: 'app.audio.audioSettings.noMicSelectedWarning',
description: 'Warning when no mic is selected',
},
baseSubtitle: {
id: 'app.audio.audioSettings.baseSubtitle',
description: 'Base subtitle for audio settings',
},
});
class AudioSettings extends React.Component {
constructor(props) {
super(props);
const { inputDeviceId, outputDeviceId } = props;
const {
inputDeviceId,
outputDeviceId,
unmuteOnExit,
permissionStatus,
} = props;
this.handleInputChange = this.handleInputChange.bind(this);
this.handleOutputChange = this.handleOutputChange.bind(this);
this.handleConfirmationClick = this.handleConfirmationClick.bind(this);
this.handleCancelClick = this.handleCancelClick.bind(this);
this.unmuteOnExit = this.unmuteOnExit.bind(this);
this.updateDeviceList = this.updateDeviceList.bind(this);
this.state = {
inputDeviceId,
@ -88,32 +131,80 @@ class AudioSettings extends React.Component {
// blocked until at least one stream is generated
producingStreams: props.produceStreams,
stream: null,
unmuteOnExit,
audioInputDevices: [],
audioOutputDevices: [],
findingDevices: permissionStatus === 'prompt' || permissionStatus === 'denied',
};
this._isMounted = false;
}
componentDidMount() {
const { inputDeviceId, outputDeviceId } = this.state;
const {
inputDeviceId,
outputDeviceId,
} = this.state;
const {
isConnected,
isMuted,
toggleMuteMicrophoneSystem,
checkMicrophonePermission,
toggleVoice,
permissionStatus,
} = this.props;
Session.setItem('inEchoTest', true);
this._isMounted = true;
// Guarantee initial in/out devices are initialized on all ends
this.setInputDevice(inputDeviceId);
this.setOutputDevice(outputDeviceId);
audioManager.isEchoTest = true;
AudioManager.isEchoTest = true;
checkMicrophonePermission({ gumOnPrompt: true, permissionStatus })
.then(this.updateDeviceList)
.then(() => {
if (!this._isMounted) return;
navigator.mediaDevices.addEventListener(
'devicechange',
this.updateDeviceList,
);
this.setState({ findingDevices: false });
this.setInputDevice(inputDeviceId);
this.setOutputDevice(outputDeviceId);
});
// If connected and unmuted, we need to mute the audio and revert it
// back to the original state on exit.
if (isConnected && !isMuted) {
toggleMuteMicrophoneSystem(isMuted, toggleVoice);
// We only need to revert the mute state if the user is not listen-only
if (inputDeviceId !== 'listen-only') this.setState({ unmuteOnExit: true });
}
}
componentDidUpdate(prevProps) {
const { permissionStatus } = this.props;
if (prevProps.permissionStatus !== permissionStatus) {
this.updateDeviceList();
}
}
componentWillUnmount() {
const { stream } = this.state;
Session.setItem('inEchoTest', false);
this._mounted = false;
this._isMounted = false;
if (stream) {
MediaStreamUtils.stopMediaStreamTracks(stream);
}
audioManager.isEchoTest = false;
AudioManager.isEchoTest = false;
navigator.mediaDevices.removeEventListener(
'devicechange', this.updateDeviceList,
);
this.unmuteOnExit();
}
handleInputChange(deviceId) {
@ -125,63 +216,104 @@ class AudioSettings extends React.Component {
}
handleConfirmationClick() {
const { stream } = this.state;
const { produceStreams, handleConfirmation } = this.props;
const { stream, inputDeviceId: selectedInputDeviceId } = this.state;
const {
isConnected,
produceStreams,
handleConfirmation,
liveChangeInputDevice,
} = this.props;
// Stream generation disabled or there isn't any stream: just run the provided callback
if (!produceStreams || !stream) return handleConfirmation();
const confirm = () => {
// Stream generation disabled or there isn't any stream: just run the provided callback
if (!produceStreams || !stream) return handleConfirmation();
// Stream generation enabled and there is a valid input stream => call
// the confirmation callback with the input stream as arg so it can be used
// in upstream components. The rationale is no surplus gUM calls.
// We're cloning it because the original will be cleaned up on unmount here.
const clonedStream = stream.clone();
return handleConfirmation(clonedStream);
// Stream generation enabled and there is a valid input stream => call
// the confirmation callback with the input stream as arg so it can be used
// in upstream components. The rationale is no surplus gUM calls.
// We're cloning it because the original will be cleaned up on unmount here.
const clonedStream = stream.clone();
return handleConfirmation(clonedStream);
};
if (isConnected) {
// If connected, we need to use the in-call device change method so that all
// components pick up the change and the peer is properly updated.
liveChangeInputDevice(selectedInputDeviceId).catch((error) => {
logger.warn({
logCode: 'audiosettings_live_change_device_failed',
extraInfo: {
errorMessage: error?.message,
errorStack: error?.stack,
errorName: error?.name,
},
}, `Audio settings live change device failed: ${error.name}`);
}).finally(() => {
confirm();
});
} else {
confirm();
}
}
handleCancelClick() {
const { handleBack } = this.props;
handleBack();
}
setInputDevice(deviceId) {
const { handleGUMFailure, changeInputDevice, produceStreams, intl, notify } = this.props;
const {
isConnected,
handleGUMFailure,
changeInputDevice,
produceStreams,
intl,
notify,
} = this.props;
const { inputDeviceId: currentInputDeviceId } = this.state;
try {
changeInputDevice(deviceId);
if (!isConnected) changeInputDevice(deviceId);
// Only generate input streams if they're going to be used with something
// In this case, the volume meter or local echo test.
if (produceStreams) {
this.generateInputStream(deviceId)
.then((stream) => {
// Extract the deviceId again from the stream to guarantee consistency
// between stream DID vs chosen DID. That's necessary in scenarios where,
// eg, there's no default/pre-set deviceId ('') and the browser's
// default device has been altered by the user (browser default != system's
// default).
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio');
if (extractedDeviceId && extractedDeviceId !== deviceId)
this.generateInputStream(deviceId).then((stream) => {
// Extract the deviceId again from the stream to guarantee consistency
// between stream DID vs chosen DID. That's necessary in scenarios where,
// eg, there's no default/pre-set deviceId ('') and the browser's
// default device has been altered by the user (browser default != system's
// default).
let extractedDeviceId = deviceId;
if (stream) {
extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio');
if (extractedDeviceId !== deviceId && !isConnected) {
changeInputDevice(extractedDeviceId);
}
}
// Component unmounted after gUM resolution -> skip echo rendering
if (!this._isMounted) return;
// Component unmounted after gUM resolution -> skip echo rendering
if (!this._isMounted) return;
this.setState({
inputDeviceId: extractedDeviceId,
stream,
producingStreams: false,
});
})
.catch((error) => {
logger.warn(
{
logCode: 'audiosettings_gum_failed',
extraInfo: {
deviceId,
errorMessage: error.message,
errorName: error.name,
},
},
`Audio settings gUM failed: ${error.name}`
);
handleGUMFailure(error);
this.setState({
inputDeviceId: extractedDeviceId,
stream,
producingStreams: false,
});
}).catch((error) => {
logger.warn({
logCode: 'audiosettings_gum_failed',
extraInfo: {
deviceId,
errorMessage: error.message,
errorName: error.name,
},
}, `Audio settings gUM failed: ${error.name}`);
handleGUMFailure(error);
});
} else {
this.setState({
inputDeviceId: deviceId,
@ -198,15 +330,20 @@ class AudioSettings extends React.Component {
newDeviceId: deviceId,
},
},
`Audio settings: error changing input device - {${error.name}: ${error.message}}`
`Audio settings: error changing input device - {${error.name}: ${error.message}}`,
);
notify(intl.formatMessage(intlMessages.deviceChangeFailed), true);
}
}
setOutputDevice(deviceId) {
const { changeOutputDevice, withEcho, intl, notify } = this.props;
const { outputDeviceId: currentOutputDeviceId } = this.state;
const {
changeOutputDevice,
withEcho,
intl,
notify,
} = this.props;
// withEcho usage (isLive arg): if local echo is enabled we need the device
// change to be performed seamlessly (which is what the isLive parameter guarantees)
@ -217,23 +354,42 @@ class AudioSettings extends React.Component {
});
})
.catch((error) => {
logger.debug(
{
logCode: 'audiosettings_output_device_change_failure',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
deviceId: currentOutputDeviceId,
newDeviceId: deviceId,
},
logger.debug({
logCode: 'audiosettings_output_device_change_failure',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
deviceId: currentOutputDeviceId,
newDeviceId: deviceId,
},
`Audio settings: error changing output device - {${error.name}: ${error.message}}`
);
}, `Audio settings: error changing output device - {${error.name}: ${error.message}}`);
notify(intl.formatMessage(intlMessages.deviceChangeFailed), true);
});
}
updateDeviceList() {
return navigator.mediaDevices.enumerateDevices()
.then((devices) => {
const audioInputDevices = devices.filter((i) => i.kind === 'audioinput');
const audioOutputDevices = devices.filter((i) => i.kind === 'audiooutput');
this.setState({
audioInputDevices,
audioOutputDevices,
});
});
}
unmuteOnExit() {
const { toggleMuteMicrophoneSystem, toggleVoice } = this.props;
const { unmuteOnExit } = this.state;
// Unmutes microphone if flagged to do so
if (unmuteOnExit) toggleMuteMicrophoneSystem(true, toggleVoice);
}
generateInputStream(inputDeviceId) {
const { doGUM, getAudioConstraints } = this.props;
const { stream } = this.state;
if (inputDeviceId && stream) {
@ -244,6 +400,8 @@ class AudioSettings extends React.Component {
MediaStreamUtils.stopMediaStreamTracks(stream);
}
if (inputDeviceId === 'listen-only') return Promise.resolve(null);
const constraints = {
audio: getAudioConstraints({ deviceId: inputDeviceId }),
};
@ -251,107 +409,143 @@ class AudioSettings extends React.Component {
return doGUM(constraints, true);
}
renderOutputTest() {
const { withEcho, intl } = this.props;
const { stream } = this.state;
renderAudioCaptionsSelector() {
const { intl, isTranscriptionEnabled } = this.props;
if (!isTranscriptionEnabled) return null;
return (
<Styled.Row>
<Styled.SpacedLeftCol>
<Styled.LabelSmall htmlFor="audioTest">
{!withEcho ? (
<AudioTestContainer id="audioTest" />
) : (
<LocalEchoContainer intl={intl} stream={stream} />
)}
</Styled.LabelSmall>
</Styled.SpacedLeftCol>
</Styled.Row>
<Styled.FormElement>
<Styled.LabelSmall htmlFor="audioSettingsCaptionsSelector">
{intl.formatMessage(intlMessages.captionsSelectorLabel)}
<AudioCaptionsSelectContainer showTitleLabel={false} />
</Styled.LabelSmall>
</Styled.FormElement>
);
}
renderVolumeMeter() {
const { withVolumeMeter, intl } = this.props;
renderDeviceSelectors() {
const {
inputDeviceId,
outputDeviceId,
producingStreams,
audioInputDevices,
audioOutputDevices,
findingDevices,
} = this.state;
const {
intl,
isConnecting,
supportsTransparentListenOnly,
withEcho,
} = this.props;
const { stream } = this.state;
const blocked = producingStreams || isConnecting || findingDevices;
return withVolumeMeter ? (
<Styled.Row>
return (
<>
<Styled.FormElement>
<Styled.LabelSmall htmlFor="inputDeviceSelector">
{intl.formatMessage(intlMessages.micSourceLabel)}
<DeviceSelector
id="inputDeviceSelector"
deviceId={inputDeviceId}
devices={audioInputDevices}
kind="audioinput"
blocked={blocked}
onChange={this.handleInputChange}
intl={intl}
supportsTransparentListenOnly={supportsTransparentListenOnly}
/>
</Styled.LabelSmall>
</Styled.FormElement>
<Styled.LabelSmallFullWidth htmlFor="audioStreamVolume">
{intl.formatMessage(intlMessages.streamVolumeLabel)}
<AudioStreamVolume stream={stream} />
</Styled.LabelSmallFullWidth>
</Styled.Row>
) : null;
<Styled.FormElement>
<Styled.LabelSmall htmlFor="outputDeviceSelector">
{intl.formatMessage(intlMessages.speakerSourceLabel)}
<DeviceSelector
id="outputDeviceSelector"
deviceId={outputDeviceId}
devices={audioOutputDevices}
kind="audiooutput"
blocked={blocked}
onChange={this.handleOutputChange}
intl={intl}
supportsTransparentListenOnly={supportsTransparentListenOnly}
/>
</Styled.LabelSmall>
</Styled.FormElement>
<Styled.LabelSmall htmlFor="audioTest">
{intl.formatMessage(intlMessages.testSpeakerLabel)}
{!withEcho ? (
<AudioTestContainer id="audioTest" />
) : (
<LocalEchoContainer intl={intl} stream={stream} />
)}
</Styled.LabelSmall>
{this.renderAudioCaptionsSelector()}
</>
);
}
renderDeviceSelectors() {
const { inputDeviceId, outputDeviceId, producingStreams } = this.state;
const { intl, isConnecting } = this.props;
const blocked = producingStreams || isConnecting;
renderAudioNote() {
const {
animations,
intl,
} = this.props;
const { findingDevices, inputDeviceId: selectedInputDeviceId } = this.state;
let subtitle = intl.formatMessage(intlMessages.baseSubtitle);
if (findingDevices) {
subtitle = intl.formatMessage(intlMessages.findingDevicesTitle);
} else if (selectedInputDeviceId === 'listen-only') {
subtitle = intl.formatMessage(intlMessages.noMicSelectedWarning);
}
return (
<Styled.Row>
<Styled.Col>
<Styled.FormElement>
<Styled.LabelSmall htmlFor="inputDeviceSelector">
{intl.formatMessage(intlMessages.micSourceLabel)}
<DeviceSelector
id="inputDeviceSelector"
deviceId={inputDeviceId}
kind="audioinput"
blocked={blocked}
onChange={this.handleInputChange}
intl={intl}
/>
</Styled.LabelSmall>
</Styled.FormElement>
</Styled.Col>
<Styled.Col>
<Styled.FormElement>
<Styled.LabelSmall htmlFor="outputDeviceSelector">
{intl.formatMessage(intlMessages.speakerSourceLabel)}
<DeviceSelector
id="outputDeviceSelector"
deviceId={outputDeviceId}
kind="audiooutput"
blocked={blocked}
onChange={this.handleOutputChange}
intl={intl}
/>
</Styled.LabelSmall>
</Styled.FormElement>
</Styled.Col>
</Styled.Row>
<Styled.AudioNote>
<span>{subtitle}</span>
{findingDevices && <Styled.FetchingAnimation animations={animations} />}
</Styled.AudioNote>
);
}
render() {
const { isConnecting, intl, handleBack } = this.props;
const { producingStreams } = this.state;
const {
producingStreams,
} = this.state;
const {
isConnecting,
isConnected,
intl,
} = this.props;
return (
<Styled.FormWrapper data-test="audioSettingsModal">
{this.renderAudioNote()}
<Styled.Form>
<Styled.Row>
<Styled.AudioNote>{intl.formatMessage(intlMessages.descriptionLabel)}</Styled.AudioNote>
</Styled.Row>
{this.renderDeviceSelectors()}
{this.renderOutputTest()}
{this.renderVolumeMeter()}
</Styled.Form>
<Styled.BottomSeparator />
<Styled.EnterAudio>
<Styled.BackButton
label={intl.formatMessage(intlMessages.backLabel)}
label={isConnected
? intl.formatMessage(intlMessages.cancelLabel)
: intl.formatMessage(intlMessages.backLabel)}
color="secondary"
onClick={handleBack}
onClick={this.handleCancelClick}
disabled={isConnecting}
/>
<Button
data-test="joinEchoTestButton"
size="md"
color="primary"
label={intl.formatMessage(intlMessages.retryLabel)}
label={isConnected
? intl.formatMessage(intlMessages.confirmLabel)
: intl.formatMessage(intlMessages.retryLabel)}
onClick={this.handleConfirmationClick}
disabled={isConnecting || producingStreams}
/>

View File

@ -1,58 +1,42 @@
import styled from 'styled-components';
import styled, { css, keyframes } from 'styled-components';
import Button from '/imports/ui/components/common/button/component';
import { smallOnly } from '/imports/ui/stylesheets/styled-components/breakpoints';
import {
borderSizeSmall,
mdPaddingX,
mdPaddingY,
jumboPaddingX,
} from '/imports/ui/stylesheets/styled-components/general';
import {
colorGrayLightest,
} from '/imports/ui/stylesheets/styled-components/palette';
import {
lineHeightComputed,
fontSizeSmall,
} from '/imports/ui/stylesheets/styled-components/typography';
const FormWrapper = styled.div`
min-width: 0;
min-width: 100%;
`;
const Form = styled.div`
display: flex;
flex-flow: column;
margin-top: 1.5rem;
`;
const Row = styled.div`
display: flex;
flex-flow: row;
justify-content: space-between;
margin-bottom: 0.7rem;
`;
const EnterAudio = styled.div`
margin-top: 1.5rem;
display: flex;
justify-content: flex-end;
display: grid;
grid-template-columns: 2fr 1fr;
margin: ${mdPaddingY} ${mdPaddingX} 0 ${mdPaddingX};
column-gap: ${jumboPaddingX};
row-gap: ${mdPaddingY};
@media ${smallOnly} {
grid-template-columns: 1fr;
grid-template-rows: auto;
}
`;
const AudioNote = styled.div`
display: block;
margin: ${mdPaddingY} ${mdPaddingX} 0 ${mdPaddingX};
text-align: center;
@media ${smallOnly} {
font-size: 0.8rem;
}
`;
const Col = styled.div`
min-width: 0;
display: flex;
flex-grow: 1;
flex-basis: 0;
margin: 0 1rem 0 0;
[dir="rtl"] & {
margin: 0 0 0 1rem;
}
&:last-child {
margin-right: 0;
margin-left: inherit;
padding: 0 0.1rem 0 4rem;
[dir="rtl"] & {
margin-right: inherit;
margin-left: 0;
padding: 0 4rem 0 0.1rem;
}
font-size: ${fontSizeSmall};
}
`;
@ -65,11 +49,11 @@ const FormElement = styled.div`
const LabelSmall = styled.label`
color: black;
font-size: 0.85rem;
font-size: ${fontSizeSmall};
font-weight: 600;
& > :first-child {
margin-top: 0.5rem;
margin: 0.5rem 0 0 0 !important;
}
`;
@ -77,85 +61,21 @@ const LabelSmallFullWidth = styled(LabelSmall)`
width: 100%;
`;
const SpacedLeftCol = styled.div`
min-width: 0;
const EnterAudio = styled.div`
margin: 0 ${mdPaddingX} 0 0;
display: flex;
flex-grow: 1;
flex-basis: 0;
margin: 0 1rem 0 0;
justify-content: flex-end;
[dir="rtl"] & {
margin: 0 0 0 1rem;
}
&:last-child {
margin-right: 0;
margin-left: inherit;
padding: 0 0.1rem 0 4rem;
[dir="rtl"] & {
margin-right: inherit;
margin-left: 0;
padding: 0 4rem 0 0.1rem;
}
}
& label {
flex-grow: 1;
flex-basis: 0;
margin-right: 0;
margin-left: inherit;
padding: 0 0.1rem 0 4rem;
[dir="rtl"] & {
margin-right: inherit;
margin-left: 0;
padding: 0 4rem 0 0.1rem;
}
}
&:before {
content: "";
display: block;
flex-grow: 1;
flex-basis: 0;
margin-right: 1rem;
margin-left: inherit;
[dir="rtl"] & {
margin-right: inherit;
margin-left: 1rem;
}
}
&:last-child {
margin-right: 0;
margin-left: inherit;
padding-right: 0;
padding-left: 0;
[dir="rtl"] & {
margin-right: 0;
margin-left: inherit;
}
margin: 0 0 0 ${mdPaddingX};
}
`;
const BackButton = styled(Button)`
margin: 0 0.5rem 0 0;
border: none;
[dir="rtl"] & {
margin: 0 0 0 0.5rem;
}
@media ${smallOnly} {
margin:0 auto 0 0;
[dir="rtl"] & {
margin:0 0 0 auto;
}
margin: 0 0 0 ${mdPaddingX};
}
&:first-child {
@ -163,16 +83,49 @@ const BackButton = styled(Button)`
}
`;
const ellipsis = keyframes`
to {
width: 1.5em;
}
`;
const FetchingAnimation = styled.span`
margin: auto;
display: inline-block;
width: 1.5em;
&:after {
overflow: hidden;
display: inline-block;
vertical-align: bottom;
content: "\\2026"; /* ascii code for the ellipsis character */
width: 0;
margin-left: 0.25em;
${({ animations }) => animations && css`
animation: ${ellipsis} steps(4, end) 900ms infinite;
`}
}
`;
const BottomSeparator = styled.div`
position: relative;
width: inherit;
height: ${borderSizeSmall};
background-color: ${colorGrayLightest};
margin: calc(${lineHeightComputed} * 1.25) ${mdPaddingX} calc(${lineHeightComputed} * 1.25) ${mdPaddingX};
`;
export default {
BottomSeparator,
FormWrapper,
Form,
Row,
EnterAudio,
AudioNote,
Col,
FormElement,
LabelSmall,
LabelSmallFullWidth,
SpacedLeftCol,
BackButton,
FetchingAnimation,
};

View File

@ -182,7 +182,7 @@ const AudioContainer = (props) => {
if (Service.isConnected()) return;
if (userSelectedMicrophone) {
joinMicrophone(true);
joinMicrophone({ skipEchoTest: true });
return;
}

View File

@ -1,7 +1,5 @@
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import logger from '/imports/startup/client/logger';
import browserInfo from '/imports/utils/browserInfo';
import {
defineMessages,
} from 'react-intl';
@ -16,12 +14,18 @@ const propTypes = {
onChange: PropTypes.func.isRequired,
blocked: PropTypes.bool,
deviceId: PropTypes.string,
devices: PropTypes.arrayOf(PropTypes.shape({
deviceId: PropTypes.string,
label: PropTypes.string,
})),
supportsTransparentListenOnly: PropTypes.bool.isRequired,
};
const defaultProps = {
kind: 'audioinput',
blocked: false,
deviceId: '',
devices: [],
};
const intlMessages = defineMessages({
@ -45,6 +49,10 @@ const intlMessages = defineMessages({
id: 'app.audio.noDeviceFound',
description: 'No audio device found',
},
noMicListenOnlyLabel: {
id: 'app.audio.audioSettings.noMicListenOnly',
description: 'No microphone, listen only mode label',
},
});
class DeviceSelector extends Component {
@ -52,52 +60,16 @@ class DeviceSelector extends Component {
super(props);
this.handleSelectChange = this.handleSelectChange.bind(this);
this.state = {
devices: [],
options: [],
};
}
componentDidMount() {
const { blocked } = this.props;
if (!blocked) this.enumerate();
}
componentDidUpdate(prevProps) {
const { blocked } = this.props;
if (prevProps.blocked === true && blocked === false) this.enumerate();
}
handleEnumerateDevicesSuccess(deviceInfos) {
const { kind } = this.props;
const devices = deviceInfos.filter((d) => d.kind === kind);
logger.info({
logCode: 'audiodeviceselector_component_enumeratedevices_success',
extraInfo: {
deviceKind: kind,
devices,
},
}, 'Success on enumerateDevices() for audio');
this.setState({
devices,
options: devices.map((d, i) => ({
label: d.label || this.getFallbackLabel(i),
value: d.deviceId,
key: uniqueId('device-option-'),
})),
});
}
handleSelectChange(event) {
const { value } = event.target;
const { onChange } = this.props;
const { devices } = this.state;
const selectedDevice = devices.find((d) => d.deviceId === value);
onChange(selectedDevice.deviceId, selectedDevice, event);
const { devices, onChange } = this.props;
const selectedDeviceId = (value === 'listen-only')
? value
: devices.find((d) => d.deviceId === value)?.deviceId;
onChange(selectedDeviceId);
}
getFallbackLabel(index) {
@ -107,28 +79,29 @@ class DeviceSelector extends Component {
return intl.formatMessage(label, { 0: index });
}
enumerate() {
const { kind } = this.props;
navigator.mediaDevices
.enumerateDevices()
.then(this.handleEnumerateDevicesSuccess.bind(this))
.catch(() => {
logger.error({
logCode: 'audiodeviceselector_component_enumeratedevices_error',
extraInfo: {
deviceKind: kind,
},
}, 'Error on enumerateDevices(): ');
});
}
render() {
const {
intl, kind, blocked, deviceId,
intl,
kind,
blocked,
deviceId,
devices,
supportsTransparentListenOnly,
} = this.props;
const { options } = this.state;
const options = devices.map((d, i) => ({
label: d.label || this.getFallbackLabel(i),
value: d.deviceId,
key: uniqueId('device-option-'),
}));
if (kind === 'audioinput' && supportsTransparentListenOnly && !blocked) {
options.push({
label: intl.formatMessage(intlMessages.noMicListenOnlyLabel),
value: 'listen-only',
key: uniqueId('device-option-'),
});
}
let notFoundOption;

View File

@ -8,6 +8,7 @@ const propTypes = {
formatMessage: PropTypes.func.isRequired,
}).isRequired,
isListenOnly: PropTypes.bool.isRequired,
isConnected: PropTypes.bool.isRequired,
audioErr: PropTypes.shape({
code: PropTypes.number,
message: PropTypes.string,
@ -18,6 +19,8 @@ const propTypes = {
}),
}).isRequired,
handleBack: PropTypes.func.isRequired,
handleRetryMic: PropTypes.func.isRequired,
handleJoinListenOnly: PropTypes.func.isRequired,
troubleshootingLink: PropTypes.string,
};
@ -30,6 +33,10 @@ const intlMessages = defineMessages({
id: 'app.audioModal.helpSubtitleMic',
description: 'Text description for the audio help subtitle (microphones)',
},
helpSubtitlePermission: {
id: 'app.audioModal.helpSubtitlePermission',
description: 'Text description for the audio help subtitle (permission)',
},
helpSubtitleGeneric: {
id: 'app.audioModal.helpSubtitleGeneric',
description: 'Text description for the audio help subtitle (generic)',
@ -46,10 +53,18 @@ const intlMessages = defineMessages({
id: 'app.audioModal.helpPermissionStep3',
description: 'Text description for the audio permission help step 3',
},
retryLabel: {
id: 'app.audio.audioSettings.retryLabel',
backLabel: {
id: 'app.audio.backLabel',
description: 'audio settings back button label',
},
retryMicLabel: {
id: 'app.audio.audioSettings.retryMicLabel',
description: 'audio settings retry button label',
},
listenOnlyLabel: {
id: 'app.audioModal.listenOnlyLabel',
description: 'audio settings listen only button label',
},
noSSL: {
id: 'app.audioModal.help.noSSL',
description: 'Text description for domain not using https',
@ -74,7 +89,12 @@ const intlMessages = defineMessages({
class Help extends Component {
getSubtitle() {
const { intl, isListenOnly } = this.props;
const { audioErr, intl, isListenOnly } = this.props;
const { MIC_ERROR } = audioErr;
if (audioErr.code === MIC_ERROR.NO_PERMISSION) {
return intl.formatMessage(intlMessages.helpSubtitlePermission);
}
return !isListenOnly
? intl.formatMessage(intlMessages.helpSubtitleMic)
@ -155,7 +175,10 @@ class Help extends Component {
render() {
const {
intl,
isConnected,
handleBack,
handleRetryMic,
handleJoinListenOnly,
troubleshootingLink,
} = this.props;
@ -174,11 +197,31 @@ class Help extends Component {
</Styled.Text>
)}
<Styled.EnterAudio>
<Styled.RetryButton
label={intl.formatMessage(intlMessages.retryLabel)}
{!isConnected ? (
<Styled.HelpActionButton
label={intl.formatMessage(intlMessages.listenOnlyLabel)}
data-test="helpListenOnlyBtn"
icon="listen"
size="md"
color="secondary"
onClick={handleJoinListenOnly}
/>
) : (
<Styled.HelpActionButton
label={intl.formatMessage(intlMessages.backLabel)}
data-test="helpBackBtn"
color="secondary"
size="md"
onClick={handleBack}
/>
)}
<Styled.HelpActionButton
label={intl.formatMessage(intlMessages.retryMicLabel)}
data-test="helpRetryMicBtn"
icon="unmute"
size="md"
color="primary"
onClick={handleBack}
onClick={handleRetryMic}
/>
</Styled.EnterAudio>
</Styled.Help>

View File

@ -24,11 +24,11 @@ const Text = styled.div`
const EnterAudio = styled.div`
display: flex;
justify-content: flex-end;
justify-content: center;
margin-top: ${jumboPaddingY};
`;
const RetryButton = styled(Button)`
const HelpActionButton = styled(Button)`
margin-right: 0.5rem;
margin-left: inherit;
@ -72,7 +72,7 @@ export default {
Help,
Text,
EnterAudio,
RetryButton,
HelpActionButton,
TroubleshootLink,
UnknownError,
PermissionHelpSteps,

View File

@ -3,7 +3,6 @@ import PropTypes from 'prop-types';
import { defineMessages, injectIntl } from 'react-intl';
import Styled from './styles';
import { getSettingsSingletonInstance } from '/imports/ui/services/settings';
import Service from '/imports/ui/components/audio/local-echo/service';
const propTypes = {
intl: PropTypes.shape({
@ -14,6 +13,10 @@ const propTypes = {
id: PropTypes.string,
}),
initialHearingState: PropTypes.bool,
playEchoStream: PropTypes.func.isRequired,
deattachEchoStream: PropTypes.func.isRequired,
shouldUseRTCLoopback: PropTypes.func.isRequired,
createAudioRTCLoopback: PropTypes.func.isRequired,
};
const intlMessages = defineMessages({
@ -21,9 +24,9 @@ const intlMessages = defineMessages({
id: 'app.audio.stopAudioFeedback',
description: 'Stop audio feedback button label',
},
testSpeakerLabel: {
id: 'app.audio.audioSettings.testSpeakerLabel',
description: 'Label for the speaker test button',
startAudioFeedback: {
id: 'app.audio.startAudioFeedback',
description: 'Start audio feedback button label',
},
});
@ -31,30 +34,34 @@ const LocalEcho = ({
intl,
stream = null,
initialHearingState = false,
playEchoStream,
deattachEchoStream,
shouldUseRTCLoopback,
createAudioRTCLoopback,
}) => {
const loopbackAgent = useRef(null);
const [hearing, setHearing] = useState(initialHearingState);
const Settings = getSettingsSingletonInstance();
const { animations } = Settings.application;
const icon = hearing ? 'mute' : 'unmute';
const label = hearing ? intlMessages.stopAudioFeedbackLabel : intlMessages.testSpeakerLabel;
const icon = hearing ? 'no_audio' : 'listen';
const label = hearing ? intlMessages.stopAudioFeedbackLabel : intlMessages.startAudioFeedback;
const applyHearingState = (_stream) => {
if (hearing) {
Service.playEchoStream(_stream, loopbackAgent.current);
playEchoStream(_stream, loopbackAgent.current);
} else {
Service.deattachEchoStream();
deattachEchoStream();
}
};
const cleanup = () => {
if (loopbackAgent.current) loopbackAgent.current.stop();
Service.deattachEchoStream();
deattachEchoStream();
};
useEffect(() => {
if (Service.useRTCLoopback()) {
loopbackAgent.current = Service.createAudioRTCLoopback();
if (shouldUseRTCLoopback()) {
loopbackAgent.current = createAudioRTCLoopback();
}
return cleanup;
}, []);

View File

@ -1,10 +1,23 @@
import React from 'react';
import LocalEchoService from '/imports/ui/components/audio/local-echo/service';
import LocalEcho from '/imports/ui/components/audio/local-echo/component';
const LocalEchoContainer = (props) => {
const { initialHearingState } = window.meetingClientSettings.public.media.localEchoTest;
const {
initialHearingState: settingsHearingState,
} = window.meetingClientSettings.public.media.localEchoTest;
const initialHearingState = settingsHearingState;
return <LocalEcho {...props} initialHearingState={initialHearingState} />;
return (
<LocalEcho
{...props}
initialHearingState={initialHearingState}
playEchoStream={LocalEchoService.playEchoStream}
deattachEchoStream={LocalEchoService.deattachEchoStream}
shouldUseRTCLoopback={LocalEchoService.shouldUseRTCLoopback}
createAudioRTCLoopback={LocalEchoService.createAudioRTCLoopback}
/>
);
};
export default LocalEchoContainer;

View File

@ -1,13 +1,15 @@
import LocalPCLoopback from '/imports/ui/services/webrtc-base/local-pc-loopback';
import browserInfo from '/imports/utils/browserInfo';
const LOCAL_MEDIA_TAG = '#local-media';
let audioContext = null;
let sourceContext = null;
let contextDestination = null;
let stubAudioElement = null;
let delayNode = null;
const useRTCLoopback = () => {
const shouldUseRTCLoopback = () => {
const USE_RTC_LOOPBACK_CHR = window.meetingClientSettings.public.media.localEchoTest.useRtcLoopbackInChromium;
return (browserInfo.isChrome || browserInfo.isEdge) && USE_RTC_LOOPBACK_CHR;
@ -44,7 +46,6 @@ const cleanupDelayNode = () => {
};
const addDelayNode = (stream) => {
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
const {
delayTime = 0.5,
maxDelayTime = 2,
@ -52,7 +53,7 @@ const addDelayNode = (stream) => {
if (stream) {
if (delayNode || audioContext || sourceContext) cleanupDelayNode();
const audioElement = document.querySelector(MEDIA_TAG);
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
// Workaround: attach the stream to a muted stub audio element to be able to play it in
// Chromium-based browsers. See https://bugs.chromium.org/p/chromium/issues/detail?id=933677
stubAudioElement = new Audio();
@ -70,18 +71,17 @@ const addDelayNode = (stream) => {
sourceContext.connect(delayNode);
delayNode.connect(contextDestination);
delayNode.delayTime.setValueAtTime(delayTime, audioContext.currentTime);
// Play the stream with the delay in the default audio element (remote-media)
// Play the stream with the delay in the default audio element (local-media)
audioElement.srcObject = contextDestination.stream;
}
};
const deattachEchoStream = () => {
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
const {
enabled: DELAY_ENABLED = true,
} = window.meetingClientSettings.public.media.localEchoTest.delay;
const audioElement = document.querySelector(MEDIA_TAG);
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
if (DELAY_ENABLED) {
audioElement.muted = false;
@ -93,7 +93,6 @@ const deattachEchoStream = () => {
};
const playEchoStream = async (stream, loopbackAgent = null) => {
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
const {
enabled: DELAY_ENABLED = true,
} = window.meetingClientSettings.public.media.localEchoTest.delay;
@ -116,9 +115,9 @@ const playEchoStream = async (stream, loopbackAgent = null) => {
if (DELAY_ENABLED) {
addDelayNode(streamToPlay);
} else {
// No delay: play the stream in the default audio element (remote-media),
// No delay: play the stream in the default audio element (local-media),
// no strings attached.
const audioElement = document.querySelector(MEDIA_TAG);
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
audioElement.srcObject = streamToPlay;
audioElement.muted = false;
audioElement.play();
@ -127,7 +126,7 @@ const playEchoStream = async (stream, loopbackAgent = null) => {
};
export default {
useRTCLoopback,
shouldUseRTCLoopback,
createAudioRTCLoopback,
deattachEchoStream,
playEchoStream,

View File

@ -6,9 +6,10 @@ import {
} from '/imports/ui/stylesheets/styled-components/palette';
const LocalEchoTestButton = styled(Button)`
margin: 0 !important;
font-weight: normal;
border: none !important;
height: 2rem;
width: 100%;
&:hover {
color: #0c5cb2;

View File

@ -1,52 +0,0 @@
import React from 'react';
import { injectIntl, defineMessages } from 'react-intl';
import PropTypes from 'prop-types';
import Styled from './styles';
import browserInfo from '/imports/utils/browserInfo';
import { getSettingsSingletonInstance } from '/imports/ui/services/settings';
const propTypes = {
intl: PropTypes.object.isRequired,
closeModal: PropTypes.func.isRequired,
};
const intlMessages = defineMessages({
title: {
id: 'app.audio.permissionsOverlay.title',
description: 'Title for the overlay',
},
hint: {
id: 'app.audio.permissionsOverlay.hint',
description: 'Hint for the overlay',
},
});
const { isChrome, isFirefox, isSafari } = browserInfo;
const PermissionsOverlay = ({ intl, closeModal }) => {
const Settings = getSettingsSingletonInstance();
const { animations } = Settings.application;
return (
<Styled.PermissionsOverlayModal
overlayClassName={"permissionsOverlay"}
onRequestClose={closeModal}
hideBorder
isFirefox={isFirefox}
isChrome={isChrome}
isSafari={isSafari}
animations={animations}
>
<Styled.Content>
{intl.formatMessage(intlMessages.title)}
<small>
{intl.formatMessage(intlMessages.hint)}
</small>
</Styled.Content>
</Styled.PermissionsOverlayModal>
)
};
PermissionsOverlay.propTypes = propTypes;
export default injectIntl(PermissionsOverlay);

View File

@ -1,108 +0,0 @@
import styled, { css, keyframes } from 'styled-components';
import ModalSimple from '/imports/ui/components/common/modal/simple/component';
import { colorBlack } from '/imports/ui/stylesheets/styled-components/palette';
import { jumboPaddingX } from '/imports/ui/stylesheets/styled-components/general';
const bounce = keyframes`
0%,
20%,
50%,
80%,
100% {
-ms-transform: translateY(0);
transform: translateY(0);
}
40% {
-ms-transform: translateY(10px);
transform: translateY(10px);
}
60% {
-ms-transform: translateY(5px);
transform: translateY(5px);
}
`;
const PermissionsOverlayModal = styled(ModalSimple)`
${({ isFirefox }) => isFirefox && `
top: 8em;
left: 22em;
right: auto;
[dir="rtl"] & {
right: none;
left: none;
top: 15rem;
}
`}
${({ isChrome }) => isChrome && `
top: 5.5em;
left: 18em;
right: auto;
[dir="rtl"] & {
right: none;
left: none;
top: 15rem;
}
`}
${({ isSafari }) => isSafari && `
top: 150px;
left:0;
right:0;
margin-left: auto;
margin-right: auto;
`}
position: absolute;
background: none;
box-shadow: none;
color: #fff;
font-size: 16px;
font-weight: 400;
padding: 0 0 0 ${jumboPaddingX};
line-height: 18px;
width: 340px;
[dir="rtl"] & {
padding: 0 ${jumboPaddingX} 0 0;
}
small {
display: block;
font-size: 12px;
line-height: 14px;
margin-top: 3px;
opacity: .6;
}
&:after {
top: -65px;
left: -20px;
right: auto;
font-size: 20px;
display: block;
font-family: 'bbb-icons';
content: "\\E906";
position: relative;
[dir="rtl"] & {
left: auto;
right: -20px;
}
${({ animations }) => animations && css`
animation: ${bounce} 2s infinite;
`}
}
`;
const Content = styled.div`
color: ${colorBlack};
`;
export default {
PermissionsOverlayModal,
Content,
};

View File

@ -3,13 +3,21 @@ import AudioManager from '/imports/ui/services/audio-manager';
import logger from '/imports/startup/client/logger';
import Storage from '../../services/storage/session';
import { useReactiveVar } from '@apollo/client';
import {
getAudioConstraints,
doGUM,
} from '/imports/api/audio/client/bridge/service';
import {
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
} from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service';
const MUTED_KEY = 'muted';
const recoverMicState = (toggleVoice) => {
const muted = Storage.getItem(MUTED_KEY);
if ((muted === undefined) || (muted === null)) {
if ((muted === undefined) || (muted === null) || AudioManager.inputDeviceId === 'listen-only') {
return;
}
@ -65,13 +73,73 @@ const useIsUsingAudio = () => {
return Boolean(isConnected || isConnecting || isHangingUp || isEchoTest);
};
const hasMicrophonePermission = async ({
permissionStatus,
gumOnPrompt = false,
}) => {
try {
let status = permissionStatus;
// If the browser doesn't support the Permissions API, we can't check
// microphone permissions - return null (unknown)
if (navigator?.permissions?.query == null) return null;
if (!status) {
({ state: status } = await navigator.permissions.query({ name: 'microphone' }));
}
switch (status) {
case 'denied':
return false;
case 'prompt':
// Prompt without any subsequent action is considered unknown
if (!gumOnPrompt) {
return null;
}
return doGUM({ audio: getAudioConstraints() }).then((stream) => {
stream.getTracks().forEach((track) => {
track.stop();
stream.removeTrack(track);
});
return true;
}).catch((error) => {
if (error.name === 'NotAllowedError') {
return false;
}
// Give it the benefit of the doubt. It might be a device mismatch
// or something else that's not a permissions issue, so let's try
// to proceed. Rollbacks that happen downstream might fix the issue,
// otherwise we'll land on the Help screen anyways
return null;
});
case 'granted':
default:
return true;
}
} catch (error) {
logger.error({
logCode: 'audio_check_microphone_permission_error',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
},
}, `Error checking microphone permission: ${error.message}`);
// Null = could not determine permission status
return null;
}
};
export default {
init,
exitAudio: () => AudioManager.exitAudio(),
forceExitAudio: () => AudioManager.forceExitAudio(),
transferCall: () => AudioManager.transferCall(),
joinListenOnly: () => AudioManager.joinListenOnly(),
joinMicrophone: () => AudioManager.joinMicrophone(),
joinMicrophone: (options) => AudioManager.joinMicrophone(options),
joinEchoTest: () => AudioManager.joinEchoTest(),
changeInputDevice: (inputDeviceId) => AudioManager.changeInputDevice(inputDeviceId),
changeInputStream: (newInputStream) => { AudioManager.inputStream = newInputStream; },
@ -80,6 +148,8 @@ export default {
outputDeviceId,
isLive,
) => AudioManager.changeOutputDevice(outputDeviceId, isLive),
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
isConnectedToBreakout: () => {
const transferStatus = AudioManager.getBreakoutAudioTransferStatus();
if (transferStatus.status
@ -95,13 +165,14 @@ export default {
isUsingAudio: () => AudioManager.isUsingAudio(),
isConnecting: () => AudioManager.isConnecting,
isListenOnly: () => AudioManager.isListenOnly,
inputDeviceId: () => AudioManager.inputDeviceId,
outputDeviceId: () => AudioManager.outputDeviceId,
isEchoTest: () => AudioManager.isEchoTest,
isMuted: () => AudioManager.isMuted,
autoplayBlocked: () => AudioManager.autoplayBlocked,
handleAllowAutoplay: () => AudioManager.handleAllowAutoplay(),
playAlertSound: (url) => AudioManager.playAlertSound(url),
updateAudioConstraints:
(constraints) => AudioManager.updateAudioConstraints(constraints),
updateAudioConstraints: (constraints) => AudioManager.updateAudioConstraints(constraints),
recoverMicState,
isReconnecting: () => AudioManager.isReconnecting,
setBreakoutAudioTransferStatus: (status) => AudioManager
@ -109,6 +180,10 @@ export default {
getBreakoutAudioTransferStatus: () => AudioManager
.getBreakoutAudioTransferStatus(),
getStats: () => AudioManager.getStats(),
getAudioConstraints,
doGUM,
supportsTransparentListenOnly: () => AudioManager.supportsTransparentListenOnly(),
hasMicrophonePermission,
notify: (message, error, icon) => { AudioManager.notify(message, error, icon); },
useIsUsingAudio,
};

View File

@ -669,7 +669,6 @@ export const meetingClientSettingsInitialValues: MeetingClientSettings = {
maxDelayTime: 2,
},
},
showVolumeMeter: true,
muteAudioOutputWhenAway: false,
},
stats: {

View File

@ -70,6 +70,9 @@ class AudioManager {
muteHandle: makeVar(null),
autoplayBlocked: makeVar(false),
isReconnecting: makeVar(false),
bypassGUM: makeVar(false),
permissionStatus: makeVar(null),
transparentListenOnlySupported: makeVar(false),
});
this.failedMediaElements = [];
@ -79,7 +82,7 @@ class AudioManager {
this._inputStream = makeVar(null);
this._inputDeviceId = {
value: makeVar(DEFAULT_INPUT_DEVICE_ID),
value: makeVar(null),
};
this._outputDeviceId = {
value: makeVar(null),
@ -90,6 +93,37 @@ class AudioManager {
window.addEventListener('StopAudioTracks', () => this.forceExitAudio());
}
_trackPermissionStatus() {
const handleTrackingError = (error) => {
logger.warn({
logCode: 'audiomanager_permission_tracking_failed',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
},
}, `Failed to track microphone permission status: ${error.message}`);
};
if (navigator?.permissions?.query) {
navigator.permissions.query({ name: 'microphone' })
.then((status) => {
// eslint-disable-next-line no-param-reassign
status.onchange = () => {
logger.debug({
logCode: 'audiomanager_permission_status_changed',
extraInfo: {
newStatus: status.state,
},
}, `Microphone permission status changed: ${status.state}`);
this.permissionStatus = status.state;
};
this.permissionStatus = status.state;
}).catch(handleTrackingError);
} else {
handleTrackingError(new Error('navigator.permissions.query is not available'));
}
}
_applyCachedOutputDeviceId() {
const cachedId = getStoredAudioOutputDeviceId();
@ -123,6 +157,10 @@ class AudioManager {
}
}
// inputDeviceId is a string that represents a MediaDeviceInfo.deviceId OR a static
// 'listen-only' string that represents our "virtual" listen-only device.
// i.e.: the user has a bidirectional audio channel, but did not specify any
// input device to it.
get inputDeviceId() {
return this._inputDeviceId.value();
}
@ -145,17 +183,26 @@ class AudioManager {
return this._outputDeviceId.value();
}
shouldBypassGUM() {
return this.supportsTransparentListenOnly() && this.inputDeviceId === 'listen-only';
}
supportsTransparentListenOnly() {
return this.listenOnlyBridge?.supportsTransparentListenOnly()
&& this.fullAudioBridge?.supportsTransparentListenOnly();
}
async init(userData, audioEventHandler) {
this.inputDeviceId = getStoredAudioInputDeviceId() || DEFAULT_INPUT_DEVICE_ID;
this.outputDeviceId = getCurrentAudioSinkId();
this._applyCachedOutputDeviceId();
this._trackPermissionStatus();
this.loadBridges(userData);
this.userData = userData;
this.initialized = true;
this.audioEventHandler = audioEventHandler;
await this.loadBridges(userData);
this.transparentListenOnlySupported = this.supportsTransparentListenOnly();
}
/**
@ -280,6 +327,7 @@ class AudioManager {
isListenOnly: false,
extension: null,
inputStream: this.inputStream,
bypassGUM: this.shouldBypassGUM(),
};
return this.joinAudio(callOptions, this.callStateCallback.bind(this));
});
@ -309,6 +357,7 @@ class AudioManager {
extension: ECHO_TEST_NUMBER,
inputStream: this.inputStream,
validIceCandidates,
bypassGUM: this.shouldBypassGUM(),
};
logger.info(
{
@ -369,7 +418,6 @@ class AudioManager {
}
this.isConnecting = false;
this.isWaitingPermissions = false;
throw errorPayload;
});
@ -415,17 +463,7 @@ class AudioManager {
}
forceExitAudio() {
this.notifyAudioExit();
this.isConnected = false;
this.isConnecting = false;
this.isHangingUp = false;
if (this.inputStream) {
this.inputStream.getTracks().forEach((track) => track.stop());
this.inputStream = null;
}
window.removeEventListener('audioPlayFailed', this.handlePlayElementFailed);
this.onAudioExit();
return this.bridge && this.bridge.exitAudio();
}
@ -520,7 +558,7 @@ class AudioManager {
if (this.inputStream) {
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(
this.inputStream,
'audio'
'audio',
);
if (extractedDeviceId && extractedDeviceId !== this.inputDeviceId) {
this.changeInputDevice(extractedDeviceId);
@ -639,22 +677,17 @@ class AudioManager {
}
changeInputDevice(deviceId) {
if (typeof deviceId !== 'string') throw new TypeError('Invalid inputDeviceId');
if (deviceId === this.inputDeviceId) return this.inputDeviceId;
const currentDeviceId = this.inputDeviceId ?? 'none';
this.inputDeviceId = deviceId;
logger.debug(
{
logCode: 'audiomanager_input_device_change',
extraInfo: {
deviceId: currentDeviceId,
newDeviceId: deviceId,
},
logger.debug({
logCode: 'audiomanager_input_device_change',
extraInfo: {
deviceId: currentDeviceId,
newDeviceId: deviceId || 'none',
},
`Microphone input device changed: from ${currentDeviceId} to ${deviceId}`
);
}, `Microphone input device changed: from ${currentDeviceId} to ${deviceId || 'none'}`);
return this.inputDeviceId;
}

View File

@ -52,13 +52,13 @@ class AudioBroker extends BaseBroker {
const localStream = this.getLocalStream();
const oldTracks = localStream ? localStream.getAudioTracks() : [];
peerConnection.getSenders().forEach((sender, index) => {
if (sender.track && sender.track.kind === 'audio') {
const newTrack = newTracks[index];
peerConnection.getSenders().forEach((sender) => {
if (sender.track == null || sender?.track?.kind === 'audio') {
const newTrack = newTracks.shift();
if (newTrack == null) return;
// Cleanup old tracks in the local MediaStream
const oldTrack = oldTracks[index];
const oldTrack = oldTracks.shift();
sender.replaceTrack(newTrack);
if (oldTrack) {
oldTrack.stop();
@ -68,6 +68,13 @@ class AudioBroker extends BaseBroker {
}
});
if (oldTracks.length > 0) {
oldTracks.forEach((track) => {
track.stop();
localStream.removeTrack(track);
});
}
return Promise.resolve();
}
@ -90,8 +97,10 @@ class AudioBroker extends BaseBroker {
gatheringTimeout: this.gatheringTimeout,
};
const peerRole = this.role === 'sendrecv' ? this.role : 'recvonly';
const peerRole = BaseBroker.getPeerRole(this.role);
this.webRtcPeer = new WebRtcPeer(peerRole, options);
window.peers = window.peers || [];
window.peers.push(this.webRtcPeer);
this.webRtcPeer.iceQueue = [];
this.webRtcPeer.start();
this.webRtcPeer.peerConnection.onconnectionstatechange = this.handleConnectionStateChange.bind(this);
@ -101,7 +110,9 @@ class AudioBroker extends BaseBroker {
this.webRtcPeer.generateOffer()
.then(this.sendStartReq.bind(this))
.catch(this._handleOfferGenerationFailure.bind(this));
} else if (peerRole === 'recvonly') {
} else if (peerRole === 'recvonly'
|| peerRole === 'recv'
|| peerRole === 'passive-sendrecv') {
// We are the answerer and we are only listening, so we don't need
// to acquire local media
this.sendStartReq();

View File

@ -8,6 +8,20 @@ const WS_HEARTBEAT_OPTS = {
};
class BaseBroker {
static getPeerRole(role) {
switch (role) {
case 'send':
case 'sendrecv':
case 'sendonly':
case 'recvonly':
case 'recv':
case 'passive-sendrecv':
return role;
default:
throw new Error(`Invalid role: ${role}`);
}
}
static assembleError(code, reason) {
const message = reason || SFU_BROKER_ERRORS[code];
const error = new Error(message);

View File

@ -37,6 +37,28 @@ export default class WebRtcPeer extends EventEmitter2 {
this._gatheringTimeout = this.options.gatheringTimeout;
this._assignOverrides();
this.logger.debug('BBB::WebRtcPeer::constructor - created', {
mode: this.mode,
options: this.options,
});
}
_getTransceiverDirection() {
switch (this.mode) {
case 'sendonly':
case 'recvonly':
case 'sendrecv':
return this.mode;
case 'recv':
return 'recvonly';
case 'send':
return 'sendonly';
case 'passive-sendrecv':
return 'sendrecv';
default:
return 'inactive';
}
}
_assignOverrides() {
@ -202,7 +224,7 @@ export default class WebRtcPeer extends EventEmitter2 {
}
return stream;
}
};
if (typeof this._mediaStreamFactory === 'function') {
return this._mediaStreamFactory(this.mediaConstraints).then(handleGUMResolution);
@ -326,6 +348,25 @@ export default class WebRtcPeer extends EventEmitter2 {
}
}
_processMediaStreams() {
if (this.videoStream) {
this.videoStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.videoStream);
});
}
if (this.audioStream) {
this.audioStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.audioStream);
});
}
this.peerConnection.getTransceivers().forEach((transceiver) => {
// eslint-disable-next-line no-param-reassign
transceiver.direction = this._getTransceiverDirection();
});
}
async generateOffer() {
switch (this.mode) {
case 'recvonly': {
@ -338,13 +379,13 @@ export default class WebRtcPeer extends EventEmitter2 {
if (useAudio) {
this.peerConnection.addTransceiver('audio', {
direction: 'recvonly',
direction: this._getTransceiverDirection(),
});
}
if (useVideo) {
this.peerConnection.addTransceiver('video', {
direction: 'recvonly',
direction: this._getTransceiverDirection(),
});
}
break;
@ -353,26 +394,14 @@ export default class WebRtcPeer extends EventEmitter2 {
case 'sendonly':
case 'sendrecv': {
await this.mediaStreamFactory();
if (this.videoStream) {
this.videoStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.videoStream);
});
}
if (this.audioStream) {
this.audioStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.audioStream);
});
}
this.peerConnection.getTransceivers().forEach((transceiver) => {
// eslint-disable-next-line no-param-reassign
transceiver.direction = this.mode;
});
this._processMediaStreams();
break;
}
case 'passive-sendrecv':
this._processMediaStreams();
break;
default:
break;
}
@ -387,6 +416,10 @@ export default class WebRtcPeer extends EventEmitter2 {
const localDescription = this.getLocalSessionDescriptor();
this.logger.debug('BBB::WebRtcPeer::generateOffer - local description set', localDescription);
return localDescription.sdp;
})
.catch((error) => {
this.logger.error('BBB::WebRtcPeer::generateOffer - failed', error);
throw error;
});
}
@ -409,23 +442,9 @@ export default class WebRtcPeer extends EventEmitter2 {
.then(async () => {
if (this.mode === 'sendonly' || this.mode === 'sendrecv') {
await this.mediaStreamFactory();
if (this.videoStream) {
this.videoStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.videoStream);
});
}
if (this.audioStream) {
this.audioStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.audioStream);
});
}
this.peerConnection.getTransceivers().forEach((transceiver) => {
// eslint-disable-next-line no-param-reassign
transceiver.direction = this.mode;
});
this._processMediaStreams();
} else if (this.mode === 'passive-sendrecv') {
this._processMediaStreams();
}
})
.then(() => this.peerConnection.createAnswer())
@ -437,6 +456,10 @@ export default class WebRtcPeer extends EventEmitter2 {
const localDescription = this.getLocalSessionDescriptor();
this.logger.debug('BBB::WebRtcPeer::processOffer - local description set', localDescription.sdp);
return localDescription.sdp;
})
.catch((error) => {
this.logger.error('BBB::WebRtcPeer::processOffer - failed', error);
throw error;
});
}

View File

@ -49,17 +49,6 @@ const GlobalStyle = createGlobalStyle`
}
}
.permissionsOverlay {
position: fixed;
z-index: 1002;
top: 0;
bottom: 0;
left: 0;
right: 0;
background-color: rgba(0, 0, 0, .85);
animation: fade-in .5s ease-in;
}
.modalOverlay {
z-index: 1000;
display: flex;

View File

@ -45,22 +45,25 @@ const getDeviceIdFromTrack = (track) => {
const { deviceId } = track.getSettings();
return deviceId;
}
return '';
return null;
};
const extractDeviceIdFromStream = (stream, kind) => {
if (!stream) return null;
// An empty string is the browser's default...
let tracks = [];
switch (kind) {
case 'audio':
tracks = getAudioTracks(stream);
if (tracks.length === 0) return 'listen-only';
return getDeviceIdFromTrack(tracks[0]);
case 'video':
tracks = getVideoTracks(stream);
return getDeviceIdFromTrack(tracks[0]);
default: {
return '';
return null;
}
}
};

View File

@ -892,8 +892,6 @@ public:
enabled: true
delayTime: 0.5
maxDelayTime: 2
# showVolumeMeter: shows an energy bar for microphones in the AudioSettings view
showVolumeMeter: true
# networkPriorities: DSCP markings for each media type. Chromium only, applies
# to sender flows. See https://datatracker.ietf.org/doc/html/rfc8837#section-5
# for further info.

View File

@ -606,8 +606,8 @@
"app.submenu.notification.userJoinLabel": "User Join",
"app.submenu.notification.userLeaveLabel": "User Leave",
"app.submenu.notification.guestWaitingLabel": "Guest Waiting Approval",
"app.submenu.audio.micSourceLabel": "Microphone source",
"app.submenu.audio.speakerSourceLabel": "Speaker source",
"app.submenu.audio.micSourceLabel": "Microphone",
"app.submenu.audio.speakerSourceLabel": "Speaker",
"app.submenu.audio.streamVolumeLabel": "Your audio stream volume",
"app.submenu.video.title": "Video",
"app.submenu.video.videoSourceLabel": "View source",
@ -723,10 +723,10 @@
"app.audioModal.yes.arialabel": "Echo is audible",
"app.audioModal.no.arialabel": "Echo is inaudible",
"app.audioModal.echoTestTitle": "This is a private echo test. Speak a few words. Did you hear audio?",
"app.audioModal.settingsTitle": "Change your audio settings",
"app.audioModal.helpTitle": "There was an issue with your audio devices",
"app.audioModal.helpSubtitleMic": "We couldn't enable your microphone",
"app.audioModal.helpSubtitleGeneric": "We're having trouble establishing an audio connection",
"app.audioModal.helpSubtitlePermission": "We need access to your microphone",
"app.audioModal.helpPermissionStep1": "When joining a call, accept all requests if prompted to use your microphone.",
"app.audioModal.helpPermissionStep2": "Check browser and device settings to ensure microphone access is allowed.",
"app.audioModal.helpPermissionStep3": "Refresh the page and try again.",
@ -759,23 +759,30 @@
"app.audio.changeAudioDevice": "Change audio device",
"app.audio.enterSessionLabel": "Enter session",
"app.audio.playSoundLabel": "Play sound",
"app.audio.startAudioFeedback": "Start audio feedback",
"app.audio.stopAudioFeedback": "Stop audio feedback",
"app.audio.backLabel": "Back",
"app.audio.loading": "Loading",
"app.audio.microphones": "Microphones",
"app.audio.speakers": "Speakers",
"app.audio.noDeviceFound": "No device found",
"app.audio.audioSettings.titleLabel": "Choose your audio settings",
"app.audio.audioSettings.descriptionLabel": "Please note, a dialog will appear in your browser, requiring you to accept sharing your microphone.",
"app.audio.audioSettings.microphoneSourceLabel": "Microphone source",
"app.audio.audioSettings.speakerSourceLabel": "Speaker source",
"app.audio.noDeviceFound": "No device found (listen only)",
"app.audio.audioSettings.titleLabel": "Adjust your audio settings",
"app.audio.audioSettings.baseSubtitle": "Speak to test your input and output devices",
"app.audio.audioSettings.findingDevicesTitle": "Looking for your audio devices, please accept any requests to use them",
"app.audio.audioSettings.noMicSelectedWarning": "No microphone selected. You'll be able to listen, but not speak.",
"app.audio.audioSettings.noMicListenOnly": "No microphone (listen only)",
"app.audio.audioSettings.microphoneSourceLabel": "Microphone",
"app.audio.audioSettings.speakerSourceLabel": "Speaker",
"app.audio.audioSettings.testSpeakerLabel": "Test your speaker",
"app.audio.audioSettings.microphoneStreamLabel": "Your audio stream volume",
"app.audio.audioSettings.retryLabel": "Retry",
"app.audio.audioSettings.retryMicLabel": "Retry",
"app.audio.audioSettings.fallbackInputLabel": "Audio input {0}",
"app.audio.audioSettings.fallbackOutputLabel": "Audio output {0}",
"app.audio.audioSettings.fallbackNoPermission": "(no device permission)",
"app.audio.audioSettings.defaultOutputDeviceLabel": "Default",
"app.audio.audioSettings.findingDevicesLabel": "Finding devices...",
"app.audio.audioSettings.findingDevicesLabel": "Finding audio devices...",
"app.audio.audioSettings.confirmLabel": "Confirm",
"app.audio.audioSettings.cancelLabel": "Cancel",
"app.audio.listenOnly.backLabel": "Back",
"app.audio.listenOnly.closeLabel": "Close",
"app.audio.permissionsOverlay.title": "Allow access to your microphone",