feat(audio): rework audio join without listen only

This is a rework of the audio join procedure whithout the explict listen
only separation in mind. It's supposed to be used in conjunction with
the transparent listen only feature so that the distinction between
modes is seamless with minimal server-side impact. An abridged list of
changes:
  - Let the user pick no input device when joining microphone while
    allowing them to set an input device on the fly later on
  - Give the user the option to join audio with no input device whenever
    we fail to obtain input devices, with the option to try re-enabling
    them on the fly later on
  - Add the option to open the audio settings modal (echo test et al)
    via the in-call device selection chevron
  - Rework the SFU audio bridge and its services to support
    adding/removing tracks on the fly without renegotiation
  - Rework the SFU audio bridge and its services to support a new peer
    role called "passive-sendrecv". That role is used by dupled peers
    that have no active input source on start, but might have one later
    on.
  - Remove stale PermissionsOverlay component from the audio modal
  - Rework how permission errors are detected using the Permissions API
  - Rework the local echo test so that it uses a separate media tag
    rather than the remote
  - Add new, separate dialplans that mute/hold FreeSWITCH channels on
    hold based on UA strings. This is orchestrated server-side via
    webrtc-sfu and akka-apps. The basic difference here is that channels
    now join in their desired state rather than waiting for client side
    observers to sync the state up. It also mitigates transparent listen
    only performance edge cases on multiple audio channels joining at
    the same time.

The old, decoupled listen only mode is still present in code while we
validate this new approach. To test this, transparentListenOnly
must be enabled and listen only mode must be disable on audio join so
that the user skips straight through microphone join.
This commit is contained in:
prlanzarin 2024-06-05 08:26:27 -03:00
parent cf293fe7fd
commit 325887e325
38 changed files with 1139 additions and 563 deletions

View File

@ -2,6 +2,7 @@ package org.bigbluebutton.core.apps.voice
import org.bigbluebutton.common2.msgs._
import org.bigbluebutton.core.running.{ LiveMeeting, MeetingActor, OutMsgRouter }
import org.bigbluebutton.core2.MeetingStatus2x
trait GetMicrophonePermissionReqMsgHdlr {
this: MeetingActor =>
@ -16,7 +17,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
voiceConf: String,
userId: String,
sfuSessionId: String,
allowed: Boolean
allowed: Boolean,
muteOnStart: Boolean
): Unit = {
val routing = Routing.addMsgToClientRouting(MessageTypes.DIRECT, meetingId, userId)
val envelope = BbbCoreEnvelope(GetMicrophonePermissionRespMsg.NAME, routing)
@ -26,7 +28,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
voiceConf,
userId,
sfuSessionId,
allowed
allowed,
muteOnStart
)
val event = GetMicrophonePermissionRespMsg(header, body)
val eventMsg = BbbCommonEnvCoreMsg(envelope, event)
@ -47,7 +50,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
liveMeeting.props.voiceProp.voiceConf,
msg.body.userId,
msg.body.sfuSessionId,
allowed
allowed,
MeetingStatus2x.isMeetingMuted(liveMeeting.status)
)
}
}

View File

@ -613,7 +613,8 @@ case class GetMicrophonePermissionRespMsgBody(
voiceConf: String,
userId: String,
sfuSessionId: String,
allowed: Boolean
allowed: Boolean,
muteOnStart: Boolean
)
/**

View File

@ -8,7 +8,15 @@
<action application="set" data="rtp_jitter_buffer_during_bridge=true" />
<action application="set" data="suppress_cng=true" />
<action application="answer" />
<action application="conference" data="$1@cdquality" />
<!-- Special condition for BBB's "transparent listen only" mechanism - HOLD on creation -->
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu-mhos$" break="never">
<action application="log" data="INFO Channel is going to be HELD and MUTED on creation ${uuid}" />
<action application="set" data="api_result=${uuid_hold(${uuid})}" />
</condition>
<!-- Duplicate condition to guarantee line-order (not nested-order) execution of this extension -->
<condition field="destination_number" expression="^(\d{5,11})$" require-nested="false">
<action application="conference" data="$1@cdquality" />
</condition>
</condition>
</extension>
<extension name="bbb_conferences">

View File

@ -1,6 +1,6 @@
<include>
<extension name="bbb_webrtc_call" continue="true">
<condition field="${sip_user_agent}" expression="bbb-webrtc-sfu" break="on-false">
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu$" break="on-false">
<action application="set" data="presence_data=from_bbb-webrtc-sfu"/>
<action application="set" data="bbb_authorized=true"/>
<action application="set" data="rtp_manual_rtp_bugs=ACCEPT_ANY_PACKETS"/>

View File

@ -0,0 +1,12 @@
<include>
<extension name="bbb_webrtc_sfu_call" continue="true">
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu-muos|bbb-webrtc-sfu-mhos$" break="on-false">
<action application="set" data="presence_data=from_bbb-webrtc-sfu"/>
<action application="set" data="bbb_authorized=true"/>
<action application="set" data="rtp_manual_rtp_bugs=ACCEPT_ANY_PACKETS"/>
<action application="set" data="jb_use_timestamps=true"/>
<action application="set" data="conference_member_flags=mute"/>
<action application="transfer" data="${destination_number} XML default"/>
</condition>
</extension>
</include>

View File

@ -171,7 +171,7 @@ with BigBlueButton; if not, see <http://www.gnu.org/licenses/>.
</main>
</div>
<span id="destination"></span>
<audio id="remote-media" autoplay>
</audio>
<audio id="remote-media" autoplay></audio>
<audio id="local-media" autoplay></audio>
<div id="modals-container"></div>
</body>

View File

@ -61,7 +61,11 @@ export default class BaseAudioBridge {
get inputDeviceId () {
return this._inputDeviceId;
}
/* eslint-disable class-methods-use-this */
supportsTransparentListenOnly() {
return false;
}
/**
@ -78,6 +82,20 @@ export default class BaseAudioBridge {
let backupStream;
try {
// Remove all input audio tracks from the stream
// This will effectively mute the microphone
// and keep the audio output working
if (deviceId === 'listen-only') {
const stream = this.inputStream;
if (stream) {
stream.getAudioTracks().forEach((track) => {
track.stop();
stream.removeTrack(track);
});
}
return stream;
}
const constraints = {
audio: getAudioConstraints({ deviceId }),
};

View File

@ -36,10 +36,25 @@ const getCurrentAudioSinkId = () => {
return audioElement?.sinkId || DEFAULT_OUTPUT_DEVICE_ID;
};
const getStoredAudioInputDeviceId = () => getStorageSingletonInstance().getItem(INPUT_DEVICE_ID_KEY);
const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance().getItem(OUTPUT_DEVICE_ID_KEY);
const storeAudioInputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId);
const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(OUTPUT_DEVICE_ID_KEY, deviceId);
const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance()
.getItem(OUTPUT_DEVICE_ID_KEY);
const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance()
.setItem(OUTPUT_DEVICE_ID_KEY, deviceId);
const getStoredAudioInputDeviceId = () => getStorageSingletonInstance()
.getItem(INPUT_DEVICE_ID_KEY);
const storeAudioInputDeviceId = (deviceId) => {
if (deviceId === 'listen-only') {
// Do not store listen-only "devices" and remove any stored device
// So it starts from scratch next time.
getStorageSingletonInstance().removeItem(INPUT_DEVICE_ID_KEY);
return false;
}
getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId);
return true;
};
/**
* Filter constraints set in audioDeviceConstraints, based on

View File

@ -20,6 +20,7 @@ import { shouldForceRelay } from '/imports/ui/services/bbb-webrtc-sfu/utils';
const SENDRECV_ROLE = 'sendrecv';
const RECV_ROLE = 'recv';
const PASSIVE_SENDRECV_ROLE = 'passive-sendrecv';
const BRIDGE_NAME = 'fullaudio';
const IS_CHROME = browserInfo.isChrome;
@ -81,7 +82,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
const MEDIA = SETTINGS.public.media;
const LISTEN_ONLY_OFFERING = MEDIA.listenOnlyOffering;
const FULLAUDIO_OFFERING = MEDIA.fullAudioOffering;
return isListenOnly
return isListenOnly && !isTransparentListenOnlyEnabled()
? LISTEN_ONLY_OFFERING
: (!isTransparentListenOnlyEnabled() && FULLAUDIO_OFFERING);
}
@ -95,12 +96,17 @@ export default class SFUAudioBridge extends BaseAudioBridge {
this.reconnecting = false;
this.iceServers = [];
this.bridgeName = BRIDGE_NAME;
this.isListenOnly = false;
this.bypassGUM = false;
this.supportsTransparentListenOnly = isTransparentListenOnlyEnabled;
this.handleTermination = this.handleTermination.bind(this);
}
get inputStream() {
if (this.broker) {
// Only return the stream if the broker is active and the role isn't recvonly
// Input stream == actual input-capturing stream, not the one that's being played
if (this.broker && this.role !== RECV_ROLE) {
return this.broker.getLocalStream();
}
@ -111,6 +117,18 @@ export default class SFUAudioBridge extends BaseAudioBridge {
return this.broker?.role;
}
getBrokerRole({ hasInputStream }) {
if (this.isListenOnly) {
return isTransparentListenOnlyEnabled()
? PASSIVE_SENDRECV_ROLE
: RECV_ROLE;
}
if (this.bypassGUM && !hasInputStream) return PASSIVE_SENDRECV_ROLE;
return SENDRECV_ROLE;
}
setInputStream(stream) {
if (this.broker == null) return null;
@ -326,6 +344,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
extension,
inputStream,
forceRelay: _forceRelay = false,
bypassGUM = false,
} = options;
const SETTINGS = window.meetingClientSettings;
@ -349,6 +368,10 @@ export default class SFUAudioBridge extends BaseAudioBridge {
try {
this.inEchoTest = !!extension;
this.isListenOnly = isListenOnly;
this.bypassGUM = bypassGUM;
const role = this.getBrokerRole({
hasInputStream: !!inputStream,
});
const brokerOptions = {
clientSessionNumber: getAudioSessionNumber(),
@ -365,11 +388,12 @@ export default class SFUAudioBridge extends BaseAudioBridge {
mediaStreamFactory: this.mediaStreamFactory,
gatheringTimeout: GATHERING_TIMEOUT,
transparentListenOnly: isTransparentListenOnlyEnabled(),
bypassGUM,
};
this.broker = new AudioBroker(
Auth.authenticateURL(SFU_URL),
isListenOnly ? RECV_ROLE : SENDRECV_ROLE,
role,
brokerOptions,
);

View File

@ -63,6 +63,8 @@ const AudioControls: React.FC<AudioControlsProps> = ({
const echoTestIntervalRef = React.useRef<ReturnType<typeof setTimeout>>();
const [isAudioModalOpen, setIsAudioModalOpen] = React.useState(false);
const [audioModalContent, setAudioModalContent] = React.useState<string | null>(null);
const [audioModalProps, setAudioModalProps] = React.useState<{ unmuteOnExit?: boolean } | null>(null);
const handleJoinAudio = useCallback((connected: boolean) => {
if (connected) {
@ -72,6 +74,12 @@ const AudioControls: React.FC<AudioControlsProps> = ({
}
}, []);
const openAudioSettings = (props: { unmuteOnExit?: boolean } = {}) => {
setAudioModalContent('settings');
setAudioModalProps(props);
setIsAudioModalOpen(true);
};
const joinButton = useMemo(() => {
const joinAudioLabel = away ? intlMessages.joinAudioAndSetActive : intlMessages.joinAudio;
@ -107,12 +115,18 @@ const AudioControls: React.FC<AudioControlsProps> = ({
return (
<Styled.Container>
{!inAudio ? joinButton : <InputStreamLiveSelectorContainer />}
{!inAudio ? joinButton : <InputStreamLiveSelectorContainer openAudioSettings={openAudioSettings} />}
{isAudioModalOpen && (
<AudioModalContainer
priority="low"
setIsOpen={() => setIsAudioModalOpen(false)}
setIsOpen={() => {
setIsAudioModalOpen(false);
setAudioModalContent(null);
setAudioModalProps(null);
}}
isOpen={isAudioModalOpen}
content={audioModalContent}
unmuteOnExit={audioModalProps?.unmuteOnExit}
/>
)}
</Styled.Container>

View File

@ -56,6 +56,26 @@ const intlMessages = defineMessages({
id: 'app.audioNotification.deviceChangeFailed',
description: 'Device change failed',
},
fallbackInputLabel: {
id: 'app.audio.audioSettings.fallbackInputLabel',
description: 'Audio input device label',
},
fallbackOutputLabel: {
id: 'app.audio.audioSettings.fallbackOutputLabel',
description: 'Audio output device label',
},
fallbackNoPermissionLabel: {
id: 'app.audio.audioSettings.fallbackNoPermission',
description: 'No permission to access audio devices label',
},
audioSettingsTitle: {
id: 'app.audio.audioSettings.titleLabel',
description: 'Audio settings button label',
},
noMicListenOnlyLabel: {
id: 'app.audio.audioSettings.noMicListenOnly',
description: 'No microphone (listen only) label',
},
});
interface MuteToggleProps {
@ -75,6 +95,8 @@ interface LiveSelectionProps extends MuteToggleProps {
outputDeviceId: string;
meetingIsBreakout: boolean;
away: boolean;
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
supportsTransparentListenOnly: boolean;
}
export const LiveSelection: React.FC<LiveSelectionProps> = ({
@ -90,6 +112,8 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
isAudioLocked,
toggleMuteMicrophone,
away,
openAudioSettings,
supportsTransparentListenOnly,
}) => {
const intl = useIntl();
@ -105,6 +129,21 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
];
}
const getFallbackLabel = (device: MediaDeviceInfo, index: number) => {
const baseLabel = device?.kind === AUDIO_OUTPUT
? intlMessages.fallbackOutputLabel
: intlMessages.fallbackInputLabel;
let label = intl.formatMessage(baseLabel, { 0: index });
if (!device?.deviceId) {
label = `${label} ${intl.formatMessage(intlMessages.fallbackNoPermissionLabel)}`;
}
return label;
};
const shouldTreatAsMicrophone = () => !listenOnly || supportsTransparentListenOnly;
const renderDeviceList = useCallback((
deviceKind: string,
list: MediaDeviceInfo[],
@ -134,7 +173,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
{
key: `${device.deviceId}-${deviceKind}`,
dataTest: `${deviceKind}-${index + 1}`,
label: truncateDeviceName(device.label),
label: truncateDeviceName(device.label || getFallbackLabel(device, index + 1)),
customStyles: (device.deviceId === currentDeviceId) ? Styled.SelectedLabel : null,
iconRight: (device.deviceId === currentDeviceId) ? 'check' : null,
onClick: () => onDeviceListClick(device.deviceId, deviceKind, callback),
@ -163,10 +202,37 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
];
}
if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) {
// "None" option for audio input devices - aka listen-only
const listenOnly = deviceKind === AUDIO_INPUT
&& currentDeviceId === 'listen-only';
deviceList.push({
key: `listenOnly-${deviceKind}`,
dataTest: `${deviceKind}-listenOnly`,
label: intl.formatMessage(intlMessages.noMicListenOnlyLabel),
customStyles: listenOnly && Styled.SelectedLabel,
iconRight: listenOnly ? 'check' : null,
onClick: () => onDeviceListClick('listen-only', deviceKind, callback),
} as MenuOptionItemType);
}
return listTitle.concat(deviceList);
}, []);
const onDeviceListClick = useCallback((deviceId: string, deviceKind: string, callback: Function) => {
if (!deviceId) {
// If there's no deviceId in an audio input device, it means
// the user doesn't have permission to access it. If we support
// transparent listen-only, fire the mount AudioSettings modal to
// acquire permission and let the user configure their stuff.
if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) {
openAudioSettings({ unmuteOnExit: true });
}
return;
}
if (!deviceId) return;
if (deviceKind === AUDIO_INPUT) {
callback(deviceId).catch(() => {
@ -179,7 +245,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
}
}, []);
const inputDeviceList = !listenOnly
const inputDeviceList = shouldTreatAsMicrophone()
? renderDeviceList(
AUDIO_INPUT,
inputDevices,
@ -196,6 +262,16 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
outputDeviceId,
);
const audioSettingsOption = {
icon: 'settings',
label: intl.formatMessage(intlMessages.audioSettingsTitle),
key: 'audioSettingsOption',
dataTest: 'input-selector-audio-settings',
customStyles: Styled.AudioSettingsOption,
dividerTop: true,
onClick: () => openAudioSettings(),
} as MenuOptionItemType;
const leaveAudioOption = {
icon: 'logout',
label: intl.formatMessage(intlMessages.leaveAudio),
@ -204,12 +280,14 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
customStyles: Styled.DangerColor,
onClick: () => handleLeaveAudio(meetingIsBreakout),
};
const dropdownListComplete = inputDeviceList.concat(outputDeviceList)
const dropdownListComplete = inputDeviceList
.concat(outputDeviceList)
.concat({
key: 'separator-02',
isSeparator: true,
})
.concat(leaveAudioOption);
});
if (shouldTreatAsMicrophone()) dropdownListComplete.push(audioSettingsOption);
dropdownListComplete.push(leaveAudioOption);
audioSettingsDropdownItems.forEach((audioSettingsDropdownItem:
PluginSdk.AudioSettingsDropdownInterface) => {
@ -239,9 +317,11 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
const customStyles = { top: '-1rem' };
const { isMobile } = deviceInfo;
const noInputDevice = inputDeviceId === 'listen-only';
return (
<>
{!listenOnly ? (
{shouldTreatAsMicrophone() ? (
// eslint-disable-next-line jsx-a11y/no-access-key
<span
style={{ display: 'none' }}
@ -250,7 +330,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
aria-hidden="true"
/>
) : null}
{(!listenOnly && isMobile) && (
{(shouldTreatAsMicrophone() && isMobile) && (
<MuteToggle
talking={talking}
muted={muted}
@ -258,13 +338,15 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
noInputDevice={noInputDevice}
openAudioSettings={openAudioSettings}
/>
)}
<BBBMenu
customStyles={!isMobile ? customStyles : null}
trigger={(
<>
{!listenOnly && !isMobile
{shouldTreatAsMicrophone() && !isMobile
? (
<MuteToggle
talking={talking}
@ -273,6 +355,8 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
noInputDevice={noInputDevice}
openAudioSettings={openAudioSettings}
/>
)
: (

View File

@ -33,6 +33,8 @@ interface MuteToggleProps {
isAudioLocked: boolean;
toggleMuteMicrophone: (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => void;
away: boolean;
noInputDevice?: boolean;
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
}
export const MuteToggle: React.FC<MuteToggleProps> = ({
@ -42,6 +44,8 @@ export const MuteToggle: React.FC<MuteToggleProps> = ({
isAudioLocked,
toggleMuteMicrophone,
away,
noInputDevice = false,
openAudioSettings,
}) => {
const intl = useIntl();
const toggleMuteShourtcut = useShortcut('toggleMute');
@ -57,15 +61,22 @@ export const MuteToggle: React.FC<MuteToggleProps> = ({
const onClickCallback = (e: React.MouseEvent<HTMLButtonElement>) => {
e.stopPropagation();
if (muted && away) {
muteAway(muted, true, toggleVoice);
VideoService.setTrackEnabled(true);
setAway({
variables: {
away: false,
},
});
if (muted) {
if (away) {
if (!noInputDevice) muteAway(muted, true, toggleVoice);
VideoService.setTrackEnabled(true);
setAway({
variables: {
away: false,
},
});
} else if (noInputDevice) {
// User is in duplex audio, passive-sendrecv, but has no input device set
// Open the audio settings modal to allow them to select an input device
openAudioSettings();
}
}
toggleMuteMicrophone(muted, toggleVoice);
};
return (

View File

@ -8,18 +8,23 @@ import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
import { User } from '/imports/ui/Types/user';
import { defineMessages, useIntl } from 'react-intl';
import {
handleLeaveAudio, liveChangeInputDevice, liveChangeOutputDevice, notify, toggleMuteMicrophone,
handleLeaveAudio,
liveChangeInputDevice,
liveChangeOutputDevice,
notify,
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
} from './service';
import useMeeting from '/imports/ui/core/hooks/useMeeting';
import { Meeting } from '/imports/ui/Types/meeting';
import logger from '/imports/startup/client/logger';
import Auth from '/imports/ui/services/auth';
import MutedAlert from '/imports/ui/components/muted-alert/component';
import MuteToggle from './buttons/muteToggle';
import ListenOnly from './buttons/listenOnly';
import LiveSelection from './buttons/LiveSelection';
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
import useWhoIsUnmuted from '/imports/ui/core/hooks/useWhoIsUnmuted';
import useToggleVoice from '/imports/ui/components/audio/audio-graphql/hooks/useToggleVoice';
const AUDIO_INPUT = 'audioinput';
const AUDIO_OUTPUT = 'audiooutput';
@ -52,7 +57,11 @@ const intlMessages = defineMessages({
},
});
interface InputStreamLiveSelectorProps {
interface InputStreamLiveSelectorContainerProps {
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
}
interface InputStreamLiveSelectorProps extends InputStreamLiveSelectorContainerProps {
isConnected: boolean;
isPresenter: boolean;
isModerator: boolean;
@ -68,6 +77,8 @@ interface InputStreamLiveSelectorProps {
inputStream: string;
meetingIsBreakout: boolean;
away: boolean;
permissionStatus: string;
supportsTransparentListenOnly: boolean;
}
const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
@ -86,8 +97,12 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
inputStream,
meetingIsBreakout,
away,
permissionStatus,
supportsTransparentListenOnly,
openAudioSettings,
}) => {
const intl = useIntl();
const toggleVoice = useToggleVoice();
// eslint-disable-next-line no-undef
const [inputDevices, setInputDevices] = React.useState<InputDeviceInfo[]>([]);
const [outputDevices, setOutputDevices] = React.useState<MediaDeviceInfo[]>([]);
@ -106,6 +121,15 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
const audioOutputDevices = devices.filter((i) => i.kind === AUDIO_OUTPUT);
setInputDevices(audioInputDevices as InputDeviceInfo[]);
setOutputDevices(audioOutputDevices);
})
.catch((error) => {
logger.warn({
logCode: 'audio_device_enumeration_error',
extraInfo: {
errorMessage: error.message,
errorName: error.name,
},
}, `Error enumerating audio devices: ${error.message}`);
});
if (isAudioConnected) {
updateRemovedDevices(inputDevices, outputDevices);
@ -115,11 +139,11 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
const fallbackInputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
if (!fallbackDevice || !fallbackDevice.deviceId) return;
logger.info({
logCode: 'audio_device_live_selector',
logger.warn({
logCode: 'audio_input_live_selector',
extraInfo: {
userId: Auth.userID,
meetingId: Auth.meetingID,
fallbackDeviceId: fallbackDevice?.deviceId,
fallbackDeviceLabel: fallbackDevice?.label,
},
}, 'Current input device was removed. Fallback to default device');
liveChangeInputDevice(fallbackDevice.deviceId).catch(() => {
@ -129,11 +153,11 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
const fallbackOutputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
if (!fallbackDevice || !fallbackDevice.deviceId) return;
logger.info({
logCode: 'audio_device_live_selector',
logger.warn({
logCode: 'audio_output_live_selector',
extraInfo: {
userId: Auth.userID,
meetingId: Auth.meetingID,
fallbackDeviceId: fallbackDevice?.deviceId,
fallbackDeviceLabel: fallbackDevice?.label,
},
}, 'Current output device was removed. Fallback to default device');
liveChangeOutputDevice(fallbackDevice.deviceId, true).catch(() => {
@ -162,7 +186,16 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
if (enableDynamicAudioDeviceSelection) {
updateDevices(inAudio);
}
}, [inAudio]);
}, [inAudio, permissionStatus]);
useEffect(() => {
// If the user has no input device, is connected to audio and unmuted,
// they need to be *muted* by the system. Further attempts to unmute
// will open the audio settings modal instead.
if (inputDeviceId === 'listen-only' && isConnected && !muted) {
toggleMuteMicrophoneSystem(muted, toggleVoice);
}
}, [inputDeviceId, isConnected, muted]);
return (
<>
@ -190,6 +223,8 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
supportsTransparentListenOnly={supportsTransparentListenOnly}
openAudioSettings={openAudioSettings}
/>
) : (
<>
@ -201,6 +236,8 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
isAudioLocked={isAudioLocked}
toggleMuteMicrophone={toggleMuteMicrophone}
away={away}
openAudioSettings={openAudioSettings}
noInputDevice={inputDeviceId === 'listen-only'}
/>
)}
<ListenOnly
@ -216,7 +253,9 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
);
};
const InputStreamLiveSelectorContainer: React.FC = () => {
const InputStreamLiveSelectorContainer: React.FC<InputStreamLiveSelectorContainerProps> = ({
openAudioSettings,
}) => {
const { data: currentUser } = useCurrentUser((u: Partial<User>) => {
if (!u.voice) {
return {
@ -261,6 +300,10 @@ const InputStreamLiveSelectorContainer: React.FC = () => {
const outputDeviceId = useReactiveVar(AudioManager._outputDeviceId.value) as string;
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
const inputStream = useReactiveVar(AudioManager._inputStream) as string;
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value) as string;
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
const supportsTransparentListenOnly = useReactiveVar(AudioManager._transparentListenOnlySupported.value) as boolean;
return (
<InputStreamLiveSelector
@ -280,6 +323,9 @@ const InputStreamLiveSelectorContainer: React.FC = () => {
inputStream={inputStream}
meetingIsBreakout={currentMeeting?.isBreakout ?? false}
away={currentUser?.away ?? false}
openAudioSettings={openAudioSettings}
permissionStatus={permissionStatus}
supportsTransparentListenOnly={supportsTransparentListenOnly}
/>
);
};

View File

@ -40,32 +40,35 @@ export const handleLeaveAudio = (meetingIsBreakout: boolean) => {
);
};
const toggleMuteMicrophoneThrottled = throttle((
const toggleMute = (
muted: boolean,
toggleVoice: (userId: string, muted: boolean) => void,
actionType = 'user_action',
) => {
Storage.setItem(MUTED_KEY, !muted);
if (muted) {
logger.info(
{
logCode: 'audiomanager_unmute_audio',
extraInfo: { logType: 'user_action' },
},
'microphone unmuted by user',
);
if (AudioManager.inputDeviceId === 'listen-only') {
// User is in duplex audio, passive-sendrecv, but has no input device set
// Unmuting should not be allowed at all
return;
}
logger.info({
logCode: 'audiomanager_unmute_audio',
extraInfo: { logType: actionType },
}, 'microphone unmuted');
Storage.setItem(MUTED_KEY, false);
toggleVoice(Auth.userID as string, false);
} else {
logger.info(
{
logCode: 'audiomanager_mute_audio',
extraInfo: { logType: 'user_action' },
},
'microphone muted by user',
);
logger.info({
logCode: 'audiomanager_mute_audio',
extraInfo: { logType: actionType },
}, 'microphone muted');
Storage.setItem(MUTED_KEY, true);
toggleVoice(Auth.userID as string, true);
}
}, TOGGLE_MUTE_THROTTLE_TIME);
};
const toggleMuteMicrophoneThrottled = throttle(toggleMute, TOGGLE_MUTE_THROTTLE_TIME);
const toggleMuteMicrophoneDebounced = debounce(toggleMuteMicrophoneThrottled, TOGGLE_MUTE_DEBOUNCE_TIME,
{ leading: true, trailing: false });
@ -74,6 +77,11 @@ export const toggleMuteMicrophone = (muted: boolean, toggleVoice: (userId: strin
return toggleMuteMicrophoneDebounced(muted, toggleVoice);
};
// Debounce is not needed here, as this function should only called by the system.
export const toggleMuteMicrophoneSystem = (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => {
return toggleMute(muted, toggleVoice, 'system_action');
};
export const truncateDeviceName = (deviceName: string) => {
if (deviceName && deviceName.length <= DEVICE_LABEL_MAX_LENGTH) {
return deviceName;
@ -141,6 +149,7 @@ export const muteAway = (
export default {
handleLeaveAudio,
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
truncateDeviceName,
notify,
liveChangeInputDevice,

View File

@ -56,6 +56,10 @@ export const DisabledLabel = {
opacity: 1,
};
export const AudioSettingsOption = {
paddingLeft: 12,
};
export const SelectedLabel = {
color: colorPrimary,
backgroundColor: colorOffWhite,
@ -80,6 +84,7 @@ export default {
MuteToggleButton,
DisabledLabel,
SelectedLabel,
AudioSettingsOption,
DangerColor,
AudioDropdown,
};

View File

@ -1,11 +1,14 @@
import React, { useEffect, useState } from 'react';
import React, {
useCallback,
useEffect,
useState,
} from 'react';
import PropTypes from 'prop-types';
import {
defineMessages, injectIntl, FormattedMessage,
} from 'react-intl';
import { useMutation } from '@apollo/client';
import Styled from './styles';
import PermissionsOverlay from '../permissions-overlay/component';
import AudioSettings from '../audio-settings/component';
import EchoTest from '../echo-test/component';
import Help from '../help/component';
@ -21,6 +24,7 @@ import {
muteAway,
} from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service';
import Session from '/imports/ui/services/storage/in-memory';
import logger from '/imports/startup/client/logger';
const propTypes = {
intl: PropTypes.shape({
@ -39,10 +43,11 @@ const propTypes = {
isConnected: PropTypes.bool.isRequired,
isUsingAudio: PropTypes.bool.isRequired,
isListenOnly: PropTypes.bool.isRequired,
isMuted: PropTypes.bool.isRequired,
toggleMuteMicrophoneSystem: PropTypes.func.isRequired,
inputDeviceId: PropTypes.string,
outputDeviceId: PropTypes.string,
formattedDialNum: PropTypes.string.isRequired,
showPermissionsOvelay: PropTypes.bool.isRequired,
listenOnlyMode: PropTypes.bool.isRequired,
joinFullAudioImmediately: PropTypes.bool,
forceListenOnlyAttendee: PropTypes.bool.isRequired,
@ -72,6 +77,14 @@ const propTypes = {
}).isRequired,
getTroubleshootingLink: PropTypes.func.isRequired,
away: PropTypes.bool,
doGUM: PropTypes.func.isRequired,
hasMicrophonePermission: PropTypes.func.isRequired,
permissionStatus: PropTypes.string,
liveChangeInputDevice: PropTypes.func.isRequired,
content: PropTypes.string,
unmuteOnExit: PropTypes.bool,
supportsTransparentListenOnly: PropTypes.bool.isRequired,
getAudioConstraints: PropTypes.func.isRequired,
};
const intlMessages = defineMessages({
@ -116,7 +129,7 @@ const intlMessages = defineMessages({
description: 'Title for the echo test',
},
settingsTitle: {
id: 'app.audioModal.settingsTitle',
id: 'app.audio.audioSettings.titleLabel',
description: 'Title for the audio modal',
},
helpTitle: {
@ -139,6 +152,10 @@ const intlMessages = defineMessages({
id: 'app.audioModal.autoplayBlockedDesc',
description: 'Message for autoplay audio block',
},
findingDevicesTitle: {
id: 'app.audio.audioSettings.findingDevicesTitle',
description: 'Message for finding audio devices',
},
});
const AudioModal = ({
@ -148,6 +165,8 @@ const AudioModal = ({
audioLocked,
isUsingAudio,
isListenOnly,
isMuted,
toggleMuteMicrophoneSystem,
autoplayBlocked,
closeModal,
isEchoTest,
@ -174,19 +193,27 @@ const AudioModal = ({
notify,
formattedTelVoice,
handleAllowAutoplay,
showPermissionsOvelay,
isIE,
isOpen,
priority,
setIsOpen,
getTroubleshootingLink,
away = false,
doGUM,
getAudioConstraints,
hasMicrophonePermission,
liveChangeInputDevice,
content: initialContent,
supportsTransparentListenOnly,
unmuteOnExit = false,
permissionStatus = null,
}) => {
const [content, setContent] = useState(null);
const [content, setContent] = useState(initialContent);
const [hasError, setHasError] = useState(false);
const [disableActions, setDisableActions] = useState(false);
const [errorInfo, setErrorInfo] = useState(null);
const [autoplayChecked, setAutoplayChecked] = useState(false);
const [findingDevices, setFindingDevices] = useState(false);
const [setAway] = useMutation(SET_AWAY);
const voiceToggle = useToggleVoice();
@ -257,6 +284,55 @@ const AudioModal = ({
});
};
const handleGUMFailure = (error) => {
const { MIC_ERROR } = AudioError;
logger.error({
logCode: 'audio_gum_failed',
extraInfo: {
errorMessage: error.message,
errorName: error.name,
},
}, `Audio gUM failed: ${error.name}`);
setContent('help');
setDisableActions(false);
setHasError(true);
setErrorInfo({
errCode: error?.name === 'NotAllowedError'
? MIC_ERROR.NO_PERMISSION
: 0,
errMessage: error?.name || 'NotAllowedError',
});
};
const checkMicrophonePermission = (options) => {
setFindingDevices(true);
return hasMicrophonePermission(options)
.then((hasPermission) => {
// null means undetermined, so we don't want to show the error modal
// and let downstream components figure it out
if (hasPermission === true || hasPermission === null) {
return hasPermission;
}
handleGUMFailure(new DOMException(
'Permissions API says denied',
'NotAllowedError',
));
return false;
})
.catch((error) => {
handleGUMFailure(error);
return null;
})
.finally(() => {
setFindingDevices(false);
});
};
const handleGoToAudioOptions = () => {
setContent(null);
setHasError(true);
@ -318,14 +394,19 @@ const AudioModal = ({
});
};
const handleJoinLocalEcho = (inputStream) => {
const handleAudioSettingsConfirmation = useCallback((inputStream) => {
// Reset the modal to a connecting state - this kind of sucks?
// prlanzarin Apr 04 2022
setContent(null);
if (inputStream) changeInputStream(inputStream);
handleJoinMicrophone();
disableAwayMode();
};
if (!isConnected) {
handleJoinMicrophone();
disableAwayMode();
} else {
closeModal();
}
}, [changeInputStream, isConnected]);
const skipAudioOptions = () => (isConnecting || (forceListenOnlyAttendee && !autoplayChecked))
&& !content
@ -333,7 +414,6 @@ const AudioModal = ({
const renderAudioOptions = () => {
const hideMicrophone = forceListenOnlyAttendee || audioLocked;
const arrow = isRTL ? '←' : '→';
const dialAudioLabel = `${intl.formatMessage(intlMessages.audioDialTitle)} ${arrow}`;
@ -400,40 +480,46 @@ const AudioModal = ({
/>
);
const handleBack = useCallback(() => {
if (isConnecting || isConnected || skipAudioOptions()) {
closeModal();
} else {
handleGoToAudioOptions();
}
}, [isConnecting, isConnected, skipAudioOptions]);
const renderAudioSettings = () => {
const { animations } = getSettingsSingletonInstance().application;
const confirmationCallback = !localEchoEnabled
? handleRetryGoToEchoTest
: handleJoinLocalEcho;
const handleGUMFailure = (error) => {
const code = error?.name === 'NotAllowedError'
? AudioError.MIC_ERROR.NO_PERMISSION
: 0;
setContent('help');
setErrorInfo({
errCode: code,
errMessage: error?.name || 'NotAllowedError',
});
setDisableActions(false);
};
: handleAudioSettingsConfirmation;
return (
<AudioSettings
handleBack={handleGoToAudioOptions}
animations={animations}
handleBack={handleBack}
handleConfirmation={confirmationCallback}
handleGUMFailure={handleGUMFailure}
joinEchoTest={joinEchoTest}
changeInputDevice={changeInputDevice}
liveChangeInputDevice={liveChangeInputDevice}
changeOutputDevice={changeOutputDevice}
isConnecting={isConnecting}
isConnected={isConnected}
isEchoTest={isEchoTest}
isMuted={isMuted}
toggleMuteMicrophoneSystem={toggleMuteMicrophoneSystem}
inputDeviceId={inputDeviceId}
outputDeviceId={outputDeviceId}
withVolumeMeter={showVolumeMeter}
withEcho={localEchoEnabled}
produceStreams={localEchoEnabled || showVolumeMeter}
notify={notify}
unmuteOnExit={unmuteOnExit}
doGUM={doGUM}
getAudioConstraints={getAudioConstraints}
checkMicrophonePermission={checkMicrophonePermission}
supportsTransparentListenOnly={supportsTransparentListenOnly}
toggleVoice={voiceToggle}
/>
);
};
@ -445,9 +531,19 @@ const AudioModal = ({
message: errorInfo?.errMessage,
};
const _joinListenOnly = () => {
// Erase the content state so that the modal transitions to the connecting
// state if the user chooses listen only
setContent(null);
handleJoinListenOnly();
};
return (
<Help
handleBack={handleGoToAudioOptions}
isConnected={isConnected}
handleBack={handleBack}
handleJoinListenOnly={_joinListenOnly}
handleRetryMic={handleGoToAudioSettings}
audioErr={audioErr}
isListenOnly={isListenOnly}
troubleshootingLink={getTroubleshootingLink(errorInfo?.errCode)}
@ -495,6 +591,17 @@ const AudioModal = ({
const renderContent = () => {
const { animations } = getSettingsSingletonInstance().application;
if (findingDevices && content === null) {
return (
<Styled.Connecting role="alert">
<span data-test="findingDevicesLabel">
{intl.formatMessage(intlMessages.findingDevicesTitle)}
</span>
<Styled.ConnectingAnimation animations={animations} />
</Styled.Connecting>
);
}
if (skipAudioOptions()) {
return (
<Styled.Connecting role="alert">
@ -505,6 +612,7 @@ const AudioModal = ({
</Styled.Connecting>
);
}
return content ? contents[content].component() : renderAudioOptions();
};
@ -512,16 +620,23 @@ const AudioModal = ({
if (!isUsingAudio) {
if (forceListenOnlyAttendee || audioLocked) {
handleJoinListenOnly();
return;
}
} else if (!listenOnlyMode) {
if (joinFullAudioImmediately) {
checkMicrophonePermission({ doGUM: true, permissionStatus })
.then((hasPermission) => {
// No permission - let the Help screen be shown as it's triggered
// by the checkMicrophonePermission function
if (hasPermission === false) return;
if (joinFullAudioImmediately && !listenOnlyMode) {
handleJoinMicrophone();
return;
}
if (!listenOnlyMode) {
handleGoToEchoTest();
// Permission is granted or undetermined, so we can proceed
handleJoinMicrophone();
});
} else {
checkMicrophonePermission({ doGUM: false, permissionStatus }).then((hasPermission) => {
if (hasPermission === false) return;
handleGoToEchoTest();
});
}
}
}
}, [
@ -551,40 +666,37 @@ const AudioModal = ({
let title = content
? intl.formatMessage(contents[content].title)
: intl.formatMessage(intlMessages.audioChoiceLabel);
title = !skipAudioOptions() ? title : null;
title = !skipAudioOptions() && !findingDevices ? title : null;
return (
<>
{showPermissionsOvelay ? <PermissionsOverlay closeModal={closeModal} /> : null}
<Styled.AudioModal
modalName="AUDIO"
onRequestClose={closeModal}
data-test="audioModal"
contentLabel={intl.formatMessage(intlMessages.ariaModalTitle)}
title={title}
{...{
setIsOpen,
isOpen,
priority,
}}
>
{isIE ? (
<Styled.BrowserWarning>
<FormattedMessage
id="app.audioModal.unsupportedBrowserLabel"
description="Warning when someone joins with a browser that isn't supported"
values={{
0: <a href="https://www.google.com/chrome/">Chrome</a>,
1: <a href="https://getfirefox.com">Firefox</a>,
}}
/>
</Styled.BrowserWarning>
) : null}
<Styled.Content>
{renderContent()}
</Styled.Content>
</Styled.AudioModal>
</>
<Styled.AudioModal
modalName="AUDIO"
onRequestClose={closeModal}
data-test="audioModal"
contentLabel={intl.formatMessage(intlMessages.ariaModalTitle)}
title={title}
{...{
setIsOpen,
isOpen,
priority,
}}
>
{isIE ? (
<Styled.BrowserWarning>
<FormattedMessage
id="app.audioModal.unsupportedBrowserLabel"
description="Warning when someone joins with a browser that isn't supported"
values={{
0: <a href="https://www.google.com/chrome/">Chrome</a>,
1: <a href="https://getfirefox.com">Firefox</a>,
}}
/>
</Styled.BrowserWarning>
) : null}
<Styled.Content>
{renderContent()}
</Styled.Content>
</Styled.AudioModal>
);
};

View File

@ -62,7 +62,6 @@ const AudioModalContainer = (props) => {
combinedDialInNum = `${dialNumber.replace(/\D+/g, '')},,,${telVoice.replace(/\D+/g, '')}`;
}
}
const { isIe } = browserInfo;
const SHOW_VOLUME_METER = window.meetingClientSettings.public.media.showVolumeMeter;
@ -81,26 +80,26 @@ const AudioModalContainer = (props) => {
const isListenOnly = useReactiveVar(AudioManager._isListenOnly.value);
const isEchoTest = useReactiveVar(AudioManager._isEchoTest.value);
const autoplayBlocked = useReactiveVar(AudioManager._autoplayBlocked.value);
const isMuted = useReactiveVar(AudioManager._isMuted.value);
const meetingIsBreakout = AppService.useMeetingIsBreakout();
const supportsTransparentListenOnly = useReactiveVar(
AudioManager._transparentListenOnlySupported.value,
);
const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value);
const { userLocks } = useLockContext();
const isListenOnlyInputDevice = Service.inputDeviceId() === 'listen-only';
const devicesAlreadyConfigured = skipEchoTestIfPreviousDevice
&& Service.inputDeviceId();
const joinFullAudioImmediately = !isListenOnlyInputDevice
&& (skipCheck || (skipCheckOnJoin && !getEchoTest) || devicesAlreadyConfigured);
const { setIsOpen } = props;
const close = useCallback(() => closeModal(() => setIsOpen(false)), [setIsOpen]);
const joinMic = useCallback(
(skipEchoTest) => joinMicrophone(skipEchoTest || skipCheck || skipCheckOnJoin),
(options = {}) => joinMicrophone({
skipEchoTest: options.skipEchoTest || joinFullAudioImmediately,
}),
[skipCheck, skipCheckOnJoin],
);
const joinFullAudioImmediately = (
autoJoin
&& (
skipCheck
|| (skipCheckOnJoin && !getEchoTest)
))
|| (
skipCheck
|| (skipCheckOnJoin && !getEchoTest)
|| (skipEchoTestIfPreviousDevice && (inputDeviceId || outputDeviceId))
);
return (
<AudioModal
@ -114,6 +113,8 @@ const AudioModalContainer = (props) => {
isConnected={isConnected}
isListenOnly={isListenOnly}
isEchoTest={isEchoTest}
isMuted={isMuted}
toggleMuteMicrophoneSystem={Service.toggleMuteMicrophoneSystem}
autoplayBlocked={autoplayBlocked}
getEchoTest={getEchoTest}
joinFullAudioImmediately={joinFullAudioImmediately}
@ -123,6 +124,7 @@ const AudioModalContainer = (props) => {
joinListenOnly={joinListenOnly}
leaveEchoTest={leaveEchoTest}
changeInputDevice={Service.changeInputDevice}
liveChangeInputDevice={Service.liveChangeInputDevice}
changeInputStream={Service.changeInputStream}
changeOutputDevice={Service.changeOutputDevice}
joinEchoTest={Service.joinEchoTest}
@ -144,7 +146,14 @@ const AudioModalContainer = (props) => {
isRTL={isRTL}
AudioError={AudioError}
getTroubleshootingLink={AudioModalService.getTroubleshootingLink}
getMicrophonePermissionStatus={Service.getMicrophonePermissionStatus}
getAudioConstraints={Service.getAudioConstraints}
doGUM={Service.doGUM}
bypassGUM={Service.bypassGUM}
supportsTransparentListenOnly={supportsTransparentListenOnly}
setIsOpen={setIsOpen}
hasMicrophonePermission={Service.hasMicrophonePermission}
permissionStatus={permissionStatus}
{...props}
/>
);

View File

@ -20,7 +20,10 @@ export const didUserSelectedListenOnly = () => (
!!Storage.getItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY)
);
export const joinMicrophone = (skipEchoTest = false) => {
export const joinMicrophone = (options = {}) => {
const { skipEchoTest = false } = options;
const shouldSkipEcho = skipEchoTest && Service.inputDeviceId() !== 'listen-only';
Storage.setItem(CLIENT_DID_USER_SELECTED_MICROPHONE_KEY, true);
Storage.setItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY, false);
@ -30,8 +33,8 @@ export const joinMicrophone = (skipEchoTest = false) => {
const call = new Promise((resolve, reject) => {
try {
if ((skipEchoTest && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) {
return resolve(Service.joinMicrophone());
if ((shouldSkipEcho && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) {
return resolve(Service.joinMicrophone(options));
}
return resolve(Service.transferCall());

View File

@ -63,6 +63,7 @@ const Connecting = styled.div`
margin-top: auto;
margin-bottom: auto;
font-size: 2rem;
text-align: center;
`;
const ellipsis = keyframes`

View File

@ -8,36 +8,47 @@ import logger from '/imports/startup/client/logger';
import AudioStreamVolume from '/imports/ui/components/audio/audio-stream-volume/component';
import LocalEchoContainer from '/imports/ui/components/audio/local-echo/container';
import DeviceSelector from '/imports/ui/components/audio/device-selector/component';
import {
getAudioConstraints,
doGUM,
} from '/imports/api/audio/client/bridge/service';
import MediaStreamUtils from '/imports/utils/media-stream-utils';
import audioManager from '/imports/ui/services/audio-manager';
import AudioManager from '/imports/ui/services/audio-manager';
import Session from '/imports/ui/services/storage/in-memory';
const propTypes = {
intl: PropTypes.shape({
formatMessage: PropTypes.func.isRequired,
}).isRequired,
animations: PropTypes.bool,
changeInputDevice: PropTypes.func.isRequired,
liveChangeInputDevice: PropTypes.func.isRequired,
changeOutputDevice: PropTypes.func.isRequired,
handleBack: PropTypes.func.isRequired,
handleConfirmation: PropTypes.func.isRequired,
handleGUMFailure: PropTypes.func.isRequired,
isConnecting: PropTypes.bool.isRequired,
isConnected: PropTypes.bool.isRequired,
isMuted: PropTypes.bool.isRequired,
toggleMuteMicrophoneSystem: PropTypes.func.isRequired,
inputDeviceId: PropTypes.string.isRequired,
outputDeviceId: PropTypes.string.isRequired,
produceStreams: PropTypes.bool,
withEcho: PropTypes.bool,
withVolumeMeter: PropTypes.bool,
notify: PropTypes.func.isRequired,
unmuteOnExit: PropTypes.bool,
doGUM: PropTypes.func.isRequired,
getAudioConstraints: PropTypes.func.isRequired,
checkMicrophonePermission: PropTypes.func.isRequired,
supportsTransparentListenOnly: PropTypes.bool.isRequired,
toggleVoice: PropTypes.func.isRequired,
permissionStatus: PropTypes.string,
};
const defaultProps = {
animations: true,
produceStreams: false,
withEcho: false,
withVolumeMeter: false,
unmuteOnExit: false,
permissionStatus: null,
};
const intlMessages = defineMessages({
@ -45,10 +56,6 @@ const intlMessages = defineMessages({
id: 'app.audio.backLabel',
description: 'audio settings back button label',
},
descriptionLabel: {
id: 'app.audio.audioSettings.descriptionLabel',
description: 'audio settings description label',
},
micSourceLabel: {
id: 'app.audio.audioSettings.microphoneSourceLabel',
description: 'Label for mic source',
@ -69,17 +76,36 @@ const intlMessages = defineMessages({
id: 'app.audioNotification.deviceChangeFailed',
description: 'Device change failed',
},
confirmLabel: {
id: 'app.audio.audioSettings.confirmLabel',
description: 'Audio settings confirmation button label',
},
cancelLabel: {
id: 'app.audio.audioSettings.cancelLabel',
description: 'Audio settings cancel button label',
},
findingDevicesTitle: {
id: 'app.audio.audioSettings.findingDevicesTitle',
description: 'Message for finding audio devices',
},
});
class AudioSettings extends React.Component {
constructor(props) {
super(props);
const { inputDeviceId, outputDeviceId } = props;
const {
inputDeviceId,
outputDeviceId,
unmuteOnExit,
} = props;
this.handleInputChange = this.handleInputChange.bind(this);
this.handleOutputChange = this.handleOutputChange.bind(this);
this.handleConfirmationClick = this.handleConfirmationClick.bind(this);
this.handleCancelClick = this.handleCancelClick.bind(this);
this.unmuteOnExit = this.unmuteOnExit.bind(this);
this.updateDeviceList = this.updateDeviceList.bind(this);
this.state = {
inputDeviceId,
@ -88,32 +114,80 @@ class AudioSettings extends React.Component {
// blocked until at least one stream is generated
producingStreams: props.produceStreams,
stream: null,
unmuteOnExit,
audioInputDevices: [],
audioOutputDevices: [],
findingDevices: true,
};
this._isMounted = false;
}
componentDidMount() {
const { inputDeviceId, outputDeviceId } = this.state;
const {
inputDeviceId,
outputDeviceId,
} = this.state;
const {
isConnected,
isMuted,
toggleMuteMicrophoneSystem,
checkMicrophonePermission,
toggleVoice,
permissionStatus,
} = this.props;
Session.setItem('inEchoTest', true);
this._isMounted = true;
// Guarantee initial in/out devices are initialized on all ends
this.setInputDevice(inputDeviceId);
this.setOutputDevice(outputDeviceId);
audioManager.isEchoTest = true;
AudioManager.isEchoTest = true;
checkMicrophonePermission({ gumOnPrompt: true, permissionStatus })
.then(this.updateDeviceList)
.then(() => {
if (!this._isMounted) return;
navigator.mediaDevices.addEventListener(
'devicechange',
this.updateDeviceList,
);
this.setState({ findingDevices: false });
this.setInputDevice(inputDeviceId);
this.setOutputDevice(outputDeviceId);
});
// If connected and unmuted, we need to mute the audio and revert it
// back to the original state on exit.
if (isConnected && !isMuted) {
toggleMuteMicrophoneSystem(isMuted, toggleVoice);
// We only need to revert the mute state if the user is not listen-only
if (inputDeviceId !== 'listen-only') this.setState({ unmuteOnExit: true });
}
}
componentDidUpdate(prevProps) {
const { permissionStatus } = this.props;
if (prevProps.permissionStatus !== permissionStatus) {
this.updateDeviceList();
}
}
componentWillUnmount() {
const { stream } = this.state;
Session.setItem('inEchoTest', false);
this._mounted = false;
this._isMounted = false;
if (stream) {
MediaStreamUtils.stopMediaStreamTracks(stream);
}
audioManager.isEchoTest = false;
AudioManager.isEchoTest = false;
navigator.mediaDevices.removeEventListener(
'devicechange', this.updateDeviceList,
);
this.unmuteOnExit();
}
handleInputChange(deviceId) {
@ -125,8 +199,17 @@ class AudioSettings extends React.Component {
}
handleConfirmationClick() {
const { stream } = this.state;
const { produceStreams, handleConfirmation } = this.props;
const { stream, inputDeviceId } = this.state;
const {
isConnected,
produceStreams,
handleConfirmation,
liveChangeInputDevice,
} = this.props;
// If connected, we need to use the in-call device change method so that all
// components pick up the change and the peer is properly updated.
if (isConnected) liveChangeInputDevice(inputDeviceId);
// Stream generation disabled or there isn't any stream: just run the provided callback
if (!produceStreams || !stream) return handleConfirmation();
@ -139,49 +222,63 @@ class AudioSettings extends React.Component {
return handleConfirmation(clonedStream);
}
setInputDevice(deviceId) {
const { handleGUMFailure, changeInputDevice, produceStreams, intl, notify } = this.props;
const { inputDeviceId: currentInputDeviceId } = this.state;
handleCancelClick() {
const { handleBack } = this.props;
handleBack();
}
setInputDevice(deviceId) {
const {
isConnected,
handleGUMFailure,
changeInputDevice,
produceStreams,
intl,
notify,
} = this.props;
const { inputDeviceId: currentInputDeviceId } = this.state;
try {
changeInputDevice(deviceId);
if (!isConnected) changeInputDevice(deviceId);
// Only generate input streams if they're going to be used with something
// In this case, the volume meter or local echo test.
if (produceStreams) {
this.generateInputStream(deviceId)
.then((stream) => {
// Extract the deviceId again from the stream to guarantee consistency
// between stream DID vs chosen DID. That's necessary in scenarios where,
// eg, there's no default/pre-set deviceId ('') and the browser's
// default device has been altered by the user (browser default != system's
// default).
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio');
if (extractedDeviceId && extractedDeviceId !== deviceId)
this.generateInputStream(deviceId).then((stream) => {
// Extract the deviceId again from the stream to guarantee consistency
// between stream DID vs chosen DID. That's necessary in scenarios where,
// eg, there's no default/pre-set deviceId ('') and the browser's
// default device has been altered by the user (browser default != system's
// default).
let extractedDeviceId = deviceId;
if (stream) {
extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio');
if (extractedDeviceId !== deviceId && !isConnected) {
changeInputDevice(extractedDeviceId);
}
}
// Component unmounted after gUM resolution -> skip echo rendering
if (!this._isMounted) return;
// Component unmounted after gUM resolution -> skip echo rendering
if (!this._isMounted) return;
this.setState({
inputDeviceId: extractedDeviceId,
stream,
producingStreams: false,
});
})
.catch((error) => {
logger.warn(
{
logCode: 'audiosettings_gum_failed',
extraInfo: {
deviceId,
errorMessage: error.message,
errorName: error.name,
},
},
`Audio settings gUM failed: ${error.name}`
);
handleGUMFailure(error);
this.setState({
inputDeviceId: extractedDeviceId,
stream,
producingStreams: false,
});
}).catch((error) => {
logger.warn({
logCode: 'audiosettings_gum_failed',
extraInfo: {
deviceId,
errorMessage: error.message,
errorName: error.name,
},
}, `Audio settings gUM failed: ${error.name}`);
handleGUMFailure(error);
});
} else {
this.setState({
inputDeviceId: deviceId,
@ -198,7 +295,7 @@ class AudioSettings extends React.Component {
newDeviceId: deviceId,
},
},
`Audio settings: error changing input device - {${error.name}: ${error.message}}`
`Audio settings: error changing input device - {${error.name}: ${error.message}}`,
);
notify(intl.formatMessage(intlMessages.deviceChangeFailed), true);
}
@ -233,7 +330,29 @@ class AudioSettings extends React.Component {
});
}
updateDeviceList() {
return navigator.mediaDevices.enumerateDevices()
.then((devices) => {
const audioInputDevices = devices.filter((i) => i.kind === 'audioinput');
const audioOutputDevices = devices.filter((i) => i.kind === 'audiooutput');
this.setState({
audioInputDevices,
audioOutputDevices,
});
});
}
unmuteOnExit() {
const { toggleMuteMicrophoneSystem, toggleVoice } = this.props;
const { unmuteOnExit } = this.state;
// Unmutes microphone if flagged to do so
if (unmuteOnExit) toggleMuteMicrophoneSystem(true, toggleVoice);
}
generateInputStream(inputDeviceId) {
const { doGUM, getAudioConstraints } = this.props;
const { stream } = this.state;
if (inputDeviceId && stream) {
@ -244,6 +363,8 @@ class AudioSettings extends React.Component {
MediaStreamUtils.stopMediaStreamTracks(stream);
}
if (inputDeviceId === 'listen-only') return Promise.resolve(null);
const constraints = {
audio: getAudioConstraints({ deviceId: inputDeviceId }),
};
@ -285,9 +406,16 @@ class AudioSettings extends React.Component {
}
renderDeviceSelectors() {
const { inputDeviceId, outputDeviceId, producingStreams } = this.state;
const { intl, isConnecting } = this.props;
const blocked = producingStreams || isConnecting;
const {
inputDeviceId,
outputDeviceId,
producingStreams,
audioInputDevices,
audioOutputDevices,
findingDevices,
} = this.state;
const { intl, isConnecting, supportsTransparentListenOnly } = this.props;
const blocked = producingStreams || isConnecting || findingDevices;
return (
<Styled.Row>
@ -298,10 +426,12 @@ class AudioSettings extends React.Component {
<DeviceSelector
id="inputDeviceSelector"
deviceId={inputDeviceId}
devices={audioInputDevices}
kind="audioinput"
blocked={blocked}
onChange={this.handleInputChange}
intl={intl}
supportsTransparentListenOnly={supportsTransparentListenOnly}
/>
</Styled.LabelSmall>
</Styled.FormElement>
@ -313,10 +443,12 @@ class AudioSettings extends React.Component {
<DeviceSelector
id="outputDeviceSelector"
deviceId={outputDeviceId}
devices={audioOutputDevices}
kind="audiooutput"
blocked={blocked}
onChange={this.handleOutputChange}
intl={intl}
supportsTransparentListenOnly={supportsTransparentListenOnly}
/>
</Styled.LabelSmall>
</Styled.FormElement>
@ -326,32 +458,46 @@ class AudioSettings extends React.Component {
}
render() {
const { isConnecting, intl, handleBack } = this.props;
const { producingStreams } = this.state;
const {
findingDevices,
producingStreams,
} = this.state;
const {
isConnecting,
isConnected,
intl,
animations,
} = this.props;
return (
<Styled.FormWrapper data-test="audioSettingsModal">
<Styled.Form>
<Styled.Row>
<Styled.AudioNote>{intl.formatMessage(intlMessages.descriptionLabel)}</Styled.AudioNote>
</Styled.Row>
{this.renderDeviceSelectors()}
{this.renderOutputTest()}
{this.renderVolumeMeter()}
</Styled.Form>
{findingDevices && (
<Styled.AudioNote>
<span>{intl.formatMessage(intlMessages.findingDevicesTitle)}</span>
<Styled.FetchingAnimation animations={animations} />
</Styled.AudioNote>
)}
<Styled.EnterAudio>
<Styled.BackButton
label={intl.formatMessage(intlMessages.backLabel)}
label={isConnected
? intl.formatMessage(intlMessages.cancelLabel)
: intl.formatMessage(intlMessages.backLabel)}
color="secondary"
onClick={handleBack}
onClick={this.handleCancelClick}
disabled={isConnecting}
/>
<Button
data-test="joinEchoTestButton"
size="md"
color="primary"
label={intl.formatMessage(intlMessages.retryLabel)}
label={isConnected
? intl.formatMessage(intlMessages.confirmLabel)
: intl.formatMessage(intlMessages.retryLabel)}
onClick={this.handleConfirmationClick}
disabled={isConnecting || producingStreams}
/>

View File

@ -1,9 +1,9 @@
import styled from 'styled-components';
import styled, { css, keyframes } from 'styled-components';
import Button from '/imports/ui/components/common/button/component';
import { smallOnly } from '/imports/ui/stylesheets/styled-components/breakpoints';
const FormWrapper = styled.div`
min-width: 0;
min-width: 100%;
`;
const Form = styled.div`
@ -26,6 +26,10 @@ const EnterAudio = styled.div`
`;
const AudioNote = styled.div`
display: flex;
flex-flow: column;
text-align: center;
justify-content: center;
@media ${smallOnly} {
font-size: 0.8rem;
}
@ -163,6 +167,31 @@ const BackButton = styled(Button)`
}
`;
const ellipsis = keyframes`
to {
width: 1.5em;
}
`;
const FetchingAnimation = styled.span`
margin: auto;
display: inline-block;
width: 1.5em;
&:after {
overflow: hidden;
display: inline-block;
vertical-align: bottom;
content: "\\2026"; /* ascii code for the ellipsis character */
width: 0;
margin-left: 0.25em;
${({ animations }) => animations && css`
animation: ${ellipsis} steps(4, end) 900ms infinite;
`}
}
`;
export default {
FormWrapper,
Form,
@ -175,4 +204,5 @@ export default {
LabelSmallFullWidth,
SpacedLeftCol,
BackButton,
FetchingAnimation,
};

View File

@ -182,7 +182,7 @@ const AudioContainer = (props) => {
if (Service.isConnected()) return;
if (userSelectedMicrophone) {
joinMicrophone(true);
joinMicrophone({ skipEchoTest: true });
return;
}

View File

@ -1,7 +1,5 @@
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import logger from '/imports/startup/client/logger';
import browserInfo from '/imports/utils/browserInfo';
import {
defineMessages,
} from 'react-intl';
@ -16,12 +14,18 @@ const propTypes = {
onChange: PropTypes.func.isRequired,
blocked: PropTypes.bool,
deviceId: PropTypes.string,
devices: PropTypes.arrayOf(PropTypes.shape({
deviceId: PropTypes.string,
label: PropTypes.string,
})),
supportsTransparentListenOnly: PropTypes.bool.isRequired,
};
const defaultProps = {
kind: 'audioinput',
blocked: false,
deviceId: '',
devices: [],
};
const intlMessages = defineMessages({
@ -45,6 +49,10 @@ const intlMessages = defineMessages({
id: 'app.audio.noDeviceFound',
description: 'No audio device found',
},
noMicListenOnlyLabel: {
id: 'app.audio.audioSettings.noMicListenOnly',
description: 'No microphone, listen only mode label',
},
});
class DeviceSelector extends Component {
@ -52,52 +60,16 @@ class DeviceSelector extends Component {
super(props);
this.handleSelectChange = this.handleSelectChange.bind(this);
this.state = {
devices: [],
options: [],
};
}
componentDidMount() {
const { blocked } = this.props;
if (!blocked) this.enumerate();
}
componentDidUpdate(prevProps) {
const { blocked } = this.props;
if (prevProps.blocked === true && blocked === false) this.enumerate();
}
handleEnumerateDevicesSuccess(deviceInfos) {
const { kind } = this.props;
const devices = deviceInfos.filter((d) => d.kind === kind);
logger.info({
logCode: 'audiodeviceselector_component_enumeratedevices_success',
extraInfo: {
deviceKind: kind,
devices,
},
}, 'Success on enumerateDevices() for audio');
this.setState({
devices,
options: devices.map((d, i) => ({
label: d.label || this.getFallbackLabel(i),
value: d.deviceId,
key: uniqueId('device-option-'),
})),
});
}
handleSelectChange(event) {
const { value } = event.target;
const { onChange } = this.props;
const { devices } = this.state;
const selectedDevice = devices.find((d) => d.deviceId === value);
onChange(selectedDevice.deviceId, selectedDevice, event);
const { devices, onChange } = this.props;
const selectedDeviceId = (value === 'listen-only')
? value
: devices.find((d) => d.deviceId === value)?.deviceId;
onChange(selectedDeviceId);
}
getFallbackLabel(index) {
@ -107,28 +79,29 @@ class DeviceSelector extends Component {
return intl.formatMessage(label, { 0: index });
}
enumerate() {
const { kind } = this.props;
navigator.mediaDevices
.enumerateDevices()
.then(this.handleEnumerateDevicesSuccess.bind(this))
.catch(() => {
logger.error({
logCode: 'audiodeviceselector_component_enumeratedevices_error',
extraInfo: {
deviceKind: kind,
},
}, 'Error on enumerateDevices(): ');
});
}
render() {
const {
intl, kind, blocked, deviceId,
intl,
kind,
blocked,
deviceId,
devices,
supportsTransparentListenOnly,
} = this.props;
const { options } = this.state;
const options = devices.map((d, i) => ({
label: d.label || this.getFallbackLabel(i),
value: d.deviceId,
key: uniqueId('device-option-'),
}));
if (kind === 'audioinput' && supportsTransparentListenOnly && !blocked) {
options.push({
label: intl.formatMessage(intlMessages.noMicListenOnlyLabel),
value: 'listen-only',
key: uniqueId('device-option-'),
});
}
let notFoundOption;

View File

@ -8,6 +8,7 @@ const propTypes = {
formatMessage: PropTypes.func.isRequired,
}).isRequired,
isListenOnly: PropTypes.bool.isRequired,
isConnected: PropTypes.bool.isRequired,
audioErr: PropTypes.shape({
code: PropTypes.number,
message: PropTypes.string,
@ -18,6 +19,8 @@ const propTypes = {
}),
}).isRequired,
handleBack: PropTypes.func.isRequired,
handleRetryMic: PropTypes.func.isRequired,
handleJoinListenOnly: PropTypes.func.isRequired,
troubleshootingLink: PropTypes.string,
};
@ -30,6 +33,10 @@ const intlMessages = defineMessages({
id: 'app.audioModal.helpSubtitleMic',
description: 'Text description for the audio help subtitle (microphones)',
},
helpSubtitlePermission: {
id: 'app.audioModal.helpSubtitlePermission',
description: 'Text description for the audio help subtitle (permission)',
},
helpSubtitleGeneric: {
id: 'app.audioModal.helpSubtitleGeneric',
description: 'Text description for the audio help subtitle (generic)',
@ -46,10 +53,18 @@ const intlMessages = defineMessages({
id: 'app.audioModal.helpPermissionStep3',
description: 'Text description for the audio permission help step 3',
},
retryLabel: {
id: 'app.audio.audioSettings.retryLabel',
backLabel: {
id: 'app.audio.backLabel',
description: 'audio settings back button label',
},
retryMicLabel: {
id: 'app.audio.audioSettings.retryMicLabel',
description: 'audio settings retry button label',
},
listenOnlyLabel: {
id: 'app.audioModal.listenOnlyLabel',
description: 'audio settings listen only button label',
},
noSSL: {
id: 'app.audioModal.help.noSSL',
description: 'Text description for domain not using https',
@ -74,7 +89,12 @@ const intlMessages = defineMessages({
class Help extends Component {
getSubtitle() {
const { intl, isListenOnly } = this.props;
const { audioErr, intl, isListenOnly } = this.props;
const { MIC_ERROR } = audioErr;
if (audioErr.code === MIC_ERROR.NO_PERMISSION) {
return intl.formatMessage(intlMessages.helpSubtitlePermission);
}
return !isListenOnly
? intl.formatMessage(intlMessages.helpSubtitleMic)
@ -155,7 +175,10 @@ class Help extends Component {
render() {
const {
intl,
isConnected,
handleBack,
handleRetryMic,
handleJoinListenOnly,
troubleshootingLink,
} = this.props;
@ -174,11 +197,31 @@ class Help extends Component {
</Styled.Text>
)}
<Styled.EnterAudio>
<Styled.RetryButton
label={intl.formatMessage(intlMessages.retryLabel)}
{!isConnected ? (
<Styled.HelpActionButton
label={intl.formatMessage(intlMessages.listenOnlyLabel)}
data-test="helpListenOnlyBtn"
icon="listen"
size="md"
color="secondary"
onClick={handleJoinListenOnly}
/>
) : (
<Styled.HelpActionButton
label={intl.formatMessage(intlMessages.backLabel)}
data-test="helpBackBtn"
color="secondary"
size="md"
onClick={handleBack}
/>
)}
<Styled.HelpActionButton
label={intl.formatMessage(intlMessages.retryMicLabel)}
data-test="helpRetryMicBtn"
icon="unmute"
size="md"
color="primary"
onClick={handleBack}
onClick={handleRetryMic}
/>
</Styled.EnterAudio>
</Styled.Help>

View File

@ -24,11 +24,11 @@ const Text = styled.div`
const EnterAudio = styled.div`
display: flex;
justify-content: flex-end;
justify-content: center;
margin-top: ${jumboPaddingY};
`;
const RetryButton = styled(Button)`
const HelpActionButton = styled(Button)`
margin-right: 0.5rem;
margin-left: inherit;
@ -72,7 +72,7 @@ export default {
Help,
Text,
EnterAudio,
RetryButton,
HelpActionButton,
TroubleshootLink,
UnknownError,
PermissionHelpSteps,

View File

@ -3,7 +3,6 @@ import PropTypes from 'prop-types';
import { defineMessages, injectIntl } from 'react-intl';
import Styled from './styles';
import { getSettingsSingletonInstance } from '/imports/ui/services/settings';
import Service from '/imports/ui/components/audio/local-echo/service';
const propTypes = {
intl: PropTypes.shape({
@ -14,6 +13,10 @@ const propTypes = {
id: PropTypes.string,
}),
initialHearingState: PropTypes.bool,
playEchoStream: PropTypes.func.isRequired,
deattachEchoStream: PropTypes.func.isRequired,
shouldUseRTCLoopback: PropTypes.func.isRequired,
createAudioRTCLoopback: PropTypes.func.isRequired,
};
const intlMessages = defineMessages({
@ -31,6 +34,10 @@ const LocalEcho = ({
intl,
stream = null,
initialHearingState = false,
playEchoStream,
deattachEchoStream,
shouldUseRTCLoopback,
createAudioRTCLoopback,
}) => {
const loopbackAgent = useRef(null);
const [hearing, setHearing] = useState(initialHearingState);
@ -41,20 +48,20 @@ const LocalEcho = ({
const applyHearingState = (_stream) => {
if (hearing) {
Service.playEchoStream(_stream, loopbackAgent.current);
playEchoStream(_stream, loopbackAgent.current);
} else {
Service.deattachEchoStream();
deattachEchoStream();
}
};
const cleanup = () => {
if (loopbackAgent.current) loopbackAgent.current.stop();
Service.deattachEchoStream();
deattachEchoStream();
};
useEffect(() => {
if (Service.useRTCLoopback()) {
loopbackAgent.current = Service.createAudioRTCLoopback();
if (shouldUseRTCLoopback()) {
loopbackAgent.current = createAudioRTCLoopback();
}
return cleanup;
}, []);

View File

@ -1,10 +1,24 @@
import React from 'react';
import AudioService from '/imports/ui/components/audio/service';
import LocalEchoService from '/imports/ui/components/audio/local-echo/service';
import LocalEcho from '/imports/ui/components/audio/local-echo/component';
const LocalEchoContainer = (props) => {
const { initialHearingState } = window.meetingClientSettings.public.media.localEchoTest;
const {
initialHearingState: settingsHearingState,
} = window.meetingClientSettings.public.media.localEchoTest;
const initialHearingState = settingsHearingState && !AudioService.isConnected();
return <LocalEcho {...props} initialHearingState={initialHearingState} />;
return (
<LocalEcho
{...props}
initialHearingState={initialHearingState}
playEchoStream={LocalEchoService.playEchoStream}
deattachEchoStream={LocalEchoService.deattachEchoStream}
shouldUseRTCLoopback={LocalEchoService.shouldUseRTCLoopback}
createAudioRTCLoopback={LocalEchoService.createAudioRTCLoopback}
/>
);
};
export default LocalEchoContainer;

View File

@ -1,13 +1,15 @@
import LocalPCLoopback from '/imports/ui/services/webrtc-base/local-pc-loopback';
import browserInfo from '/imports/utils/browserInfo';
const LOCAL_MEDIA_TAG = '#local-media';
let audioContext = null;
let sourceContext = null;
let contextDestination = null;
let stubAudioElement = null;
let delayNode = null;
const useRTCLoopback = () => {
const shouldUseRTCLoopback = () => {
const USE_RTC_LOOPBACK_CHR = window.meetingClientSettings.public.media.localEchoTest.useRtcLoopbackInChromium;
return (browserInfo.isChrome || browserInfo.isEdge) && USE_RTC_LOOPBACK_CHR;
@ -44,7 +46,6 @@ const cleanupDelayNode = () => {
};
const addDelayNode = (stream) => {
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
const {
delayTime = 0.5,
maxDelayTime = 2,
@ -52,7 +53,7 @@ const addDelayNode = (stream) => {
if (stream) {
if (delayNode || audioContext || sourceContext) cleanupDelayNode();
const audioElement = document.querySelector(MEDIA_TAG);
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
// Workaround: attach the stream to a muted stub audio element to be able to play it in
// Chromium-based browsers. See https://bugs.chromium.org/p/chromium/issues/detail?id=933677
stubAudioElement = new Audio();
@ -70,18 +71,17 @@ const addDelayNode = (stream) => {
sourceContext.connect(delayNode);
delayNode.connect(contextDestination);
delayNode.delayTime.setValueAtTime(delayTime, audioContext.currentTime);
// Play the stream with the delay in the default audio element (remote-media)
// Play the stream with the delay in the default audio element (local-media)
audioElement.srcObject = contextDestination.stream;
}
};
const deattachEchoStream = () => {
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
const {
enabled: DELAY_ENABLED = true,
} = window.meetingClientSettings.public.media.localEchoTest.delay;
const audioElement = document.querySelector(MEDIA_TAG);
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
if (DELAY_ENABLED) {
audioElement.muted = false;
@ -93,7 +93,6 @@ const deattachEchoStream = () => {
};
const playEchoStream = async (stream, loopbackAgent = null) => {
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
const {
enabled: DELAY_ENABLED = true,
} = window.meetingClientSettings.public.media.localEchoTest.delay;
@ -116,9 +115,9 @@ const playEchoStream = async (stream, loopbackAgent = null) => {
if (DELAY_ENABLED) {
addDelayNode(streamToPlay);
} else {
// No delay: play the stream in the default audio element (remote-media),
// No delay: play the stream in the default audio element (local-media),
// no strings attached.
const audioElement = document.querySelector(MEDIA_TAG);
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
audioElement.srcObject = streamToPlay;
audioElement.muted = false;
audioElement.play();
@ -127,7 +126,7 @@ const playEchoStream = async (stream, loopbackAgent = null) => {
};
export default {
useRTCLoopback,
shouldUseRTCLoopback,
createAudioRTCLoopback,
deattachEchoStream,
playEchoStream,

View File

@ -1,52 +0,0 @@
import React from 'react';
import { injectIntl, defineMessages } from 'react-intl';
import PropTypes from 'prop-types';
import Styled from './styles';
import browserInfo from '/imports/utils/browserInfo';
import { getSettingsSingletonInstance } from '/imports/ui/services/settings';
const propTypes = {
intl: PropTypes.object.isRequired,
closeModal: PropTypes.func.isRequired,
};
const intlMessages = defineMessages({
title: {
id: 'app.audio.permissionsOverlay.title',
description: 'Title for the overlay',
},
hint: {
id: 'app.audio.permissionsOverlay.hint',
description: 'Hint for the overlay',
},
});
const { isChrome, isFirefox, isSafari } = browserInfo;
const PermissionsOverlay = ({ intl, closeModal }) => {
const Settings = getSettingsSingletonInstance();
const { animations } = Settings.application;
return (
<Styled.PermissionsOverlayModal
overlayClassName={"permissionsOverlay"}
onRequestClose={closeModal}
hideBorder
isFirefox={isFirefox}
isChrome={isChrome}
isSafari={isSafari}
animations={animations}
>
<Styled.Content>
{intl.formatMessage(intlMessages.title)}
<small>
{intl.formatMessage(intlMessages.hint)}
</small>
</Styled.Content>
</Styled.PermissionsOverlayModal>
)
};
PermissionsOverlay.propTypes = propTypes;
export default injectIntl(PermissionsOverlay);

View File

@ -1,108 +0,0 @@
import styled, { css, keyframes } from 'styled-components';
import ModalSimple from '/imports/ui/components/common/modal/simple/component';
import { colorBlack } from '/imports/ui/stylesheets/styled-components/palette';
import { jumboPaddingX } from '/imports/ui/stylesheets/styled-components/general';
const bounce = keyframes`
0%,
20%,
50%,
80%,
100% {
-ms-transform: translateY(0);
transform: translateY(0);
}
40% {
-ms-transform: translateY(10px);
transform: translateY(10px);
}
60% {
-ms-transform: translateY(5px);
transform: translateY(5px);
}
`;
const PermissionsOverlayModal = styled(ModalSimple)`
${({ isFirefox }) => isFirefox && `
top: 8em;
left: 22em;
right: auto;
[dir="rtl"] & {
right: none;
left: none;
top: 15rem;
}
`}
${({ isChrome }) => isChrome && `
top: 5.5em;
left: 18em;
right: auto;
[dir="rtl"] & {
right: none;
left: none;
top: 15rem;
}
`}
${({ isSafari }) => isSafari && `
top: 150px;
left:0;
right:0;
margin-left: auto;
margin-right: auto;
`}
position: absolute;
background: none;
box-shadow: none;
color: #fff;
font-size: 16px;
font-weight: 400;
padding: 0 0 0 ${jumboPaddingX};
line-height: 18px;
width: 340px;
[dir="rtl"] & {
padding: 0 ${jumboPaddingX} 0 0;
}
small {
display: block;
font-size: 12px;
line-height: 14px;
margin-top: 3px;
opacity: .6;
}
&:after {
top: -65px;
left: -20px;
right: auto;
font-size: 20px;
display: block;
font-family: 'bbb-icons';
content: "\\E906";
position: relative;
[dir="rtl"] & {
left: auto;
right: -20px;
}
${({ animations }) => animations && css`
animation: ${bounce} 2s infinite;
`}
}
`;
const Content = styled.div`
color: ${colorBlack};
`;
export default {
PermissionsOverlayModal,
Content,
};

View File

@ -3,13 +3,21 @@ import AudioManager from '/imports/ui/services/audio-manager';
import logger from '/imports/startup/client/logger';
import Storage from '../../services/storage/session';
import { useReactiveVar } from '@apollo/client';
import {
getAudioConstraints,
doGUM,
} from '/imports/api/audio/client/bridge/service';
import {
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
} from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service';
const MUTED_KEY = 'muted';
const recoverMicState = (toggleVoice) => {
const muted = Storage.getItem(MUTED_KEY);
if ((muted === undefined) || (muted === null)) {
if ((muted === undefined) || (muted === null) || AudioManager.inputDeviceId === 'listen-only') {
return;
}
@ -65,13 +73,73 @@ const useIsUsingAudio = () => {
return Boolean(isConnected || isConnecting || isHangingUp || isEchoTest);
};
const hasMicrophonePermission = async ({
permissionStatus,
gumOnPrompt = false,
}) => {
try {
let status = permissionStatus;
// If the browser doesn't support the Permissions API, we can't check
// microphone permissions - return null (unknown)
if (navigator?.permissions?.query == null) return null;
if (!status) {
({ state: status } = await navigator.permissions.query({ name: 'microphone' }));
}
switch (status) {
case 'denied':
return false;
case 'prompt':
// Prompt without any subsequent action is considered unknown
if (!gumOnPrompt) {
return null;
}
return doGUM({ audio: getAudioConstraints() }).then((stream) => {
stream.getTracks().forEach((track) => {
track.stop();
stream.removeTrack(track);
});
return true;
}).catch((error) => {
if (error.name === 'NotAllowedError') {
return false;
}
// Give it the benefit of the doubt. It might be a device mismatch
// or something else that's not a permissions issue, so let's try
// to proceed. Rollbacks that happen downstream might fix the issue,
// otherwise we'll land on the Help screen anyways
return null;
});
case 'granted':
default:
return true;
}
} catch (error) {
logger.error({
logCode: 'audio_check_microphone_permission_error',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
},
}, `Error checking microphone permission: ${error.message}`);
// Null = could not determine permission status
return null;
}
};
export default {
init,
exitAudio: () => AudioManager.exitAudio(),
forceExitAudio: () => AudioManager.forceExitAudio(),
transferCall: () => AudioManager.transferCall(),
joinListenOnly: () => AudioManager.joinListenOnly(),
joinMicrophone: () => AudioManager.joinMicrophone(),
joinMicrophone: (options) => AudioManager.joinMicrophone(options),
joinEchoTest: () => AudioManager.joinEchoTest(),
changeInputDevice: (inputDeviceId) => AudioManager.changeInputDevice(inputDeviceId),
changeInputStream: (newInputStream) => { AudioManager.inputStream = newInputStream; },
@ -80,6 +148,8 @@ export default {
outputDeviceId,
isLive,
) => AudioManager.changeOutputDevice(outputDeviceId, isLive),
toggleMuteMicrophone,
toggleMuteMicrophoneSystem,
isConnectedToBreakout: () => {
const transferStatus = AudioManager.getBreakoutAudioTransferStatus();
if (transferStatus.status
@ -95,13 +165,14 @@ export default {
isUsingAudio: () => AudioManager.isUsingAudio(),
isConnecting: () => AudioManager.isConnecting,
isListenOnly: () => AudioManager.isListenOnly,
inputDeviceId: () => AudioManager.inputDeviceId,
outputDeviceId: () => AudioManager.outputDeviceId,
isEchoTest: () => AudioManager.isEchoTest,
isMuted: () => AudioManager.isMuted,
autoplayBlocked: () => AudioManager.autoplayBlocked,
handleAllowAutoplay: () => AudioManager.handleAllowAutoplay(),
playAlertSound: (url) => AudioManager.playAlertSound(url),
updateAudioConstraints:
(constraints) => AudioManager.updateAudioConstraints(constraints),
updateAudioConstraints: (constraints) => AudioManager.updateAudioConstraints(constraints),
recoverMicState,
isReconnecting: () => AudioManager.isReconnecting,
setBreakoutAudioTransferStatus: (status) => AudioManager
@ -109,6 +180,10 @@ export default {
getBreakoutAudioTransferStatus: () => AudioManager
.getBreakoutAudioTransferStatus(),
getStats: () => AudioManager.getStats(),
getAudioConstraints,
doGUM,
supportsTransparentListenOnly: () => AudioManager.supportsTransparentListenOnly(),
hasMicrophonePermission,
notify: (message, error, icon) => { AudioManager.notify(message, error, icon); },
useIsUsingAudio,
};

View File

@ -70,6 +70,9 @@ class AudioManager {
muteHandle: makeVar(null),
autoplayBlocked: makeVar(false),
isReconnecting: makeVar(false),
bypassGUM: makeVar(false),
permissionStatus: makeVar(null),
transparentListenOnlySupported: makeVar(false),
});
this.failedMediaElements = [];
@ -79,7 +82,7 @@ class AudioManager {
this._inputStream = makeVar(null);
this._inputDeviceId = {
value: makeVar(DEFAULT_INPUT_DEVICE_ID),
value: makeVar(null),
};
this._outputDeviceId = {
value: makeVar(null),
@ -90,6 +93,37 @@ class AudioManager {
window.addEventListener('StopAudioTracks', () => this.forceExitAudio());
}
_trackPermissionStatus() {
const handleTrackingError = (error) => {
logger.warn({
logCode: 'audiomanager_permission_tracking_failed',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
},
}, `Failed to track microphone permission status: ${error.message}`);
};
if (navigator?.permissions?.query) {
navigator.permissions.query({ name: 'microphone' })
.then((status) => {
// eslint-disable-next-line no-param-reassign
status.onchange = () => {
logger.debug({
logCode: 'audiomanager_permission_status_changed',
extraInfo: {
newStatus: status.state,
},
}, `Microphone permission status changed: ${status.state}`);
this.permissionStatus = status.state;
};
this.permissionStatus = status.state;
}).catch(handleTrackingError);
} else {
handleTrackingError(new Error('navigator.permissions.query is not available'));
}
}
_applyCachedOutputDeviceId() {
const cachedId = getStoredAudioOutputDeviceId();
@ -145,17 +179,26 @@ class AudioManager {
return this._outputDeviceId.value();
}
shouldBypassGUM() {
return this.supportsTransparentListenOnly() && this.inputDeviceId === 'listen-only';
}
supportsTransparentListenOnly() {
return this.listenOnlyBridge?.supportsTransparentListenOnly()
&& this.fullAudioBridge?.supportsTransparentListenOnly();
}
async init(userData, audioEventHandler) {
this.inputDeviceId = getStoredAudioInputDeviceId() || DEFAULT_INPUT_DEVICE_ID;
this.outputDeviceId = getCurrentAudioSinkId();
this._applyCachedOutputDeviceId();
this._trackPermissionStatus();
this.loadBridges(userData);
this.userData = userData;
this.initialized = true;
this.audioEventHandler = audioEventHandler;
await this.loadBridges(userData);
this.transparentListenOnlySupported = this.supportsTransparentListenOnly();
}
/**
@ -280,6 +323,7 @@ class AudioManager {
isListenOnly: false,
extension: null,
inputStream: this.inputStream,
bypassGUM: this.shouldBypassGUM(),
};
return this.joinAudio(callOptions, this.callStateCallback.bind(this));
});
@ -309,6 +353,7 @@ class AudioManager {
extension: ECHO_TEST_NUMBER,
inputStream: this.inputStream,
validIceCandidates,
bypassGUM: this.shouldBypassGUM(),
};
logger.info(
{
@ -369,7 +414,6 @@ class AudioManager {
}
this.isConnecting = false;
this.isWaitingPermissions = false;
throw errorPayload;
});
@ -415,17 +459,7 @@ class AudioManager {
}
forceExitAudio() {
this.notifyAudioExit();
this.isConnected = false;
this.isConnecting = false;
this.isHangingUp = false;
if (this.inputStream) {
this.inputStream.getTracks().forEach((track) => track.stop());
this.inputStream = null;
}
window.removeEventListener('audioPlayFailed', this.handlePlayElementFailed);
this.onAudioExit();
return this.bridge && this.bridge.exitAudio();
}
@ -520,7 +554,7 @@ class AudioManager {
if (this.inputStream) {
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(
this.inputStream,
'audio'
'audio',
);
if (extractedDeviceId && extractedDeviceId !== this.inputDeviceId) {
this.changeInputDevice(extractedDeviceId);
@ -639,22 +673,17 @@ class AudioManager {
}
changeInputDevice(deviceId) {
if (typeof deviceId !== 'string') throw new TypeError('Invalid inputDeviceId');
if (deviceId === this.inputDeviceId) return this.inputDeviceId;
const currentDeviceId = this.inputDeviceId ?? 'none';
this.inputDeviceId = deviceId;
logger.debug(
{
logCode: 'audiomanager_input_device_change',
extraInfo: {
deviceId: currentDeviceId,
newDeviceId: deviceId,
},
logger.debug({
logCode: 'audiomanager_input_device_change',
extraInfo: {
deviceId: currentDeviceId,
newDeviceId: deviceId || 'none',
},
`Microphone input device changed: from ${currentDeviceId} to ${deviceId}`
);
}, `Microphone input device changed: from ${currentDeviceId} to ${deviceId || 'none'}`);
return this.inputDeviceId;
}

View File

@ -52,13 +52,13 @@ class AudioBroker extends BaseBroker {
const localStream = this.getLocalStream();
const oldTracks = localStream ? localStream.getAudioTracks() : [];
peerConnection.getSenders().forEach((sender, index) => {
if (sender.track && sender.track.kind === 'audio') {
const newTrack = newTracks[index];
peerConnection.getSenders().forEach((sender) => {
if (sender.track == null || sender?.track?.kind === 'audio') {
const newTrack = newTracks.shift();
if (newTrack == null) return;
// Cleanup old tracks in the local MediaStream
const oldTrack = oldTracks[index];
const oldTrack = oldTracks.shift();
sender.replaceTrack(newTrack);
if (oldTrack) {
oldTrack.stop();
@ -68,6 +68,13 @@ class AudioBroker extends BaseBroker {
}
});
if (oldTracks.length > 0) {
oldTracks.forEach((track) => {
track.stop();
localStream.removeTrack(track);
});
}
return Promise.resolve();
}
@ -90,8 +97,10 @@ class AudioBroker extends BaseBroker {
gatheringTimeout: this.gatheringTimeout,
};
const peerRole = this.role === 'sendrecv' ? this.role : 'recvonly';
const peerRole = BaseBroker.getPeerRole(this.role);
this.webRtcPeer = new WebRtcPeer(peerRole, options);
window.peers = window.peers || [];
window.peers.push(this.webRtcPeer);
this.webRtcPeer.iceQueue = [];
this.webRtcPeer.start();
this.webRtcPeer.peerConnection.onconnectionstatechange = this.handleConnectionStateChange.bind(this);
@ -101,7 +110,9 @@ class AudioBroker extends BaseBroker {
this.webRtcPeer.generateOffer()
.then(this.sendStartReq.bind(this))
.catch(this._handleOfferGenerationFailure.bind(this));
} else if (peerRole === 'recvonly') {
} else if (peerRole === 'recvonly'
|| peerRole === 'recv'
|| peerRole === 'passive-sendrecv') {
// We are the answerer and we are only listening, so we don't need
// to acquire local media
this.sendStartReq();

View File

@ -8,6 +8,20 @@ const WS_HEARTBEAT_OPTS = {
};
class BaseBroker {
static getPeerRole(role) {
switch (role) {
case 'send':
case 'sendrecv':
case 'sendonly':
case 'recvonly':
case 'recv':
case 'passive-sendrecv':
return role;
default:
throw new Error(`Invalid role: ${role}`);
}
}
static assembleError(code, reason) {
const message = reason || SFU_BROKER_ERRORS[code];
const error = new Error(message);

View File

@ -37,6 +37,28 @@ export default class WebRtcPeer extends EventEmitter2 {
this._gatheringTimeout = this.options.gatheringTimeout;
this._assignOverrides();
this.logger.debug('BBB::WebRtcPeer::constructor - created', {
mode: this.mode,
options: this.options,
});
}
_getTransceiverDirection() {
switch (this.mode) {
case 'sendonly':
case 'recvonly':
case 'sendrecv':
return this.mode;
case 'recv':
return 'recvonly';
case 'send':
return 'sendonly';
case 'passive-sendrecv':
return 'sendrecv';
default:
return 'inactive';
}
}
_assignOverrides() {
@ -202,7 +224,7 @@ export default class WebRtcPeer extends EventEmitter2 {
}
return stream;
}
};
if (typeof this._mediaStreamFactory === 'function') {
return this._mediaStreamFactory(this.mediaConstraints).then(handleGUMResolution);
@ -326,6 +348,25 @@ export default class WebRtcPeer extends EventEmitter2 {
}
}
_processMediaStreams() {
if (this.videoStream) {
this.videoStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.videoStream);
});
}
if (this.audioStream) {
this.audioStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.audioStream);
});
}
this.peerConnection.getTransceivers().forEach((transceiver) => {
// eslint-disable-next-line no-param-reassign
transceiver.direction = this._getTransceiverDirection();
});
}
async generateOffer() {
switch (this.mode) {
case 'recvonly': {
@ -338,13 +379,13 @@ export default class WebRtcPeer extends EventEmitter2 {
if (useAudio) {
this.peerConnection.addTransceiver('audio', {
direction: 'recvonly',
direction: this._getTransceiverDirection(),
});
}
if (useVideo) {
this.peerConnection.addTransceiver('video', {
direction: 'recvonly',
direction: this._getTransceiverDirection(),
});
}
break;
@ -353,26 +394,14 @@ export default class WebRtcPeer extends EventEmitter2 {
case 'sendonly':
case 'sendrecv': {
await this.mediaStreamFactory();
if (this.videoStream) {
this.videoStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.videoStream);
});
}
if (this.audioStream) {
this.audioStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.audioStream);
});
}
this.peerConnection.getTransceivers().forEach((transceiver) => {
// eslint-disable-next-line no-param-reassign
transceiver.direction = this.mode;
});
this._processMediaStreams();
break;
}
case 'passive-sendrecv':
this._processMediaStreams();
break;
default:
break;
}
@ -387,6 +416,10 @@ export default class WebRtcPeer extends EventEmitter2 {
const localDescription = this.getLocalSessionDescriptor();
this.logger.debug('BBB::WebRtcPeer::generateOffer - local description set', localDescription);
return localDescription.sdp;
})
.catch((error) => {
this.logger.error('BBB::WebRtcPeer::generateOffer - failed', error);
throw error;
});
}
@ -409,23 +442,9 @@ export default class WebRtcPeer extends EventEmitter2 {
.then(async () => {
if (this.mode === 'sendonly' || this.mode === 'sendrecv') {
await this.mediaStreamFactory();
if (this.videoStream) {
this.videoStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.videoStream);
});
}
if (this.audioStream) {
this.audioStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.audioStream);
});
}
this.peerConnection.getTransceivers().forEach((transceiver) => {
// eslint-disable-next-line no-param-reassign
transceiver.direction = this.mode;
});
this._processMediaStreams();
} else if (this.mode === 'passive-sendrecv') {
this._processMediaStreams();
}
})
.then(() => this.peerConnection.createAnswer())
@ -437,6 +456,10 @@ export default class WebRtcPeer extends EventEmitter2 {
const localDescription = this.getLocalSessionDescriptor();
this.logger.debug('BBB::WebRtcPeer::processOffer - local description set', localDescription.sdp);
return localDescription.sdp;
})
.catch((error) => {
this.logger.error('BBB::WebRtcPeer::processOffer - failed', error);
throw error;
});
}

View File

@ -49,17 +49,6 @@ const GlobalStyle = createGlobalStyle`
}
}
.permissionsOverlay {
position: fixed;
z-index: 1002;
top: 0;
bottom: 0;
left: 0;
right: 0;
background-color: rgba(0, 0, 0, .85);
animation: fade-in .5s ease-in;
}
.modalOverlay {
z-index: 1000;
display: flex;

View File

@ -45,22 +45,25 @@ const getDeviceIdFromTrack = (track) => {
const { deviceId } = track.getSettings();
return deviceId;
}
return '';
return null;
};
const extractDeviceIdFromStream = (stream, kind) => {
if (!stream) return null;
// An empty string is the browser's default...
let tracks = [];
switch (kind) {
case 'audio':
tracks = getAudioTracks(stream);
if (tracks.length === 0) return 'listen-only';
return getDeviceIdFromTrack(tracks[0]);
case 'video':
tracks = getVideoTracks(stream);
return getDeviceIdFromTrack(tracks[0]);
default: {
return '';
return null;
}
}
};

View File

@ -606,8 +606,8 @@
"app.submenu.notification.userJoinLabel": "User Join",
"app.submenu.notification.userLeaveLabel": "User Leave",
"app.submenu.notification.guestWaitingLabel": "Guest Waiting Approval",
"app.submenu.audio.micSourceLabel": "Microphone source",
"app.submenu.audio.speakerSourceLabel": "Speaker source",
"app.submenu.audio.micSourceLabel": "Microphone",
"app.submenu.audio.speakerSourceLabel": "Speaker",
"app.submenu.audio.streamVolumeLabel": "Your audio stream volume",
"app.submenu.video.title": "Video",
"app.submenu.video.videoSourceLabel": "View source",
@ -723,10 +723,10 @@
"app.audioModal.yes.arialabel": "Echo is audible",
"app.audioModal.no.arialabel": "Echo is inaudible",
"app.audioModal.echoTestTitle": "This is a private echo test. Speak a few words. Did you hear audio?",
"app.audioModal.settingsTitle": "Change your audio settings",
"app.audioModal.helpTitle": "There was an issue with your audio devices",
"app.audioModal.helpSubtitleMic": "We couldn't enable your microphone",
"app.audioModal.helpSubtitleGeneric": "We're having trouble establishing an audio connection",
"app.audioModal.helpSubtitlePermission": "We need access to your microphone",
"app.audioModal.helpPermissionStep1": "When joining a call, accept all requests if prompted to use your microphone.",
"app.audioModal.helpPermissionStep2": "Check browser and device settings to ensure microphone access is allowed.",
"app.audioModal.helpPermissionStep3": "Refresh the page and try again.",
@ -764,18 +764,22 @@
"app.audio.loading": "Loading",
"app.audio.microphones": "Microphones",
"app.audio.speakers": "Speakers",
"app.audio.noDeviceFound": "No device found",
"app.audio.audioSettings.titleLabel": "Choose your audio settings",
"app.audio.audioSettings.descriptionLabel": "Please note, a dialog will appear in your browser, requiring you to accept sharing your microphone.",
"app.audio.audioSettings.microphoneSourceLabel": "Microphone source",
"app.audio.audioSettings.speakerSourceLabel": "Speaker source",
"app.audio.noDeviceFound": "No device found (listen only)",
"app.audio.audioSettings.titleLabel": "Adjust your audio settings",
"app.audio.audioSettings.findingDevicesTitle": "Looking for your audio devices, please accept any requests to use them",
"app.audio.audioSettings.noMicListenOnly": "No microphone (listen only)",
"app.audio.audioSettings.microphoneSourceLabel": "Microphone",
"app.audio.audioSettings.speakerSourceLabel": "Speaker",
"app.audio.audioSettings.testSpeakerLabel": "Test your speaker",
"app.audio.audioSettings.microphoneStreamLabel": "Your audio stream volume",
"app.audio.audioSettings.retryLabel": "Retry",
"app.audio.audioSettings.retryMicLabel": "Retry",
"app.audio.audioSettings.fallbackInputLabel": "Audio input {0}",
"app.audio.audioSettings.fallbackOutputLabel": "Audio output {0}",
"app.audio.audioSettings.fallbackNoPermission": "(no device permission)",
"app.audio.audioSettings.defaultOutputDeviceLabel": "Default",
"app.audio.audioSettings.findingDevicesLabel": "Finding devices...",
"app.audio.audioSettings.findingDevicesLabel": "Finding audio devices...",
"app.audio.audioSettings.confirmLabel": "Confirm",
"app.audio.audioSettings.cancelLabel": "Cancel",
"app.audio.listenOnly.backLabel": "Back",
"app.audio.listenOnly.closeLabel": "Close",
"app.audio.permissionsOverlay.title": "Allow access to your microphone",