feat(audio): rework audio join without listen only
This is a rework of the audio join procedure whithout the explict listen only separation in mind. It's supposed to be used in conjunction with the transparent listen only feature so that the distinction between modes is seamless with minimal server-side impact. An abridged list of changes: - Let the user pick no input device when joining microphone while allowing them to set an input device on the fly later on - Give the user the option to join audio with no input device whenever we fail to obtain input devices, with the option to try re-enabling them on the fly later on - Add the option to open the audio settings modal (echo test et al) via the in-call device selection chevron - Rework the SFU audio bridge and its services to support adding/removing tracks on the fly without renegotiation - Rework the SFU audio bridge and its services to support a new peer role called "passive-sendrecv". That role is used by dupled peers that have no active input source on start, but might have one later on. - Remove stale PermissionsOverlay component from the audio modal - Rework how permission errors are detected using the Permissions API - Rework the local echo test so that it uses a separate media tag rather than the remote - Add new, separate dialplans that mute/hold FreeSWITCH channels on hold based on UA strings. This is orchestrated server-side via webrtc-sfu and akka-apps. The basic difference here is that channels now join in their desired state rather than waiting for client side observers to sync the state up. It also mitigates transparent listen only performance edge cases on multiple audio channels joining at the same time. The old, decoupled listen only mode is still present in code while we validate this new approach. To test this, transparentListenOnly must be enabled and listen only mode must be disable on audio join so that the user skips straight through microphone join.
This commit is contained in:
parent
cf293fe7fd
commit
325887e325
@ -2,6 +2,7 @@ package org.bigbluebutton.core.apps.voice
|
|||||||
|
|
||||||
import org.bigbluebutton.common2.msgs._
|
import org.bigbluebutton.common2.msgs._
|
||||||
import org.bigbluebutton.core.running.{ LiveMeeting, MeetingActor, OutMsgRouter }
|
import org.bigbluebutton.core.running.{ LiveMeeting, MeetingActor, OutMsgRouter }
|
||||||
|
import org.bigbluebutton.core2.MeetingStatus2x
|
||||||
|
|
||||||
trait GetMicrophonePermissionReqMsgHdlr {
|
trait GetMicrophonePermissionReqMsgHdlr {
|
||||||
this: MeetingActor =>
|
this: MeetingActor =>
|
||||||
@ -16,7 +17,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
|
|||||||
voiceConf: String,
|
voiceConf: String,
|
||||||
userId: String,
|
userId: String,
|
||||||
sfuSessionId: String,
|
sfuSessionId: String,
|
||||||
allowed: Boolean
|
allowed: Boolean,
|
||||||
|
muteOnStart: Boolean
|
||||||
): Unit = {
|
): Unit = {
|
||||||
val routing = Routing.addMsgToClientRouting(MessageTypes.DIRECT, meetingId, userId)
|
val routing = Routing.addMsgToClientRouting(MessageTypes.DIRECT, meetingId, userId)
|
||||||
val envelope = BbbCoreEnvelope(GetMicrophonePermissionRespMsg.NAME, routing)
|
val envelope = BbbCoreEnvelope(GetMicrophonePermissionRespMsg.NAME, routing)
|
||||||
@ -26,7 +28,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
|
|||||||
voiceConf,
|
voiceConf,
|
||||||
userId,
|
userId,
|
||||||
sfuSessionId,
|
sfuSessionId,
|
||||||
allowed
|
allowed,
|
||||||
|
muteOnStart
|
||||||
)
|
)
|
||||||
val event = GetMicrophonePermissionRespMsg(header, body)
|
val event = GetMicrophonePermissionRespMsg(header, body)
|
||||||
val eventMsg = BbbCommonEnvCoreMsg(envelope, event)
|
val eventMsg = BbbCommonEnvCoreMsg(envelope, event)
|
||||||
@ -47,7 +50,8 @@ trait GetMicrophonePermissionReqMsgHdlr {
|
|||||||
liveMeeting.props.voiceProp.voiceConf,
|
liveMeeting.props.voiceProp.voiceConf,
|
||||||
msg.body.userId,
|
msg.body.userId,
|
||||||
msg.body.sfuSessionId,
|
msg.body.sfuSessionId,
|
||||||
allowed
|
allowed,
|
||||||
|
MeetingStatus2x.isMeetingMuted(liveMeeting.status)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -613,7 +613,8 @@ case class GetMicrophonePermissionRespMsgBody(
|
|||||||
voiceConf: String,
|
voiceConf: String,
|
||||||
userId: String,
|
userId: String,
|
||||||
sfuSessionId: String,
|
sfuSessionId: String,
|
||||||
allowed: Boolean
|
allowed: Boolean,
|
||||||
|
muteOnStart: Boolean
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -8,8 +8,16 @@
|
|||||||
<action application="set" data="rtp_jitter_buffer_during_bridge=true" />
|
<action application="set" data="rtp_jitter_buffer_during_bridge=true" />
|
||||||
<action application="set" data="suppress_cng=true" />
|
<action application="set" data="suppress_cng=true" />
|
||||||
<action application="answer" />
|
<action application="answer" />
|
||||||
|
<!-- Special condition for BBB's "transparent listen only" mechanism - HOLD on creation -->
|
||||||
|
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu-mhos$" break="never">
|
||||||
|
<action application="log" data="INFO Channel is going to be HELD and MUTED on creation ${uuid}" />
|
||||||
|
<action application="set" data="api_result=${uuid_hold(${uuid})}" />
|
||||||
|
</condition>
|
||||||
|
<!-- Duplicate condition to guarantee line-order (not nested-order) execution of this extension -->
|
||||||
|
<condition field="destination_number" expression="^(\d{5,11})$" require-nested="false">
|
||||||
<action application="conference" data="$1@cdquality" />
|
<action application="conference" data="$1@cdquality" />
|
||||||
</condition>
|
</condition>
|
||||||
|
</condition>
|
||||||
</extension>
|
</extension>
|
||||||
<extension name="bbb_conferences">
|
<extension name="bbb_conferences">
|
||||||
<condition field="${bbb_authorized}" expression="true" break="on-false" />
|
<condition field="${bbb_authorized}" expression="true" break="on-false" />
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
<include>
|
<include>
|
||||||
<extension name="bbb_webrtc_call" continue="true">
|
<extension name="bbb_webrtc_call" continue="true">
|
||||||
<condition field="${sip_user_agent}" expression="bbb-webrtc-sfu" break="on-false">
|
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu$" break="on-false">
|
||||||
<action application="set" data="presence_data=from_bbb-webrtc-sfu"/>
|
<action application="set" data="presence_data=from_bbb-webrtc-sfu"/>
|
||||||
<action application="set" data="bbb_authorized=true"/>
|
<action application="set" data="bbb_authorized=true"/>
|
||||||
<action application="set" data="rtp_manual_rtp_bugs=ACCEPT_ANY_PACKETS"/>
|
<action application="set" data="rtp_manual_rtp_bugs=ACCEPT_ANY_PACKETS"/>
|
||||||
|
@ -0,0 +1,12 @@
|
|||||||
|
<include>
|
||||||
|
<extension name="bbb_webrtc_sfu_call" continue="true">
|
||||||
|
<condition field="${sip_user_agent}" expression="^bbb-webrtc-sfu-muos|bbb-webrtc-sfu-mhos$" break="on-false">
|
||||||
|
<action application="set" data="presence_data=from_bbb-webrtc-sfu"/>
|
||||||
|
<action application="set" data="bbb_authorized=true"/>
|
||||||
|
<action application="set" data="rtp_manual_rtp_bugs=ACCEPT_ANY_PACKETS"/>
|
||||||
|
<action application="set" data="jb_use_timestamps=true"/>
|
||||||
|
<action application="set" data="conference_member_flags=mute"/>
|
||||||
|
<action application="transfer" data="${destination_number} XML default"/>
|
||||||
|
</condition>
|
||||||
|
</extension>
|
||||||
|
</include>
|
@ -171,7 +171,7 @@ with BigBlueButton; if not, see <http://www.gnu.org/licenses/>.
|
|||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
<span id="destination"></span>
|
<span id="destination"></span>
|
||||||
<audio id="remote-media" autoplay>
|
<audio id="remote-media" autoplay></audio>
|
||||||
</audio>
|
<audio id="local-media" autoplay></audio>
|
||||||
<div id="modals-container"></div>
|
<div id="modals-container"></div>
|
||||||
</body>
|
</body>
|
||||||
|
@ -61,7 +61,11 @@ export default class BaseAudioBridge {
|
|||||||
|
|
||||||
get inputDeviceId () {
|
get inputDeviceId () {
|
||||||
return this._inputDeviceId;
|
return this._inputDeviceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* eslint-disable class-methods-use-this */
|
||||||
|
supportsTransparentListenOnly() {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -78,6 +82,20 @@ export default class BaseAudioBridge {
|
|||||||
let backupStream;
|
let backupStream;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Remove all input audio tracks from the stream
|
||||||
|
// This will effectively mute the microphone
|
||||||
|
// and keep the audio output working
|
||||||
|
if (deviceId === 'listen-only') {
|
||||||
|
const stream = this.inputStream;
|
||||||
|
if (stream) {
|
||||||
|
stream.getAudioTracks().forEach((track) => {
|
||||||
|
track.stop();
|
||||||
|
stream.removeTrack(track);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
const constraints = {
|
const constraints = {
|
||||||
audio: getAudioConstraints({ deviceId }),
|
audio: getAudioConstraints({ deviceId }),
|
||||||
};
|
};
|
||||||
|
@ -36,10 +36,25 @@ const getCurrentAudioSinkId = () => {
|
|||||||
return audioElement?.sinkId || DEFAULT_OUTPUT_DEVICE_ID;
|
return audioElement?.sinkId || DEFAULT_OUTPUT_DEVICE_ID;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getStoredAudioInputDeviceId = () => getStorageSingletonInstance().getItem(INPUT_DEVICE_ID_KEY);
|
const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance()
|
||||||
const getStoredAudioOutputDeviceId = () => getStorageSingletonInstance().getItem(OUTPUT_DEVICE_ID_KEY);
|
.getItem(OUTPUT_DEVICE_ID_KEY);
|
||||||
const storeAudioInputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId);
|
const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance()
|
||||||
const storeAudioOutputDeviceId = (deviceId) => getStorageSingletonInstance().setItem(OUTPUT_DEVICE_ID_KEY, deviceId);
|
.setItem(OUTPUT_DEVICE_ID_KEY, deviceId);
|
||||||
|
const getStoredAudioInputDeviceId = () => getStorageSingletonInstance()
|
||||||
|
.getItem(INPUT_DEVICE_ID_KEY);
|
||||||
|
const storeAudioInputDeviceId = (deviceId) => {
|
||||||
|
if (deviceId === 'listen-only') {
|
||||||
|
// Do not store listen-only "devices" and remove any stored device
|
||||||
|
// So it starts from scratch next time.
|
||||||
|
getStorageSingletonInstance().removeItem(INPUT_DEVICE_ID_KEY);
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
getStorageSingletonInstance().setItem(INPUT_DEVICE_ID_KEY, deviceId);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Filter constraints set in audioDeviceConstraints, based on
|
* Filter constraints set in audioDeviceConstraints, based on
|
||||||
|
@ -20,6 +20,7 @@ import { shouldForceRelay } from '/imports/ui/services/bbb-webrtc-sfu/utils';
|
|||||||
|
|
||||||
const SENDRECV_ROLE = 'sendrecv';
|
const SENDRECV_ROLE = 'sendrecv';
|
||||||
const RECV_ROLE = 'recv';
|
const RECV_ROLE = 'recv';
|
||||||
|
const PASSIVE_SENDRECV_ROLE = 'passive-sendrecv';
|
||||||
const BRIDGE_NAME = 'fullaudio';
|
const BRIDGE_NAME = 'fullaudio';
|
||||||
const IS_CHROME = browserInfo.isChrome;
|
const IS_CHROME = browserInfo.isChrome;
|
||||||
|
|
||||||
@ -81,7 +82,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
|||||||
const MEDIA = SETTINGS.public.media;
|
const MEDIA = SETTINGS.public.media;
|
||||||
const LISTEN_ONLY_OFFERING = MEDIA.listenOnlyOffering;
|
const LISTEN_ONLY_OFFERING = MEDIA.listenOnlyOffering;
|
||||||
const FULLAUDIO_OFFERING = MEDIA.fullAudioOffering;
|
const FULLAUDIO_OFFERING = MEDIA.fullAudioOffering;
|
||||||
return isListenOnly
|
return isListenOnly && !isTransparentListenOnlyEnabled()
|
||||||
? LISTEN_ONLY_OFFERING
|
? LISTEN_ONLY_OFFERING
|
||||||
: (!isTransparentListenOnlyEnabled() && FULLAUDIO_OFFERING);
|
: (!isTransparentListenOnlyEnabled() && FULLAUDIO_OFFERING);
|
||||||
}
|
}
|
||||||
@ -95,12 +96,17 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
|||||||
this.reconnecting = false;
|
this.reconnecting = false;
|
||||||
this.iceServers = [];
|
this.iceServers = [];
|
||||||
this.bridgeName = BRIDGE_NAME;
|
this.bridgeName = BRIDGE_NAME;
|
||||||
|
this.isListenOnly = false;
|
||||||
|
this.bypassGUM = false;
|
||||||
|
this.supportsTransparentListenOnly = isTransparentListenOnlyEnabled;
|
||||||
|
|
||||||
this.handleTermination = this.handleTermination.bind(this);
|
this.handleTermination = this.handleTermination.bind(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
get inputStream() {
|
get inputStream() {
|
||||||
if (this.broker) {
|
// Only return the stream if the broker is active and the role isn't recvonly
|
||||||
|
// Input stream == actual input-capturing stream, not the one that's being played
|
||||||
|
if (this.broker && this.role !== RECV_ROLE) {
|
||||||
return this.broker.getLocalStream();
|
return this.broker.getLocalStream();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,6 +117,18 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
|||||||
return this.broker?.role;
|
return this.broker?.role;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getBrokerRole({ hasInputStream }) {
|
||||||
|
if (this.isListenOnly) {
|
||||||
|
return isTransparentListenOnlyEnabled()
|
||||||
|
? PASSIVE_SENDRECV_ROLE
|
||||||
|
: RECV_ROLE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.bypassGUM && !hasInputStream) return PASSIVE_SENDRECV_ROLE;
|
||||||
|
|
||||||
|
return SENDRECV_ROLE;
|
||||||
|
}
|
||||||
|
|
||||||
setInputStream(stream) {
|
setInputStream(stream) {
|
||||||
if (this.broker == null) return null;
|
if (this.broker == null) return null;
|
||||||
|
|
||||||
@ -326,6 +344,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
|||||||
extension,
|
extension,
|
||||||
inputStream,
|
inputStream,
|
||||||
forceRelay: _forceRelay = false,
|
forceRelay: _forceRelay = false,
|
||||||
|
bypassGUM = false,
|
||||||
} = options;
|
} = options;
|
||||||
|
|
||||||
const SETTINGS = window.meetingClientSettings;
|
const SETTINGS = window.meetingClientSettings;
|
||||||
@ -349,6 +368,10 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
|||||||
try {
|
try {
|
||||||
this.inEchoTest = !!extension;
|
this.inEchoTest = !!extension;
|
||||||
this.isListenOnly = isListenOnly;
|
this.isListenOnly = isListenOnly;
|
||||||
|
this.bypassGUM = bypassGUM;
|
||||||
|
const role = this.getBrokerRole({
|
||||||
|
hasInputStream: !!inputStream,
|
||||||
|
});
|
||||||
|
|
||||||
const brokerOptions = {
|
const brokerOptions = {
|
||||||
clientSessionNumber: getAudioSessionNumber(),
|
clientSessionNumber: getAudioSessionNumber(),
|
||||||
@ -365,11 +388,12 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
|||||||
mediaStreamFactory: this.mediaStreamFactory,
|
mediaStreamFactory: this.mediaStreamFactory,
|
||||||
gatheringTimeout: GATHERING_TIMEOUT,
|
gatheringTimeout: GATHERING_TIMEOUT,
|
||||||
transparentListenOnly: isTransparentListenOnlyEnabled(),
|
transparentListenOnly: isTransparentListenOnlyEnabled(),
|
||||||
|
bypassGUM,
|
||||||
};
|
};
|
||||||
|
|
||||||
this.broker = new AudioBroker(
|
this.broker = new AudioBroker(
|
||||||
Auth.authenticateURL(SFU_URL),
|
Auth.authenticateURL(SFU_URL),
|
||||||
isListenOnly ? RECV_ROLE : SENDRECV_ROLE,
|
role,
|
||||||
brokerOptions,
|
brokerOptions,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -63,6 +63,8 @@ const AudioControls: React.FC<AudioControlsProps> = ({
|
|||||||
const echoTestIntervalRef = React.useRef<ReturnType<typeof setTimeout>>();
|
const echoTestIntervalRef = React.useRef<ReturnType<typeof setTimeout>>();
|
||||||
|
|
||||||
const [isAudioModalOpen, setIsAudioModalOpen] = React.useState(false);
|
const [isAudioModalOpen, setIsAudioModalOpen] = React.useState(false);
|
||||||
|
const [audioModalContent, setAudioModalContent] = React.useState<string | null>(null);
|
||||||
|
const [audioModalProps, setAudioModalProps] = React.useState<{ unmuteOnExit?: boolean } | null>(null);
|
||||||
|
|
||||||
const handleJoinAudio = useCallback((connected: boolean) => {
|
const handleJoinAudio = useCallback((connected: boolean) => {
|
||||||
if (connected) {
|
if (connected) {
|
||||||
@ -72,6 +74,12 @@ const AudioControls: React.FC<AudioControlsProps> = ({
|
|||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
const openAudioSettings = (props: { unmuteOnExit?: boolean } = {}) => {
|
||||||
|
setAudioModalContent('settings');
|
||||||
|
setAudioModalProps(props);
|
||||||
|
setIsAudioModalOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
const joinButton = useMemo(() => {
|
const joinButton = useMemo(() => {
|
||||||
const joinAudioLabel = away ? intlMessages.joinAudioAndSetActive : intlMessages.joinAudio;
|
const joinAudioLabel = away ? intlMessages.joinAudioAndSetActive : intlMessages.joinAudio;
|
||||||
|
|
||||||
@ -107,12 +115,18 @@ const AudioControls: React.FC<AudioControlsProps> = ({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<Styled.Container>
|
<Styled.Container>
|
||||||
{!inAudio ? joinButton : <InputStreamLiveSelectorContainer />}
|
{!inAudio ? joinButton : <InputStreamLiveSelectorContainer openAudioSettings={openAudioSettings} />}
|
||||||
{isAudioModalOpen && (
|
{isAudioModalOpen && (
|
||||||
<AudioModalContainer
|
<AudioModalContainer
|
||||||
priority="low"
|
priority="low"
|
||||||
setIsOpen={() => setIsAudioModalOpen(false)}
|
setIsOpen={() => {
|
||||||
|
setIsAudioModalOpen(false);
|
||||||
|
setAudioModalContent(null);
|
||||||
|
setAudioModalProps(null);
|
||||||
|
}}
|
||||||
isOpen={isAudioModalOpen}
|
isOpen={isAudioModalOpen}
|
||||||
|
content={audioModalContent}
|
||||||
|
unmuteOnExit={audioModalProps?.unmuteOnExit}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</Styled.Container>
|
</Styled.Container>
|
||||||
|
@ -56,6 +56,26 @@ const intlMessages = defineMessages({
|
|||||||
id: 'app.audioNotification.deviceChangeFailed',
|
id: 'app.audioNotification.deviceChangeFailed',
|
||||||
description: 'Device change failed',
|
description: 'Device change failed',
|
||||||
},
|
},
|
||||||
|
fallbackInputLabel: {
|
||||||
|
id: 'app.audio.audioSettings.fallbackInputLabel',
|
||||||
|
description: 'Audio input device label',
|
||||||
|
},
|
||||||
|
fallbackOutputLabel: {
|
||||||
|
id: 'app.audio.audioSettings.fallbackOutputLabel',
|
||||||
|
description: 'Audio output device label',
|
||||||
|
},
|
||||||
|
fallbackNoPermissionLabel: {
|
||||||
|
id: 'app.audio.audioSettings.fallbackNoPermission',
|
||||||
|
description: 'No permission to access audio devices label',
|
||||||
|
},
|
||||||
|
audioSettingsTitle: {
|
||||||
|
id: 'app.audio.audioSettings.titleLabel',
|
||||||
|
description: 'Audio settings button label',
|
||||||
|
},
|
||||||
|
noMicListenOnlyLabel: {
|
||||||
|
id: 'app.audio.audioSettings.noMicListenOnly',
|
||||||
|
description: 'No microphone (listen only) label',
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
interface MuteToggleProps {
|
interface MuteToggleProps {
|
||||||
@ -75,6 +95,8 @@ interface LiveSelectionProps extends MuteToggleProps {
|
|||||||
outputDeviceId: string;
|
outputDeviceId: string;
|
||||||
meetingIsBreakout: boolean;
|
meetingIsBreakout: boolean;
|
||||||
away: boolean;
|
away: boolean;
|
||||||
|
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
|
||||||
|
supportsTransparentListenOnly: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
||||||
@ -90,6 +112,8 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
isAudioLocked,
|
isAudioLocked,
|
||||||
toggleMuteMicrophone,
|
toggleMuteMicrophone,
|
||||||
away,
|
away,
|
||||||
|
openAudioSettings,
|
||||||
|
supportsTransparentListenOnly,
|
||||||
}) => {
|
}) => {
|
||||||
const intl = useIntl();
|
const intl = useIntl();
|
||||||
|
|
||||||
@ -105,6 +129,21 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getFallbackLabel = (device: MediaDeviceInfo, index: number) => {
|
||||||
|
const baseLabel = device?.kind === AUDIO_OUTPUT
|
||||||
|
? intlMessages.fallbackOutputLabel
|
||||||
|
: intlMessages.fallbackInputLabel;
|
||||||
|
let label = intl.formatMessage(baseLabel, { 0: index });
|
||||||
|
|
||||||
|
if (!device?.deviceId) {
|
||||||
|
label = `${label} ${intl.formatMessage(intlMessages.fallbackNoPermissionLabel)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return label;
|
||||||
|
};
|
||||||
|
|
||||||
|
const shouldTreatAsMicrophone = () => !listenOnly || supportsTransparentListenOnly;
|
||||||
|
|
||||||
const renderDeviceList = useCallback((
|
const renderDeviceList = useCallback((
|
||||||
deviceKind: string,
|
deviceKind: string,
|
||||||
list: MediaDeviceInfo[],
|
list: MediaDeviceInfo[],
|
||||||
@ -134,7 +173,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
{
|
{
|
||||||
key: `${device.deviceId}-${deviceKind}`,
|
key: `${device.deviceId}-${deviceKind}`,
|
||||||
dataTest: `${deviceKind}-${index + 1}`,
|
dataTest: `${deviceKind}-${index + 1}`,
|
||||||
label: truncateDeviceName(device.label),
|
label: truncateDeviceName(device.label || getFallbackLabel(device, index + 1)),
|
||||||
customStyles: (device.deviceId === currentDeviceId) ? Styled.SelectedLabel : null,
|
customStyles: (device.deviceId === currentDeviceId) ? Styled.SelectedLabel : null,
|
||||||
iconRight: (device.deviceId === currentDeviceId) ? 'check' : null,
|
iconRight: (device.deviceId === currentDeviceId) ? 'check' : null,
|
||||||
onClick: () => onDeviceListClick(device.deviceId, deviceKind, callback),
|
onClick: () => onDeviceListClick(device.deviceId, deviceKind, callback),
|
||||||
@ -163,10 +202,37 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) {
|
||||||
|
// "None" option for audio input devices - aka listen-only
|
||||||
|
const listenOnly = deviceKind === AUDIO_INPUT
|
||||||
|
&& currentDeviceId === 'listen-only';
|
||||||
|
|
||||||
|
deviceList.push({
|
||||||
|
key: `listenOnly-${deviceKind}`,
|
||||||
|
dataTest: `${deviceKind}-listenOnly`,
|
||||||
|
label: intl.formatMessage(intlMessages.noMicListenOnlyLabel),
|
||||||
|
customStyles: listenOnly && Styled.SelectedLabel,
|
||||||
|
iconRight: listenOnly ? 'check' : null,
|
||||||
|
onClick: () => onDeviceListClick('listen-only', deviceKind, callback),
|
||||||
|
} as MenuOptionItemType);
|
||||||
|
}
|
||||||
|
|
||||||
return listTitle.concat(deviceList);
|
return listTitle.concat(deviceList);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const onDeviceListClick = useCallback((deviceId: string, deviceKind: string, callback: Function) => {
|
const onDeviceListClick = useCallback((deviceId: string, deviceKind: string, callback: Function) => {
|
||||||
|
if (!deviceId) {
|
||||||
|
// If there's no deviceId in an audio input device, it means
|
||||||
|
// the user doesn't have permission to access it. If we support
|
||||||
|
// transparent listen-only, fire the mount AudioSettings modal to
|
||||||
|
// acquire permission and let the user configure their stuff.
|
||||||
|
if (deviceKind === AUDIO_INPUT && supportsTransparentListenOnly) {
|
||||||
|
openAudioSettings({ unmuteOnExit: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!deviceId) return;
|
if (!deviceId) return;
|
||||||
if (deviceKind === AUDIO_INPUT) {
|
if (deviceKind === AUDIO_INPUT) {
|
||||||
callback(deviceId).catch(() => {
|
callback(deviceId).catch(() => {
|
||||||
@ -179,7 +245,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const inputDeviceList = !listenOnly
|
const inputDeviceList = shouldTreatAsMicrophone()
|
||||||
? renderDeviceList(
|
? renderDeviceList(
|
||||||
AUDIO_INPUT,
|
AUDIO_INPUT,
|
||||||
inputDevices,
|
inputDevices,
|
||||||
@ -196,6 +262,16 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
outputDeviceId,
|
outputDeviceId,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const audioSettingsOption = {
|
||||||
|
icon: 'settings',
|
||||||
|
label: intl.formatMessage(intlMessages.audioSettingsTitle),
|
||||||
|
key: 'audioSettingsOption',
|
||||||
|
dataTest: 'input-selector-audio-settings',
|
||||||
|
customStyles: Styled.AudioSettingsOption,
|
||||||
|
dividerTop: true,
|
||||||
|
onClick: () => openAudioSettings(),
|
||||||
|
} as MenuOptionItemType;
|
||||||
|
|
||||||
const leaveAudioOption = {
|
const leaveAudioOption = {
|
||||||
icon: 'logout',
|
icon: 'logout',
|
||||||
label: intl.formatMessage(intlMessages.leaveAudio),
|
label: intl.formatMessage(intlMessages.leaveAudio),
|
||||||
@ -204,12 +280,14 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
customStyles: Styled.DangerColor,
|
customStyles: Styled.DangerColor,
|
||||||
onClick: () => handleLeaveAudio(meetingIsBreakout),
|
onClick: () => handleLeaveAudio(meetingIsBreakout),
|
||||||
};
|
};
|
||||||
const dropdownListComplete = inputDeviceList.concat(outputDeviceList)
|
const dropdownListComplete = inputDeviceList
|
||||||
|
.concat(outputDeviceList)
|
||||||
.concat({
|
.concat({
|
||||||
key: 'separator-02',
|
key: 'separator-02',
|
||||||
isSeparator: true,
|
isSeparator: true,
|
||||||
})
|
});
|
||||||
.concat(leaveAudioOption);
|
if (shouldTreatAsMicrophone()) dropdownListComplete.push(audioSettingsOption);
|
||||||
|
dropdownListComplete.push(leaveAudioOption);
|
||||||
|
|
||||||
audioSettingsDropdownItems.forEach((audioSettingsDropdownItem:
|
audioSettingsDropdownItems.forEach((audioSettingsDropdownItem:
|
||||||
PluginSdk.AudioSettingsDropdownInterface) => {
|
PluginSdk.AudioSettingsDropdownInterface) => {
|
||||||
@ -239,9 +317,11 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
|
|
||||||
const customStyles = { top: '-1rem' };
|
const customStyles = { top: '-1rem' };
|
||||||
const { isMobile } = deviceInfo;
|
const { isMobile } = deviceInfo;
|
||||||
|
const noInputDevice = inputDeviceId === 'listen-only';
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{!listenOnly ? (
|
{shouldTreatAsMicrophone() ? (
|
||||||
// eslint-disable-next-line jsx-a11y/no-access-key
|
// eslint-disable-next-line jsx-a11y/no-access-key
|
||||||
<span
|
<span
|
||||||
style={{ display: 'none' }}
|
style={{ display: 'none' }}
|
||||||
@ -250,7 +330,7 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
aria-hidden="true"
|
aria-hidden="true"
|
||||||
/>
|
/>
|
||||||
) : null}
|
) : null}
|
||||||
{(!listenOnly && isMobile) && (
|
{(shouldTreatAsMicrophone() && isMobile) && (
|
||||||
<MuteToggle
|
<MuteToggle
|
||||||
talking={talking}
|
talking={talking}
|
||||||
muted={muted}
|
muted={muted}
|
||||||
@ -258,13 +338,15 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
isAudioLocked={isAudioLocked}
|
isAudioLocked={isAudioLocked}
|
||||||
toggleMuteMicrophone={toggleMuteMicrophone}
|
toggleMuteMicrophone={toggleMuteMicrophone}
|
||||||
away={away}
|
away={away}
|
||||||
|
noInputDevice={noInputDevice}
|
||||||
|
openAudioSettings={openAudioSettings}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
<BBBMenu
|
<BBBMenu
|
||||||
customStyles={!isMobile ? customStyles : null}
|
customStyles={!isMobile ? customStyles : null}
|
||||||
trigger={(
|
trigger={(
|
||||||
<>
|
<>
|
||||||
{!listenOnly && !isMobile
|
{shouldTreatAsMicrophone() && !isMobile
|
||||||
? (
|
? (
|
||||||
<MuteToggle
|
<MuteToggle
|
||||||
talking={talking}
|
talking={talking}
|
||||||
@ -273,6 +355,8 @@ export const LiveSelection: React.FC<LiveSelectionProps> = ({
|
|||||||
isAudioLocked={isAudioLocked}
|
isAudioLocked={isAudioLocked}
|
||||||
toggleMuteMicrophone={toggleMuteMicrophone}
|
toggleMuteMicrophone={toggleMuteMicrophone}
|
||||||
away={away}
|
away={away}
|
||||||
|
noInputDevice={noInputDevice}
|
||||||
|
openAudioSettings={openAudioSettings}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
: (
|
: (
|
||||||
|
@ -33,6 +33,8 @@ interface MuteToggleProps {
|
|||||||
isAudioLocked: boolean;
|
isAudioLocked: boolean;
|
||||||
toggleMuteMicrophone: (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => void;
|
toggleMuteMicrophone: (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => void;
|
||||||
away: boolean;
|
away: boolean;
|
||||||
|
noInputDevice?: boolean;
|
||||||
|
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const MuteToggle: React.FC<MuteToggleProps> = ({
|
export const MuteToggle: React.FC<MuteToggleProps> = ({
|
||||||
@ -42,6 +44,8 @@ export const MuteToggle: React.FC<MuteToggleProps> = ({
|
|||||||
isAudioLocked,
|
isAudioLocked,
|
||||||
toggleMuteMicrophone,
|
toggleMuteMicrophone,
|
||||||
away,
|
away,
|
||||||
|
noInputDevice = false,
|
||||||
|
openAudioSettings,
|
||||||
}) => {
|
}) => {
|
||||||
const intl = useIntl();
|
const intl = useIntl();
|
||||||
const toggleMuteShourtcut = useShortcut('toggleMute');
|
const toggleMuteShourtcut = useShortcut('toggleMute');
|
||||||
@ -57,15 +61,22 @@ export const MuteToggle: React.FC<MuteToggleProps> = ({
|
|||||||
const onClickCallback = (e: React.MouseEvent<HTMLButtonElement>) => {
|
const onClickCallback = (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
if (muted && away) {
|
if (muted) {
|
||||||
muteAway(muted, true, toggleVoice);
|
if (away) {
|
||||||
|
if (!noInputDevice) muteAway(muted, true, toggleVoice);
|
||||||
VideoService.setTrackEnabled(true);
|
VideoService.setTrackEnabled(true);
|
||||||
setAway({
|
setAway({
|
||||||
variables: {
|
variables: {
|
||||||
away: false,
|
away: false,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
} else if (noInputDevice) {
|
||||||
|
// User is in duplex audio, passive-sendrecv, but has no input device set
|
||||||
|
// Open the audio settings modal to allow them to select an input device
|
||||||
|
openAudioSettings();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
toggleMuteMicrophone(muted, toggleVoice);
|
toggleMuteMicrophone(muted, toggleVoice);
|
||||||
};
|
};
|
||||||
return (
|
return (
|
||||||
|
@ -8,18 +8,23 @@ import useCurrentUser from '/imports/ui/core/hooks/useCurrentUser';
|
|||||||
import { User } from '/imports/ui/Types/user';
|
import { User } from '/imports/ui/Types/user';
|
||||||
import { defineMessages, useIntl } from 'react-intl';
|
import { defineMessages, useIntl } from 'react-intl';
|
||||||
import {
|
import {
|
||||||
handleLeaveAudio, liveChangeInputDevice, liveChangeOutputDevice, notify, toggleMuteMicrophone,
|
handleLeaveAudio,
|
||||||
|
liveChangeInputDevice,
|
||||||
|
liveChangeOutputDevice,
|
||||||
|
notify,
|
||||||
|
toggleMuteMicrophone,
|
||||||
|
toggleMuteMicrophoneSystem,
|
||||||
} from './service';
|
} from './service';
|
||||||
import useMeeting from '/imports/ui/core/hooks/useMeeting';
|
import useMeeting from '/imports/ui/core/hooks/useMeeting';
|
||||||
import { Meeting } from '/imports/ui/Types/meeting';
|
import { Meeting } from '/imports/ui/Types/meeting';
|
||||||
import logger from '/imports/startup/client/logger';
|
import logger from '/imports/startup/client/logger';
|
||||||
import Auth from '/imports/ui/services/auth';
|
|
||||||
import MutedAlert from '/imports/ui/components/muted-alert/component';
|
import MutedAlert from '/imports/ui/components/muted-alert/component';
|
||||||
import MuteToggle from './buttons/muteToggle';
|
import MuteToggle from './buttons/muteToggle';
|
||||||
import ListenOnly from './buttons/listenOnly';
|
import ListenOnly from './buttons/listenOnly';
|
||||||
import LiveSelection from './buttons/LiveSelection';
|
import LiveSelection from './buttons/LiveSelection';
|
||||||
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
|
import useWhoIsTalking from '/imports/ui/core/hooks/useWhoIsTalking';
|
||||||
import useWhoIsUnmuted from '/imports/ui/core/hooks/useWhoIsUnmuted';
|
import useWhoIsUnmuted from '/imports/ui/core/hooks/useWhoIsUnmuted';
|
||||||
|
import useToggleVoice from '/imports/ui/components/audio/audio-graphql/hooks/useToggleVoice';
|
||||||
|
|
||||||
const AUDIO_INPUT = 'audioinput';
|
const AUDIO_INPUT = 'audioinput';
|
||||||
const AUDIO_OUTPUT = 'audiooutput';
|
const AUDIO_OUTPUT = 'audiooutput';
|
||||||
@ -52,7 +57,11 @@ const intlMessages = defineMessages({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
interface InputStreamLiveSelectorProps {
|
interface InputStreamLiveSelectorContainerProps {
|
||||||
|
openAudioSettings: (props?: { unmuteOnExit?: boolean }) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InputStreamLiveSelectorProps extends InputStreamLiveSelectorContainerProps {
|
||||||
isConnected: boolean;
|
isConnected: boolean;
|
||||||
isPresenter: boolean;
|
isPresenter: boolean;
|
||||||
isModerator: boolean;
|
isModerator: boolean;
|
||||||
@ -68,6 +77,8 @@ interface InputStreamLiveSelectorProps {
|
|||||||
inputStream: string;
|
inputStream: string;
|
||||||
meetingIsBreakout: boolean;
|
meetingIsBreakout: boolean;
|
||||||
away: boolean;
|
away: boolean;
|
||||||
|
permissionStatus: string;
|
||||||
|
supportsTransparentListenOnly: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
||||||
@ -86,8 +97,12 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
inputStream,
|
inputStream,
|
||||||
meetingIsBreakout,
|
meetingIsBreakout,
|
||||||
away,
|
away,
|
||||||
|
permissionStatus,
|
||||||
|
supportsTransparentListenOnly,
|
||||||
|
openAudioSettings,
|
||||||
}) => {
|
}) => {
|
||||||
const intl = useIntl();
|
const intl = useIntl();
|
||||||
|
const toggleVoice = useToggleVoice();
|
||||||
// eslint-disable-next-line no-undef
|
// eslint-disable-next-line no-undef
|
||||||
const [inputDevices, setInputDevices] = React.useState<InputDeviceInfo[]>([]);
|
const [inputDevices, setInputDevices] = React.useState<InputDeviceInfo[]>([]);
|
||||||
const [outputDevices, setOutputDevices] = React.useState<MediaDeviceInfo[]>([]);
|
const [outputDevices, setOutputDevices] = React.useState<MediaDeviceInfo[]>([]);
|
||||||
@ -106,6 +121,15 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
const audioOutputDevices = devices.filter((i) => i.kind === AUDIO_OUTPUT);
|
const audioOutputDevices = devices.filter((i) => i.kind === AUDIO_OUTPUT);
|
||||||
setInputDevices(audioInputDevices as InputDeviceInfo[]);
|
setInputDevices(audioInputDevices as InputDeviceInfo[]);
|
||||||
setOutputDevices(audioOutputDevices);
|
setOutputDevices(audioOutputDevices);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
logger.warn({
|
||||||
|
logCode: 'audio_device_enumeration_error',
|
||||||
|
extraInfo: {
|
||||||
|
errorMessage: error.message,
|
||||||
|
errorName: error.name,
|
||||||
|
},
|
||||||
|
}, `Error enumerating audio devices: ${error.message}`);
|
||||||
});
|
});
|
||||||
if (isAudioConnected) {
|
if (isAudioConnected) {
|
||||||
updateRemovedDevices(inputDevices, outputDevices);
|
updateRemovedDevices(inputDevices, outputDevices);
|
||||||
@ -115,11 +139,11 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
const fallbackInputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
|
const fallbackInputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
|
||||||
if (!fallbackDevice || !fallbackDevice.deviceId) return;
|
if (!fallbackDevice || !fallbackDevice.deviceId) return;
|
||||||
|
|
||||||
logger.info({
|
logger.warn({
|
||||||
logCode: 'audio_device_live_selector',
|
logCode: 'audio_input_live_selector',
|
||||||
extraInfo: {
|
extraInfo: {
|
||||||
userId: Auth.userID,
|
fallbackDeviceId: fallbackDevice?.deviceId,
|
||||||
meetingId: Auth.meetingID,
|
fallbackDeviceLabel: fallbackDevice?.label,
|
||||||
},
|
},
|
||||||
}, 'Current input device was removed. Fallback to default device');
|
}, 'Current input device was removed. Fallback to default device');
|
||||||
liveChangeInputDevice(fallbackDevice.deviceId).catch(() => {
|
liveChangeInputDevice(fallbackDevice.deviceId).catch(() => {
|
||||||
@ -129,11 +153,11 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
|
|
||||||
const fallbackOutputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
|
const fallbackOutputDevice = useCallback((fallbackDevice: MediaDeviceInfo) => {
|
||||||
if (!fallbackDevice || !fallbackDevice.deviceId) return;
|
if (!fallbackDevice || !fallbackDevice.deviceId) return;
|
||||||
logger.info({
|
logger.warn({
|
||||||
logCode: 'audio_device_live_selector',
|
logCode: 'audio_output_live_selector',
|
||||||
extraInfo: {
|
extraInfo: {
|
||||||
userId: Auth.userID,
|
fallbackDeviceId: fallbackDevice?.deviceId,
|
||||||
meetingId: Auth.meetingID,
|
fallbackDeviceLabel: fallbackDevice?.label,
|
||||||
},
|
},
|
||||||
}, 'Current output device was removed. Fallback to default device');
|
}, 'Current output device was removed. Fallback to default device');
|
||||||
liveChangeOutputDevice(fallbackDevice.deviceId, true).catch(() => {
|
liveChangeOutputDevice(fallbackDevice.deviceId, true).catch(() => {
|
||||||
@ -162,7 +186,16 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
if (enableDynamicAudioDeviceSelection) {
|
if (enableDynamicAudioDeviceSelection) {
|
||||||
updateDevices(inAudio);
|
updateDevices(inAudio);
|
||||||
}
|
}
|
||||||
}, [inAudio]);
|
}, [inAudio, permissionStatus]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// If the user has no input device, is connected to audio and unmuted,
|
||||||
|
// they need to be *muted* by the system. Further attempts to unmute
|
||||||
|
// will open the audio settings modal instead.
|
||||||
|
if (inputDeviceId === 'listen-only' && isConnected && !muted) {
|
||||||
|
toggleMuteMicrophoneSystem(muted, toggleVoice);
|
||||||
|
}
|
||||||
|
}, [inputDeviceId, isConnected, muted]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
@ -190,6 +223,8 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
isAudioLocked={isAudioLocked}
|
isAudioLocked={isAudioLocked}
|
||||||
toggleMuteMicrophone={toggleMuteMicrophone}
|
toggleMuteMicrophone={toggleMuteMicrophone}
|
||||||
away={away}
|
away={away}
|
||||||
|
supportsTransparentListenOnly={supportsTransparentListenOnly}
|
||||||
|
openAudioSettings={openAudioSettings}
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
@ -201,6 +236,8 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
isAudioLocked={isAudioLocked}
|
isAudioLocked={isAudioLocked}
|
||||||
toggleMuteMicrophone={toggleMuteMicrophone}
|
toggleMuteMicrophone={toggleMuteMicrophone}
|
||||||
away={away}
|
away={away}
|
||||||
|
openAudioSettings={openAudioSettings}
|
||||||
|
noInputDevice={inputDeviceId === 'listen-only'}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
<ListenOnly
|
<ListenOnly
|
||||||
@ -216,7 +253,9 @@ const InputStreamLiveSelector: React.FC<InputStreamLiveSelectorProps> = ({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const InputStreamLiveSelectorContainer: React.FC = () => {
|
const InputStreamLiveSelectorContainer: React.FC<InputStreamLiveSelectorContainerProps> = ({
|
||||||
|
openAudioSettings,
|
||||||
|
}) => {
|
||||||
const { data: currentUser } = useCurrentUser((u: Partial<User>) => {
|
const { data: currentUser } = useCurrentUser((u: Partial<User>) => {
|
||||||
if (!u.voice) {
|
if (!u.voice) {
|
||||||
return {
|
return {
|
||||||
@ -261,6 +300,10 @@ const InputStreamLiveSelectorContainer: React.FC = () => {
|
|||||||
const outputDeviceId = useReactiveVar(AudioManager._outputDeviceId.value) as string;
|
const outputDeviceId = useReactiveVar(AudioManager._outputDeviceId.value) as string;
|
||||||
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
|
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
|
||||||
const inputStream = useReactiveVar(AudioManager._inputStream) as string;
|
const inputStream = useReactiveVar(AudioManager._inputStream) as string;
|
||||||
|
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
|
||||||
|
const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value) as string;
|
||||||
|
// @ts-ignore - temporary while hybrid (meteor+GraphQl)
|
||||||
|
const supportsTransparentListenOnly = useReactiveVar(AudioManager._transparentListenOnlySupported.value) as boolean;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<InputStreamLiveSelector
|
<InputStreamLiveSelector
|
||||||
@ -280,6 +323,9 @@ const InputStreamLiveSelectorContainer: React.FC = () => {
|
|||||||
inputStream={inputStream}
|
inputStream={inputStream}
|
||||||
meetingIsBreakout={currentMeeting?.isBreakout ?? false}
|
meetingIsBreakout={currentMeeting?.isBreakout ?? false}
|
||||||
away={currentUser?.away ?? false}
|
away={currentUser?.away ?? false}
|
||||||
|
openAudioSettings={openAudioSettings}
|
||||||
|
permissionStatus={permissionStatus}
|
||||||
|
supportsTransparentListenOnly={supportsTransparentListenOnly}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
@ -40,32 +40,35 @@ export const handleLeaveAudio = (meetingIsBreakout: boolean) => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const toggleMuteMicrophoneThrottled = throttle((
|
const toggleMute = (
|
||||||
muted: boolean,
|
muted: boolean,
|
||||||
toggleVoice: (userId: string, muted: boolean) => void,
|
toggleVoice: (userId: string, muted: boolean) => void,
|
||||||
|
actionType = 'user_action',
|
||||||
) => {
|
) => {
|
||||||
Storage.setItem(MUTED_KEY, !muted);
|
|
||||||
|
|
||||||
if (muted) {
|
if (muted) {
|
||||||
logger.info(
|
if (AudioManager.inputDeviceId === 'listen-only') {
|
||||||
{
|
// User is in duplex audio, passive-sendrecv, but has no input device set
|
||||||
|
// Unmuting should not be allowed at all
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info({
|
||||||
logCode: 'audiomanager_unmute_audio',
|
logCode: 'audiomanager_unmute_audio',
|
||||||
extraInfo: { logType: 'user_action' },
|
extraInfo: { logType: actionType },
|
||||||
},
|
}, 'microphone unmuted');
|
||||||
'microphone unmuted by user',
|
Storage.setItem(MUTED_KEY, false);
|
||||||
);
|
|
||||||
toggleVoice(Auth.userID as string, false);
|
toggleVoice(Auth.userID as string, false);
|
||||||
} else {
|
} else {
|
||||||
logger.info(
|
logger.info({
|
||||||
{
|
|
||||||
logCode: 'audiomanager_mute_audio',
|
logCode: 'audiomanager_mute_audio',
|
||||||
extraInfo: { logType: 'user_action' },
|
extraInfo: { logType: actionType },
|
||||||
},
|
}, 'microphone muted');
|
||||||
'microphone muted by user',
|
Storage.setItem(MUTED_KEY, true);
|
||||||
);
|
|
||||||
toggleVoice(Auth.userID as string, true);
|
toggleVoice(Auth.userID as string, true);
|
||||||
}
|
}
|
||||||
}, TOGGLE_MUTE_THROTTLE_TIME);
|
};
|
||||||
|
|
||||||
|
const toggleMuteMicrophoneThrottled = throttle(toggleMute, TOGGLE_MUTE_THROTTLE_TIME);
|
||||||
|
|
||||||
const toggleMuteMicrophoneDebounced = debounce(toggleMuteMicrophoneThrottled, TOGGLE_MUTE_DEBOUNCE_TIME,
|
const toggleMuteMicrophoneDebounced = debounce(toggleMuteMicrophoneThrottled, TOGGLE_MUTE_DEBOUNCE_TIME,
|
||||||
{ leading: true, trailing: false });
|
{ leading: true, trailing: false });
|
||||||
@ -74,6 +77,11 @@ export const toggleMuteMicrophone = (muted: boolean, toggleVoice: (userId: strin
|
|||||||
return toggleMuteMicrophoneDebounced(muted, toggleVoice);
|
return toggleMuteMicrophoneDebounced(muted, toggleVoice);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Debounce is not needed here, as this function should only called by the system.
|
||||||
|
export const toggleMuteMicrophoneSystem = (muted: boolean, toggleVoice: (userId: string, muted: boolean) => void) => {
|
||||||
|
return toggleMute(muted, toggleVoice, 'system_action');
|
||||||
|
};
|
||||||
|
|
||||||
export const truncateDeviceName = (deviceName: string) => {
|
export const truncateDeviceName = (deviceName: string) => {
|
||||||
if (deviceName && deviceName.length <= DEVICE_LABEL_MAX_LENGTH) {
|
if (deviceName && deviceName.length <= DEVICE_LABEL_MAX_LENGTH) {
|
||||||
return deviceName;
|
return deviceName;
|
||||||
@ -141,6 +149,7 @@ export const muteAway = (
|
|||||||
export default {
|
export default {
|
||||||
handleLeaveAudio,
|
handleLeaveAudio,
|
||||||
toggleMuteMicrophone,
|
toggleMuteMicrophone,
|
||||||
|
toggleMuteMicrophoneSystem,
|
||||||
truncateDeviceName,
|
truncateDeviceName,
|
||||||
notify,
|
notify,
|
||||||
liveChangeInputDevice,
|
liveChangeInputDevice,
|
||||||
|
@ -56,6 +56,10 @@ export const DisabledLabel = {
|
|||||||
opacity: 1,
|
opacity: 1,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const AudioSettingsOption = {
|
||||||
|
paddingLeft: 12,
|
||||||
|
};
|
||||||
|
|
||||||
export const SelectedLabel = {
|
export const SelectedLabel = {
|
||||||
color: colorPrimary,
|
color: colorPrimary,
|
||||||
backgroundColor: colorOffWhite,
|
backgroundColor: colorOffWhite,
|
||||||
@ -80,6 +84,7 @@ export default {
|
|||||||
MuteToggleButton,
|
MuteToggleButton,
|
||||||
DisabledLabel,
|
DisabledLabel,
|
||||||
SelectedLabel,
|
SelectedLabel,
|
||||||
|
AudioSettingsOption,
|
||||||
DangerColor,
|
DangerColor,
|
||||||
AudioDropdown,
|
AudioDropdown,
|
||||||
};
|
};
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
import React, { useEffect, useState } from 'react';
|
import React, {
|
||||||
|
useCallback,
|
||||||
|
useEffect,
|
||||||
|
useState,
|
||||||
|
} from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import {
|
import {
|
||||||
defineMessages, injectIntl, FormattedMessage,
|
defineMessages, injectIntl, FormattedMessage,
|
||||||
} from 'react-intl';
|
} from 'react-intl';
|
||||||
import { useMutation } from '@apollo/client';
|
import { useMutation } from '@apollo/client';
|
||||||
import Styled from './styles';
|
import Styled from './styles';
|
||||||
import PermissionsOverlay from '../permissions-overlay/component';
|
|
||||||
import AudioSettings from '../audio-settings/component';
|
import AudioSettings from '../audio-settings/component';
|
||||||
import EchoTest from '../echo-test/component';
|
import EchoTest from '../echo-test/component';
|
||||||
import Help from '../help/component';
|
import Help from '../help/component';
|
||||||
@ -21,6 +24,7 @@ import {
|
|||||||
muteAway,
|
muteAway,
|
||||||
} from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service';
|
} from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service';
|
||||||
import Session from '/imports/ui/services/storage/in-memory';
|
import Session from '/imports/ui/services/storage/in-memory';
|
||||||
|
import logger from '/imports/startup/client/logger';
|
||||||
|
|
||||||
const propTypes = {
|
const propTypes = {
|
||||||
intl: PropTypes.shape({
|
intl: PropTypes.shape({
|
||||||
@ -39,10 +43,11 @@ const propTypes = {
|
|||||||
isConnected: PropTypes.bool.isRequired,
|
isConnected: PropTypes.bool.isRequired,
|
||||||
isUsingAudio: PropTypes.bool.isRequired,
|
isUsingAudio: PropTypes.bool.isRequired,
|
||||||
isListenOnly: PropTypes.bool.isRequired,
|
isListenOnly: PropTypes.bool.isRequired,
|
||||||
|
isMuted: PropTypes.bool.isRequired,
|
||||||
|
toggleMuteMicrophoneSystem: PropTypes.func.isRequired,
|
||||||
inputDeviceId: PropTypes.string,
|
inputDeviceId: PropTypes.string,
|
||||||
outputDeviceId: PropTypes.string,
|
outputDeviceId: PropTypes.string,
|
||||||
formattedDialNum: PropTypes.string.isRequired,
|
formattedDialNum: PropTypes.string.isRequired,
|
||||||
showPermissionsOvelay: PropTypes.bool.isRequired,
|
|
||||||
listenOnlyMode: PropTypes.bool.isRequired,
|
listenOnlyMode: PropTypes.bool.isRequired,
|
||||||
joinFullAudioImmediately: PropTypes.bool,
|
joinFullAudioImmediately: PropTypes.bool,
|
||||||
forceListenOnlyAttendee: PropTypes.bool.isRequired,
|
forceListenOnlyAttendee: PropTypes.bool.isRequired,
|
||||||
@ -72,6 +77,14 @@ const propTypes = {
|
|||||||
}).isRequired,
|
}).isRequired,
|
||||||
getTroubleshootingLink: PropTypes.func.isRequired,
|
getTroubleshootingLink: PropTypes.func.isRequired,
|
||||||
away: PropTypes.bool,
|
away: PropTypes.bool,
|
||||||
|
doGUM: PropTypes.func.isRequired,
|
||||||
|
hasMicrophonePermission: PropTypes.func.isRequired,
|
||||||
|
permissionStatus: PropTypes.string,
|
||||||
|
liveChangeInputDevice: PropTypes.func.isRequired,
|
||||||
|
content: PropTypes.string,
|
||||||
|
unmuteOnExit: PropTypes.bool,
|
||||||
|
supportsTransparentListenOnly: PropTypes.bool.isRequired,
|
||||||
|
getAudioConstraints: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
const intlMessages = defineMessages({
|
const intlMessages = defineMessages({
|
||||||
@ -116,7 +129,7 @@ const intlMessages = defineMessages({
|
|||||||
description: 'Title for the echo test',
|
description: 'Title for the echo test',
|
||||||
},
|
},
|
||||||
settingsTitle: {
|
settingsTitle: {
|
||||||
id: 'app.audioModal.settingsTitle',
|
id: 'app.audio.audioSettings.titleLabel',
|
||||||
description: 'Title for the audio modal',
|
description: 'Title for the audio modal',
|
||||||
},
|
},
|
||||||
helpTitle: {
|
helpTitle: {
|
||||||
@ -139,6 +152,10 @@ const intlMessages = defineMessages({
|
|||||||
id: 'app.audioModal.autoplayBlockedDesc',
|
id: 'app.audioModal.autoplayBlockedDesc',
|
||||||
description: 'Message for autoplay audio block',
|
description: 'Message for autoplay audio block',
|
||||||
},
|
},
|
||||||
|
findingDevicesTitle: {
|
||||||
|
id: 'app.audio.audioSettings.findingDevicesTitle',
|
||||||
|
description: 'Message for finding audio devices',
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const AudioModal = ({
|
const AudioModal = ({
|
||||||
@ -148,6 +165,8 @@ const AudioModal = ({
|
|||||||
audioLocked,
|
audioLocked,
|
||||||
isUsingAudio,
|
isUsingAudio,
|
||||||
isListenOnly,
|
isListenOnly,
|
||||||
|
isMuted,
|
||||||
|
toggleMuteMicrophoneSystem,
|
||||||
autoplayBlocked,
|
autoplayBlocked,
|
||||||
closeModal,
|
closeModal,
|
||||||
isEchoTest,
|
isEchoTest,
|
||||||
@ -174,19 +193,27 @@ const AudioModal = ({
|
|||||||
notify,
|
notify,
|
||||||
formattedTelVoice,
|
formattedTelVoice,
|
||||||
handleAllowAutoplay,
|
handleAllowAutoplay,
|
||||||
showPermissionsOvelay,
|
|
||||||
isIE,
|
isIE,
|
||||||
isOpen,
|
isOpen,
|
||||||
priority,
|
priority,
|
||||||
setIsOpen,
|
setIsOpen,
|
||||||
getTroubleshootingLink,
|
getTroubleshootingLink,
|
||||||
away = false,
|
away = false,
|
||||||
|
doGUM,
|
||||||
|
getAudioConstraints,
|
||||||
|
hasMicrophonePermission,
|
||||||
|
liveChangeInputDevice,
|
||||||
|
content: initialContent,
|
||||||
|
supportsTransparentListenOnly,
|
||||||
|
unmuteOnExit = false,
|
||||||
|
permissionStatus = null,
|
||||||
}) => {
|
}) => {
|
||||||
const [content, setContent] = useState(null);
|
const [content, setContent] = useState(initialContent);
|
||||||
const [hasError, setHasError] = useState(false);
|
const [hasError, setHasError] = useState(false);
|
||||||
const [disableActions, setDisableActions] = useState(false);
|
const [disableActions, setDisableActions] = useState(false);
|
||||||
const [errorInfo, setErrorInfo] = useState(null);
|
const [errorInfo, setErrorInfo] = useState(null);
|
||||||
const [autoplayChecked, setAutoplayChecked] = useState(false);
|
const [autoplayChecked, setAutoplayChecked] = useState(false);
|
||||||
|
const [findingDevices, setFindingDevices] = useState(false);
|
||||||
const [setAway] = useMutation(SET_AWAY);
|
const [setAway] = useMutation(SET_AWAY);
|
||||||
const voiceToggle = useToggleVoice();
|
const voiceToggle = useToggleVoice();
|
||||||
|
|
||||||
@ -257,6 +284,55 @@ const AudioModal = ({
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleGUMFailure = (error) => {
|
||||||
|
const { MIC_ERROR } = AudioError;
|
||||||
|
|
||||||
|
logger.error({
|
||||||
|
logCode: 'audio_gum_failed',
|
||||||
|
extraInfo: {
|
||||||
|
errorMessage: error.message,
|
||||||
|
errorName: error.name,
|
||||||
|
},
|
||||||
|
}, `Audio gUM failed: ${error.name}`);
|
||||||
|
|
||||||
|
setContent('help');
|
||||||
|
setDisableActions(false);
|
||||||
|
setHasError(true);
|
||||||
|
setErrorInfo({
|
||||||
|
errCode: error?.name === 'NotAllowedError'
|
||||||
|
? MIC_ERROR.NO_PERMISSION
|
||||||
|
: 0,
|
||||||
|
errMessage: error?.name || 'NotAllowedError',
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const checkMicrophonePermission = (options) => {
|
||||||
|
setFindingDevices(true);
|
||||||
|
|
||||||
|
return hasMicrophonePermission(options)
|
||||||
|
.then((hasPermission) => {
|
||||||
|
// null means undetermined, so we don't want to show the error modal
|
||||||
|
// and let downstream components figure it out
|
||||||
|
if (hasPermission === true || hasPermission === null) {
|
||||||
|
return hasPermission;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleGUMFailure(new DOMException(
|
||||||
|
'Permissions API says denied',
|
||||||
|
'NotAllowedError',
|
||||||
|
));
|
||||||
|
|
||||||
|
return false;
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
handleGUMFailure(error);
|
||||||
|
return null;
|
||||||
|
})
|
||||||
|
.finally(() => {
|
||||||
|
setFindingDevices(false);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
const handleGoToAudioOptions = () => {
|
const handleGoToAudioOptions = () => {
|
||||||
setContent(null);
|
setContent(null);
|
||||||
setHasError(true);
|
setHasError(true);
|
||||||
@ -318,14 +394,19 @@ const AudioModal = ({
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleJoinLocalEcho = (inputStream) => {
|
const handleAudioSettingsConfirmation = useCallback((inputStream) => {
|
||||||
// Reset the modal to a connecting state - this kind of sucks?
|
// Reset the modal to a connecting state - this kind of sucks?
|
||||||
// prlanzarin Apr 04 2022
|
// prlanzarin Apr 04 2022
|
||||||
setContent(null);
|
setContent(null);
|
||||||
if (inputStream) changeInputStream(inputStream);
|
if (inputStream) changeInputStream(inputStream);
|
||||||
|
|
||||||
|
if (!isConnected) {
|
||||||
handleJoinMicrophone();
|
handleJoinMicrophone();
|
||||||
disableAwayMode();
|
disableAwayMode();
|
||||||
};
|
} else {
|
||||||
|
closeModal();
|
||||||
|
}
|
||||||
|
}, [changeInputStream, isConnected]);
|
||||||
|
|
||||||
const skipAudioOptions = () => (isConnecting || (forceListenOnlyAttendee && !autoplayChecked))
|
const skipAudioOptions = () => (isConnecting || (forceListenOnlyAttendee && !autoplayChecked))
|
||||||
&& !content
|
&& !content
|
||||||
@ -333,7 +414,6 @@ const AudioModal = ({
|
|||||||
|
|
||||||
const renderAudioOptions = () => {
|
const renderAudioOptions = () => {
|
||||||
const hideMicrophone = forceListenOnlyAttendee || audioLocked;
|
const hideMicrophone = forceListenOnlyAttendee || audioLocked;
|
||||||
|
|
||||||
const arrow = isRTL ? '←' : '→';
|
const arrow = isRTL ? '←' : '→';
|
||||||
const dialAudioLabel = `${intl.formatMessage(intlMessages.audioDialTitle)} ${arrow}`;
|
const dialAudioLabel = `${intl.formatMessage(intlMessages.audioDialTitle)} ${arrow}`;
|
||||||
|
|
||||||
@ -400,40 +480,46 @@ const AudioModal = ({
|
|||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const handleBack = useCallback(() => {
|
||||||
|
if (isConnecting || isConnected || skipAudioOptions()) {
|
||||||
|
closeModal();
|
||||||
|
} else {
|
||||||
|
handleGoToAudioOptions();
|
||||||
|
}
|
||||||
|
}, [isConnecting, isConnected, skipAudioOptions]);
|
||||||
|
|
||||||
const renderAudioSettings = () => {
|
const renderAudioSettings = () => {
|
||||||
|
const { animations } = getSettingsSingletonInstance().application;
|
||||||
const confirmationCallback = !localEchoEnabled
|
const confirmationCallback = !localEchoEnabled
|
||||||
? handleRetryGoToEchoTest
|
? handleRetryGoToEchoTest
|
||||||
: handleJoinLocalEcho;
|
: handleAudioSettingsConfirmation;
|
||||||
|
|
||||||
const handleGUMFailure = (error) => {
|
|
||||||
const code = error?.name === 'NotAllowedError'
|
|
||||||
? AudioError.MIC_ERROR.NO_PERMISSION
|
|
||||||
: 0;
|
|
||||||
setContent('help');
|
|
||||||
setErrorInfo({
|
|
||||||
errCode: code,
|
|
||||||
errMessage: error?.name || 'NotAllowedError',
|
|
||||||
});
|
|
||||||
setDisableActions(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<AudioSettings
|
<AudioSettings
|
||||||
handleBack={handleGoToAudioOptions}
|
animations={animations}
|
||||||
|
handleBack={handleBack}
|
||||||
handleConfirmation={confirmationCallback}
|
handleConfirmation={confirmationCallback}
|
||||||
handleGUMFailure={handleGUMFailure}
|
handleGUMFailure={handleGUMFailure}
|
||||||
joinEchoTest={joinEchoTest}
|
joinEchoTest={joinEchoTest}
|
||||||
changeInputDevice={changeInputDevice}
|
changeInputDevice={changeInputDevice}
|
||||||
|
liveChangeInputDevice={liveChangeInputDevice}
|
||||||
changeOutputDevice={changeOutputDevice}
|
changeOutputDevice={changeOutputDevice}
|
||||||
isConnecting={isConnecting}
|
isConnecting={isConnecting}
|
||||||
isConnected={isConnected}
|
isConnected={isConnected}
|
||||||
isEchoTest={isEchoTest}
|
isMuted={isMuted}
|
||||||
|
toggleMuteMicrophoneSystem={toggleMuteMicrophoneSystem}
|
||||||
inputDeviceId={inputDeviceId}
|
inputDeviceId={inputDeviceId}
|
||||||
outputDeviceId={outputDeviceId}
|
outputDeviceId={outputDeviceId}
|
||||||
withVolumeMeter={showVolumeMeter}
|
withVolumeMeter={showVolumeMeter}
|
||||||
withEcho={localEchoEnabled}
|
withEcho={localEchoEnabled}
|
||||||
produceStreams={localEchoEnabled || showVolumeMeter}
|
produceStreams={localEchoEnabled || showVolumeMeter}
|
||||||
notify={notify}
|
notify={notify}
|
||||||
|
unmuteOnExit={unmuteOnExit}
|
||||||
|
doGUM={doGUM}
|
||||||
|
getAudioConstraints={getAudioConstraints}
|
||||||
|
checkMicrophonePermission={checkMicrophonePermission}
|
||||||
|
supportsTransparentListenOnly={supportsTransparentListenOnly}
|
||||||
|
toggleVoice={voiceToggle}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@ -445,9 +531,19 @@ const AudioModal = ({
|
|||||||
message: errorInfo?.errMessage,
|
message: errorInfo?.errMessage,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const _joinListenOnly = () => {
|
||||||
|
// Erase the content state so that the modal transitions to the connecting
|
||||||
|
// state if the user chooses listen only
|
||||||
|
setContent(null);
|
||||||
|
handleJoinListenOnly();
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Help
|
<Help
|
||||||
handleBack={handleGoToAudioOptions}
|
isConnected={isConnected}
|
||||||
|
handleBack={handleBack}
|
||||||
|
handleJoinListenOnly={_joinListenOnly}
|
||||||
|
handleRetryMic={handleGoToAudioSettings}
|
||||||
audioErr={audioErr}
|
audioErr={audioErr}
|
||||||
isListenOnly={isListenOnly}
|
isListenOnly={isListenOnly}
|
||||||
troubleshootingLink={getTroubleshootingLink(errorInfo?.errCode)}
|
troubleshootingLink={getTroubleshootingLink(errorInfo?.errCode)}
|
||||||
@ -495,6 +591,17 @@ const AudioModal = ({
|
|||||||
const renderContent = () => {
|
const renderContent = () => {
|
||||||
const { animations } = getSettingsSingletonInstance().application;
|
const { animations } = getSettingsSingletonInstance().application;
|
||||||
|
|
||||||
|
if (findingDevices && content === null) {
|
||||||
|
return (
|
||||||
|
<Styled.Connecting role="alert">
|
||||||
|
<span data-test="findingDevicesLabel">
|
||||||
|
{intl.formatMessage(intlMessages.findingDevicesTitle)}
|
||||||
|
</span>
|
||||||
|
<Styled.ConnectingAnimation animations={animations} />
|
||||||
|
</Styled.Connecting>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (skipAudioOptions()) {
|
if (skipAudioOptions()) {
|
||||||
return (
|
return (
|
||||||
<Styled.Connecting role="alert">
|
<Styled.Connecting role="alert">
|
||||||
@ -505,6 +612,7 @@ const AudioModal = ({
|
|||||||
</Styled.Connecting>
|
</Styled.Connecting>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return content ? contents[content].component() : renderAudioOptions();
|
return content ? contents[content].component() : renderAudioOptions();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -512,16 +620,23 @@ const AudioModal = ({
|
|||||||
if (!isUsingAudio) {
|
if (!isUsingAudio) {
|
||||||
if (forceListenOnlyAttendee || audioLocked) {
|
if (forceListenOnlyAttendee || audioLocked) {
|
||||||
handleJoinListenOnly();
|
handleJoinListenOnly();
|
||||||
return;
|
} else if (!listenOnlyMode) {
|
||||||
}
|
if (joinFullAudioImmediately) {
|
||||||
|
checkMicrophonePermission({ doGUM: true, permissionStatus })
|
||||||
|
.then((hasPermission) => {
|
||||||
|
// No permission - let the Help screen be shown as it's triggered
|
||||||
|
// by the checkMicrophonePermission function
|
||||||
|
if (hasPermission === false) return;
|
||||||
|
|
||||||
if (joinFullAudioImmediately && !listenOnlyMode) {
|
// Permission is granted or undetermined, so we can proceed
|
||||||
handleJoinMicrophone();
|
handleJoinMicrophone();
|
||||||
return;
|
});
|
||||||
}
|
} else {
|
||||||
|
checkMicrophonePermission({ doGUM: false, permissionStatus }).then((hasPermission) => {
|
||||||
if (!listenOnlyMode) {
|
if (hasPermission === false) return;
|
||||||
handleGoToEchoTest();
|
handleGoToEchoTest();
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, [
|
}, [
|
||||||
@ -551,11 +666,9 @@ const AudioModal = ({
|
|||||||
let title = content
|
let title = content
|
||||||
? intl.formatMessage(contents[content].title)
|
? intl.formatMessage(contents[content].title)
|
||||||
: intl.formatMessage(intlMessages.audioChoiceLabel);
|
: intl.formatMessage(intlMessages.audioChoiceLabel);
|
||||||
title = !skipAudioOptions() ? title : null;
|
title = !skipAudioOptions() && !findingDevices ? title : null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
|
||||||
{showPermissionsOvelay ? <PermissionsOverlay closeModal={closeModal} /> : null}
|
|
||||||
<Styled.AudioModal
|
<Styled.AudioModal
|
||||||
modalName="AUDIO"
|
modalName="AUDIO"
|
||||||
onRequestClose={closeModal}
|
onRequestClose={closeModal}
|
||||||
@ -584,7 +697,6 @@ const AudioModal = ({
|
|||||||
{renderContent()}
|
{renderContent()}
|
||||||
</Styled.Content>
|
</Styled.Content>
|
||||||
</Styled.AudioModal>
|
</Styled.AudioModal>
|
||||||
</>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -62,7 +62,6 @@ const AudioModalContainer = (props) => {
|
|||||||
combinedDialInNum = `${dialNumber.replace(/\D+/g, '')},,,${telVoice.replace(/\D+/g, '')}`;
|
combinedDialInNum = `${dialNumber.replace(/\D+/g, '')},,,${telVoice.replace(/\D+/g, '')}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const { isIe } = browserInfo;
|
const { isIe } = browserInfo;
|
||||||
|
|
||||||
const SHOW_VOLUME_METER = window.meetingClientSettings.public.media.showVolumeMeter;
|
const SHOW_VOLUME_METER = window.meetingClientSettings.public.media.showVolumeMeter;
|
||||||
@ -81,26 +80,26 @@ const AudioModalContainer = (props) => {
|
|||||||
const isListenOnly = useReactiveVar(AudioManager._isListenOnly.value);
|
const isListenOnly = useReactiveVar(AudioManager._isListenOnly.value);
|
||||||
const isEchoTest = useReactiveVar(AudioManager._isEchoTest.value);
|
const isEchoTest = useReactiveVar(AudioManager._isEchoTest.value);
|
||||||
const autoplayBlocked = useReactiveVar(AudioManager._autoplayBlocked.value);
|
const autoplayBlocked = useReactiveVar(AudioManager._autoplayBlocked.value);
|
||||||
|
const isMuted = useReactiveVar(AudioManager._isMuted.value);
|
||||||
const meetingIsBreakout = AppService.useMeetingIsBreakout();
|
const meetingIsBreakout = AppService.useMeetingIsBreakout();
|
||||||
|
const supportsTransparentListenOnly = useReactiveVar(
|
||||||
|
AudioManager._transparentListenOnlySupported.value,
|
||||||
|
);
|
||||||
|
const permissionStatus = useReactiveVar(AudioManager._permissionStatus.value);
|
||||||
const { userLocks } = useLockContext();
|
const { userLocks } = useLockContext();
|
||||||
|
const isListenOnlyInputDevice = Service.inputDeviceId() === 'listen-only';
|
||||||
|
const devicesAlreadyConfigured = skipEchoTestIfPreviousDevice
|
||||||
|
&& Service.inputDeviceId();
|
||||||
|
const joinFullAudioImmediately = !isListenOnlyInputDevice
|
||||||
|
&& (skipCheck || (skipCheckOnJoin && !getEchoTest) || devicesAlreadyConfigured);
|
||||||
const { setIsOpen } = props;
|
const { setIsOpen } = props;
|
||||||
const close = useCallback(() => closeModal(() => setIsOpen(false)), [setIsOpen]);
|
const close = useCallback(() => closeModal(() => setIsOpen(false)), [setIsOpen]);
|
||||||
const joinMic = useCallback(
|
const joinMic = useCallback(
|
||||||
(skipEchoTest) => joinMicrophone(skipEchoTest || skipCheck || skipCheckOnJoin),
|
(options = {}) => joinMicrophone({
|
||||||
|
skipEchoTest: options.skipEchoTest || joinFullAudioImmediately,
|
||||||
|
}),
|
||||||
[skipCheck, skipCheckOnJoin],
|
[skipCheck, skipCheckOnJoin],
|
||||||
);
|
);
|
||||||
const joinFullAudioImmediately = (
|
|
||||||
autoJoin
|
|
||||||
&& (
|
|
||||||
skipCheck
|
|
||||||
|| (skipCheckOnJoin && !getEchoTest)
|
|
||||||
))
|
|
||||||
|| (
|
|
||||||
skipCheck
|
|
||||||
|| (skipCheckOnJoin && !getEchoTest)
|
|
||||||
|| (skipEchoTestIfPreviousDevice && (inputDeviceId || outputDeviceId))
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<AudioModal
|
<AudioModal
|
||||||
@ -114,6 +113,8 @@ const AudioModalContainer = (props) => {
|
|||||||
isConnected={isConnected}
|
isConnected={isConnected}
|
||||||
isListenOnly={isListenOnly}
|
isListenOnly={isListenOnly}
|
||||||
isEchoTest={isEchoTest}
|
isEchoTest={isEchoTest}
|
||||||
|
isMuted={isMuted}
|
||||||
|
toggleMuteMicrophoneSystem={Service.toggleMuteMicrophoneSystem}
|
||||||
autoplayBlocked={autoplayBlocked}
|
autoplayBlocked={autoplayBlocked}
|
||||||
getEchoTest={getEchoTest}
|
getEchoTest={getEchoTest}
|
||||||
joinFullAudioImmediately={joinFullAudioImmediately}
|
joinFullAudioImmediately={joinFullAudioImmediately}
|
||||||
@ -123,6 +124,7 @@ const AudioModalContainer = (props) => {
|
|||||||
joinListenOnly={joinListenOnly}
|
joinListenOnly={joinListenOnly}
|
||||||
leaveEchoTest={leaveEchoTest}
|
leaveEchoTest={leaveEchoTest}
|
||||||
changeInputDevice={Service.changeInputDevice}
|
changeInputDevice={Service.changeInputDevice}
|
||||||
|
liveChangeInputDevice={Service.liveChangeInputDevice}
|
||||||
changeInputStream={Service.changeInputStream}
|
changeInputStream={Service.changeInputStream}
|
||||||
changeOutputDevice={Service.changeOutputDevice}
|
changeOutputDevice={Service.changeOutputDevice}
|
||||||
joinEchoTest={Service.joinEchoTest}
|
joinEchoTest={Service.joinEchoTest}
|
||||||
@ -144,7 +146,14 @@ const AudioModalContainer = (props) => {
|
|||||||
isRTL={isRTL}
|
isRTL={isRTL}
|
||||||
AudioError={AudioError}
|
AudioError={AudioError}
|
||||||
getTroubleshootingLink={AudioModalService.getTroubleshootingLink}
|
getTroubleshootingLink={AudioModalService.getTroubleshootingLink}
|
||||||
|
getMicrophonePermissionStatus={Service.getMicrophonePermissionStatus}
|
||||||
|
getAudioConstraints={Service.getAudioConstraints}
|
||||||
|
doGUM={Service.doGUM}
|
||||||
|
bypassGUM={Service.bypassGUM}
|
||||||
|
supportsTransparentListenOnly={supportsTransparentListenOnly}
|
||||||
setIsOpen={setIsOpen}
|
setIsOpen={setIsOpen}
|
||||||
|
hasMicrophonePermission={Service.hasMicrophonePermission}
|
||||||
|
permissionStatus={permissionStatus}
|
||||||
{...props}
|
{...props}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
@ -20,7 +20,10 @@ export const didUserSelectedListenOnly = () => (
|
|||||||
!!Storage.getItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY)
|
!!Storage.getItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY)
|
||||||
);
|
);
|
||||||
|
|
||||||
export const joinMicrophone = (skipEchoTest = false) => {
|
export const joinMicrophone = (options = {}) => {
|
||||||
|
const { skipEchoTest = false } = options;
|
||||||
|
const shouldSkipEcho = skipEchoTest && Service.inputDeviceId() !== 'listen-only';
|
||||||
|
|
||||||
Storage.setItem(CLIENT_DID_USER_SELECTED_MICROPHONE_KEY, true);
|
Storage.setItem(CLIENT_DID_USER_SELECTED_MICROPHONE_KEY, true);
|
||||||
Storage.setItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY, false);
|
Storage.setItem(CLIENT_DID_USER_SELECTED_LISTEN_ONLY_KEY, false);
|
||||||
|
|
||||||
@ -30,8 +33,8 @@ export const joinMicrophone = (skipEchoTest = false) => {
|
|||||||
|
|
||||||
const call = new Promise((resolve, reject) => {
|
const call = new Promise((resolve, reject) => {
|
||||||
try {
|
try {
|
||||||
if ((skipEchoTest && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) {
|
if ((shouldSkipEcho && !Service.isConnected()) || LOCAL_ECHO_TEST_ENABLED) {
|
||||||
return resolve(Service.joinMicrophone());
|
return resolve(Service.joinMicrophone(options));
|
||||||
}
|
}
|
||||||
|
|
||||||
return resolve(Service.transferCall());
|
return resolve(Service.transferCall());
|
||||||
|
@ -63,6 +63,7 @@ const Connecting = styled.div`
|
|||||||
margin-top: auto;
|
margin-top: auto;
|
||||||
margin-bottom: auto;
|
margin-bottom: auto;
|
||||||
font-size: 2rem;
|
font-size: 2rem;
|
||||||
|
text-align: center;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const ellipsis = keyframes`
|
const ellipsis = keyframes`
|
||||||
|
@ -8,36 +8,47 @@ import logger from '/imports/startup/client/logger';
|
|||||||
import AudioStreamVolume from '/imports/ui/components/audio/audio-stream-volume/component';
|
import AudioStreamVolume from '/imports/ui/components/audio/audio-stream-volume/component';
|
||||||
import LocalEchoContainer from '/imports/ui/components/audio/local-echo/container';
|
import LocalEchoContainer from '/imports/ui/components/audio/local-echo/container';
|
||||||
import DeviceSelector from '/imports/ui/components/audio/device-selector/component';
|
import DeviceSelector from '/imports/ui/components/audio/device-selector/component';
|
||||||
import {
|
|
||||||
getAudioConstraints,
|
|
||||||
doGUM,
|
|
||||||
} from '/imports/api/audio/client/bridge/service';
|
|
||||||
import MediaStreamUtils from '/imports/utils/media-stream-utils';
|
import MediaStreamUtils from '/imports/utils/media-stream-utils';
|
||||||
import audioManager from '/imports/ui/services/audio-manager';
|
import AudioManager from '/imports/ui/services/audio-manager';
|
||||||
import Session from '/imports/ui/services/storage/in-memory';
|
import Session from '/imports/ui/services/storage/in-memory';
|
||||||
|
|
||||||
const propTypes = {
|
const propTypes = {
|
||||||
intl: PropTypes.shape({
|
intl: PropTypes.shape({
|
||||||
formatMessage: PropTypes.func.isRequired,
|
formatMessage: PropTypes.func.isRequired,
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
|
animations: PropTypes.bool,
|
||||||
changeInputDevice: PropTypes.func.isRequired,
|
changeInputDevice: PropTypes.func.isRequired,
|
||||||
|
liveChangeInputDevice: PropTypes.func.isRequired,
|
||||||
changeOutputDevice: PropTypes.func.isRequired,
|
changeOutputDevice: PropTypes.func.isRequired,
|
||||||
handleBack: PropTypes.func.isRequired,
|
handleBack: PropTypes.func.isRequired,
|
||||||
handleConfirmation: PropTypes.func.isRequired,
|
handleConfirmation: PropTypes.func.isRequired,
|
||||||
handleGUMFailure: PropTypes.func.isRequired,
|
handleGUMFailure: PropTypes.func.isRequired,
|
||||||
isConnecting: PropTypes.bool.isRequired,
|
isConnecting: PropTypes.bool.isRequired,
|
||||||
|
isConnected: PropTypes.bool.isRequired,
|
||||||
|
isMuted: PropTypes.bool.isRequired,
|
||||||
|
toggleMuteMicrophoneSystem: PropTypes.func.isRequired,
|
||||||
inputDeviceId: PropTypes.string.isRequired,
|
inputDeviceId: PropTypes.string.isRequired,
|
||||||
outputDeviceId: PropTypes.string.isRequired,
|
outputDeviceId: PropTypes.string.isRequired,
|
||||||
produceStreams: PropTypes.bool,
|
produceStreams: PropTypes.bool,
|
||||||
withEcho: PropTypes.bool,
|
withEcho: PropTypes.bool,
|
||||||
withVolumeMeter: PropTypes.bool,
|
withVolumeMeter: PropTypes.bool,
|
||||||
notify: PropTypes.func.isRequired,
|
notify: PropTypes.func.isRequired,
|
||||||
|
unmuteOnExit: PropTypes.bool,
|
||||||
|
doGUM: PropTypes.func.isRequired,
|
||||||
|
getAudioConstraints: PropTypes.func.isRequired,
|
||||||
|
checkMicrophonePermission: PropTypes.func.isRequired,
|
||||||
|
supportsTransparentListenOnly: PropTypes.bool.isRequired,
|
||||||
|
toggleVoice: PropTypes.func.isRequired,
|
||||||
|
permissionStatus: PropTypes.string,
|
||||||
};
|
};
|
||||||
|
|
||||||
const defaultProps = {
|
const defaultProps = {
|
||||||
|
animations: true,
|
||||||
produceStreams: false,
|
produceStreams: false,
|
||||||
withEcho: false,
|
withEcho: false,
|
||||||
withVolumeMeter: false,
|
withVolumeMeter: false,
|
||||||
|
unmuteOnExit: false,
|
||||||
|
permissionStatus: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
const intlMessages = defineMessages({
|
const intlMessages = defineMessages({
|
||||||
@ -45,10 +56,6 @@ const intlMessages = defineMessages({
|
|||||||
id: 'app.audio.backLabel',
|
id: 'app.audio.backLabel',
|
||||||
description: 'audio settings back button label',
|
description: 'audio settings back button label',
|
||||||
},
|
},
|
||||||
descriptionLabel: {
|
|
||||||
id: 'app.audio.audioSettings.descriptionLabel',
|
|
||||||
description: 'audio settings description label',
|
|
||||||
},
|
|
||||||
micSourceLabel: {
|
micSourceLabel: {
|
||||||
id: 'app.audio.audioSettings.microphoneSourceLabel',
|
id: 'app.audio.audioSettings.microphoneSourceLabel',
|
||||||
description: 'Label for mic source',
|
description: 'Label for mic source',
|
||||||
@ -69,17 +76,36 @@ const intlMessages = defineMessages({
|
|||||||
id: 'app.audioNotification.deviceChangeFailed',
|
id: 'app.audioNotification.deviceChangeFailed',
|
||||||
description: 'Device change failed',
|
description: 'Device change failed',
|
||||||
},
|
},
|
||||||
|
confirmLabel: {
|
||||||
|
id: 'app.audio.audioSettings.confirmLabel',
|
||||||
|
description: 'Audio settings confirmation button label',
|
||||||
|
},
|
||||||
|
cancelLabel: {
|
||||||
|
id: 'app.audio.audioSettings.cancelLabel',
|
||||||
|
description: 'Audio settings cancel button label',
|
||||||
|
},
|
||||||
|
findingDevicesTitle: {
|
||||||
|
id: 'app.audio.audioSettings.findingDevicesTitle',
|
||||||
|
description: 'Message for finding audio devices',
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
class AudioSettings extends React.Component {
|
class AudioSettings extends React.Component {
|
||||||
constructor(props) {
|
constructor(props) {
|
||||||
super(props);
|
super(props);
|
||||||
|
|
||||||
const { inputDeviceId, outputDeviceId } = props;
|
const {
|
||||||
|
inputDeviceId,
|
||||||
|
outputDeviceId,
|
||||||
|
unmuteOnExit,
|
||||||
|
} = props;
|
||||||
|
|
||||||
this.handleInputChange = this.handleInputChange.bind(this);
|
this.handleInputChange = this.handleInputChange.bind(this);
|
||||||
this.handleOutputChange = this.handleOutputChange.bind(this);
|
this.handleOutputChange = this.handleOutputChange.bind(this);
|
||||||
this.handleConfirmationClick = this.handleConfirmationClick.bind(this);
|
this.handleConfirmationClick = this.handleConfirmationClick.bind(this);
|
||||||
|
this.handleCancelClick = this.handleCancelClick.bind(this);
|
||||||
|
this.unmuteOnExit = this.unmuteOnExit.bind(this);
|
||||||
|
this.updateDeviceList = this.updateDeviceList.bind(this);
|
||||||
|
|
||||||
this.state = {
|
this.state = {
|
||||||
inputDeviceId,
|
inputDeviceId,
|
||||||
@ -88,32 +114,80 @@ class AudioSettings extends React.Component {
|
|||||||
// blocked until at least one stream is generated
|
// blocked until at least one stream is generated
|
||||||
producingStreams: props.produceStreams,
|
producingStreams: props.produceStreams,
|
||||||
stream: null,
|
stream: null,
|
||||||
|
unmuteOnExit,
|
||||||
|
audioInputDevices: [],
|
||||||
|
audioOutputDevices: [],
|
||||||
|
findingDevices: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
this._isMounted = false;
|
this._isMounted = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
const { inputDeviceId, outputDeviceId } = this.state;
|
const {
|
||||||
|
inputDeviceId,
|
||||||
|
outputDeviceId,
|
||||||
|
} = this.state;
|
||||||
|
const {
|
||||||
|
isConnected,
|
||||||
|
isMuted,
|
||||||
|
toggleMuteMicrophoneSystem,
|
||||||
|
checkMicrophonePermission,
|
||||||
|
toggleVoice,
|
||||||
|
permissionStatus,
|
||||||
|
} = this.props;
|
||||||
|
|
||||||
Session.setItem('inEchoTest', true);
|
Session.setItem('inEchoTest', true);
|
||||||
this._isMounted = true;
|
this._isMounted = true;
|
||||||
// Guarantee initial in/out devices are initialized on all ends
|
// Guarantee initial in/out devices are initialized on all ends
|
||||||
|
AudioManager.isEchoTest = true;
|
||||||
|
checkMicrophonePermission({ gumOnPrompt: true, permissionStatus })
|
||||||
|
.then(this.updateDeviceList)
|
||||||
|
.then(() => {
|
||||||
|
if (!this._isMounted) return;
|
||||||
|
|
||||||
|
navigator.mediaDevices.addEventListener(
|
||||||
|
'devicechange',
|
||||||
|
this.updateDeviceList,
|
||||||
|
);
|
||||||
|
this.setState({ findingDevices: false });
|
||||||
this.setInputDevice(inputDeviceId);
|
this.setInputDevice(inputDeviceId);
|
||||||
this.setOutputDevice(outputDeviceId);
|
this.setOutputDevice(outputDeviceId);
|
||||||
audioManager.isEchoTest = true;
|
});
|
||||||
|
|
||||||
|
// If connected and unmuted, we need to mute the audio and revert it
|
||||||
|
// back to the original state on exit.
|
||||||
|
if (isConnected && !isMuted) {
|
||||||
|
toggleMuteMicrophoneSystem(isMuted, toggleVoice);
|
||||||
|
// We only need to revert the mute state if the user is not listen-only
|
||||||
|
if (inputDeviceId !== 'listen-only') this.setState({ unmuteOnExit: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidUpdate(prevProps) {
|
||||||
|
const { permissionStatus } = this.props;
|
||||||
|
|
||||||
|
if (prevProps.permissionStatus !== permissionStatus) {
|
||||||
|
this.updateDeviceList();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
componentWillUnmount() {
|
componentWillUnmount() {
|
||||||
const { stream } = this.state;
|
const { stream } = this.state;
|
||||||
|
|
||||||
Session.setItem('inEchoTest', false);
|
Session.setItem('inEchoTest', false);
|
||||||
this._mounted = false;
|
this._isMounted = false;
|
||||||
|
|
||||||
if (stream) {
|
if (stream) {
|
||||||
MediaStreamUtils.stopMediaStreamTracks(stream);
|
MediaStreamUtils.stopMediaStreamTracks(stream);
|
||||||
}
|
}
|
||||||
audioManager.isEchoTest = false;
|
|
||||||
|
AudioManager.isEchoTest = false;
|
||||||
|
navigator.mediaDevices.removeEventListener(
|
||||||
|
'devicechange', this.updateDeviceList,
|
||||||
|
);
|
||||||
|
|
||||||
|
this.unmuteOnExit();
|
||||||
}
|
}
|
||||||
|
|
||||||
handleInputChange(deviceId) {
|
handleInputChange(deviceId) {
|
||||||
@ -125,8 +199,17 @@ class AudioSettings extends React.Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
handleConfirmationClick() {
|
handleConfirmationClick() {
|
||||||
const { stream } = this.state;
|
const { stream, inputDeviceId } = this.state;
|
||||||
const { produceStreams, handleConfirmation } = this.props;
|
const {
|
||||||
|
isConnected,
|
||||||
|
produceStreams,
|
||||||
|
handleConfirmation,
|
||||||
|
liveChangeInputDevice,
|
||||||
|
} = this.props;
|
||||||
|
|
||||||
|
// If connected, we need to use the in-call device change method so that all
|
||||||
|
// components pick up the change and the peer is properly updated.
|
||||||
|
if (isConnected) liveChangeInputDevice(inputDeviceId);
|
||||||
|
|
||||||
// Stream generation disabled or there isn't any stream: just run the provided callback
|
// Stream generation disabled or there isn't any stream: just run the provided callback
|
||||||
if (!produceStreams || !stream) return handleConfirmation();
|
if (!produceStreams || !stream) return handleConfirmation();
|
||||||
@ -139,25 +222,43 @@ class AudioSettings extends React.Component {
|
|||||||
return handleConfirmation(clonedStream);
|
return handleConfirmation(clonedStream);
|
||||||
}
|
}
|
||||||
|
|
||||||
setInputDevice(deviceId) {
|
handleCancelClick() {
|
||||||
const { handleGUMFailure, changeInputDevice, produceStreams, intl, notify } = this.props;
|
const { handleBack } = this.props;
|
||||||
const { inputDeviceId: currentInputDeviceId } = this.state;
|
|
||||||
|
|
||||||
|
handleBack();
|
||||||
|
}
|
||||||
|
|
||||||
|
setInputDevice(deviceId) {
|
||||||
|
const {
|
||||||
|
isConnected,
|
||||||
|
handleGUMFailure,
|
||||||
|
changeInputDevice,
|
||||||
|
produceStreams,
|
||||||
|
intl,
|
||||||
|
notify,
|
||||||
|
} = this.props;
|
||||||
|
const { inputDeviceId: currentInputDeviceId } = this.state;
|
||||||
try {
|
try {
|
||||||
changeInputDevice(deviceId);
|
if (!isConnected) changeInputDevice(deviceId);
|
||||||
|
|
||||||
// Only generate input streams if they're going to be used with something
|
// Only generate input streams if they're going to be used with something
|
||||||
// In this case, the volume meter or local echo test.
|
// In this case, the volume meter or local echo test.
|
||||||
if (produceStreams) {
|
if (produceStreams) {
|
||||||
this.generateInputStream(deviceId)
|
this.generateInputStream(deviceId).then((stream) => {
|
||||||
.then((stream) => {
|
|
||||||
// Extract the deviceId again from the stream to guarantee consistency
|
// Extract the deviceId again from the stream to guarantee consistency
|
||||||
// between stream DID vs chosen DID. That's necessary in scenarios where,
|
// between stream DID vs chosen DID. That's necessary in scenarios where,
|
||||||
// eg, there's no default/pre-set deviceId ('') and the browser's
|
// eg, there's no default/pre-set deviceId ('') and the browser's
|
||||||
// default device has been altered by the user (browser default != system's
|
// default device has been altered by the user (browser default != system's
|
||||||
// default).
|
// default).
|
||||||
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio');
|
let extractedDeviceId = deviceId;
|
||||||
if (extractedDeviceId && extractedDeviceId !== deviceId)
|
|
||||||
|
if (stream) {
|
||||||
|
extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(stream, 'audio');
|
||||||
|
|
||||||
|
if (extractedDeviceId !== deviceId && !isConnected) {
|
||||||
changeInputDevice(extractedDeviceId);
|
changeInputDevice(extractedDeviceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Component unmounted after gUM resolution -> skip echo rendering
|
// Component unmounted after gUM resolution -> skip echo rendering
|
||||||
if (!this._isMounted) return;
|
if (!this._isMounted) return;
|
||||||
@ -167,19 +268,15 @@ class AudioSettings extends React.Component {
|
|||||||
stream,
|
stream,
|
||||||
producingStreams: false,
|
producingStreams: false,
|
||||||
});
|
});
|
||||||
})
|
}).catch((error) => {
|
||||||
.catch((error) => {
|
logger.warn({
|
||||||
logger.warn(
|
|
||||||
{
|
|
||||||
logCode: 'audiosettings_gum_failed',
|
logCode: 'audiosettings_gum_failed',
|
||||||
extraInfo: {
|
extraInfo: {
|
||||||
deviceId,
|
deviceId,
|
||||||
errorMessage: error.message,
|
errorMessage: error.message,
|
||||||
errorName: error.name,
|
errorName: error.name,
|
||||||
},
|
},
|
||||||
},
|
}, `Audio settings gUM failed: ${error.name}`);
|
||||||
`Audio settings gUM failed: ${error.name}`
|
|
||||||
);
|
|
||||||
handleGUMFailure(error);
|
handleGUMFailure(error);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -198,7 +295,7 @@ class AudioSettings extends React.Component {
|
|||||||
newDeviceId: deviceId,
|
newDeviceId: deviceId,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
`Audio settings: error changing input device - {${error.name}: ${error.message}}`
|
`Audio settings: error changing input device - {${error.name}: ${error.message}}`,
|
||||||
);
|
);
|
||||||
notify(intl.formatMessage(intlMessages.deviceChangeFailed), true);
|
notify(intl.formatMessage(intlMessages.deviceChangeFailed), true);
|
||||||
}
|
}
|
||||||
@ -233,7 +330,29 @@ class AudioSettings extends React.Component {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
updateDeviceList() {
|
||||||
|
return navigator.mediaDevices.enumerateDevices()
|
||||||
|
.then((devices) => {
|
||||||
|
const audioInputDevices = devices.filter((i) => i.kind === 'audioinput');
|
||||||
|
const audioOutputDevices = devices.filter((i) => i.kind === 'audiooutput');
|
||||||
|
|
||||||
|
this.setState({
|
||||||
|
audioInputDevices,
|
||||||
|
audioOutputDevices,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
unmuteOnExit() {
|
||||||
|
const { toggleMuteMicrophoneSystem, toggleVoice } = this.props;
|
||||||
|
const { unmuteOnExit } = this.state;
|
||||||
|
|
||||||
|
// Unmutes microphone if flagged to do so
|
||||||
|
if (unmuteOnExit) toggleMuteMicrophoneSystem(true, toggleVoice);
|
||||||
|
}
|
||||||
|
|
||||||
generateInputStream(inputDeviceId) {
|
generateInputStream(inputDeviceId) {
|
||||||
|
const { doGUM, getAudioConstraints } = this.props;
|
||||||
const { stream } = this.state;
|
const { stream } = this.state;
|
||||||
|
|
||||||
if (inputDeviceId && stream) {
|
if (inputDeviceId && stream) {
|
||||||
@ -244,6 +363,8 @@ class AudioSettings extends React.Component {
|
|||||||
MediaStreamUtils.stopMediaStreamTracks(stream);
|
MediaStreamUtils.stopMediaStreamTracks(stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (inputDeviceId === 'listen-only') return Promise.resolve(null);
|
||||||
|
|
||||||
const constraints = {
|
const constraints = {
|
||||||
audio: getAudioConstraints({ deviceId: inputDeviceId }),
|
audio: getAudioConstraints({ deviceId: inputDeviceId }),
|
||||||
};
|
};
|
||||||
@ -285,9 +406,16 @@ class AudioSettings extends React.Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
renderDeviceSelectors() {
|
renderDeviceSelectors() {
|
||||||
const { inputDeviceId, outputDeviceId, producingStreams } = this.state;
|
const {
|
||||||
const { intl, isConnecting } = this.props;
|
inputDeviceId,
|
||||||
const blocked = producingStreams || isConnecting;
|
outputDeviceId,
|
||||||
|
producingStreams,
|
||||||
|
audioInputDevices,
|
||||||
|
audioOutputDevices,
|
||||||
|
findingDevices,
|
||||||
|
} = this.state;
|
||||||
|
const { intl, isConnecting, supportsTransparentListenOnly } = this.props;
|
||||||
|
const blocked = producingStreams || isConnecting || findingDevices;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Styled.Row>
|
<Styled.Row>
|
||||||
@ -298,10 +426,12 @@ class AudioSettings extends React.Component {
|
|||||||
<DeviceSelector
|
<DeviceSelector
|
||||||
id="inputDeviceSelector"
|
id="inputDeviceSelector"
|
||||||
deviceId={inputDeviceId}
|
deviceId={inputDeviceId}
|
||||||
|
devices={audioInputDevices}
|
||||||
kind="audioinput"
|
kind="audioinput"
|
||||||
blocked={blocked}
|
blocked={blocked}
|
||||||
onChange={this.handleInputChange}
|
onChange={this.handleInputChange}
|
||||||
intl={intl}
|
intl={intl}
|
||||||
|
supportsTransparentListenOnly={supportsTransparentListenOnly}
|
||||||
/>
|
/>
|
||||||
</Styled.LabelSmall>
|
</Styled.LabelSmall>
|
||||||
</Styled.FormElement>
|
</Styled.FormElement>
|
||||||
@ -313,10 +443,12 @@ class AudioSettings extends React.Component {
|
|||||||
<DeviceSelector
|
<DeviceSelector
|
||||||
id="outputDeviceSelector"
|
id="outputDeviceSelector"
|
||||||
deviceId={outputDeviceId}
|
deviceId={outputDeviceId}
|
||||||
|
devices={audioOutputDevices}
|
||||||
kind="audiooutput"
|
kind="audiooutput"
|
||||||
blocked={blocked}
|
blocked={blocked}
|
||||||
onChange={this.handleOutputChange}
|
onChange={this.handleOutputChange}
|
||||||
intl={intl}
|
intl={intl}
|
||||||
|
supportsTransparentListenOnly={supportsTransparentListenOnly}
|
||||||
/>
|
/>
|
||||||
</Styled.LabelSmall>
|
</Styled.LabelSmall>
|
||||||
</Styled.FormElement>
|
</Styled.FormElement>
|
||||||
@ -326,32 +458,46 @@ class AudioSettings extends React.Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const { isConnecting, intl, handleBack } = this.props;
|
const {
|
||||||
const { producingStreams } = this.state;
|
findingDevices,
|
||||||
|
producingStreams,
|
||||||
|
} = this.state;
|
||||||
|
const {
|
||||||
|
isConnecting,
|
||||||
|
isConnected,
|
||||||
|
intl,
|
||||||
|
animations,
|
||||||
|
} = this.props;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Styled.FormWrapper data-test="audioSettingsModal">
|
<Styled.FormWrapper data-test="audioSettingsModal">
|
||||||
<Styled.Form>
|
<Styled.Form>
|
||||||
<Styled.Row>
|
|
||||||
<Styled.AudioNote>{intl.formatMessage(intlMessages.descriptionLabel)}</Styled.AudioNote>
|
|
||||||
</Styled.Row>
|
|
||||||
{this.renderDeviceSelectors()}
|
{this.renderDeviceSelectors()}
|
||||||
{this.renderOutputTest()}
|
{this.renderOutputTest()}
|
||||||
{this.renderVolumeMeter()}
|
{this.renderVolumeMeter()}
|
||||||
</Styled.Form>
|
</Styled.Form>
|
||||||
|
{findingDevices && (
|
||||||
|
<Styled.AudioNote>
|
||||||
|
<span>{intl.formatMessage(intlMessages.findingDevicesTitle)}</span>
|
||||||
|
<Styled.FetchingAnimation animations={animations} />
|
||||||
|
</Styled.AudioNote>
|
||||||
|
)}
|
||||||
<Styled.EnterAudio>
|
<Styled.EnterAudio>
|
||||||
<Styled.BackButton
|
<Styled.BackButton
|
||||||
label={intl.formatMessage(intlMessages.backLabel)}
|
label={isConnected
|
||||||
|
? intl.formatMessage(intlMessages.cancelLabel)
|
||||||
|
: intl.formatMessage(intlMessages.backLabel)}
|
||||||
color="secondary"
|
color="secondary"
|
||||||
onClick={handleBack}
|
onClick={this.handleCancelClick}
|
||||||
disabled={isConnecting}
|
disabled={isConnecting}
|
||||||
/>
|
/>
|
||||||
<Button
|
<Button
|
||||||
data-test="joinEchoTestButton"
|
data-test="joinEchoTestButton"
|
||||||
size="md"
|
size="md"
|
||||||
color="primary"
|
color="primary"
|
||||||
label={intl.formatMessage(intlMessages.retryLabel)}
|
label={isConnected
|
||||||
|
? intl.formatMessage(intlMessages.confirmLabel)
|
||||||
|
: intl.formatMessage(intlMessages.retryLabel)}
|
||||||
onClick={this.handleConfirmationClick}
|
onClick={this.handleConfirmationClick}
|
||||||
disabled={isConnecting || producingStreams}
|
disabled={isConnecting || producingStreams}
|
||||||
/>
|
/>
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import styled from 'styled-components';
|
import styled, { css, keyframes } from 'styled-components';
|
||||||
import Button from '/imports/ui/components/common/button/component';
|
import Button from '/imports/ui/components/common/button/component';
|
||||||
import { smallOnly } from '/imports/ui/stylesheets/styled-components/breakpoints';
|
import { smallOnly } from '/imports/ui/stylesheets/styled-components/breakpoints';
|
||||||
|
|
||||||
const FormWrapper = styled.div`
|
const FormWrapper = styled.div`
|
||||||
min-width: 0;
|
min-width: 100%;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const Form = styled.div`
|
const Form = styled.div`
|
||||||
@ -26,6 +26,10 @@ const EnterAudio = styled.div`
|
|||||||
`;
|
`;
|
||||||
|
|
||||||
const AudioNote = styled.div`
|
const AudioNote = styled.div`
|
||||||
|
display: flex;
|
||||||
|
flex-flow: column;
|
||||||
|
text-align: center;
|
||||||
|
justify-content: center;
|
||||||
@media ${smallOnly} {
|
@media ${smallOnly} {
|
||||||
font-size: 0.8rem;
|
font-size: 0.8rem;
|
||||||
}
|
}
|
||||||
@ -163,6 +167,31 @@ const BackButton = styled(Button)`
|
|||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
const ellipsis = keyframes`
|
||||||
|
to {
|
||||||
|
width: 1.5em;
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const FetchingAnimation = styled.span`
|
||||||
|
margin: auto;
|
||||||
|
display: inline-block;
|
||||||
|
width: 1.5em;
|
||||||
|
|
||||||
|
&:after {
|
||||||
|
overflow: hidden;
|
||||||
|
display: inline-block;
|
||||||
|
vertical-align: bottom;
|
||||||
|
content: "\\2026"; /* ascii code for the ellipsis character */
|
||||||
|
width: 0;
|
||||||
|
margin-left: 0.25em;
|
||||||
|
|
||||||
|
${({ animations }) => animations && css`
|
||||||
|
animation: ${ellipsis} steps(4, end) 900ms infinite;
|
||||||
|
`}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
FormWrapper,
|
FormWrapper,
|
||||||
Form,
|
Form,
|
||||||
@ -175,4 +204,5 @@ export default {
|
|||||||
LabelSmallFullWidth,
|
LabelSmallFullWidth,
|
||||||
SpacedLeftCol,
|
SpacedLeftCol,
|
||||||
BackButton,
|
BackButton,
|
||||||
|
FetchingAnimation,
|
||||||
};
|
};
|
||||||
|
@ -182,7 +182,7 @@ const AudioContainer = (props) => {
|
|||||||
if (Service.isConnected()) return;
|
if (Service.isConnected()) return;
|
||||||
|
|
||||||
if (userSelectedMicrophone) {
|
if (userSelectedMicrophone) {
|
||||||
joinMicrophone(true);
|
joinMicrophone({ skipEchoTest: true });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
import React, { Component } from 'react';
|
import React, { Component } from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import logger from '/imports/startup/client/logger';
|
|
||||||
import browserInfo from '/imports/utils/browserInfo';
|
|
||||||
import {
|
import {
|
||||||
defineMessages,
|
defineMessages,
|
||||||
} from 'react-intl';
|
} from 'react-intl';
|
||||||
@ -16,12 +14,18 @@ const propTypes = {
|
|||||||
onChange: PropTypes.func.isRequired,
|
onChange: PropTypes.func.isRequired,
|
||||||
blocked: PropTypes.bool,
|
blocked: PropTypes.bool,
|
||||||
deviceId: PropTypes.string,
|
deviceId: PropTypes.string,
|
||||||
|
devices: PropTypes.arrayOf(PropTypes.shape({
|
||||||
|
deviceId: PropTypes.string,
|
||||||
|
label: PropTypes.string,
|
||||||
|
})),
|
||||||
|
supportsTransparentListenOnly: PropTypes.bool.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
const defaultProps = {
|
const defaultProps = {
|
||||||
kind: 'audioinput',
|
kind: 'audioinput',
|
||||||
blocked: false,
|
blocked: false,
|
||||||
deviceId: '',
|
deviceId: '',
|
||||||
|
devices: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
const intlMessages = defineMessages({
|
const intlMessages = defineMessages({
|
||||||
@ -45,6 +49,10 @@ const intlMessages = defineMessages({
|
|||||||
id: 'app.audio.noDeviceFound',
|
id: 'app.audio.noDeviceFound',
|
||||||
description: 'No audio device found',
|
description: 'No audio device found',
|
||||||
},
|
},
|
||||||
|
noMicListenOnlyLabel: {
|
||||||
|
id: 'app.audio.audioSettings.noMicListenOnly',
|
||||||
|
description: 'No microphone, listen only mode label',
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
class DeviceSelector extends Component {
|
class DeviceSelector extends Component {
|
||||||
@ -52,52 +60,16 @@ class DeviceSelector extends Component {
|
|||||||
super(props);
|
super(props);
|
||||||
|
|
||||||
this.handleSelectChange = this.handleSelectChange.bind(this);
|
this.handleSelectChange = this.handleSelectChange.bind(this);
|
||||||
|
|
||||||
this.state = {
|
|
||||||
devices: [],
|
|
||||||
options: [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
componentDidMount() {
|
|
||||||
const { blocked } = this.props;
|
|
||||||
|
|
||||||
if (!blocked) this.enumerate();
|
|
||||||
}
|
|
||||||
|
|
||||||
componentDidUpdate(prevProps) {
|
|
||||||
const { blocked } = this.props;
|
|
||||||
|
|
||||||
if (prevProps.blocked === true && blocked === false) this.enumerate();
|
|
||||||
}
|
|
||||||
|
|
||||||
handleEnumerateDevicesSuccess(deviceInfos) {
|
|
||||||
const { kind } = this.props;
|
|
||||||
|
|
||||||
const devices = deviceInfos.filter((d) => d.kind === kind);
|
|
||||||
logger.info({
|
|
||||||
logCode: 'audiodeviceselector_component_enumeratedevices_success',
|
|
||||||
extraInfo: {
|
|
||||||
deviceKind: kind,
|
|
||||||
devices,
|
|
||||||
},
|
|
||||||
}, 'Success on enumerateDevices() for audio');
|
|
||||||
this.setState({
|
|
||||||
devices,
|
|
||||||
options: devices.map((d, i) => ({
|
|
||||||
label: d.label || this.getFallbackLabel(i),
|
|
||||||
value: d.deviceId,
|
|
||||||
key: uniqueId('device-option-'),
|
|
||||||
})),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
handleSelectChange(event) {
|
handleSelectChange(event) {
|
||||||
const { value } = event.target;
|
const { value } = event.target;
|
||||||
const { onChange } = this.props;
|
const { devices, onChange } = this.props;
|
||||||
const { devices } = this.state;
|
const selectedDeviceId = (value === 'listen-only')
|
||||||
const selectedDevice = devices.find((d) => d.deviceId === value);
|
? value
|
||||||
onChange(selectedDevice.deviceId, selectedDevice, event);
|
: devices.find((d) => d.deviceId === value)?.deviceId;
|
||||||
|
|
||||||
|
onChange(selectedDeviceId);
|
||||||
}
|
}
|
||||||
|
|
||||||
getFallbackLabel(index) {
|
getFallbackLabel(index) {
|
||||||
@ -107,28 +79,29 @@ class DeviceSelector extends Component {
|
|||||||
return intl.formatMessage(label, { 0: index });
|
return intl.formatMessage(label, { 0: index });
|
||||||
}
|
}
|
||||||
|
|
||||||
enumerate() {
|
|
||||||
const { kind } = this.props;
|
|
||||||
|
|
||||||
navigator.mediaDevices
|
|
||||||
.enumerateDevices()
|
|
||||||
.then(this.handleEnumerateDevicesSuccess.bind(this))
|
|
||||||
.catch(() => {
|
|
||||||
logger.error({
|
|
||||||
logCode: 'audiodeviceselector_component_enumeratedevices_error',
|
|
||||||
extraInfo: {
|
|
||||||
deviceKind: kind,
|
|
||||||
},
|
|
||||||
}, 'Error on enumerateDevices(): ');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
intl, kind, blocked, deviceId,
|
intl,
|
||||||
|
kind,
|
||||||
|
blocked,
|
||||||
|
deviceId,
|
||||||
|
devices,
|
||||||
|
supportsTransparentListenOnly,
|
||||||
} = this.props;
|
} = this.props;
|
||||||
|
|
||||||
const { options } = this.state;
|
const options = devices.map((d, i) => ({
|
||||||
|
label: d.label || this.getFallbackLabel(i),
|
||||||
|
value: d.deviceId,
|
||||||
|
key: uniqueId('device-option-'),
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (kind === 'audioinput' && supportsTransparentListenOnly && !blocked) {
|
||||||
|
options.push({
|
||||||
|
label: intl.formatMessage(intlMessages.noMicListenOnlyLabel),
|
||||||
|
value: 'listen-only',
|
||||||
|
key: uniqueId('device-option-'),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
let notFoundOption;
|
let notFoundOption;
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ const propTypes = {
|
|||||||
formatMessage: PropTypes.func.isRequired,
|
formatMessage: PropTypes.func.isRequired,
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
isListenOnly: PropTypes.bool.isRequired,
|
isListenOnly: PropTypes.bool.isRequired,
|
||||||
|
isConnected: PropTypes.bool.isRequired,
|
||||||
audioErr: PropTypes.shape({
|
audioErr: PropTypes.shape({
|
||||||
code: PropTypes.number,
|
code: PropTypes.number,
|
||||||
message: PropTypes.string,
|
message: PropTypes.string,
|
||||||
@ -18,6 +19,8 @@ const propTypes = {
|
|||||||
}),
|
}),
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
handleBack: PropTypes.func.isRequired,
|
handleBack: PropTypes.func.isRequired,
|
||||||
|
handleRetryMic: PropTypes.func.isRequired,
|
||||||
|
handleJoinListenOnly: PropTypes.func.isRequired,
|
||||||
troubleshootingLink: PropTypes.string,
|
troubleshootingLink: PropTypes.string,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -30,6 +33,10 @@ const intlMessages = defineMessages({
|
|||||||
id: 'app.audioModal.helpSubtitleMic',
|
id: 'app.audioModal.helpSubtitleMic',
|
||||||
description: 'Text description for the audio help subtitle (microphones)',
|
description: 'Text description for the audio help subtitle (microphones)',
|
||||||
},
|
},
|
||||||
|
helpSubtitlePermission: {
|
||||||
|
id: 'app.audioModal.helpSubtitlePermission',
|
||||||
|
description: 'Text description for the audio help subtitle (permission)',
|
||||||
|
},
|
||||||
helpSubtitleGeneric: {
|
helpSubtitleGeneric: {
|
||||||
id: 'app.audioModal.helpSubtitleGeneric',
|
id: 'app.audioModal.helpSubtitleGeneric',
|
||||||
description: 'Text description for the audio help subtitle (generic)',
|
description: 'Text description for the audio help subtitle (generic)',
|
||||||
@ -46,10 +53,18 @@ const intlMessages = defineMessages({
|
|||||||
id: 'app.audioModal.helpPermissionStep3',
|
id: 'app.audioModal.helpPermissionStep3',
|
||||||
description: 'Text description for the audio permission help step 3',
|
description: 'Text description for the audio permission help step 3',
|
||||||
},
|
},
|
||||||
retryLabel: {
|
backLabel: {
|
||||||
id: 'app.audio.audioSettings.retryLabel',
|
id: 'app.audio.backLabel',
|
||||||
|
description: 'audio settings back button label',
|
||||||
|
},
|
||||||
|
retryMicLabel: {
|
||||||
|
id: 'app.audio.audioSettings.retryMicLabel',
|
||||||
description: 'audio settings retry button label',
|
description: 'audio settings retry button label',
|
||||||
},
|
},
|
||||||
|
listenOnlyLabel: {
|
||||||
|
id: 'app.audioModal.listenOnlyLabel',
|
||||||
|
description: 'audio settings listen only button label',
|
||||||
|
},
|
||||||
noSSL: {
|
noSSL: {
|
||||||
id: 'app.audioModal.help.noSSL',
|
id: 'app.audioModal.help.noSSL',
|
||||||
description: 'Text description for domain not using https',
|
description: 'Text description for domain not using https',
|
||||||
@ -74,7 +89,12 @@ const intlMessages = defineMessages({
|
|||||||
|
|
||||||
class Help extends Component {
|
class Help extends Component {
|
||||||
getSubtitle() {
|
getSubtitle() {
|
||||||
const { intl, isListenOnly } = this.props;
|
const { audioErr, intl, isListenOnly } = this.props;
|
||||||
|
const { MIC_ERROR } = audioErr;
|
||||||
|
|
||||||
|
if (audioErr.code === MIC_ERROR.NO_PERMISSION) {
|
||||||
|
return intl.formatMessage(intlMessages.helpSubtitlePermission);
|
||||||
|
}
|
||||||
|
|
||||||
return !isListenOnly
|
return !isListenOnly
|
||||||
? intl.formatMessage(intlMessages.helpSubtitleMic)
|
? intl.formatMessage(intlMessages.helpSubtitleMic)
|
||||||
@ -155,7 +175,10 @@ class Help extends Component {
|
|||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
intl,
|
intl,
|
||||||
|
isConnected,
|
||||||
handleBack,
|
handleBack,
|
||||||
|
handleRetryMic,
|
||||||
|
handleJoinListenOnly,
|
||||||
troubleshootingLink,
|
troubleshootingLink,
|
||||||
} = this.props;
|
} = this.props;
|
||||||
|
|
||||||
@ -174,11 +197,31 @@ class Help extends Component {
|
|||||||
</Styled.Text>
|
</Styled.Text>
|
||||||
)}
|
)}
|
||||||
<Styled.EnterAudio>
|
<Styled.EnterAudio>
|
||||||
<Styled.RetryButton
|
{!isConnected ? (
|
||||||
label={intl.formatMessage(intlMessages.retryLabel)}
|
<Styled.HelpActionButton
|
||||||
|
label={intl.formatMessage(intlMessages.listenOnlyLabel)}
|
||||||
|
data-test="helpListenOnlyBtn"
|
||||||
|
icon="listen"
|
||||||
|
size="md"
|
||||||
|
color="secondary"
|
||||||
|
onClick={handleJoinListenOnly}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<Styled.HelpActionButton
|
||||||
|
label={intl.formatMessage(intlMessages.backLabel)}
|
||||||
|
data-test="helpBackBtn"
|
||||||
|
color="secondary"
|
||||||
|
size="md"
|
||||||
|
onClick={handleBack}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<Styled.HelpActionButton
|
||||||
|
label={intl.formatMessage(intlMessages.retryMicLabel)}
|
||||||
|
data-test="helpRetryMicBtn"
|
||||||
|
icon="unmute"
|
||||||
size="md"
|
size="md"
|
||||||
color="primary"
|
color="primary"
|
||||||
onClick={handleBack}
|
onClick={handleRetryMic}
|
||||||
/>
|
/>
|
||||||
</Styled.EnterAudio>
|
</Styled.EnterAudio>
|
||||||
</Styled.Help>
|
</Styled.Help>
|
||||||
|
@ -24,11 +24,11 @@ const Text = styled.div`
|
|||||||
|
|
||||||
const EnterAudio = styled.div`
|
const EnterAudio = styled.div`
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: flex-end;
|
justify-content: center;
|
||||||
margin-top: ${jumboPaddingY};
|
margin-top: ${jumboPaddingY};
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const RetryButton = styled(Button)`
|
const HelpActionButton = styled(Button)`
|
||||||
margin-right: 0.5rem;
|
margin-right: 0.5rem;
|
||||||
margin-left: inherit;
|
margin-left: inherit;
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ export default {
|
|||||||
Help,
|
Help,
|
||||||
Text,
|
Text,
|
||||||
EnterAudio,
|
EnterAudio,
|
||||||
RetryButton,
|
HelpActionButton,
|
||||||
TroubleshootLink,
|
TroubleshootLink,
|
||||||
UnknownError,
|
UnknownError,
|
||||||
PermissionHelpSteps,
|
PermissionHelpSteps,
|
||||||
|
@ -3,7 +3,6 @@ import PropTypes from 'prop-types';
|
|||||||
import { defineMessages, injectIntl } from 'react-intl';
|
import { defineMessages, injectIntl } from 'react-intl';
|
||||||
import Styled from './styles';
|
import Styled from './styles';
|
||||||
import { getSettingsSingletonInstance } from '/imports/ui/services/settings';
|
import { getSettingsSingletonInstance } from '/imports/ui/services/settings';
|
||||||
import Service from '/imports/ui/components/audio/local-echo/service';
|
|
||||||
|
|
||||||
const propTypes = {
|
const propTypes = {
|
||||||
intl: PropTypes.shape({
|
intl: PropTypes.shape({
|
||||||
@ -14,6 +13,10 @@ const propTypes = {
|
|||||||
id: PropTypes.string,
|
id: PropTypes.string,
|
||||||
}),
|
}),
|
||||||
initialHearingState: PropTypes.bool,
|
initialHearingState: PropTypes.bool,
|
||||||
|
playEchoStream: PropTypes.func.isRequired,
|
||||||
|
deattachEchoStream: PropTypes.func.isRequired,
|
||||||
|
shouldUseRTCLoopback: PropTypes.func.isRequired,
|
||||||
|
createAudioRTCLoopback: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
const intlMessages = defineMessages({
|
const intlMessages = defineMessages({
|
||||||
@ -31,6 +34,10 @@ const LocalEcho = ({
|
|||||||
intl,
|
intl,
|
||||||
stream = null,
|
stream = null,
|
||||||
initialHearingState = false,
|
initialHearingState = false,
|
||||||
|
playEchoStream,
|
||||||
|
deattachEchoStream,
|
||||||
|
shouldUseRTCLoopback,
|
||||||
|
createAudioRTCLoopback,
|
||||||
}) => {
|
}) => {
|
||||||
const loopbackAgent = useRef(null);
|
const loopbackAgent = useRef(null);
|
||||||
const [hearing, setHearing] = useState(initialHearingState);
|
const [hearing, setHearing] = useState(initialHearingState);
|
||||||
@ -41,20 +48,20 @@ const LocalEcho = ({
|
|||||||
|
|
||||||
const applyHearingState = (_stream) => {
|
const applyHearingState = (_stream) => {
|
||||||
if (hearing) {
|
if (hearing) {
|
||||||
Service.playEchoStream(_stream, loopbackAgent.current);
|
playEchoStream(_stream, loopbackAgent.current);
|
||||||
} else {
|
} else {
|
||||||
Service.deattachEchoStream();
|
deattachEchoStream();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const cleanup = () => {
|
const cleanup = () => {
|
||||||
if (loopbackAgent.current) loopbackAgent.current.stop();
|
if (loopbackAgent.current) loopbackAgent.current.stop();
|
||||||
Service.deattachEchoStream();
|
deattachEchoStream();
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (Service.useRTCLoopback()) {
|
if (shouldUseRTCLoopback()) {
|
||||||
loopbackAgent.current = Service.createAudioRTCLoopback();
|
loopbackAgent.current = createAudioRTCLoopback();
|
||||||
}
|
}
|
||||||
return cleanup;
|
return cleanup;
|
||||||
}, []);
|
}, []);
|
||||||
|
@ -1,10 +1,24 @@
|
|||||||
import React from 'react';
|
import React from 'react';
|
||||||
|
import AudioService from '/imports/ui/components/audio/service';
|
||||||
|
import LocalEchoService from '/imports/ui/components/audio/local-echo/service';
|
||||||
import LocalEcho from '/imports/ui/components/audio/local-echo/component';
|
import LocalEcho from '/imports/ui/components/audio/local-echo/component';
|
||||||
|
|
||||||
const LocalEchoContainer = (props) => {
|
const LocalEchoContainer = (props) => {
|
||||||
const { initialHearingState } = window.meetingClientSettings.public.media.localEchoTest;
|
const {
|
||||||
|
initialHearingState: settingsHearingState,
|
||||||
|
} = window.meetingClientSettings.public.media.localEchoTest;
|
||||||
|
const initialHearingState = settingsHearingState && !AudioService.isConnected();
|
||||||
|
|
||||||
return <LocalEcho {...props} initialHearingState={initialHearingState} />;
|
return (
|
||||||
|
<LocalEcho
|
||||||
|
{...props}
|
||||||
|
initialHearingState={initialHearingState}
|
||||||
|
playEchoStream={LocalEchoService.playEchoStream}
|
||||||
|
deattachEchoStream={LocalEchoService.deattachEchoStream}
|
||||||
|
shouldUseRTCLoopback={LocalEchoService.shouldUseRTCLoopback}
|
||||||
|
createAudioRTCLoopback={LocalEchoService.createAudioRTCLoopback}
|
||||||
|
/>
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default LocalEchoContainer;
|
export default LocalEchoContainer;
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
import LocalPCLoopback from '/imports/ui/services/webrtc-base/local-pc-loopback';
|
import LocalPCLoopback from '/imports/ui/services/webrtc-base/local-pc-loopback';
|
||||||
import browserInfo from '/imports/utils/browserInfo';
|
import browserInfo from '/imports/utils/browserInfo';
|
||||||
|
|
||||||
|
const LOCAL_MEDIA_TAG = '#local-media';
|
||||||
|
|
||||||
let audioContext = null;
|
let audioContext = null;
|
||||||
let sourceContext = null;
|
let sourceContext = null;
|
||||||
let contextDestination = null;
|
let contextDestination = null;
|
||||||
let stubAudioElement = null;
|
let stubAudioElement = null;
|
||||||
let delayNode = null;
|
let delayNode = null;
|
||||||
|
|
||||||
const useRTCLoopback = () => {
|
const shouldUseRTCLoopback = () => {
|
||||||
const USE_RTC_LOOPBACK_CHR = window.meetingClientSettings.public.media.localEchoTest.useRtcLoopbackInChromium;
|
const USE_RTC_LOOPBACK_CHR = window.meetingClientSettings.public.media.localEchoTest.useRtcLoopbackInChromium;
|
||||||
|
|
||||||
return (browserInfo.isChrome || browserInfo.isEdge) && USE_RTC_LOOPBACK_CHR;
|
return (browserInfo.isChrome || browserInfo.isEdge) && USE_RTC_LOOPBACK_CHR;
|
||||||
@ -44,7 +46,6 @@ const cleanupDelayNode = () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const addDelayNode = (stream) => {
|
const addDelayNode = (stream) => {
|
||||||
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
|
|
||||||
const {
|
const {
|
||||||
delayTime = 0.5,
|
delayTime = 0.5,
|
||||||
maxDelayTime = 2,
|
maxDelayTime = 2,
|
||||||
@ -52,7 +53,7 @@ const addDelayNode = (stream) => {
|
|||||||
|
|
||||||
if (stream) {
|
if (stream) {
|
||||||
if (delayNode || audioContext || sourceContext) cleanupDelayNode();
|
if (delayNode || audioContext || sourceContext) cleanupDelayNode();
|
||||||
const audioElement = document.querySelector(MEDIA_TAG);
|
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
|
||||||
// Workaround: attach the stream to a muted stub audio element to be able to play it in
|
// Workaround: attach the stream to a muted stub audio element to be able to play it in
|
||||||
// Chromium-based browsers. See https://bugs.chromium.org/p/chromium/issues/detail?id=933677
|
// Chromium-based browsers. See https://bugs.chromium.org/p/chromium/issues/detail?id=933677
|
||||||
stubAudioElement = new Audio();
|
stubAudioElement = new Audio();
|
||||||
@ -70,18 +71,17 @@ const addDelayNode = (stream) => {
|
|||||||
sourceContext.connect(delayNode);
|
sourceContext.connect(delayNode);
|
||||||
delayNode.connect(contextDestination);
|
delayNode.connect(contextDestination);
|
||||||
delayNode.delayTime.setValueAtTime(delayTime, audioContext.currentTime);
|
delayNode.delayTime.setValueAtTime(delayTime, audioContext.currentTime);
|
||||||
// Play the stream with the delay in the default audio element (remote-media)
|
// Play the stream with the delay in the default audio element (local-media)
|
||||||
audioElement.srcObject = contextDestination.stream;
|
audioElement.srcObject = contextDestination.stream;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const deattachEchoStream = () => {
|
const deattachEchoStream = () => {
|
||||||
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
|
|
||||||
const {
|
const {
|
||||||
enabled: DELAY_ENABLED = true,
|
enabled: DELAY_ENABLED = true,
|
||||||
} = window.meetingClientSettings.public.media.localEchoTest.delay;
|
} = window.meetingClientSettings.public.media.localEchoTest.delay;
|
||||||
|
|
||||||
const audioElement = document.querySelector(MEDIA_TAG);
|
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
|
||||||
|
|
||||||
if (DELAY_ENABLED) {
|
if (DELAY_ENABLED) {
|
||||||
audioElement.muted = false;
|
audioElement.muted = false;
|
||||||
@ -93,7 +93,6 @@ const deattachEchoStream = () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const playEchoStream = async (stream, loopbackAgent = null) => {
|
const playEchoStream = async (stream, loopbackAgent = null) => {
|
||||||
const MEDIA_TAG = window.meetingClientSettings.public.media.mediaTag;
|
|
||||||
const {
|
const {
|
||||||
enabled: DELAY_ENABLED = true,
|
enabled: DELAY_ENABLED = true,
|
||||||
} = window.meetingClientSettings.public.media.localEchoTest.delay;
|
} = window.meetingClientSettings.public.media.localEchoTest.delay;
|
||||||
@ -116,9 +115,9 @@ const playEchoStream = async (stream, loopbackAgent = null) => {
|
|||||||
if (DELAY_ENABLED) {
|
if (DELAY_ENABLED) {
|
||||||
addDelayNode(streamToPlay);
|
addDelayNode(streamToPlay);
|
||||||
} else {
|
} else {
|
||||||
// No delay: play the stream in the default audio element (remote-media),
|
// No delay: play the stream in the default audio element (local-media),
|
||||||
// no strings attached.
|
// no strings attached.
|
||||||
const audioElement = document.querySelector(MEDIA_TAG);
|
const audioElement = document.querySelector(LOCAL_MEDIA_TAG);
|
||||||
audioElement.srcObject = streamToPlay;
|
audioElement.srcObject = streamToPlay;
|
||||||
audioElement.muted = false;
|
audioElement.muted = false;
|
||||||
audioElement.play();
|
audioElement.play();
|
||||||
@ -127,7 +126,7 @@ const playEchoStream = async (stream, loopbackAgent = null) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
useRTCLoopback,
|
shouldUseRTCLoopback,
|
||||||
createAudioRTCLoopback,
|
createAudioRTCLoopback,
|
||||||
deattachEchoStream,
|
deattachEchoStream,
|
||||||
playEchoStream,
|
playEchoStream,
|
||||||
|
@ -1,52 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import { injectIntl, defineMessages } from 'react-intl';
|
|
||||||
import PropTypes from 'prop-types';
|
|
||||||
import Styled from './styles';
|
|
||||||
import browserInfo from '/imports/utils/browserInfo';
|
|
||||||
import { getSettingsSingletonInstance } from '/imports/ui/services/settings';
|
|
||||||
|
|
||||||
const propTypes = {
|
|
||||||
intl: PropTypes.object.isRequired,
|
|
||||||
closeModal: PropTypes.func.isRequired,
|
|
||||||
};
|
|
||||||
|
|
||||||
const intlMessages = defineMessages({
|
|
||||||
title: {
|
|
||||||
id: 'app.audio.permissionsOverlay.title',
|
|
||||||
description: 'Title for the overlay',
|
|
||||||
},
|
|
||||||
hint: {
|
|
||||||
id: 'app.audio.permissionsOverlay.hint',
|
|
||||||
description: 'Hint for the overlay',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { isChrome, isFirefox, isSafari } = browserInfo;
|
|
||||||
|
|
||||||
const PermissionsOverlay = ({ intl, closeModal }) => {
|
|
||||||
const Settings = getSettingsSingletonInstance();
|
|
||||||
const { animations } = Settings.application;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Styled.PermissionsOverlayModal
|
|
||||||
overlayClassName={"permissionsOverlay"}
|
|
||||||
onRequestClose={closeModal}
|
|
||||||
hideBorder
|
|
||||||
isFirefox={isFirefox}
|
|
||||||
isChrome={isChrome}
|
|
||||||
isSafari={isSafari}
|
|
||||||
animations={animations}
|
|
||||||
>
|
|
||||||
<Styled.Content>
|
|
||||||
{intl.formatMessage(intlMessages.title)}
|
|
||||||
<small>
|
|
||||||
{intl.formatMessage(intlMessages.hint)}
|
|
||||||
</small>
|
|
||||||
</Styled.Content>
|
|
||||||
</Styled.PermissionsOverlayModal>
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
PermissionsOverlay.propTypes = propTypes;
|
|
||||||
|
|
||||||
export default injectIntl(PermissionsOverlay);
|
|
@ -1,108 +0,0 @@
|
|||||||
import styled, { css, keyframes } from 'styled-components';
|
|
||||||
import ModalSimple from '/imports/ui/components/common/modal/simple/component';
|
|
||||||
import { colorBlack } from '/imports/ui/stylesheets/styled-components/palette';
|
|
||||||
import { jumboPaddingX } from '/imports/ui/stylesheets/styled-components/general';
|
|
||||||
|
|
||||||
const bounce = keyframes`
|
|
||||||
0%,
|
|
||||||
20%,
|
|
||||||
50%,
|
|
||||||
80%,
|
|
||||||
100% {
|
|
||||||
-ms-transform: translateY(0);
|
|
||||||
transform: translateY(0);
|
|
||||||
}
|
|
||||||
40% {
|
|
||||||
-ms-transform: translateY(10px);
|
|
||||||
transform: translateY(10px);
|
|
||||||
}
|
|
||||||
60% {
|
|
||||||
-ms-transform: translateY(5px);
|
|
||||||
transform: translateY(5px);
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
const PermissionsOverlayModal = styled(ModalSimple)`
|
|
||||||
${({ isFirefox }) => isFirefox && `
|
|
||||||
top: 8em;
|
|
||||||
left: 22em;
|
|
||||||
right: auto;
|
|
||||||
|
|
||||||
[dir="rtl"] & {
|
|
||||||
right: none;
|
|
||||||
left: none;
|
|
||||||
top: 15rem;
|
|
||||||
}
|
|
||||||
`}
|
|
||||||
|
|
||||||
${({ isChrome }) => isChrome && `
|
|
||||||
top: 5.5em;
|
|
||||||
left: 18em;
|
|
||||||
right: auto;
|
|
||||||
|
|
||||||
[dir="rtl"] & {
|
|
||||||
right: none;
|
|
||||||
left: none;
|
|
||||||
top: 15rem;
|
|
||||||
}
|
|
||||||
`}
|
|
||||||
|
|
||||||
${({ isSafari }) => isSafari && `
|
|
||||||
top: 150px;
|
|
||||||
left:0;
|
|
||||||
right:0;
|
|
||||||
margin-left: auto;
|
|
||||||
margin-right: auto;
|
|
||||||
`}
|
|
||||||
|
|
||||||
position: absolute;
|
|
||||||
background: none;
|
|
||||||
box-shadow: none;
|
|
||||||
color: #fff;
|
|
||||||
font-size: 16px;
|
|
||||||
font-weight: 400;
|
|
||||||
padding: 0 0 0 ${jumboPaddingX};
|
|
||||||
line-height: 18px;
|
|
||||||
width: 340px;
|
|
||||||
|
|
||||||
[dir="rtl"] & {
|
|
||||||
padding: 0 ${jumboPaddingX} 0 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
small {
|
|
||||||
display: block;
|
|
||||||
font-size: 12px;
|
|
||||||
line-height: 14px;
|
|
||||||
margin-top: 3px;
|
|
||||||
opacity: .6;
|
|
||||||
}
|
|
||||||
|
|
||||||
&:after {
|
|
||||||
top: -65px;
|
|
||||||
left: -20px;
|
|
||||||
right: auto;
|
|
||||||
font-size: 20px;
|
|
||||||
display: block;
|
|
||||||
font-family: 'bbb-icons';
|
|
||||||
content: "\\E906";
|
|
||||||
position: relative;
|
|
||||||
|
|
||||||
[dir="rtl"] & {
|
|
||||||
left: auto;
|
|
||||||
right: -20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
${({ animations }) => animations && css`
|
|
||||||
animation: ${bounce} 2s infinite;
|
|
||||||
`}
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
const Content = styled.div`
|
|
||||||
color: ${colorBlack};
|
|
||||||
`;
|
|
||||||
|
|
||||||
export default {
|
|
||||||
PermissionsOverlayModal,
|
|
||||||
Content,
|
|
||||||
};
|
|
@ -3,13 +3,21 @@ import AudioManager from '/imports/ui/services/audio-manager';
|
|||||||
import logger from '/imports/startup/client/logger';
|
import logger from '/imports/startup/client/logger';
|
||||||
import Storage from '../../services/storage/session';
|
import Storage from '../../services/storage/session';
|
||||||
import { useReactiveVar } from '@apollo/client';
|
import { useReactiveVar } from '@apollo/client';
|
||||||
|
import {
|
||||||
|
getAudioConstraints,
|
||||||
|
doGUM,
|
||||||
|
} from '/imports/api/audio/client/bridge/service';
|
||||||
|
import {
|
||||||
|
toggleMuteMicrophone,
|
||||||
|
toggleMuteMicrophoneSystem,
|
||||||
|
} from '/imports/ui/components/audio/audio-graphql/audio-controls/input-stream-live-selector/service';
|
||||||
|
|
||||||
const MUTED_KEY = 'muted';
|
const MUTED_KEY = 'muted';
|
||||||
|
|
||||||
const recoverMicState = (toggleVoice) => {
|
const recoverMicState = (toggleVoice) => {
|
||||||
const muted = Storage.getItem(MUTED_KEY);
|
const muted = Storage.getItem(MUTED_KEY);
|
||||||
|
|
||||||
if ((muted === undefined) || (muted === null)) {
|
if ((muted === undefined) || (muted === null) || AudioManager.inputDeviceId === 'listen-only') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,13 +73,73 @@ const useIsUsingAudio = () => {
|
|||||||
return Boolean(isConnected || isConnecting || isHangingUp || isEchoTest);
|
return Boolean(isConnected || isConnecting || isHangingUp || isEchoTest);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const hasMicrophonePermission = async ({
|
||||||
|
permissionStatus,
|
||||||
|
gumOnPrompt = false,
|
||||||
|
}) => {
|
||||||
|
try {
|
||||||
|
let status = permissionStatus;
|
||||||
|
|
||||||
|
// If the browser doesn't support the Permissions API, we can't check
|
||||||
|
// microphone permissions - return null (unknown)
|
||||||
|
if (navigator?.permissions?.query == null) return null;
|
||||||
|
|
||||||
|
if (!status) {
|
||||||
|
({ state: status } = await navigator.permissions.query({ name: 'microphone' }));
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'denied':
|
||||||
|
return false;
|
||||||
|
case 'prompt':
|
||||||
|
// Prompt without any subsequent action is considered unknown
|
||||||
|
if (!gumOnPrompt) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return doGUM({ audio: getAudioConstraints() }).then((stream) => {
|
||||||
|
stream.getTracks().forEach((track) => {
|
||||||
|
track.stop();
|
||||||
|
stream.removeTrack(track);
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}).catch((error) => {
|
||||||
|
if (error.name === 'NotAllowedError') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Give it the benefit of the doubt. It might be a device mismatch
|
||||||
|
// or something else that's not a permissions issue, so let's try
|
||||||
|
// to proceed. Rollbacks that happen downstream might fix the issue,
|
||||||
|
// otherwise we'll land on the Help screen anyways
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
case 'granted':
|
||||||
|
default:
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error({
|
||||||
|
logCode: 'audio_check_microphone_permission_error',
|
||||||
|
extraInfo: {
|
||||||
|
errorName: error.name,
|
||||||
|
errorMessage: error.message,
|
||||||
|
},
|
||||||
|
}, `Error checking microphone permission: ${error.message}`);
|
||||||
|
|
||||||
|
// Null = could not determine permission status
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
init,
|
init,
|
||||||
exitAudio: () => AudioManager.exitAudio(),
|
exitAudio: () => AudioManager.exitAudio(),
|
||||||
forceExitAudio: () => AudioManager.forceExitAudio(),
|
forceExitAudio: () => AudioManager.forceExitAudio(),
|
||||||
transferCall: () => AudioManager.transferCall(),
|
transferCall: () => AudioManager.transferCall(),
|
||||||
joinListenOnly: () => AudioManager.joinListenOnly(),
|
joinListenOnly: () => AudioManager.joinListenOnly(),
|
||||||
joinMicrophone: () => AudioManager.joinMicrophone(),
|
joinMicrophone: (options) => AudioManager.joinMicrophone(options),
|
||||||
joinEchoTest: () => AudioManager.joinEchoTest(),
|
joinEchoTest: () => AudioManager.joinEchoTest(),
|
||||||
changeInputDevice: (inputDeviceId) => AudioManager.changeInputDevice(inputDeviceId),
|
changeInputDevice: (inputDeviceId) => AudioManager.changeInputDevice(inputDeviceId),
|
||||||
changeInputStream: (newInputStream) => { AudioManager.inputStream = newInputStream; },
|
changeInputStream: (newInputStream) => { AudioManager.inputStream = newInputStream; },
|
||||||
@ -80,6 +148,8 @@ export default {
|
|||||||
outputDeviceId,
|
outputDeviceId,
|
||||||
isLive,
|
isLive,
|
||||||
) => AudioManager.changeOutputDevice(outputDeviceId, isLive),
|
) => AudioManager.changeOutputDevice(outputDeviceId, isLive),
|
||||||
|
toggleMuteMicrophone,
|
||||||
|
toggleMuteMicrophoneSystem,
|
||||||
isConnectedToBreakout: () => {
|
isConnectedToBreakout: () => {
|
||||||
const transferStatus = AudioManager.getBreakoutAudioTransferStatus();
|
const transferStatus = AudioManager.getBreakoutAudioTransferStatus();
|
||||||
if (transferStatus.status
|
if (transferStatus.status
|
||||||
@ -95,13 +165,14 @@ export default {
|
|||||||
isUsingAudio: () => AudioManager.isUsingAudio(),
|
isUsingAudio: () => AudioManager.isUsingAudio(),
|
||||||
isConnecting: () => AudioManager.isConnecting,
|
isConnecting: () => AudioManager.isConnecting,
|
||||||
isListenOnly: () => AudioManager.isListenOnly,
|
isListenOnly: () => AudioManager.isListenOnly,
|
||||||
|
inputDeviceId: () => AudioManager.inputDeviceId,
|
||||||
|
outputDeviceId: () => AudioManager.outputDeviceId,
|
||||||
isEchoTest: () => AudioManager.isEchoTest,
|
isEchoTest: () => AudioManager.isEchoTest,
|
||||||
isMuted: () => AudioManager.isMuted,
|
isMuted: () => AudioManager.isMuted,
|
||||||
autoplayBlocked: () => AudioManager.autoplayBlocked,
|
autoplayBlocked: () => AudioManager.autoplayBlocked,
|
||||||
handleAllowAutoplay: () => AudioManager.handleAllowAutoplay(),
|
handleAllowAutoplay: () => AudioManager.handleAllowAutoplay(),
|
||||||
playAlertSound: (url) => AudioManager.playAlertSound(url),
|
playAlertSound: (url) => AudioManager.playAlertSound(url),
|
||||||
updateAudioConstraints:
|
updateAudioConstraints: (constraints) => AudioManager.updateAudioConstraints(constraints),
|
||||||
(constraints) => AudioManager.updateAudioConstraints(constraints),
|
|
||||||
recoverMicState,
|
recoverMicState,
|
||||||
isReconnecting: () => AudioManager.isReconnecting,
|
isReconnecting: () => AudioManager.isReconnecting,
|
||||||
setBreakoutAudioTransferStatus: (status) => AudioManager
|
setBreakoutAudioTransferStatus: (status) => AudioManager
|
||||||
@ -109,6 +180,10 @@ export default {
|
|||||||
getBreakoutAudioTransferStatus: () => AudioManager
|
getBreakoutAudioTransferStatus: () => AudioManager
|
||||||
.getBreakoutAudioTransferStatus(),
|
.getBreakoutAudioTransferStatus(),
|
||||||
getStats: () => AudioManager.getStats(),
|
getStats: () => AudioManager.getStats(),
|
||||||
|
getAudioConstraints,
|
||||||
|
doGUM,
|
||||||
|
supportsTransparentListenOnly: () => AudioManager.supportsTransparentListenOnly(),
|
||||||
|
hasMicrophonePermission,
|
||||||
notify: (message, error, icon) => { AudioManager.notify(message, error, icon); },
|
notify: (message, error, icon) => { AudioManager.notify(message, error, icon); },
|
||||||
useIsUsingAudio,
|
useIsUsingAudio,
|
||||||
};
|
};
|
||||||
|
@ -70,6 +70,9 @@ class AudioManager {
|
|||||||
muteHandle: makeVar(null),
|
muteHandle: makeVar(null),
|
||||||
autoplayBlocked: makeVar(false),
|
autoplayBlocked: makeVar(false),
|
||||||
isReconnecting: makeVar(false),
|
isReconnecting: makeVar(false),
|
||||||
|
bypassGUM: makeVar(false),
|
||||||
|
permissionStatus: makeVar(null),
|
||||||
|
transparentListenOnlySupported: makeVar(false),
|
||||||
});
|
});
|
||||||
|
|
||||||
this.failedMediaElements = [];
|
this.failedMediaElements = [];
|
||||||
@ -79,7 +82,7 @@ class AudioManager {
|
|||||||
|
|
||||||
this._inputStream = makeVar(null);
|
this._inputStream = makeVar(null);
|
||||||
this._inputDeviceId = {
|
this._inputDeviceId = {
|
||||||
value: makeVar(DEFAULT_INPUT_DEVICE_ID),
|
value: makeVar(null),
|
||||||
};
|
};
|
||||||
this._outputDeviceId = {
|
this._outputDeviceId = {
|
||||||
value: makeVar(null),
|
value: makeVar(null),
|
||||||
@ -90,6 +93,37 @@ class AudioManager {
|
|||||||
window.addEventListener('StopAudioTracks', () => this.forceExitAudio());
|
window.addEventListener('StopAudioTracks', () => this.forceExitAudio());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_trackPermissionStatus() {
|
||||||
|
const handleTrackingError = (error) => {
|
||||||
|
logger.warn({
|
||||||
|
logCode: 'audiomanager_permission_tracking_failed',
|
||||||
|
extraInfo: {
|
||||||
|
errorName: error.name,
|
||||||
|
errorMessage: error.message,
|
||||||
|
},
|
||||||
|
}, `Failed to track microphone permission status: ${error.message}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (navigator?.permissions?.query) {
|
||||||
|
navigator.permissions.query({ name: 'microphone' })
|
||||||
|
.then((status) => {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
status.onchange = () => {
|
||||||
|
logger.debug({
|
||||||
|
logCode: 'audiomanager_permission_status_changed',
|
||||||
|
extraInfo: {
|
||||||
|
newStatus: status.state,
|
||||||
|
},
|
||||||
|
}, `Microphone permission status changed: ${status.state}`);
|
||||||
|
this.permissionStatus = status.state;
|
||||||
|
};
|
||||||
|
this.permissionStatus = status.state;
|
||||||
|
}).catch(handleTrackingError);
|
||||||
|
} else {
|
||||||
|
handleTrackingError(new Error('navigator.permissions.query is not available'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
_applyCachedOutputDeviceId() {
|
_applyCachedOutputDeviceId() {
|
||||||
const cachedId = getStoredAudioOutputDeviceId();
|
const cachedId = getStoredAudioOutputDeviceId();
|
||||||
|
|
||||||
@ -145,17 +179,26 @@ class AudioManager {
|
|||||||
return this._outputDeviceId.value();
|
return this._outputDeviceId.value();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
shouldBypassGUM() {
|
||||||
|
return this.supportsTransparentListenOnly() && this.inputDeviceId === 'listen-only';
|
||||||
|
}
|
||||||
|
|
||||||
|
supportsTransparentListenOnly() {
|
||||||
|
return this.listenOnlyBridge?.supportsTransparentListenOnly()
|
||||||
|
&& this.fullAudioBridge?.supportsTransparentListenOnly();
|
||||||
|
}
|
||||||
|
|
||||||
async init(userData, audioEventHandler) {
|
async init(userData, audioEventHandler) {
|
||||||
this.inputDeviceId = getStoredAudioInputDeviceId() || DEFAULT_INPUT_DEVICE_ID;
|
this.inputDeviceId = getStoredAudioInputDeviceId() || DEFAULT_INPUT_DEVICE_ID;
|
||||||
this.outputDeviceId = getCurrentAudioSinkId();
|
this.outputDeviceId = getCurrentAudioSinkId();
|
||||||
|
|
||||||
this._applyCachedOutputDeviceId();
|
this._applyCachedOutputDeviceId();
|
||||||
|
this._trackPermissionStatus();
|
||||||
this.loadBridges(userData);
|
this.loadBridges(userData);
|
||||||
this.userData = userData;
|
this.userData = userData;
|
||||||
this.initialized = true;
|
this.initialized = true;
|
||||||
this.audioEventHandler = audioEventHandler;
|
this.audioEventHandler = audioEventHandler;
|
||||||
await this.loadBridges(userData);
|
await this.loadBridges(userData);
|
||||||
|
this.transparentListenOnlySupported = this.supportsTransparentListenOnly();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -280,6 +323,7 @@ class AudioManager {
|
|||||||
isListenOnly: false,
|
isListenOnly: false,
|
||||||
extension: null,
|
extension: null,
|
||||||
inputStream: this.inputStream,
|
inputStream: this.inputStream,
|
||||||
|
bypassGUM: this.shouldBypassGUM(),
|
||||||
};
|
};
|
||||||
return this.joinAudio(callOptions, this.callStateCallback.bind(this));
|
return this.joinAudio(callOptions, this.callStateCallback.bind(this));
|
||||||
});
|
});
|
||||||
@ -309,6 +353,7 @@ class AudioManager {
|
|||||||
extension: ECHO_TEST_NUMBER,
|
extension: ECHO_TEST_NUMBER,
|
||||||
inputStream: this.inputStream,
|
inputStream: this.inputStream,
|
||||||
validIceCandidates,
|
validIceCandidates,
|
||||||
|
bypassGUM: this.shouldBypassGUM(),
|
||||||
};
|
};
|
||||||
logger.info(
|
logger.info(
|
||||||
{
|
{
|
||||||
@ -369,7 +414,6 @@ class AudioManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.isConnecting = false;
|
this.isConnecting = false;
|
||||||
this.isWaitingPermissions = false;
|
|
||||||
|
|
||||||
throw errorPayload;
|
throw errorPayload;
|
||||||
});
|
});
|
||||||
@ -415,17 +459,7 @@ class AudioManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
forceExitAudio() {
|
forceExitAudio() {
|
||||||
this.notifyAudioExit();
|
this.onAudioExit();
|
||||||
this.isConnected = false;
|
|
||||||
this.isConnecting = false;
|
|
||||||
this.isHangingUp = false;
|
|
||||||
|
|
||||||
if (this.inputStream) {
|
|
||||||
this.inputStream.getTracks().forEach((track) => track.stop());
|
|
||||||
this.inputStream = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
window.removeEventListener('audioPlayFailed', this.handlePlayElementFailed);
|
|
||||||
|
|
||||||
return this.bridge && this.bridge.exitAudio();
|
return this.bridge && this.bridge.exitAudio();
|
||||||
}
|
}
|
||||||
@ -520,7 +554,7 @@ class AudioManager {
|
|||||||
if (this.inputStream) {
|
if (this.inputStream) {
|
||||||
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(
|
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(
|
||||||
this.inputStream,
|
this.inputStream,
|
||||||
'audio'
|
'audio',
|
||||||
);
|
);
|
||||||
if (extractedDeviceId && extractedDeviceId !== this.inputDeviceId) {
|
if (extractedDeviceId && extractedDeviceId !== this.inputDeviceId) {
|
||||||
this.changeInputDevice(extractedDeviceId);
|
this.changeInputDevice(extractedDeviceId);
|
||||||
@ -639,22 +673,17 @@ class AudioManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
changeInputDevice(deviceId) {
|
changeInputDevice(deviceId) {
|
||||||
if (typeof deviceId !== 'string') throw new TypeError('Invalid inputDeviceId');
|
|
||||||
|
|
||||||
if (deviceId === this.inputDeviceId) return this.inputDeviceId;
|
if (deviceId === this.inputDeviceId) return this.inputDeviceId;
|
||||||
|
|
||||||
const currentDeviceId = this.inputDeviceId ?? 'none';
|
const currentDeviceId = this.inputDeviceId ?? 'none';
|
||||||
this.inputDeviceId = deviceId;
|
this.inputDeviceId = deviceId;
|
||||||
logger.debug(
|
logger.debug({
|
||||||
{
|
|
||||||
logCode: 'audiomanager_input_device_change',
|
logCode: 'audiomanager_input_device_change',
|
||||||
extraInfo: {
|
extraInfo: {
|
||||||
deviceId: currentDeviceId,
|
deviceId: currentDeviceId,
|
||||||
newDeviceId: deviceId,
|
newDeviceId: deviceId || 'none',
|
||||||
},
|
},
|
||||||
},
|
}, `Microphone input device changed: from ${currentDeviceId} to ${deviceId || 'none'}`);
|
||||||
`Microphone input device changed: from ${currentDeviceId} to ${deviceId}`
|
|
||||||
);
|
|
||||||
|
|
||||||
return this.inputDeviceId;
|
return this.inputDeviceId;
|
||||||
}
|
}
|
||||||
|
@ -52,13 +52,13 @@ class AudioBroker extends BaseBroker {
|
|||||||
const localStream = this.getLocalStream();
|
const localStream = this.getLocalStream();
|
||||||
const oldTracks = localStream ? localStream.getAudioTracks() : [];
|
const oldTracks = localStream ? localStream.getAudioTracks() : [];
|
||||||
|
|
||||||
peerConnection.getSenders().forEach((sender, index) => {
|
peerConnection.getSenders().forEach((sender) => {
|
||||||
if (sender.track && sender.track.kind === 'audio') {
|
if (sender.track == null || sender?.track?.kind === 'audio') {
|
||||||
const newTrack = newTracks[index];
|
const newTrack = newTracks.shift();
|
||||||
if (newTrack == null) return;
|
if (newTrack == null) return;
|
||||||
|
|
||||||
// Cleanup old tracks in the local MediaStream
|
// Cleanup old tracks in the local MediaStream
|
||||||
const oldTrack = oldTracks[index];
|
const oldTrack = oldTracks.shift();
|
||||||
sender.replaceTrack(newTrack);
|
sender.replaceTrack(newTrack);
|
||||||
if (oldTrack) {
|
if (oldTrack) {
|
||||||
oldTrack.stop();
|
oldTrack.stop();
|
||||||
@ -68,6 +68,13 @@ class AudioBroker extends BaseBroker {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (oldTracks.length > 0) {
|
||||||
|
oldTracks.forEach((track) => {
|
||||||
|
track.stop();
|
||||||
|
localStream.removeTrack(track);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -90,8 +97,10 @@ class AudioBroker extends BaseBroker {
|
|||||||
gatheringTimeout: this.gatheringTimeout,
|
gatheringTimeout: this.gatheringTimeout,
|
||||||
};
|
};
|
||||||
|
|
||||||
const peerRole = this.role === 'sendrecv' ? this.role : 'recvonly';
|
const peerRole = BaseBroker.getPeerRole(this.role);
|
||||||
this.webRtcPeer = new WebRtcPeer(peerRole, options);
|
this.webRtcPeer = new WebRtcPeer(peerRole, options);
|
||||||
|
window.peers = window.peers || [];
|
||||||
|
window.peers.push(this.webRtcPeer);
|
||||||
this.webRtcPeer.iceQueue = [];
|
this.webRtcPeer.iceQueue = [];
|
||||||
this.webRtcPeer.start();
|
this.webRtcPeer.start();
|
||||||
this.webRtcPeer.peerConnection.onconnectionstatechange = this.handleConnectionStateChange.bind(this);
|
this.webRtcPeer.peerConnection.onconnectionstatechange = this.handleConnectionStateChange.bind(this);
|
||||||
@ -101,7 +110,9 @@ class AudioBroker extends BaseBroker {
|
|||||||
this.webRtcPeer.generateOffer()
|
this.webRtcPeer.generateOffer()
|
||||||
.then(this.sendStartReq.bind(this))
|
.then(this.sendStartReq.bind(this))
|
||||||
.catch(this._handleOfferGenerationFailure.bind(this));
|
.catch(this._handleOfferGenerationFailure.bind(this));
|
||||||
} else if (peerRole === 'recvonly') {
|
} else if (peerRole === 'recvonly'
|
||||||
|
|| peerRole === 'recv'
|
||||||
|
|| peerRole === 'passive-sendrecv') {
|
||||||
// We are the answerer and we are only listening, so we don't need
|
// We are the answerer and we are only listening, so we don't need
|
||||||
// to acquire local media
|
// to acquire local media
|
||||||
this.sendStartReq();
|
this.sendStartReq();
|
||||||
|
@ -8,6 +8,20 @@ const WS_HEARTBEAT_OPTS = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
class BaseBroker {
|
class BaseBroker {
|
||||||
|
static getPeerRole(role) {
|
||||||
|
switch (role) {
|
||||||
|
case 'send':
|
||||||
|
case 'sendrecv':
|
||||||
|
case 'sendonly':
|
||||||
|
case 'recvonly':
|
||||||
|
case 'recv':
|
||||||
|
case 'passive-sendrecv':
|
||||||
|
return role;
|
||||||
|
default:
|
||||||
|
throw new Error(`Invalid role: ${role}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static assembleError(code, reason) {
|
static assembleError(code, reason) {
|
||||||
const message = reason || SFU_BROKER_ERRORS[code];
|
const message = reason || SFU_BROKER_ERRORS[code];
|
||||||
const error = new Error(message);
|
const error = new Error(message);
|
||||||
|
@ -37,6 +37,28 @@ export default class WebRtcPeer extends EventEmitter2 {
|
|||||||
this._gatheringTimeout = this.options.gatheringTimeout;
|
this._gatheringTimeout = this.options.gatheringTimeout;
|
||||||
|
|
||||||
this._assignOverrides();
|
this._assignOverrides();
|
||||||
|
|
||||||
|
this.logger.debug('BBB::WebRtcPeer::constructor - created', {
|
||||||
|
mode: this.mode,
|
||||||
|
options: this.options,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_getTransceiverDirection() {
|
||||||
|
switch (this.mode) {
|
||||||
|
case 'sendonly':
|
||||||
|
case 'recvonly':
|
||||||
|
case 'sendrecv':
|
||||||
|
return this.mode;
|
||||||
|
case 'recv':
|
||||||
|
return 'recvonly';
|
||||||
|
case 'send':
|
||||||
|
return 'sendonly';
|
||||||
|
case 'passive-sendrecv':
|
||||||
|
return 'sendrecv';
|
||||||
|
default:
|
||||||
|
return 'inactive';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_assignOverrides() {
|
_assignOverrides() {
|
||||||
@ -202,7 +224,7 @@ export default class WebRtcPeer extends EventEmitter2 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return stream;
|
return stream;
|
||||||
}
|
};
|
||||||
|
|
||||||
if (typeof this._mediaStreamFactory === 'function') {
|
if (typeof this._mediaStreamFactory === 'function') {
|
||||||
return this._mediaStreamFactory(this.mediaConstraints).then(handleGUMResolution);
|
return this._mediaStreamFactory(this.mediaConstraints).then(handleGUMResolution);
|
||||||
@ -326,34 +348,7 @@ export default class WebRtcPeer extends EventEmitter2 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async generateOffer() {
|
_processMediaStreams() {
|
||||||
switch (this.mode) {
|
|
||||||
case 'recvonly': {
|
|
||||||
const useAudio = this.mediaConstraints
|
|
||||||
&& ((typeof this.mediaConstraints.audio === 'boolean' && this.mediaConstraints.audio)
|
|
||||||
|| (typeof this.mediaConstraints.audio === 'object'));
|
|
||||||
const useVideo = this.mediaConstraints
|
|
||||||
&& ((typeof this.mediaConstraints.video === 'boolean' && this.mediaConstraints.video)
|
|
||||||
|| (typeof this.mediaConstraints.video === 'object'));
|
|
||||||
|
|
||||||
if (useAudio) {
|
|
||||||
this.peerConnection.addTransceiver('audio', {
|
|
||||||
direction: 'recvonly',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (useVideo) {
|
|
||||||
this.peerConnection.addTransceiver('video', {
|
|
||||||
direction: 'recvonly',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'sendonly':
|
|
||||||
case 'sendrecv': {
|
|
||||||
await this.mediaStreamFactory();
|
|
||||||
|
|
||||||
if (this.videoStream) {
|
if (this.videoStream) {
|
||||||
this.videoStream.getTracks().forEach((track) => {
|
this.videoStream.getTracks().forEach((track) => {
|
||||||
this.peerConnection.addTrack(track, this.videoStream);
|
this.peerConnection.addTrack(track, this.videoStream);
|
||||||
@ -368,11 +363,45 @@ export default class WebRtcPeer extends EventEmitter2 {
|
|||||||
|
|
||||||
this.peerConnection.getTransceivers().forEach((transceiver) => {
|
this.peerConnection.getTransceivers().forEach((transceiver) => {
|
||||||
// eslint-disable-next-line no-param-reassign
|
// eslint-disable-next-line no-param-reassign
|
||||||
transceiver.direction = this.mode;
|
transceiver.direction = this._getTransceiverDirection();
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateOffer() {
|
||||||
|
switch (this.mode) {
|
||||||
|
case 'recvonly': {
|
||||||
|
const useAudio = this.mediaConstraints
|
||||||
|
&& ((typeof this.mediaConstraints.audio === 'boolean' && this.mediaConstraints.audio)
|
||||||
|
|| (typeof this.mediaConstraints.audio === 'object'));
|
||||||
|
const useVideo = this.mediaConstraints
|
||||||
|
&& ((typeof this.mediaConstraints.video === 'boolean' && this.mediaConstraints.video)
|
||||||
|
|| (typeof this.mediaConstraints.video === 'object'));
|
||||||
|
|
||||||
|
if (useAudio) {
|
||||||
|
this.peerConnection.addTransceiver('audio', {
|
||||||
|
direction: this._getTransceiverDirection(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (useVideo) {
|
||||||
|
this.peerConnection.addTransceiver('video', {
|
||||||
|
direction: this._getTransceiverDirection(),
|
||||||
|
});
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case 'sendonly':
|
||||||
|
case 'sendrecv': {
|
||||||
|
await this.mediaStreamFactory();
|
||||||
|
this._processMediaStreams();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'passive-sendrecv':
|
||||||
|
this._processMediaStreams();
|
||||||
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -387,6 +416,10 @@ export default class WebRtcPeer extends EventEmitter2 {
|
|||||||
const localDescription = this.getLocalSessionDescriptor();
|
const localDescription = this.getLocalSessionDescriptor();
|
||||||
this.logger.debug('BBB::WebRtcPeer::generateOffer - local description set', localDescription);
|
this.logger.debug('BBB::WebRtcPeer::generateOffer - local description set', localDescription);
|
||||||
return localDescription.sdp;
|
return localDescription.sdp;
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
this.logger.error('BBB::WebRtcPeer::generateOffer - failed', error);
|
||||||
|
throw error;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -409,23 +442,9 @@ export default class WebRtcPeer extends EventEmitter2 {
|
|||||||
.then(async () => {
|
.then(async () => {
|
||||||
if (this.mode === 'sendonly' || this.mode === 'sendrecv') {
|
if (this.mode === 'sendonly' || this.mode === 'sendrecv') {
|
||||||
await this.mediaStreamFactory();
|
await this.mediaStreamFactory();
|
||||||
|
this._processMediaStreams();
|
||||||
if (this.videoStream) {
|
} else if (this.mode === 'passive-sendrecv') {
|
||||||
this.videoStream.getTracks().forEach((track) => {
|
this._processMediaStreams();
|
||||||
this.peerConnection.addTrack(track, this.videoStream);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.audioStream) {
|
|
||||||
this.audioStream.getTracks().forEach((track) => {
|
|
||||||
this.peerConnection.addTrack(track, this.audioStream);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
this.peerConnection.getTransceivers().forEach((transceiver) => {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
transceiver.direction = this.mode;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.then(() => this.peerConnection.createAnswer())
|
.then(() => this.peerConnection.createAnswer())
|
||||||
@ -437,6 +456,10 @@ export default class WebRtcPeer extends EventEmitter2 {
|
|||||||
const localDescription = this.getLocalSessionDescriptor();
|
const localDescription = this.getLocalSessionDescriptor();
|
||||||
this.logger.debug('BBB::WebRtcPeer::processOffer - local description set', localDescription.sdp);
|
this.logger.debug('BBB::WebRtcPeer::processOffer - local description set', localDescription.sdp);
|
||||||
return localDescription.sdp;
|
return localDescription.sdp;
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
this.logger.error('BBB::WebRtcPeer::processOffer - failed', error);
|
||||||
|
throw error;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,17 +49,6 @@ const GlobalStyle = createGlobalStyle`
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.permissionsOverlay {
|
|
||||||
position: fixed;
|
|
||||||
z-index: 1002;
|
|
||||||
top: 0;
|
|
||||||
bottom: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
background-color: rgba(0, 0, 0, .85);
|
|
||||||
animation: fade-in .5s ease-in;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modalOverlay {
|
.modalOverlay {
|
||||||
z-index: 1000;
|
z-index: 1000;
|
||||||
display: flex;
|
display: flex;
|
||||||
|
@ -45,22 +45,25 @@ const getDeviceIdFromTrack = (track) => {
|
|||||||
const { deviceId } = track.getSettings();
|
const { deviceId } = track.getSettings();
|
||||||
return deviceId;
|
return deviceId;
|
||||||
}
|
}
|
||||||
return '';
|
return null;
|
||||||
};
|
};
|
||||||
|
|
||||||
const extractDeviceIdFromStream = (stream, kind) => {
|
const extractDeviceIdFromStream = (stream, kind) => {
|
||||||
|
if (!stream) return null;
|
||||||
|
|
||||||
// An empty string is the browser's default...
|
// An empty string is the browser's default...
|
||||||
let tracks = [];
|
let tracks = [];
|
||||||
|
|
||||||
switch (kind) {
|
switch (kind) {
|
||||||
case 'audio':
|
case 'audio':
|
||||||
tracks = getAudioTracks(stream);
|
tracks = getAudioTracks(stream);
|
||||||
|
if (tracks.length === 0) return 'listen-only';
|
||||||
return getDeviceIdFromTrack(tracks[0]);
|
return getDeviceIdFromTrack(tracks[0]);
|
||||||
case 'video':
|
case 'video':
|
||||||
tracks = getVideoTracks(stream);
|
tracks = getVideoTracks(stream);
|
||||||
return getDeviceIdFromTrack(tracks[0]);
|
return getDeviceIdFromTrack(tracks[0]);
|
||||||
default: {
|
default: {
|
||||||
return '';
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -606,8 +606,8 @@
|
|||||||
"app.submenu.notification.userJoinLabel": "User Join",
|
"app.submenu.notification.userJoinLabel": "User Join",
|
||||||
"app.submenu.notification.userLeaveLabel": "User Leave",
|
"app.submenu.notification.userLeaveLabel": "User Leave",
|
||||||
"app.submenu.notification.guestWaitingLabel": "Guest Waiting Approval",
|
"app.submenu.notification.guestWaitingLabel": "Guest Waiting Approval",
|
||||||
"app.submenu.audio.micSourceLabel": "Microphone source",
|
"app.submenu.audio.micSourceLabel": "Microphone",
|
||||||
"app.submenu.audio.speakerSourceLabel": "Speaker source",
|
"app.submenu.audio.speakerSourceLabel": "Speaker",
|
||||||
"app.submenu.audio.streamVolumeLabel": "Your audio stream volume",
|
"app.submenu.audio.streamVolumeLabel": "Your audio stream volume",
|
||||||
"app.submenu.video.title": "Video",
|
"app.submenu.video.title": "Video",
|
||||||
"app.submenu.video.videoSourceLabel": "View source",
|
"app.submenu.video.videoSourceLabel": "View source",
|
||||||
@ -723,10 +723,10 @@
|
|||||||
"app.audioModal.yes.arialabel": "Echo is audible",
|
"app.audioModal.yes.arialabel": "Echo is audible",
|
||||||
"app.audioModal.no.arialabel": "Echo is inaudible",
|
"app.audioModal.no.arialabel": "Echo is inaudible",
|
||||||
"app.audioModal.echoTestTitle": "This is a private echo test. Speak a few words. Did you hear audio?",
|
"app.audioModal.echoTestTitle": "This is a private echo test. Speak a few words. Did you hear audio?",
|
||||||
"app.audioModal.settingsTitle": "Change your audio settings",
|
|
||||||
"app.audioModal.helpTitle": "There was an issue with your audio devices",
|
"app.audioModal.helpTitle": "There was an issue with your audio devices",
|
||||||
"app.audioModal.helpSubtitleMic": "We couldn't enable your microphone",
|
"app.audioModal.helpSubtitleMic": "We couldn't enable your microphone",
|
||||||
"app.audioModal.helpSubtitleGeneric": "We're having trouble establishing an audio connection",
|
"app.audioModal.helpSubtitleGeneric": "We're having trouble establishing an audio connection",
|
||||||
|
"app.audioModal.helpSubtitlePermission": "We need access to your microphone",
|
||||||
"app.audioModal.helpPermissionStep1": "When joining a call, accept all requests if prompted to use your microphone.",
|
"app.audioModal.helpPermissionStep1": "When joining a call, accept all requests if prompted to use your microphone.",
|
||||||
"app.audioModal.helpPermissionStep2": "Check browser and device settings to ensure microphone access is allowed.",
|
"app.audioModal.helpPermissionStep2": "Check browser and device settings to ensure microphone access is allowed.",
|
||||||
"app.audioModal.helpPermissionStep3": "Refresh the page and try again.",
|
"app.audioModal.helpPermissionStep3": "Refresh the page and try again.",
|
||||||
@ -764,18 +764,22 @@
|
|||||||
"app.audio.loading": "Loading",
|
"app.audio.loading": "Loading",
|
||||||
"app.audio.microphones": "Microphones",
|
"app.audio.microphones": "Microphones",
|
||||||
"app.audio.speakers": "Speakers",
|
"app.audio.speakers": "Speakers",
|
||||||
"app.audio.noDeviceFound": "No device found",
|
"app.audio.noDeviceFound": "No device found (listen only)",
|
||||||
"app.audio.audioSettings.titleLabel": "Choose your audio settings",
|
"app.audio.audioSettings.titleLabel": "Adjust your audio settings",
|
||||||
"app.audio.audioSettings.descriptionLabel": "Please note, a dialog will appear in your browser, requiring you to accept sharing your microphone.",
|
"app.audio.audioSettings.findingDevicesTitle": "Looking for your audio devices, please accept any requests to use them",
|
||||||
"app.audio.audioSettings.microphoneSourceLabel": "Microphone source",
|
"app.audio.audioSettings.noMicListenOnly": "No microphone (listen only)",
|
||||||
"app.audio.audioSettings.speakerSourceLabel": "Speaker source",
|
"app.audio.audioSettings.microphoneSourceLabel": "Microphone",
|
||||||
|
"app.audio.audioSettings.speakerSourceLabel": "Speaker",
|
||||||
"app.audio.audioSettings.testSpeakerLabel": "Test your speaker",
|
"app.audio.audioSettings.testSpeakerLabel": "Test your speaker",
|
||||||
"app.audio.audioSettings.microphoneStreamLabel": "Your audio stream volume",
|
"app.audio.audioSettings.microphoneStreamLabel": "Your audio stream volume",
|
||||||
"app.audio.audioSettings.retryLabel": "Retry",
|
"app.audio.audioSettings.retryMicLabel": "Retry",
|
||||||
"app.audio.audioSettings.fallbackInputLabel": "Audio input {0}",
|
"app.audio.audioSettings.fallbackInputLabel": "Audio input {0}",
|
||||||
"app.audio.audioSettings.fallbackOutputLabel": "Audio output {0}",
|
"app.audio.audioSettings.fallbackOutputLabel": "Audio output {0}",
|
||||||
|
"app.audio.audioSettings.fallbackNoPermission": "(no device permission)",
|
||||||
"app.audio.audioSettings.defaultOutputDeviceLabel": "Default",
|
"app.audio.audioSettings.defaultOutputDeviceLabel": "Default",
|
||||||
"app.audio.audioSettings.findingDevicesLabel": "Finding devices...",
|
"app.audio.audioSettings.findingDevicesLabel": "Finding audio devices...",
|
||||||
|
"app.audio.audioSettings.confirmLabel": "Confirm",
|
||||||
|
"app.audio.audioSettings.cancelLabel": "Cancel",
|
||||||
"app.audio.listenOnly.backLabel": "Back",
|
"app.audio.listenOnly.backLabel": "Back",
|
||||||
"app.audio.listenOnly.closeLabel": "Close",
|
"app.audio.listenOnly.closeLabel": "Close",
|
||||||
"app.audio.permissionsOverlay.title": "Allow access to your microphone",
|
"app.audio.permissionsOverlay.title": "Allow access to your microphone",
|
||||||
|
Loading…
Reference in New Issue
Block a user