2017-11-17 19:52:48 +08:00
|
|
|
import Auth from '/imports/ui/services/auth';
|
2017-10-13 03:22:10 +08:00
|
|
|
import SIPBridge from '/imports/api/audio/client/bridge/sip';
|
2022-04-21 04:38:52 +08:00
|
|
|
import SFUAudioBridge from '/imports/api/audio/client/bridge/sfu-audio-bridge';
|
2018-07-12 06:03:56 +08:00
|
|
|
import logger from '/imports/startup/client/logger';
|
2017-10-24 21:19:58 +08:00
|
|
|
import { notify } from '/imports/ui/services/notification';
|
2019-09-30 22:54:34 +08:00
|
|
|
import playAndRetry from '/imports/utils/mediaElementPlayRetry';
|
2019-11-30 05:48:04 +08:00
|
|
|
import { monitorAudioConnection } from '/imports/utils/stats';
|
2021-07-02 23:51:00 +08:00
|
|
|
import browserInfo from '/imports/utils/browserInfo';
|
2021-10-12 04:35:53 +08:00
|
|
|
import getFromMeetingSettings from '/imports/ui/services/meeting-settings';
|
2023-05-10 01:14:31 +08:00
|
|
|
import getFromUserSettings from '/imports/ui/services/users-settings';
|
2022-04-08 03:45:17 +08:00
|
|
|
import {
|
|
|
|
DEFAULT_INPUT_DEVICE_ID,
|
2022-08-20 01:22:42 +08:00
|
|
|
reloadAudioElement,
|
|
|
|
getCurrentAudioSinkId,
|
|
|
|
getStoredAudioInputDeviceId,
|
|
|
|
storeAudioInputDeviceId,
|
|
|
|
getStoredAudioOutputDeviceId,
|
|
|
|
storeAudioOutputDeviceId,
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
getAudioConstraints,
|
|
|
|
doGUM,
|
2022-04-08 03:45:17 +08:00
|
|
|
} from '/imports/api/audio/client/bridge/service';
|
2022-08-24 22:36:40 +08:00
|
|
|
import MediaStreamUtils from '/imports/utils/media-stream-utils';
|
2023-07-25 02:56:40 +08:00
|
|
|
import { makeVar } from '@apollo/client';
|
2024-04-10 02:17:18 +08:00
|
|
|
import AudioErrors from '/imports/ui/services/audio-manager/error-codes';
|
2024-06-06 21:50:03 +08:00
|
|
|
import Session from '/imports/ui/services/storage/in-memory';
|
2024-06-07 02:47:41 +08:00
|
|
|
import GrahqlSubscriptionStore, { stringToHash } from '/imports/ui/core/singletons/subscriptionStore';
|
2024-06-29 03:58:29 +08:00
|
|
|
import VOICE_ACTIVITY from '../../core/graphql/queries/whoIsTalking';
|
2017-09-20 01:47:57 +08:00
|
|
|
|
2021-09-01 02:50:53 +08:00
|
|
|
const DEFAULT_AUDIO_BRIDGES_PATH = '/imports/api/audio/client/';
|
2017-10-10 04:48:10 +08:00
|
|
|
const CALL_STATES = {
|
2017-10-05 04:49:11 +08:00
|
|
|
STARTED: 'started',
|
|
|
|
ENDED: 'ended',
|
|
|
|
FAILED: 'failed',
|
2019-06-13 05:01:20 +08:00
|
|
|
RECONNECTING: 'reconnecting',
|
2019-08-03 05:32:42 +08:00
|
|
|
AUTOPLAY_BLOCKED: 'autoplayBlocked',
|
2017-10-05 04:49:11 +08:00
|
|
|
};
|
2017-09-20 01:47:57 +08:00
|
|
|
|
2021-03-07 09:09:43 +08:00
|
|
|
const BREAKOUT_AUDIO_TRANSFER_STATES = {
|
|
|
|
CONNECTED: 'connected',
|
|
|
|
DISCONNECTED: 'disconnected',
|
|
|
|
RETURNING: 'returning',
|
|
|
|
};
|
|
|
|
|
2021-08-13 03:39:04 +08:00
|
|
|
/**
|
|
|
|
* Audio status to be filtered in getStats()
|
|
|
|
*/
|
|
|
|
const FILTER_AUDIO_STATS = [
|
|
|
|
'outbound-rtp',
|
|
|
|
'inbound-rtp',
|
2021-08-26 03:27:46 +08:00
|
|
|
'candidate-pair',
|
|
|
|
'local-candidate',
|
|
|
|
'transport',
|
2021-08-13 03:39:04 +08:00
|
|
|
];
|
|
|
|
|
2017-09-20 01:47:57 +08:00
|
|
|
class AudioManager {
|
|
|
|
constructor() {
|
2021-03-07 09:09:43 +08:00
|
|
|
this._breakoutAudioTransferStatus = {
|
|
|
|
status: BREAKOUT_AUDIO_TRANSFER_STATES.DISCONNECTED,
|
|
|
|
breakoutMeetingId: null,
|
|
|
|
};
|
2021-02-26 02:36:11 +08:00
|
|
|
|
2017-09-20 01:47:57 +08:00
|
|
|
this.defineProperties({
|
2024-09-12 02:47:40 +08:00
|
|
|
isMuted: makeVar(true),
|
2023-07-25 02:56:40 +08:00
|
|
|
isConnected: makeVar(false),
|
|
|
|
isConnecting: makeVar(false),
|
|
|
|
isHangingUp: makeVar(false),
|
|
|
|
isListenOnly: makeVar(false),
|
|
|
|
isEchoTest: makeVar(false),
|
2023-09-21 21:48:00 +08:00
|
|
|
isTalking: makeVar(false),
|
2024-06-12 21:25:46 +08:00
|
|
|
isWaitingPermissions: makeVar(false),
|
|
|
|
error: makeVar(null),
|
|
|
|
autoplayBlocked: makeVar(false),
|
|
|
|
isReconnecting: makeVar(false),
|
2024-06-05 19:26:27 +08:00
|
|
|
bypassGUM: makeVar(false),
|
|
|
|
permissionStatus: makeVar(null),
|
|
|
|
transparentListenOnlySupported: makeVar(false),
|
2017-09-20 01:47:57 +08:00
|
|
|
});
|
2018-09-12 01:09:29 +08:00
|
|
|
|
2019-08-03 05:32:42 +08:00
|
|
|
this.failedMediaElements = [];
|
|
|
|
this.handlePlayElementFailed = this.handlePlayElementFailed.bind(this);
|
2019-11-30 05:48:04 +08:00
|
|
|
this.monitor = this.monitor.bind(this);
|
2024-05-21 23:31:17 +08:00
|
|
|
this.isUsingAudio = this.isUsingAudio.bind(this);
|
2021-03-07 09:09:43 +08:00
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
this._inputStream = makeVar(null);
|
2022-08-20 01:22:42 +08:00
|
|
|
this._inputDeviceId = {
|
2024-06-05 19:26:27 +08:00
|
|
|
value: makeVar(null),
|
2022-08-20 01:22:42 +08:00
|
|
|
};
|
|
|
|
this._outputDeviceId = {
|
2024-05-29 21:26:11 +08:00
|
|
|
value: makeVar(null),
|
2022-08-20 01:22:42 +08:00
|
|
|
};
|
2024-10-25 09:19:21 +08:00
|
|
|
this._inputDevices = [];
|
|
|
|
this._outputDevices = [];
|
2021-04-16 21:45:40 +08:00
|
|
|
|
2021-03-07 09:09:43 +08:00
|
|
|
this.BREAKOUT_AUDIO_TRANSFER_STATES = BREAKOUT_AUDIO_TRANSFER_STATES;
|
2024-09-12 02:47:40 +08:00
|
|
|
this._voiceActivityObserver = null;
|
2024-05-02 01:11:04 +08:00
|
|
|
|
|
|
|
window.addEventListener('StopAudioTracks', () => this.forceExitAudio());
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
|
2024-06-05 19:26:27 +08:00
|
|
|
_trackPermissionStatus() {
|
|
|
|
const handleTrackingError = (error) => {
|
|
|
|
logger.warn({
|
|
|
|
logCode: 'audiomanager_permission_tracking_failed',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
|
|
|
},
|
|
|
|
}, `Failed to track microphone permission status: ${error.message}`);
|
|
|
|
};
|
|
|
|
|
|
|
|
if (navigator?.permissions?.query) {
|
|
|
|
navigator.permissions.query({ name: 'microphone' })
|
|
|
|
.then((status) => {
|
|
|
|
// eslint-disable-next-line no-param-reassign
|
|
|
|
status.onchange = () => {
|
|
|
|
logger.debug({
|
|
|
|
logCode: 'audiomanager_permission_status_changed',
|
|
|
|
extraInfo: {
|
|
|
|
newStatus: status.state,
|
|
|
|
},
|
|
|
|
}, `Microphone permission status changed: ${status.state}`);
|
|
|
|
this.permissionStatus = status.state;
|
|
|
|
};
|
|
|
|
this.permissionStatus = status.state;
|
|
|
|
}).catch(handleTrackingError);
|
|
|
|
} else {
|
|
|
|
handleTrackingError(new Error('navigator.permissions.query is not available'));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-20 01:22:42 +08:00
|
|
|
_applyCachedOutputDeviceId() {
|
|
|
|
const cachedId = getStoredAudioOutputDeviceId();
|
|
|
|
|
|
|
|
if (typeof cachedId === 'string') {
|
2023-07-25 02:56:40 +08:00
|
|
|
this.changeOutputDevice(cachedId, false)
|
|
|
|
.then(() => {
|
|
|
|
this.outputDeviceId = cachedId;
|
|
|
|
})
|
|
|
|
.catch((error) => {
|
|
|
|
logger.warn(
|
|
|
|
{
|
|
|
|
logCode: 'audiomanager_output_device_storage_failed',
|
|
|
|
extraInfo: {
|
|
|
|
deviceId: cachedId,
|
|
|
|
errorMessage: error.message,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
`Failed to apply output audio device from storage: ${error.message}`
|
|
|
|
);
|
|
|
|
});
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
set inputDeviceId(value) {
|
2023-07-25 02:56:40 +08:00
|
|
|
if (this._inputDeviceId.value() !== value) {
|
|
|
|
this._inputDeviceId.value(value);
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if (this.fullAudioBridge) {
|
2023-07-25 02:56:40 +08:00
|
|
|
this.fullAudioBridge.inputDeviceId = this._inputDeviceId.value();
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-15 11:30:18 +08:00
|
|
|
// inputDeviceId is a string that represents a MediaDeviceInfo.deviceId OR a static
|
|
|
|
// 'listen-only' string that represents our "virtual" listen-only device.
|
|
|
|
// i.e.: the user has a bidirectional audio channel, but did not specify any
|
|
|
|
// input device to it.
|
2022-08-20 01:22:42 +08:00
|
|
|
get inputDeviceId() {
|
2023-07-25 02:56:40 +08:00
|
|
|
return this._inputDeviceId.value();
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
set outputDeviceId(value) {
|
2023-07-25 02:56:40 +08:00
|
|
|
if (this._outputDeviceId.value() !== value) {
|
|
|
|
this._outputDeviceId.value(value);
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if (this.fullAudioBridge) {
|
2023-07-25 02:56:40 +08:00
|
|
|
this.fullAudioBridge.outputDeviceId = this._outputDeviceId.value();
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if (this.listenOnlyBridge) {
|
2023-07-25 02:56:40 +08:00
|
|
|
this.listenOnlyBridge.outputDeviceId = this._outputDeviceId.value();
|
2022-08-20 01:22:42 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
get outputDeviceId() {
|
2023-07-25 02:56:40 +08:00
|
|
|
return this._outputDeviceId.value();
|
2017-09-20 01:47:57 +08:00
|
|
|
}
|
|
|
|
|
2024-10-25 09:19:21 +08:00
|
|
|
set inputDevices(value) {
|
|
|
|
if (value?.length) {
|
|
|
|
this._inputDevices = value;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
get inputDevices() {
|
|
|
|
return this._inputDevices;
|
|
|
|
}
|
|
|
|
|
|
|
|
get inputDevicesJSON() {
|
|
|
|
return this.inputDevices.map((device) => device.toJSON());
|
|
|
|
}
|
|
|
|
|
|
|
|
set outputDevices(value) {
|
|
|
|
if (value?.length) {
|
|
|
|
this._outputDevices = value;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
get outputDevices() {
|
|
|
|
return this._outputDevices;
|
|
|
|
}
|
|
|
|
|
|
|
|
get outputDevicesJSON() {
|
|
|
|
return this.outputDevices.map((device) => device.toJSON());
|
|
|
|
}
|
|
|
|
|
2024-06-05 19:26:27 +08:00
|
|
|
shouldBypassGUM() {
|
|
|
|
return this.supportsTransparentListenOnly() && this.inputDeviceId === 'listen-only';
|
|
|
|
}
|
|
|
|
|
|
|
|
supportsTransparentListenOnly() {
|
|
|
|
return this.listenOnlyBridge?.supportsTransparentListenOnly()
|
|
|
|
&& this.fullAudioBridge?.supportsTransparentListenOnly();
|
|
|
|
}
|
|
|
|
|
2024-09-12 02:47:40 +08:00
|
|
|
observeVoiceActivity() {
|
|
|
|
// Observe voice activity changes to update any relevant *local* states
|
|
|
|
// (see onVoiceUserChanges)
|
|
|
|
if (!this._voiceActivityObserver) {
|
|
|
|
const subHash = stringToHash(JSON.stringify({
|
|
|
|
subscription: VOICE_ACTIVITY,
|
|
|
|
}));
|
|
|
|
this._voiceActivityObserver = GrahqlSubscriptionStore.makeSubscription(VOICE_ACTIVITY);
|
|
|
|
window.addEventListener('graphqlSubscription', (e) => {
|
|
|
|
const { subscriptionHash, response } = e.detail;
|
|
|
|
if (subscriptionHash === subHash) {
|
|
|
|
if (response) {
|
|
|
|
const { data } = response;
|
|
|
|
const voiceUser = data.user_voice_activity_stream.find((v) => v.userId === Auth.userID);
|
|
|
|
this.onVoiceUserChanges(voiceUser);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
init(userData, audioEventHandler) {
|
|
|
|
this.userData = userData;
|
2024-05-29 21:26:11 +08:00
|
|
|
this.inputDeviceId = getStoredAudioInputDeviceId() || DEFAULT_INPUT_DEVICE_ID;
|
|
|
|
this.outputDeviceId = getCurrentAudioSinkId();
|
|
|
|
this._applyCachedOutputDeviceId();
|
2024-06-05 19:26:27 +08:00
|
|
|
this._trackPermissionStatus();
|
2021-09-01 02:50:53 +08:00
|
|
|
this.loadBridges(userData);
|
2024-06-05 19:26:27 +08:00
|
|
|
this.transparentListenOnlySupported = this.supportsTransparentListenOnly();
|
2024-09-12 02:47:40 +08:00
|
|
|
this.audioEventHandler = audioEventHandler;
|
|
|
|
this.initialized = true;
|
2017-10-18 03:16:42 +08:00
|
|
|
}
|
2018-12-22 01:14:05 +08:00
|
|
|
|
2021-09-01 02:50:53 +08:00
|
|
|
/**
|
|
|
|
* Load audio bridges modules to be used the manager.
|
|
|
|
*
|
|
|
|
* Bridges can be configured in settings.yml file.
|
|
|
|
* @param {Object} userData The Object representing user data to be passed to
|
|
|
|
* the bridge.
|
|
|
|
*/
|
2024-09-12 02:47:40 +08:00
|
|
|
loadBridges(userData) {
|
2021-09-01 02:50:53 +08:00
|
|
|
let FullAudioBridge = SIPBridge;
|
2022-04-21 04:38:52 +08:00
|
|
|
let ListenOnlyBridge = SFUAudioBridge;
|
2021-09-01 02:50:53 +08:00
|
|
|
|
2024-05-29 21:26:11 +08:00
|
|
|
const MEDIA = window.meetingClientSettings.public.media;
|
|
|
|
|
2021-09-01 02:50:53 +08:00
|
|
|
if (MEDIA.audio) {
|
2024-08-10 01:58:44 +08:00
|
|
|
const { defaultFullAudioBridge, defaultListenOnlyBridge } = MEDIA.audio;
|
2021-09-01 02:50:53 +08:00
|
|
|
|
2023-05-10 01:14:31 +08:00
|
|
|
const _fullAudioBridge = getFromUserSettings(
|
|
|
|
'bbb_fullaudio_bridge',
|
2024-08-10 01:58:44 +08:00
|
|
|
getFromMeetingSettings('fullaudio-bridge', defaultFullAudioBridge),
|
2021-10-12 04:35:53 +08:00
|
|
|
);
|
|
|
|
|
2024-08-10 01:58:44 +08:00
|
|
|
this.bridges = {
|
|
|
|
[_fullAudioBridge]: SIPBridge,
|
|
|
|
[defaultListenOnlyBridge]: SFUAudioBridge,
|
|
|
|
};
|
2021-09-01 02:50:53 +08:00
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
if (_fullAudioBridge && this.bridges[_fullAudioBridge]) {
|
2021-10-12 04:35:53 +08:00
|
|
|
FullAudioBridge = this.bridges[_fullAudioBridge];
|
2021-09-01 02:50:53 +08:00
|
|
|
}
|
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
if (defaultListenOnlyBridge && this.bridges[defaultListenOnlyBridge]) {
|
2021-09-01 02:50:53 +08:00
|
|
|
ListenOnlyBridge = this.bridges[defaultListenOnlyBridge];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-21 04:38:52 +08:00
|
|
|
this.fullAudioBridge = new FullAudioBridge(userData);
|
|
|
|
this.listenOnlyBridge = new ListenOnlyBridge(userData);
|
2022-08-20 01:22:42 +08:00
|
|
|
// Initialize device IDs in configured bridges
|
|
|
|
this.fullAudioBridge.inputDeviceId = this.inputDeviceId;
|
|
|
|
this.fullAudioBridge.outputDeviceId = this.outputDeviceId;
|
|
|
|
this.listenOnlyBridge.outputDeviceId = this.outputDeviceId;
|
2021-09-01 02:50:53 +08:00
|
|
|
}
|
|
|
|
|
2019-02-21 05:58:37 +08:00
|
|
|
setAudioMessages(messages, intl) {
|
2018-04-20 03:57:54 +08:00
|
|
|
this.messages = messages;
|
2019-02-21 05:58:37 +08:00
|
|
|
this.intl = intl;
|
2018-04-20 03:57:54 +08:00
|
|
|
}
|
2017-10-18 03:16:42 +08:00
|
|
|
|
2017-09-20 01:47:57 +08:00
|
|
|
defineProperties(obj) {
|
2017-09-29 21:38:10 +08:00
|
|
|
Object.keys(obj).forEach((key) => {
|
|
|
|
const privateKey = `_${key}`;
|
|
|
|
this[privateKey] = {
|
2024-06-12 21:25:46 +08:00
|
|
|
value: obj[key],
|
2017-09-29 21:38:10 +08:00
|
|
|
};
|
2017-09-20 01:47:57 +08:00
|
|
|
|
|
|
|
Object.defineProperty(this, key, {
|
|
|
|
set: (value) => {
|
2024-06-12 21:25:46 +08:00
|
|
|
this[privateKey].value(value);
|
2017-09-29 21:38:10 +08:00
|
|
|
},
|
2024-06-12 21:25:46 +08:00
|
|
|
get: () => this[privateKey].value(),
|
2023-07-25 02:56:40 +08:00
|
|
|
[`getReferece${key}`]: () => this[privateKey],
|
2017-09-29 21:38:10 +08:00
|
|
|
});
|
|
|
|
});
|
2017-09-20 01:47:57 +08:00
|
|
|
}
|
|
|
|
|
2021-06-30 01:47:59 +08:00
|
|
|
async trickleIce() {
|
2021-07-02 23:51:00 +08:00
|
|
|
const { isFirefox, isIe, isSafari } = browserInfo;
|
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
if (
|
|
|
|
!this.listenOnlyBridge ||
|
|
|
|
typeof this.listenOnlyBridge.trickleIce !== 'function' ||
|
|
|
|
isFirefox ||
|
|
|
|
isIe ||
|
|
|
|
isSafari
|
|
|
|
) {
|
2022-04-21 04:38:52 +08:00
|
|
|
return [];
|
|
|
|
}
|
2021-06-30 01:47:59 +08:00
|
|
|
|
|
|
|
if (this.validIceCandidates && this.validIceCandidates.length) {
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.info(
|
|
|
|
{ logCode: 'audiomanager_trickle_ice_reuse_candidate' },
|
2023-07-25 02:56:40 +08:00
|
|
|
'Reusing trickle ICE information before activating microphone'
|
2021-12-10 04:37:05 +08:00
|
|
|
);
|
2021-06-30 01:47:59 +08:00
|
|
|
return this.validIceCandidates;
|
|
|
|
}
|
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.info(
|
|
|
|
{ logCode: 'audiomanager_trickle_ice_get_local_candidate' },
|
2023-07-25 02:56:40 +08:00
|
|
|
'Performing trickle ICE before activating microphone'
|
2021-12-10 04:37:05 +08:00
|
|
|
);
|
2022-04-26 00:34:39 +08:00
|
|
|
|
|
|
|
try {
|
|
|
|
this.validIceCandidates = await this.listenOnlyBridge.trickleIce();
|
|
|
|
return this.validIceCandidates;
|
|
|
|
} catch (error) {
|
2023-07-25 02:56:40 +08:00
|
|
|
logger.error(
|
|
|
|
{
|
|
|
|
logCode: 'audiomanager_trickle_ice_failed',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
|
|
|
},
|
2022-04-26 00:34:39 +08:00
|
|
|
},
|
2023-07-25 02:56:40 +08:00
|
|
|
`Trickle ICE before activating microphone failed: ${error.message}`
|
|
|
|
);
|
2022-04-26 00:34:39 +08:00
|
|
|
return [];
|
|
|
|
}
|
2021-06-30 01:47:59 +08:00
|
|
|
}
|
|
|
|
|
2018-03-16 02:57:25 +08:00
|
|
|
joinMicrophone() {
|
|
|
|
this.isListenOnly = false;
|
|
|
|
this.isEchoTest = false;
|
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
return this.onAudioJoining
|
|
|
|
.bind(this)()
|
2019-06-13 05:01:20 +08:00
|
|
|
.then(() => {
|
|
|
|
const callOptions = {
|
|
|
|
isListenOnly: false,
|
|
|
|
extension: null,
|
|
|
|
inputStream: this.inputStream,
|
2024-06-05 19:26:27 +08:00
|
|
|
bypassGUM: this.shouldBypassGUM(),
|
2019-06-13 05:01:20 +08:00
|
|
|
};
|
2020-09-26 07:11:44 +08:00
|
|
|
return this.joinAudio(callOptions, this.callStateCallback.bind(this));
|
2019-06-13 05:01:20 +08:00
|
|
|
});
|
2018-03-16 02:57:25 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
joinEchoTest() {
|
|
|
|
this.isListenOnly = false;
|
|
|
|
this.isEchoTest = true;
|
|
|
|
|
2024-05-29 21:26:11 +08:00
|
|
|
const MEDIA = window.meetingClientSettings.public.media;
|
|
|
|
const ECHO_TEST_NUMBER = MEDIA.echoTestNumber;
|
|
|
|
const EXPERIMENTAL_USE_KMS_TRICKLE_ICE_FOR_MICROPHONE =
|
|
|
|
window.meetingClientSettings.public.app.experimentalUseKmsTrickleIceForMicrophone;
|
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
return this.onAudioJoining
|
|
|
|
.bind(this)()
|
2021-06-30 01:47:59 +08:00
|
|
|
.then(async () => {
|
2021-07-02 23:51:00 +08:00
|
|
|
let validIceCandidates = [];
|
|
|
|
if (EXPERIMENTAL_USE_KMS_TRICKLE_ICE_FOR_MICROPHONE) {
|
|
|
|
validIceCandidates = await this.trickleIce();
|
|
|
|
}
|
|
|
|
|
2019-06-13 05:01:20 +08:00
|
|
|
const callOptions = {
|
|
|
|
isListenOnly: false,
|
|
|
|
extension: ECHO_TEST_NUMBER,
|
|
|
|
inputStream: this.inputStream,
|
2021-06-30 01:47:59 +08:00
|
|
|
validIceCandidates,
|
2024-06-05 19:26:27 +08:00
|
|
|
bypassGUM: this.shouldBypassGUM(),
|
2019-06-13 05:01:20 +08:00
|
|
|
};
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.info(
|
|
|
|
{
|
|
|
|
logCode: 'audiomanager_join_echotest',
|
|
|
|
extraInfo: { logType: 'user_action' },
|
|
|
|
},
|
|
|
|
'User requested to join audio conference with mic'
|
|
|
|
);
|
2020-09-26 07:11:44 +08:00
|
|
|
return this.joinAudio(callOptions, this.callStateCallback.bind(this));
|
2019-06-13 05:01:20 +08:00
|
|
|
});
|
2018-03-16 02:57:25 +08:00
|
|
|
}
|
|
|
|
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
async joinAudio(callOptions, callStateCallback) {
|
|
|
|
try {
|
|
|
|
// If there's no input stream, we need to get one via getUserMedia
|
|
|
|
if (callOptions?.inputStream == null
|
|
|
|
&& !this.shouldBypassGUM()
|
|
|
|
&& !callOptions.isListenOnly) {
|
|
|
|
const constraints = getAudioConstraints({ deviceId: this?.bridge?.inputDeviceId });
|
|
|
|
this.inputStream = await doGUM({ audio: constraints });
|
|
|
|
// eslint-disable-next-line no-param-reassign
|
|
|
|
callOptions.inputStream = this.inputStream;
|
|
|
|
}
|
Correctly set audio input/output devices
When refusing ("thumbs down" button) echo test, user is able to select a different input device. This should work fine for chrome, firefox and safari (once user grants permission when asked by html5client).
For output devices, we depend on setSinkId function, which is enabled by default on current chrome release (2020) but not in Firefox (user needs to enable "setSinkId in about:config page). This implementation is listed as (?) in MDN.
In other words, output device selection should work out of the box for chrome, only.
When selecting an outputDevice, all alert sounds (hangup, screenshare , polling, etc) also goes to the same output device.
This solves #10592
2020-10-07 07:37:55 +08:00
|
|
|
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
// Start tracking audio join time here to avoid counting time spent on
|
|
|
|
// getUserMedia prompts. We're primarily focused on negotiation times here.
|
|
|
|
// We're only concerned with gUM timeouts - and it'll throw an error which
|
|
|
|
// is logged accordingly whenever it times out.
|
|
|
|
this.audioJoinStartTime = new Date();
|
|
|
|
this.logAudioJoinTime = false;
|
Correctly set audio input/output devices
When refusing ("thumbs down" button) echo test, user is able to select a different input device. This should work fine for chrome, firefox and safari (once user grants permission when asked by html5client).
For output devices, we depend on setSinkId function, which is enabled by default on current chrome release (2020) but not in Firefox (user needs to enable "setSinkId in about:config page). This implementation is listed as (?) in MDN.
In other words, output device selection should work out of the box for chrome, only.
When selecting an outputDevice, all alert sounds (hangup, screenshare , polling, etc) also goes to the same output device.
This solves #10592
2020-10-07 07:37:55 +08:00
|
|
|
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
await this.bridge.joinAudio(callOptions, callStateCallback.bind(this));
|
|
|
|
} catch (error) {
|
|
|
|
// Reset audio join time tracking if an error occurs
|
|
|
|
this.audioJoinStartTime = null;
|
|
|
|
this.logAudioJoinTime = false;
|
|
|
|
const { name, message } = error;
|
|
|
|
const errorPayload = {
|
|
|
|
type: 'MEDIA_ERROR',
|
|
|
|
errMessage: message || 'MEDIA_ERROR',
|
|
|
|
errCode: AudioErrors.MIC_ERROR.UNKNOWN,
|
|
|
|
};
|
Correctly set audio input/output devices
When refusing ("thumbs down" button) echo test, user is able to select a different input device. This should work fine for chrome, firefox and safari (once user grants permission when asked by html5client).
For output devices, we depend on setSinkId function, which is enabled by default on current chrome release (2020) but not in Firefox (user needs to enable "setSinkId in about:config page). This implementation is listed as (?) in MDN.
In other words, output device selection should work out of the box for chrome, only.
When selecting an outputDevice, all alert sounds (hangup, screenshare , polling, etc) also goes to the same output device.
This solves #10592
2020-10-07 07:37:55 +08:00
|
|
|
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
switch (name) {
|
|
|
|
case 'NotAllowedError':
|
|
|
|
errorPayload.errCode = AudioErrors.MIC_ERROR.NO_PERMISSION;
|
|
|
|
logger.error({
|
|
|
|
logCode: 'audiomanager_error_getting_device',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
|
|
|
},
|
|
|
|
}, `Error getting microphone - {${error.name}: ${error.message}}`);
|
|
|
|
break;
|
|
|
|
case 'NotFoundError':
|
|
|
|
errorPayload.errCode = AudioErrors.MIC_ERROR.DEVICE_NOT_FOUND;
|
|
|
|
logger.error({
|
|
|
|
logCode: 'audiomanager_error_device_not_found',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
2024-10-25 09:19:21 +08:00
|
|
|
inputDeviceId: this.inputDeviceId,
|
|
|
|
inputDevices: this.inputDevicesJSON,
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
},
|
|
|
|
}, `Error getting microphone - {${error.name}: ${error.message}}`);
|
2024-10-25 09:19:21 +08:00
|
|
|
// Reset the input device ID so the user can select a new one
|
|
|
|
this.changeInputDevice(null);
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
logger.error({
|
|
|
|
logCode: 'audiomanager_error_unknown',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
2024-10-25 09:19:21 +08:00
|
|
|
errorStack: error?.stack,
|
|
|
|
inputDeviceId: this.inputDeviceId,
|
|
|
|
inputDevices: this.inputDevicesJSON,
|
|
|
|
outputDeviceId: this.outputDeviceId,
|
|
|
|
outputDevices: this.outputDevicesJSON,
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
},
|
|
|
|
}, `Error enabling audio - {${name}: ${message}}`);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
this.isConnecting = false;
|
|
|
|
|
|
|
|
throw errorPayload;
|
|
|
|
}
|
2020-09-26 07:11:44 +08:00
|
|
|
}
|
|
|
|
|
2024-04-17 01:27:39 +08:00
|
|
|
async joinListenOnly() {
|
2018-03-16 02:57:25 +08:00
|
|
|
this.isListenOnly = true;
|
|
|
|
this.isEchoTest = false;
|
2019-06-13 05:01:20 +08:00
|
|
|
|
2024-04-17 01:27:39 +08:00
|
|
|
logger.info({
|
|
|
|
logCode: 'audiomanager_join_listenonly',
|
|
|
|
extraInfo: { logType: 'user_action' },
|
|
|
|
}, 'user requested to connect to audio conference as listen only');
|
2019-07-26 02:41:24 +08:00
|
|
|
|
2019-08-03 05:32:42 +08:00
|
|
|
window.addEventListener('audioPlayFailed', this.handlePlayElementFailed);
|
|
|
|
|
2024-04-17 01:27:39 +08:00
|
|
|
return this.onAudioJoining.bind(this)()
|
|
|
|
.then(() => {
|
|
|
|
const callOptions = {
|
|
|
|
isListenOnly: true,
|
|
|
|
extension: null,
|
|
|
|
};
|
|
|
|
return this.joinAudio(callOptions, this.callStateCallback.bind(this));
|
2017-11-02 20:10:01 +08:00
|
|
|
});
|
2017-09-20 01:47:57 +08:00
|
|
|
}
|
|
|
|
|
2018-03-16 02:57:25 +08:00
|
|
|
onAudioJoining() {
|
|
|
|
this.isConnecting = true;
|
2024-09-12 02:47:40 +08:00
|
|
|
this.isMuted = true;
|
2018-03-16 02:57:25 +08:00
|
|
|
this.error = false;
|
2024-09-12 02:47:40 +08:00
|
|
|
this.observeVoiceActivity();
|
|
|
|
// Ensure the local mute state (this.isMuted) is aligned with the initial
|
|
|
|
// placeholder value before joining audio.
|
|
|
|
// Currently, the server sets the placeholder mute state to *true*, and this
|
|
|
|
// is only communicated via observeVoiceActivity's subscription if the initial
|
|
|
|
// state differs from the placeholder or when the state changes.
|
|
|
|
// Refer to user_voice_activity DB schema for details.
|
|
|
|
// tl;dr: without enforcing the initial mute state here, the client won't be
|
|
|
|
// locally muted if the audio channel starts muted (e.g., dialplan-level
|
|
|
|
// muteOnStart).
|
|
|
|
this.setSenderTrackEnabled(!this.isMuted);
|
2018-03-16 02:57:25 +08:00
|
|
|
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
|
2017-09-20 01:47:57 +08:00
|
|
|
exitAudio() {
|
2017-11-02 20:10:01 +08:00
|
|
|
if (!this.isConnected) return Promise.resolve();
|
|
|
|
|
2017-10-27 01:14:56 +08:00
|
|
|
this.isHangingUp = true;
|
2018-06-28 01:44:11 +08:00
|
|
|
|
2022-04-21 04:38:52 +08:00
|
|
|
return this.bridge.exitAudio();
|
2017-09-20 01:47:57 +08:00
|
|
|
}
|
|
|
|
|
2021-12-03 19:45:07 +08:00
|
|
|
forceExitAudio() {
|
2024-06-05 19:26:27 +08:00
|
|
|
this.onAudioExit();
|
2021-12-03 19:45:07 +08:00
|
|
|
|
2024-05-29 21:26:11 +08:00
|
|
|
return this.bridge && this.bridge.exitAudio();
|
2021-12-03 19:45:07 +08:00
|
|
|
}
|
|
|
|
|
2017-10-12 20:50:23 +08:00
|
|
|
transferCall() {
|
2017-10-18 03:16:42 +08:00
|
|
|
this.onTransferStart();
|
|
|
|
return this.bridge.transferCall(this.onAudioJoin.bind(this));
|
2017-10-12 20:50:23 +08:00
|
|
|
}
|
|
|
|
|
2024-06-11 00:14:50 +08:00
|
|
|
onVoiceUserChanges(fields = {}) {
|
2020-08-27 05:23:01 +08:00
|
|
|
if (fields.muted !== undefined && fields.muted !== this.isMuted) {
|
2020-09-10 01:03:27 +08:00
|
|
|
let muteState;
|
2020-08-27 05:23:01 +08:00
|
|
|
this.isMuted = fields.muted;
|
2020-09-10 01:03:27 +08:00
|
|
|
|
|
|
|
if (this.isMuted) {
|
|
|
|
muteState = 'selfMuted';
|
|
|
|
this.mute();
|
|
|
|
} else {
|
|
|
|
muteState = 'selfUnmuted';
|
|
|
|
this.unmute();
|
|
|
|
}
|
2020-08-27 05:23:01 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if (fields.talking !== undefined && fields.talking !== this.isTalking) {
|
|
|
|
this.isTalking = fields.talking;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (this.isMuted) {
|
|
|
|
this.isTalking = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-20 01:47:57 +08:00
|
|
|
onAudioJoin() {
|
2017-10-19 03:40:01 +08:00
|
|
|
this.isConnected = true;
|
2024-08-16 09:51:10 +08:00
|
|
|
this.isConnecting = false;
|
2017-10-20 18:11:51 +08:00
|
|
|
|
2024-05-29 21:26:11 +08:00
|
|
|
const STATS = window.meetingClientSettings.public.stats;
|
fix(audio): prevent overlapping sessions from gUM-induced timeouts
`getUserMedia` is called by each audio bridge if it hasn't been
triggered during pre-flight screens. This ties gUM to the bridge's
negotiation timers and audio-manager's activation tracking, leading to
two issues:
- A gUM prompt left unanswered for over 30 seconds can cause an
incorrect 1010 (negotiation timeout) audio error.
- If `retryThroughRelay: true`, and a joinAudio timeout occurs due to an
unanswered gUM prompt, but the user responds while the system retries
the connection, it can create overlapping audio sessions, resulting in
mute state inconsistencies when `muteOnStart: true`.
This commit addresses these issues by moving gUM handling to the
audio-manager before any bridge action. This removes gUM from the
negotiation timeout trackers, ensuring that gUM errors are treated as
browser API errors (as expected).
Additionally, audio activation tracking in audio-manager has been updated
to exclude gUM times. Initially, gUM was included to catch unintended
browser-related gUM timeouts (e.g., Chrome bugs), but this skewed the
metric intended for tracking *negotiation times*. With this change,
`secondsToActivateAudio` will now focus solely on negotiation.
2024-10-19 06:41:41 +08:00
|
|
|
// If we don't have a start time, something went wrong with the tracking code
|
|
|
|
// Log it as 0 seconds to keep things consistent, but 0 should be treated
|
|
|
|
// as an invalid value and be ignored in any log analysis.
|
|
|
|
const secondsToActivateAudio = this.audioJoinStartTime > 0
|
|
|
|
? (new Date() - this.audioJoinStartTime) / 1000
|
|
|
|
: 0;
|
2021-06-30 01:47:59 +08:00
|
|
|
|
|
|
|
if (!this.logAudioJoinTime) {
|
|
|
|
this.logAudioJoinTime = true;
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.info(
|
|
|
|
{
|
|
|
|
logCode: 'audio_mic_join_time',
|
|
|
|
extraInfo: {
|
|
|
|
secondsToActivateAudio,
|
|
|
|
},
|
2021-07-10 00:43:43 +08:00
|
|
|
},
|
2021-12-10 04:37:05 +08:00
|
|
|
`Time needed to connect audio (seconds): ${secondsToActivateAudio}`
|
|
|
|
);
|
2021-06-30 01:47:59 +08:00
|
|
|
}
|
2018-01-16 05:01:57 +08:00
|
|
|
|
2024-09-20 18:37:51 +08:00
|
|
|
try {
|
|
|
|
this.inputStream = this.bridge ? this.bridge.inputStream : null;
|
|
|
|
// Enforce correct output device on audio join
|
|
|
|
this.changeOutputDevice(this.outputDeviceId, true);
|
|
|
|
storeAudioOutputDeviceId(this.outputDeviceId);
|
|
|
|
// Extract the deviceId again from the stream to guarantee consistency
|
|
|
|
// between stream DID vs chosen DID. That's necessary in scenarios where,
|
|
|
|
// eg, there's no default/pre-set deviceId ('') and the browser's
|
|
|
|
// default device has been altered by the user (browser default != system's
|
|
|
|
// default).
|
|
|
|
if (this.inputStream) {
|
|
|
|
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(
|
|
|
|
this.inputStream,
|
|
|
|
'audio',
|
|
|
|
);
|
|
|
|
if (extractedDeviceId && extractedDeviceId !== this.inputDeviceId) {
|
|
|
|
this.changeInputDevice(extractedDeviceId);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Audio joined successfully - add device IDs to session storage so they
|
|
|
|
// can be re-used on refreshes/other sessions
|
|
|
|
storeAudioInputDeviceId(this.inputDeviceId);
|
|
|
|
} catch (error) {
|
|
|
|
logger.warn({
|
|
|
|
logCode: 'audiomanager_device_enforce_failed',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
|
|
|
inputDeviceId: this.inputDeviceId,
|
|
|
|
outputDeviceId: this.outputDeviceId,
|
|
|
|
},
|
|
|
|
}, `Failed to enforce input/output devices: ${error.message}`);
|
|
|
|
}
|
|
|
|
|
2017-10-20 18:11:51 +08:00
|
|
|
if (!this.isEchoTest) {
|
2019-02-21 05:58:37 +08:00
|
|
|
this.notify(this.intl.formatMessage(this.messages.info.JOINED_AUDIO));
|
2024-09-20 18:37:51 +08:00
|
|
|
logger.info({
|
|
|
|
logCode: 'audio_joined',
|
|
|
|
extraInfo: {
|
|
|
|
secondsToActivateAudio,
|
|
|
|
inputDeviceId: this.inputDeviceId,
|
2024-10-25 09:19:21 +08:00
|
|
|
inputDevices: this.inputDevicesJSON,
|
2024-09-20 18:37:51 +08:00
|
|
|
outputDeviceId: this.outputDeviceId,
|
2024-10-25 09:19:21 +08:00
|
|
|
outputDevices: this.outputDevicesJSON,
|
2024-09-20 18:37:51 +08:00
|
|
|
isListenOnly: this.isListenOnly,
|
|
|
|
},
|
|
|
|
}, 'Audio Joined');
|
2024-10-25 09:19:21 +08:00
|
|
|
|
2020-01-28 21:07:21 +08:00
|
|
|
if (STATS.enabled) this.monitor();
|
2021-02-27 02:52:11 +08:00
|
|
|
this.audioEventHandler({
|
|
|
|
name: 'started',
|
|
|
|
isListenOnly: this.isListenOnly,
|
|
|
|
});
|
2017-10-20 18:11:51 +08:00
|
|
|
}
|
2024-06-06 21:50:03 +08:00
|
|
|
Session.setItem('audioModalIsOpen', false);
|
2017-09-20 01:47:57 +08:00
|
|
|
}
|
|
|
|
|
2017-10-12 20:50:23 +08:00
|
|
|
onTransferStart() {
|
|
|
|
this.isEchoTest = false;
|
|
|
|
this.isConnecting = true;
|
|
|
|
}
|
|
|
|
|
2024-04-12 05:30:55 +08:00
|
|
|
// Must be called before the call is actually torn down (this.isConnected = true)
|
|
|
|
notifyAudioExit() {
|
|
|
|
try {
|
|
|
|
if (!this.error && (this.isConnected && !this.isEchoTest)) {
|
|
|
|
this.notify(
|
|
|
|
this.intl.formatMessage(this.messages.info.LEFT_AUDIO),
|
|
|
|
false,
|
|
|
|
'no_audio',
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} catch {}
|
|
|
|
}
|
|
|
|
|
2017-09-20 01:47:57 +08:00
|
|
|
onAudioExit() {
|
2024-04-12 05:30:55 +08:00
|
|
|
this.notifyAudioExit();
|
2017-09-20 01:47:57 +08:00
|
|
|
this.isConnected = false;
|
2017-10-05 04:49:11 +08:00
|
|
|
this.isConnecting = false;
|
2017-10-27 01:14:56 +08:00
|
|
|
this.isHangingUp = false;
|
2019-08-03 05:32:42 +08:00
|
|
|
this.autoplayBlocked = false;
|
|
|
|
this.failedMediaElements = [];
|
2017-09-29 21:38:10 +08:00
|
|
|
|
2018-04-18 01:09:05 +08:00
|
|
|
if (this.inputStream) {
|
2021-04-16 21:45:40 +08:00
|
|
|
this.inputStream.getTracks().forEach((track) => track.stop());
|
|
|
|
this.inputStream = null;
|
2018-04-18 01:09:05 +08:00
|
|
|
}
|
2018-04-13 20:39:26 +08:00
|
|
|
|
2019-06-13 05:01:20 +08:00
|
|
|
if (!this.isEchoTest) {
|
|
|
|
this.playHangUpSound();
|
|
|
|
}
|
|
|
|
|
2019-08-03 05:32:42 +08:00
|
|
|
window.removeEventListener('audioPlayFailed', this.handlePlayElementFailed);
|
2017-09-20 01:47:57 +08:00
|
|
|
}
|
|
|
|
|
2017-10-05 04:49:11 +08:00
|
|
|
callStateCallback(response) {
|
2017-09-29 21:38:10 +08:00
|
|
|
return new Promise((resolve) => {
|
2023-07-25 02:56:40 +08:00
|
|
|
const { STARTED, ENDED, FAILED, RECONNECTING, AUTOPLAY_BLOCKED } = CALL_STATES;
|
2021-12-10 04:37:05 +08:00
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
const { status, error, bridgeError, silenceNotifications, bridge } = response;
|
2017-10-05 04:49:11 +08:00
|
|
|
|
|
|
|
if (status === STARTED) {
|
2021-02-27 02:05:17 +08:00
|
|
|
this.isReconnecting = false;
|
2017-09-29 21:38:10 +08:00
|
|
|
this.onAudioJoin();
|
2017-10-05 04:49:11 +08:00
|
|
|
resolve(STARTED);
|
|
|
|
} else if (status === ENDED) {
|
2021-02-27 02:05:17 +08:00
|
|
|
this.isReconnecting = false;
|
2021-03-09 01:51:03 +08:00
|
|
|
this.setBreakoutAudioTransferStatus({
|
|
|
|
breakoutMeetingId: '',
|
|
|
|
status: BREAKOUT_AUDIO_TRANSFER_STATES.DISCONNECTED,
|
|
|
|
});
|
2017-09-29 21:38:10 +08:00
|
|
|
this.onAudioExit();
|
2024-10-25 09:19:21 +08:00
|
|
|
logger.info({
|
|
|
|
logCode: 'audio_ended',
|
|
|
|
extraInfo: {
|
|
|
|
inputDeviceId: this.inputDeviceId,
|
|
|
|
inputDevices: this.inputDevicesJSON,
|
|
|
|
outputDeviceId: this.outputDeviceId,
|
|
|
|
outputDevices: this.outputDevicesJSON,
|
|
|
|
isListenOnly: this.isListenOnly,
|
|
|
|
},
|
|
|
|
}, 'Audio ended without issue');
|
2017-10-05 04:49:11 +08:00
|
|
|
} else if (status === FAILED) {
|
2021-02-27 02:05:17 +08:00
|
|
|
this.isReconnecting = false;
|
2021-03-09 01:51:03 +08:00
|
|
|
this.setBreakoutAudioTransferStatus({
|
|
|
|
breakoutMeetingId: '',
|
|
|
|
status: BREAKOUT_AUDIO_TRANSFER_STATES.DISCONNECTED,
|
2021-12-10 04:37:05 +08:00
|
|
|
});
|
2023-07-25 02:56:40 +08:00
|
|
|
const errorKey = this.messages.error[error] || this.messages.error.GENERIC_ERROR;
|
2019-02-21 05:58:37 +08:00
|
|
|
const errorMsg = this.intl.formatMessage(errorKey, { 0: bridgeError });
|
|
|
|
this.error = !!error;
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.error(
|
|
|
|
{
|
|
|
|
logCode: 'audio_failure',
|
|
|
|
extraInfo: {
|
|
|
|
errorCode: error,
|
|
|
|
cause: bridgeError,
|
|
|
|
bridge,
|
2024-09-20 18:37:51 +08:00
|
|
|
inputDeviceId: this.inputDeviceId,
|
2024-10-25 09:19:21 +08:00
|
|
|
inputDevices: this.inputDevicesJSON,
|
2024-09-20 18:37:51 +08:00
|
|
|
outputDeviceId: this.outputDeviceId,
|
2024-10-25 09:19:21 +08:00
|
|
|
outputDevices: this.outputDevicesJSON,
|
2024-09-20 18:37:51 +08:00
|
|
|
isListenOnly: this.isListenOnly,
|
2021-12-10 04:37:05 +08:00
|
|
|
},
|
2019-06-29 05:45:50 +08:00
|
|
|
},
|
2021-12-10 04:37:05 +08:00
|
|
|
`Audio error - errorCode=${error}, cause=${bridgeError}`
|
|
|
|
);
|
2019-06-13 05:01:20 +08:00
|
|
|
if (silenceNotifications !== true) {
|
|
|
|
this.notify(errorMsg, true);
|
|
|
|
this.exitAudio();
|
|
|
|
this.onAudioExit();
|
|
|
|
}
|
|
|
|
} else if (status === RECONNECTING) {
|
2021-02-27 02:05:17 +08:00
|
|
|
this.isReconnecting = true;
|
2021-03-09 01:51:03 +08:00
|
|
|
this.setBreakoutAudioTransferStatus({
|
|
|
|
breakoutMeetingId: '',
|
|
|
|
status: BREAKOUT_AUDIO_TRANSFER_STATES.DISCONNECTED,
|
2021-12-10 04:37:05 +08:00
|
|
|
});
|
2023-07-25 02:56:40 +08:00
|
|
|
logger.info({ logCode: 'audio_reconnecting' }, 'Attempting to reconnect audio');
|
|
|
|
this.notify(this.intl.formatMessage(this.messages.info.RECONNECTING_AUDIO), true);
|
2019-06-13 05:01:20 +08:00
|
|
|
this.playHangUpSound();
|
2019-08-03 05:32:42 +08:00
|
|
|
} else if (status === AUTOPLAY_BLOCKED) {
|
2021-03-09 01:51:03 +08:00
|
|
|
this.setBreakoutAudioTransferStatus({
|
|
|
|
breakoutMeetingId: '',
|
|
|
|
status: BREAKOUT_AUDIO_TRANSFER_STATES.DISCONNECTED,
|
2021-12-10 04:37:05 +08:00
|
|
|
});
|
2021-02-27 02:05:17 +08:00
|
|
|
this.isReconnecting = false;
|
2019-08-03 05:32:42 +08:00
|
|
|
this.autoplayBlocked = true;
|
|
|
|
this.onAudioJoin();
|
|
|
|
resolve(AUTOPLAY_BLOCKED);
|
2017-09-29 21:38:10 +08:00
|
|
|
}
|
2017-10-11 20:05:57 +08:00
|
|
|
});
|
2017-09-29 21:38:10 +08:00
|
|
|
}
|
|
|
|
|
2018-06-20 23:36:26 +08:00
|
|
|
isUsingAudio() {
|
fix(audio): prevent permission check loop in Safari
Safari may enter a microphone permission check loop due to buggy behavior
in the Permissions API. When permission isn't permanently denied, gUM
requests fail with a NotAllowedError for a few seconds. During this time,
the permission state remains 'prompt' instead of transitioning to 'denied'
and back to 'prompt' after the timeout.
This leads to an issue where, on retrying while in 'prompt' + blocked,
the client loops through gUM checks via: 1) checking permission in the API,
2) receiving 'prompt', so trying gUM, 3) gUM fails, 4) returning to the
modal and checking permission again because the API still says 'prompt'.
Additionally, the `isUsingAudio` flag incorrectly counts the local echo
test/audio settings modal as "using audio," which toggles the flag on/off,
triggering the useEffect that causes the loop more frequently.
To fix this, remove the unnecessary AudioModal permission check that
causes the loop. Also, exclude "isEchoTest" from the `isUsingAudio` flag.
2024-08-21 03:09:59 +08:00
|
|
|
return Boolean(this.isConnected || this.isConnecting || this.isHangingUp);
|
2018-06-20 23:36:26 +08:00
|
|
|
}
|
|
|
|
|
2017-11-02 20:10:01 +08:00
|
|
|
changeInputDevice(deviceId) {
|
2022-08-20 01:22:42 +08:00
|
|
|
if (deviceId === this.inputDeviceId) return this.inputDeviceId;
|
2019-09-27 21:52:29 +08:00
|
|
|
|
2022-08-20 01:22:42 +08:00
|
|
|
const currentDeviceId = this.inputDeviceId ?? 'none';
|
|
|
|
this.inputDeviceId = deviceId;
|
2024-06-05 19:26:27 +08:00
|
|
|
logger.debug({
|
|
|
|
logCode: 'audiomanager_input_device_change',
|
|
|
|
extraInfo: {
|
|
|
|
deviceId: currentDeviceId,
|
|
|
|
newDeviceId: deviceId || 'none',
|
2022-08-20 01:22:42 +08:00
|
|
|
},
|
2024-06-05 19:26:27 +08:00
|
|
|
}, `Microphone input device changed: from ${currentDeviceId} to ${deviceId || 'none'}`);
|
2017-11-02 20:10:01 +08:00
|
|
|
|
2022-08-20 01:22:42 +08:00
|
|
|
return this.inputDeviceId;
|
2017-10-18 03:16:42 +08:00
|
|
|
}
|
2017-09-30 04:42:34 +08:00
|
|
|
|
2020-07-28 03:49:26 +08:00
|
|
|
liveChangeInputDevice(deviceId) {
|
2022-08-20 01:22:42 +08:00
|
|
|
const currentDeviceId = this.inputDeviceId ?? 'none';
|
2021-04-16 21:45:40 +08:00
|
|
|
// we force stream to be null, so MutedAlert will deallocate it and
|
|
|
|
// a new one will be created for the new stream
|
|
|
|
this.inputStream = null;
|
2023-07-25 02:56:40 +08:00
|
|
|
return this.bridge
|
|
|
|
.liveChangeInputDevice(deviceId)
|
|
|
|
.then((stream) => {
|
|
|
|
this.inputStream = stream;
|
|
|
|
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(
|
|
|
|
this.inputStream,
|
|
|
|
'audio'
|
|
|
|
);
|
|
|
|
if (extractedDeviceId && extractedDeviceId !== this.inputDeviceId) {
|
|
|
|
this.changeInputDevice(extractedDeviceId);
|
|
|
|
}
|
|
|
|
// Live input device change - add device ID to session storage so it
|
|
|
|
// can be re-used on refreshes/other sessions
|
|
|
|
storeAudioInputDeviceId(extractedDeviceId);
|
|
|
|
this.setSenderTrackEnabled(!this.isMuted);
|
|
|
|
})
|
|
|
|
.catch((error) => {
|
2024-10-25 09:19:21 +08:00
|
|
|
logger.error({
|
|
|
|
logCode: 'audiomanager_input_live_device_change_failure',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
|
|
|
deviceId: currentDeviceId,
|
|
|
|
newDeviceId: deviceId,
|
|
|
|
inputDevices: this.inputDevicesJSON,
|
2023-07-25 02:56:40 +08:00
|
|
|
},
|
2024-10-25 09:19:21 +08:00
|
|
|
}, `Input device live change failed - {${error.name}: ${error.message}}`);
|
2022-08-19 04:41:18 +08:00
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
throw error;
|
|
|
|
});
|
2017-09-30 04:42:34 +08:00
|
|
|
}
|
|
|
|
|
2022-08-20 01:22:42 +08:00
|
|
|
async changeOutputDevice(deviceId, isLive) {
|
|
|
|
const targetDeviceId = deviceId;
|
|
|
|
const currentDeviceId = this.outputDeviceId ?? getCurrentAudioSinkId();
|
2024-05-29 21:26:11 +08:00
|
|
|
|
|
|
|
const MEDIA = window.meetingClientSettings.public.media;
|
|
|
|
const MEDIA_TAG = MEDIA.mediaTag;
|
2022-08-20 01:22:42 +08:00
|
|
|
const audioElement = document.querySelector(MEDIA_TAG);
|
|
|
|
const sinkIdSupported = audioElement && typeof audioElement.setSinkId === 'function';
|
|
|
|
|
|
|
|
if (typeof deviceId === 'string' && sinkIdSupported && currentDeviceId !== targetDeviceId) {
|
|
|
|
try {
|
|
|
|
if (!isLive) audioElement.srcObject = null;
|
|
|
|
|
|
|
|
await audioElement.setSinkId(deviceId);
|
|
|
|
reloadAudioElement(audioElement);
|
2023-07-25 02:56:40 +08:00
|
|
|
logger.debug(
|
|
|
|
{
|
|
|
|
logCode: 'audiomanager_output_device_change',
|
|
|
|
extraInfo: {
|
|
|
|
deviceId: currentDeviceId,
|
|
|
|
newDeviceId: deviceId,
|
|
|
|
},
|
2022-08-20 01:22:42 +08:00
|
|
|
},
|
2023-07-25 02:56:40 +08:00
|
|
|
`Audio output device changed: from ${currentDeviceId || 'default'} to ${
|
|
|
|
deviceId || 'default'
|
|
|
|
}`
|
|
|
|
);
|
2022-08-20 01:22:42 +08:00
|
|
|
this.outputDeviceId = deviceId;
|
|
|
|
|
|
|
|
// Live output device change - add device ID to session storage so it
|
|
|
|
// can be re-used on refreshes/other sessions
|
|
|
|
if (isLive) storeAudioOutputDeviceId(deviceId);
|
|
|
|
|
|
|
|
return this.outputDeviceId;
|
|
|
|
} catch (error) {
|
2024-10-25 09:19:21 +08:00
|
|
|
logger.error({
|
|
|
|
logCode: 'audiomanager_output_device_change_failure',
|
|
|
|
extraInfo: {
|
|
|
|
errorName: error.name,
|
|
|
|
errorMessage: error.message,
|
|
|
|
deviceId: currentDeviceId,
|
|
|
|
newDeviceId: targetDeviceId,
|
|
|
|
outputDevices: this.outputDevicesJSON,
|
|
|
|
}
|
|
|
|
}, `Error changing output device - {${error.name}: ${error.message}}`);
|
2022-08-20 01:22:42 +08:00
|
|
|
|
|
|
|
// Rollback/enforce current sinkId (if possible)
|
|
|
|
if (sinkIdSupported) {
|
|
|
|
this.outputDeviceId = getCurrentAudioSinkId();
|
|
|
|
} else {
|
|
|
|
this.outputDeviceId = currentDeviceId;
|
|
|
|
}
|
|
|
|
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return this.outputDeviceId;
|
2017-10-10 04:48:10 +08:00
|
|
|
}
|
|
|
|
|
2017-10-11 02:03:29 +08:00
|
|
|
get inputStream() {
|
2023-07-25 02:56:40 +08:00
|
|
|
return this._inputStream();
|
2021-04-16 21:45:40 +08:00
|
|
|
}
|
|
|
|
|
2022-04-21 04:38:52 +08:00
|
|
|
get bridge() {
|
|
|
|
return this.isListenOnly ? this.listenOnlyBridge : this.fullAudioBridge;
|
|
|
|
}
|
|
|
|
|
2021-04-16 21:45:40 +08:00
|
|
|
set inputStream(stream) {
|
|
|
|
// We store reactive information about input stream
|
|
|
|
// because mutedalert component needs to track when it changes
|
|
|
|
// and then update hark with the new value for inputStream
|
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
this._inputStream(stream);
|
2017-09-30 04:42:34 +08:00
|
|
|
}
|
|
|
|
|
2021-03-07 09:09:43 +08:00
|
|
|
/**
|
|
|
|
* Sets the current status for breakout audio transfer
|
2021-03-08 02:01:12 +08:00
|
|
|
* @param {Object} newStatus The status Object to be set for
|
2021-03-07 09:09:43 +08:00
|
|
|
* audio transfer.
|
2021-03-08 02:01:12 +08:00
|
|
|
* @param {string} newStatus.breakoutMeetingId The meeting id of the current
|
|
|
|
* breakout audio transfer.
|
|
|
|
* @param {string} newStatus.status The status of the current audio
|
|
|
|
* transfer. Valid values are
|
|
|
|
* 'connected', 'disconnected' and
|
|
|
|
* 'returning'.
|
2021-03-07 09:09:43 +08:00
|
|
|
*/
|
2021-03-08 02:01:12 +08:00
|
|
|
setBreakoutAudioTransferStatus(newStatus) {
|
|
|
|
const currentStatus = this._breakoutAudioTransferStatus;
|
|
|
|
const { breakoutMeetingId, status } = newStatus;
|
2021-03-07 09:09:43 +08:00
|
|
|
|
|
|
|
if (typeof breakoutMeetingId === 'string') {
|
2021-03-08 02:01:12 +08:00
|
|
|
currentStatus.breakoutMeetingId = breakoutMeetingId;
|
2023-07-31 22:24:25 +08:00
|
|
|
} else {
|
|
|
|
currentStatus.breakoutMeetingId = null;
|
2021-03-07 09:09:43 +08:00
|
|
|
}
|
2021-03-17 22:30:07 +08:00
|
|
|
|
2021-03-07 09:09:43 +08:00
|
|
|
if (typeof status === 'string') {
|
2021-03-08 02:01:12 +08:00
|
|
|
currentStatus.status = status;
|
2021-09-21 22:22:05 +08:00
|
|
|
|
|
|
|
if (this.bridge && !this.isListenOnly) {
|
|
|
|
if (status !== BREAKOUT_AUDIO_TRANSFER_STATES.CONNECTED) {
|
|
|
|
this.bridge.ignoreCallState = false;
|
|
|
|
} else {
|
|
|
|
this.bridge.ignoreCallState = true;
|
|
|
|
}
|
|
|
|
}
|
2021-03-07 09:09:43 +08:00
|
|
|
}
|
2021-02-26 02:36:11 +08:00
|
|
|
}
|
|
|
|
|
2021-03-07 09:09:43 +08:00
|
|
|
getBreakoutAudioTransferStatus() {
|
|
|
|
return this._breakoutAudioTransferStatus;
|
2021-02-26 02:36:11 +08:00
|
|
|
}
|
|
|
|
|
2017-10-18 03:16:42 +08:00
|
|
|
set userData(value) {
|
|
|
|
this._userData = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
get userData() {
|
|
|
|
return this._userData;
|
|
|
|
}
|
2017-10-23 20:41:09 +08:00
|
|
|
|
2019-06-13 05:01:20 +08:00
|
|
|
playHangUpSound() {
|
2021-12-10 04:37:05 +08:00
|
|
|
this.playAlertSound(
|
|
|
|
`${
|
2024-03-07 01:28:18 +08:00
|
|
|
window.meetingClientSettings.public.app.cdn +
|
2024-08-03 02:10:39 +08:00
|
|
|
window.meetingClientSettings.public.app.basename
|
2021-12-10 04:37:05 +08:00
|
|
|
}` + '/resources/sounds/LeftCall.mp3'
|
|
|
|
);
|
2019-06-13 05:01:20 +08:00
|
|
|
}
|
|
|
|
|
2019-04-12 21:55:14 +08:00
|
|
|
notify(message, error = false, icon = 'unmute') {
|
|
|
|
const audioIcon = this.isListenOnly ? 'listen' : icon;
|
2019-04-12 06:53:57 +08:00
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
notify(message, error ? 'error' : 'info', audioIcon);
|
2017-10-23 20:41:09 +08:00
|
|
|
}
|
2019-08-03 05:32:42 +08:00
|
|
|
|
2019-11-30 05:48:04 +08:00
|
|
|
monitor() {
|
2022-04-21 04:38:52 +08:00
|
|
|
const peer = this.bridge.getPeerConnection();
|
2019-11-30 05:48:04 +08:00
|
|
|
monitorAudioConnection(peer);
|
|
|
|
}
|
|
|
|
|
2019-08-03 05:32:42 +08:00
|
|
|
handleAllowAutoplay() {
|
|
|
|
window.removeEventListener('audioPlayFailed', this.handlePlayElementFailed);
|
2019-09-07 02:58:22 +08:00
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.info(
|
|
|
|
{
|
|
|
|
logCode: 'audiomanager_autoplay_allowed',
|
|
|
|
},
|
|
|
|
'Listen only autoplay allowed by the user'
|
|
|
|
);
|
2019-09-07 02:58:22 +08:00
|
|
|
|
2019-08-03 05:32:42 +08:00
|
|
|
while (this.failedMediaElements.length) {
|
|
|
|
const mediaElement = this.failedMediaElements.shift();
|
|
|
|
if (mediaElement) {
|
2019-09-07 02:58:22 +08:00
|
|
|
playAndRetry(mediaElement).then((played) => {
|
|
|
|
if (!played) {
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.error(
|
|
|
|
{
|
|
|
|
logCode: 'audiomanager_autoplay_handling_failed',
|
|
|
|
},
|
|
|
|
'Listen only autoplay handling failed to play media'
|
|
|
|
);
|
2019-09-07 02:58:22 +08:00
|
|
|
} else {
|
|
|
|
// logCode is listenonly_* to make it consistent with the other tag play log
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.info(
|
|
|
|
{
|
|
|
|
logCode: 'listenonly_media_play_success',
|
|
|
|
},
|
|
|
|
'Listen only media played successfully'
|
|
|
|
);
|
2019-09-07 02:58:22 +08:00
|
|
|
}
|
2019-08-03 05:32:42 +08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
this.autoplayBlocked = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
handlePlayElementFailed(e) {
|
|
|
|
const { mediaElement } = e.detail;
|
|
|
|
|
|
|
|
e.stopPropagation();
|
|
|
|
this.failedMediaElements.push(mediaElement);
|
|
|
|
if (!this.autoplayBlocked) {
|
2021-12-10 04:37:05 +08:00
|
|
|
logger.info(
|
|
|
|
{
|
|
|
|
logCode: 'audiomanager_autoplay_prompt',
|
|
|
|
},
|
|
|
|
'Prompting user for action to play listen only media'
|
|
|
|
);
|
2019-08-03 05:32:42 +08:00
|
|
|
this.autoplayBlocked = true;
|
|
|
|
}
|
|
|
|
}
|
2020-09-10 01:03:27 +08:00
|
|
|
|
2020-09-17 22:37:28 +08:00
|
|
|
setSenderTrackEnabled(shouldEnable) {
|
2020-09-12 07:06:56 +08:00
|
|
|
// If the bridge is set to listen only mode, nothing to do here. This method
|
|
|
|
// is solely for muting outbound tracks.
|
|
|
|
if (this.isListenOnly) return;
|
|
|
|
|
2020-09-10 22:50:50 +08:00
|
|
|
// Bridge -> SIP.js bridge, the only full audio capable one right now
|
|
|
|
const peer = this.bridge.getPeerConnection();
|
2020-09-26 07:11:44 +08:00
|
|
|
|
|
|
|
if (!peer) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
peer.getSenders().forEach((sender) => {
|
2020-09-10 01:03:27 +08:00
|
|
|
const { track } = sender;
|
|
|
|
if (track && track.kind === 'audio') {
|
2020-09-10 22:50:50 +08:00
|
|
|
track.enabled = shouldEnable;
|
2020-09-10 01:03:27 +08:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-09-17 22:37:28 +08:00
|
|
|
mute() {
|
2020-09-10 22:50:50 +08:00
|
|
|
this.setSenderTrackEnabled(false);
|
|
|
|
}
|
|
|
|
|
2020-09-17 22:37:28 +08:00
|
|
|
unmute() {
|
2020-09-10 22:50:50 +08:00
|
|
|
this.setSenderTrackEnabled(true);
|
2020-09-10 01:03:27 +08:00
|
|
|
}
|
Correctly set audio input/output devices
When refusing ("thumbs down" button) echo test, user is able to select a different input device. This should work fine for chrome, firefox and safari (once user grants permission when asked by html5client).
For output devices, we depend on setSinkId function, which is enabled by default on current chrome release (2020) but not in Firefox (user needs to enable "setSinkId in about:config page). This implementation is listed as (?) in MDN.
In other words, output device selection should work out of the box for chrome, only.
When selecting an outputDevice, all alert sounds (hangup, screenshare , polling, etc) also goes to the same output device.
This solves #10592
2020-10-07 07:37:55 +08:00
|
|
|
|
2021-04-02 02:53:43 +08:00
|
|
|
playAlertSound(url) {
|
2021-04-13 00:57:02 +08:00
|
|
|
if (!url || !this.bridge) {
|
Correctly set audio input/output devices
When refusing ("thumbs down" button) echo test, user is able to select a different input device. This should work fine for chrome, firefox and safari (once user grants permission when asked by html5client).
For output devices, we depend on setSinkId function, which is enabled by default on current chrome release (2020) but not in Firefox (user needs to enable "setSinkId in about:config page). This implementation is listed as (?) in MDN.
In other words, output device selection should work out of the box for chrome, only.
When selecting an outputDevice, all alert sounds (hangup, screenshare , polling, etc) also goes to the same output device.
This solves #10592
2020-10-07 07:37:55 +08:00
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
|
|
|
|
const audioAlert = new Audio(url);
|
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
audioAlert.addEventListener('ended', () => {
|
|
|
|
audioAlert.src = null;
|
|
|
|
});
|
2021-04-02 02:53:43 +08:00
|
|
|
|
|
|
|
const { outputDeviceId } = this.bridge;
|
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
if (outputDeviceId && typeof audioAlert.setSinkId === 'function') {
|
|
|
|
return audioAlert.setSinkId(outputDeviceId).then(() => audioAlert.play());
|
Correctly set audio input/output devices
When refusing ("thumbs down" button) echo test, user is able to select a different input device. This should work fine for chrome, firefox and safari (once user grants permission when asked by html5client).
For output devices, we depend on setSinkId function, which is enabled by default on current chrome release (2020) but not in Firefox (user needs to enable "setSinkId in about:config page). This implementation is listed as (?) in MDN.
In other words, output device selection should work out of the box for chrome, only.
When selecting an outputDevice, all alert sounds (hangup, screenshare , polling, etc) also goes to the same output device.
This solves #10592
2020-10-07 07:37:55 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return audioAlert.play();
|
|
|
|
}
|
2021-01-23 03:30:42 +08:00
|
|
|
|
|
|
|
async updateAudioConstraints(constraints) {
|
|
|
|
await this.bridge.updateAudioConstraints(constraints);
|
|
|
|
}
|
2021-08-13 03:39:04 +08:00
|
|
|
|
2021-08-13 20:46:19 +08:00
|
|
|
/**
|
2021-08-26 03:27:46 +08:00
|
|
|
* Get the info about candidate-pair that is being used by the current peer.
|
|
|
|
* For firefox, or any other browser that doesn't support iceTransport
|
|
|
|
* property of RTCDtlsTransport, we retrieve the selected local candidate
|
|
|
|
* by looking into stats returned from getStats() api. For other browsers,
|
|
|
|
* we should use getSelectedCandidatePairFromPeer instead, because it has
|
|
|
|
* relatedAddress and relatedPort information about local candidate.
|
2021-08-13 20:46:19 +08:00
|
|
|
*
|
2021-08-26 03:27:46 +08:00
|
|
|
* @param {Object} stats object returned by getStats() api
|
|
|
|
* @returns An Object of type RTCIceCandidatePairStats containing information
|
|
|
|
* about the candidate-pair being used by the peer.
|
2021-08-13 20:46:19 +08:00
|
|
|
*
|
2021-08-26 03:27:46 +08:00
|
|
|
* For firefox, we can use the 'selected' flag to find the candidate pair
|
|
|
|
* being used, while in chrome we can retrieved the selected pair
|
|
|
|
* by looking for the corresponding transport of the active peer.
|
2021-08-13 20:46:19 +08:00
|
|
|
* For more information see:
|
2021-08-26 03:27:46 +08:00
|
|
|
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
|
2021-08-13 20:46:19 +08:00
|
|
|
* and
|
2021-08-26 03:27:46 +08:00
|
|
|
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePairStats/selected#value
|
|
|
|
*/
|
|
|
|
static getSelectedCandidatePairFromStats(stats) {
|
|
|
|
if (!stats || typeof stats !== 'object') return null;
|
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
const transport = Object.values(stats).find((stat) => stat.type === 'transport') || {};
|
2021-08-26 03:27:46 +08:00
|
|
|
|
2021-12-10 04:37:05 +08:00
|
|
|
return Object.values(stats).find(
|
|
|
|
(stat) =>
|
|
|
|
stat.type === 'candidate-pair' &&
|
|
|
|
stat.nominated &&
|
|
|
|
(stat.selected || stat.id === transport.selectedCandidatePairId)
|
|
|
|
);
|
2021-08-26 03:27:46 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get the info about candidate-pair that is being used by the current peer.
|
|
|
|
* This function's return value (RTCIceCandidatePair object ) is different
|
|
|
|
* from getSelectedCandidatePairFromStats (RTCIceCandidatePairStats object).
|
|
|
|
* The information returned here contains the relatedAddress and relatedPort
|
|
|
|
* fields (only for candidates that are derived from another candidate, for
|
|
|
|
* host candidates, these fields are null). These field can be helpful for
|
|
|
|
* debugging network issues. For all the browsers that support iceTransport
|
|
|
|
* field of RTCDtlsTransport, we use this function as default to retrieve
|
|
|
|
* information about current selected-pair. For other browsers we retrieve it
|
|
|
|
* from getSelectedCandidatePairFromStats
|
|
|
|
*
|
|
|
|
* @returns {Object} An RTCIceCandidatePair represented the selected
|
|
|
|
* candidate-pair of the active peer.
|
|
|
|
*
|
|
|
|
* For more info see:
|
|
|
|
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidatepair
|
|
|
|
* and
|
|
|
|
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePair
|
|
|
|
* and
|
|
|
|
* https://developer.mozilla.org/en-US/docs/Web/API/RTCDtlsTransport
|
2021-08-13 20:46:19 +08:00
|
|
|
*/
|
2021-08-26 03:27:46 +08:00
|
|
|
getSelectedCandidatePairFromPeer() {
|
2022-04-21 04:38:52 +08:00
|
|
|
if (!this.bridge) return null;
|
2021-08-13 03:39:04 +08:00
|
|
|
|
2022-04-21 04:38:52 +08:00
|
|
|
const peer = this.bridge.getPeerConnection();
|
2021-08-13 03:39:04 +08:00
|
|
|
|
2021-08-26 03:27:46 +08:00
|
|
|
if (!peer) return null;
|
2021-08-13 03:39:04 +08:00
|
|
|
|
2021-08-26 03:27:46 +08:00
|
|
|
let selectedPair = null;
|
2021-08-13 03:39:04 +08:00
|
|
|
|
|
|
|
const receivers = peer.getReceivers();
|
2021-12-10 04:37:05 +08:00
|
|
|
if (
|
|
|
|
receivers &&
|
|
|
|
receivers[0] &&
|
|
|
|
receivers[0].transport &&
|
|
|
|
receivers[0].transport.iceTransport &&
|
2024-05-01 00:44:29 +08:00
|
|
|
typeof receivers[0].transport.iceTransport.getSelectedCandidatePair === 'function'
|
2021-12-10 04:37:05 +08:00
|
|
|
) {
|
2023-07-25 02:56:40 +08:00
|
|
|
selectedPair = receivers[0].transport.iceTransport.getSelectedCandidatePair();
|
2021-08-26 03:27:46 +08:00
|
|
|
}
|
2021-08-13 03:39:04 +08:00
|
|
|
|
2021-08-26 03:27:46 +08:00
|
|
|
return selectedPair;
|
|
|
|
}
|
2021-08-13 03:39:04 +08:00
|
|
|
|
2021-08-26 03:27:46 +08:00
|
|
|
/**
|
|
|
|
* Gets the selected local-candidate information. For browsers that support
|
|
|
|
* iceTransport property (see getSelectedCandidatePairFromPeer) we get this
|
|
|
|
* info from peer, otherwise we retrieve this information from getStats() api
|
|
|
|
*
|
|
|
|
* @param {Object} [stats] The status object returned from getStats() api
|
|
|
|
* @returns {Object} An Object containing the information about the
|
|
|
|
* local-candidate. For browsers that support iceTransport
|
|
|
|
* property, the object's type is RCIceCandidate. A
|
|
|
|
* RTCIceCandidateStats is returned, otherwise.
|
|
|
|
*
|
|
|
|
* For more info see:
|
|
|
|
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidate
|
|
|
|
* and
|
|
|
|
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
getSelectedLocalCandidate(stats) {
|
|
|
|
let selectedPair = this.getSelectedCandidatePairFromPeer();
|
|
|
|
|
|
|
|
if (selectedPair) return selectedPair.local;
|
|
|
|
|
|
|
|
if (!stats) return null;
|
|
|
|
|
|
|
|
selectedPair = AudioManager.getSelectedCandidatePairFromStats(stats);
|
|
|
|
|
|
|
|
if (selectedPair) return stats[selectedPair.localCandidateId];
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Gets the information about private/public ip address from peer
|
|
|
|
* stats. The information retrieved from selected pair from the current
|
|
|
|
* RTCIceTransport and returned in a new Object with format:
|
|
|
|
* {
|
|
|
|
* address: String,
|
|
|
|
* relatedAddress: String,
|
|
|
|
* port: Number,
|
|
|
|
* relatedPort: Number,
|
|
|
|
* candidateType: String,
|
|
|
|
* selectedLocalCandidate: Object,
|
|
|
|
* }
|
|
|
|
*
|
|
|
|
* If users isn't behind NAT, relatedAddress and relatedPort may be null.
|
|
|
|
*
|
|
|
|
* @returns An Object containing the information about private/public IP
|
|
|
|
* addresses and ports.
|
|
|
|
*
|
|
|
|
* For more information see:
|
|
|
|
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
|
|
|
|
* and
|
|
|
|
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
|
|
|
|
* and
|
|
|
|
* https://www.w3.org/TR/webrtc/#rtcicecandidatetype-enum
|
|
|
|
*/
|
|
|
|
async getInternalExternalIpAddresses(stats) {
|
|
|
|
let transports = {};
|
|
|
|
|
|
|
|
if (stats) {
|
|
|
|
const selectedLocalCandidate = this.getSelectedLocalCandidate(stats);
|
|
|
|
|
|
|
|
if (!selectedLocalCandidate) return transports;
|
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
const candidateType = selectedLocalCandidate.candidateType || selectedLocalCandidate.type;
|
2021-08-13 03:39:04 +08:00
|
|
|
|
|
|
|
transports = {
|
2021-12-10 04:37:05 +08:00
|
|
|
isUsingTurn: candidateType === 'relay',
|
2021-08-26 03:27:46 +08:00
|
|
|
address: selectedLocalCandidate.address,
|
|
|
|
relatedAddress: selectedLocalCandidate.relatedAddress,
|
|
|
|
port: selectedLocalCandidate.port,
|
|
|
|
relatedPort: selectedLocalCandidate.relatedPort,
|
|
|
|
candidateType,
|
|
|
|
selectedLocalCandidate,
|
2021-08-13 03:39:04 +08:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
return transports;
|
|
|
|
}
|
|
|
|
|
2021-08-13 20:46:19 +08:00
|
|
|
/**
|
|
|
|
* Get stats about active audio peer.
|
2021-08-23 22:07:12 +08:00
|
|
|
* We filter the status based on FILTER_AUDIO_STATS constant.
|
2021-08-13 20:46:19 +08:00
|
|
|
* We also append to the returned object the information about peer's
|
|
|
|
* transport. This transport information is retrieved by
|
|
|
|
* getInternalExternalIpAddressesFromPeer().
|
|
|
|
*
|
|
|
|
* @returns An Object containing the status about the active audio peer.
|
|
|
|
*
|
|
|
|
* For more information see:
|
|
|
|
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats
|
2021-08-23 23:36:01 +08:00
|
|
|
* and
|
|
|
|
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
|
2021-08-13 20:46:19 +08:00
|
|
|
*/
|
2021-08-13 03:39:04 +08:00
|
|
|
async getStats() {
|
2022-04-21 04:38:52 +08:00
|
|
|
if (!this.bridge) return null;
|
2021-08-13 03:39:04 +08:00
|
|
|
|
2022-04-21 04:38:52 +08:00
|
|
|
const peer = this.bridge.getPeerConnection();
|
2021-08-13 03:39:04 +08:00
|
|
|
|
|
|
|
if (!peer) return null;
|
|
|
|
|
|
|
|
const peerStats = await peer.getStats();
|
|
|
|
|
|
|
|
const audioStats = {};
|
|
|
|
|
|
|
|
peerStats.forEach((stat) => {
|
|
|
|
if (FILTER_AUDIO_STATS.includes(stat.type)) {
|
2021-08-26 03:27:46 +08:00
|
|
|
audioStats[stat.id] = stat;
|
2021-08-13 03:39:04 +08:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2023-07-25 02:56:40 +08:00
|
|
|
const transportStats = await this.getInternalExternalIpAddresses(audioStats);
|
2021-08-13 03:39:04 +08:00
|
|
|
|
|
|
|
return { transportStats, ...audioStats };
|
|
|
|
}
|
2017-09-20 01:47:57 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
const audioManager = new AudioManager();
|
|
|
|
export default audioManager;
|