Merge pull request #15695 from prlanzarin/u26/fix/audio-rollback-overconstrained
fix(audio): retry gUM without pre-set deviceIds on OverconstrainedErr…
This commit is contained in:
commit
da39134d98
@ -5,6 +5,7 @@ import logger from '/imports/startup/client/logger';
|
||||
import Auth from '/imports/ui/services/auth';
|
||||
import {
|
||||
getAudioConstraints,
|
||||
doGUM,
|
||||
} from '/imports/api/audio/client/bridge/service';
|
||||
|
||||
const MEDIA = Meteor.settings.public.media;
|
||||
@ -94,9 +95,8 @@ export default class BaseAudioBridge {
|
||||
this.inputStream.getAudioTracks().forEach((track) => track.stop());
|
||||
}
|
||||
|
||||
newStream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
newStream = await doGUM(constraints);
|
||||
await this.setInputStream(newStream);
|
||||
this.inputDeviceId = deviceId;
|
||||
if (backupStream && backupStream.active) {
|
||||
backupStream.getAudioTracks().forEach((track) => track.stop());
|
||||
backupStream = null;
|
||||
|
@ -75,7 +75,8 @@ const filterSupportedConstraints = (audioDeviceConstraints) => {
|
||||
}
|
||||
};
|
||||
|
||||
const getAudioConstraints = ({ deviceId = '' }) => {
|
||||
const getAudioConstraints = (constraintFields = {}) => {
|
||||
const { deviceId = '' } = constraintFields;
|
||||
const userSettingsConstraints = Settings.application.microphoneConstraints;
|
||||
const audioDeviceConstraints = userSettingsConstraints
|
||||
|| AUDIO_MICROPHONE_CONSTRAINTS || {};
|
||||
@ -91,6 +92,29 @@ const getAudioConstraints = ({ deviceId = '' }) => {
|
||||
return matchConstraints;
|
||||
};
|
||||
|
||||
const doGUM = async (constraints, retryOnFailure = false) => {
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
return stream;
|
||||
} catch (error) {
|
||||
// This is probably a deviceId mistmatch. Retry with base constraints
|
||||
// without an exact deviceId.
|
||||
if (error.name === 'OverconstrainedError' && retryOnFailure) {
|
||||
logger.warn({
|
||||
logCode: 'audio_overconstrainederror_rollback',
|
||||
extraInfo: {
|
||||
constraints,
|
||||
},
|
||||
}, 'Audio getUserMedia returned OverconstrainedError, rollback');
|
||||
|
||||
return navigator.mediaDevices.getUserMedia({ audio: getAudioConstraints() });
|
||||
}
|
||||
|
||||
// Not OverconstrainedError - bubble up the error.
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
DEFAULT_INPUT_DEVICE_ID,
|
||||
DEFAULT_OUTPUT_DEVICE_ID,
|
||||
@ -106,4 +130,5 @@ export {
|
||||
storeAudioInputDeviceId,
|
||||
getStoredAudioOutputDeviceId,
|
||||
storeAudioOutputDeviceId,
|
||||
doGUM,
|
||||
};
|
||||
|
@ -13,6 +13,7 @@ import {
|
||||
getAudioSessionNumber,
|
||||
getAudioConstraints,
|
||||
filterSupportedConstraints,
|
||||
doGUM,
|
||||
} from '/imports/api/audio/client/bridge/service';
|
||||
import { shouldForceRelay } from '/imports/ui/services/bbb-webrtc-sfu/utils';
|
||||
|
||||
@ -104,6 +105,11 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
||||
return null;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
mediaStreamFactory(constraints) {
|
||||
return doGUM(constraints, true);
|
||||
}
|
||||
|
||||
handleTermination() {
|
||||
return this.callback({ status: this.baseCallStates.ended, bridge: this.bridgeName });
|
||||
}
|
||||
@ -258,6 +264,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
||||
signalCandidates: SIGNAL_CANDIDATES,
|
||||
traceLogs: TRACE_LOGS,
|
||||
networkPriority: NETWORK_PRIORITY,
|
||||
mediaStreamFactory: this.mediaStreamFactory,
|
||||
};
|
||||
|
||||
this.broker = new AudioBroker(
|
||||
@ -316,9 +323,7 @@ export default class SFUAudioBridge extends BaseAudioBridge {
|
||||
|
||||
if (IS_CHROME) {
|
||||
matchConstraints.deviceId = this.inputDeviceId;
|
||||
const stream = await navigator.mediaDevices.getUserMedia(
|
||||
{ audio: matchConstraints },
|
||||
);
|
||||
const stream = await doGUM({ audio: matchConstraints });
|
||||
await this.setInputStream(stream);
|
||||
} else {
|
||||
this.inputStream.getAudioTracks()
|
||||
|
@ -23,6 +23,7 @@ import {
|
||||
getAudioSessionNumber,
|
||||
getAudioConstraints,
|
||||
filterSupportedConstraints,
|
||||
doGUM,
|
||||
} from '/imports/api/audio/client/bridge/service';
|
||||
|
||||
const MEDIA = Meteor.settings.public.media;
|
||||
@ -384,7 +385,8 @@ class SIPSession {
|
||||
if (!constraints.audio && !constraints.video) {
|
||||
return Promise.resolve(new MediaStream());
|
||||
}
|
||||
return navigator.mediaDevices.getUserMedia(constraints);
|
||||
|
||||
return doGUM(constraints, true);
|
||||
}
|
||||
|
||||
createUserAgent(iceServers) {
|
||||
@ -1117,9 +1119,7 @@ class SIPSession {
|
||||
if (isChrome) {
|
||||
matchConstraints.deviceId = this.inputDeviceId;
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia(
|
||||
{ audio: matchConstraints },
|
||||
);
|
||||
const stream = await doGUM({ audio: matchConstraints });
|
||||
|
||||
this.currentSession.sessionDescriptionHandler
|
||||
.setLocalMediaStream(stream);
|
||||
|
@ -11,6 +11,7 @@ import LocalEchoContainer from '/imports/ui/components/audio/local-echo/containe
|
||||
import DeviceSelector from '/imports/ui/components/audio/device-selector/component';
|
||||
import {
|
||||
getAudioConstraints,
|
||||
doGUM,
|
||||
} from '/imports/api/audio/client/bridge/service';
|
||||
import MediaStreamUtils from '/imports/utils/media-stream-utils';
|
||||
|
||||
@ -245,7 +246,7 @@ class AudioSettings extends React.Component {
|
||||
audio: getAudioConstraints({ deviceId: inputDeviceId }),
|
||||
};
|
||||
|
||||
return navigator.mediaDevices.getUserMedia(constraints);
|
||||
return doGUM(constraints, true);
|
||||
}
|
||||
|
||||
renderOutputTest() {
|
||||
|
@ -717,19 +717,15 @@ class AudioManager {
|
||||
// a new one will be created for the new stream
|
||||
this.inputStream = null;
|
||||
return this.bridge.liveChangeInputDevice(deviceId).then((stream) => {
|
||||
logger.debug({
|
||||
logCode: 'audiomanager_input_live_device_change',
|
||||
extraInfo: {
|
||||
deviceId: currentDeviceId,
|
||||
newDeviceId: deviceId,
|
||||
},
|
||||
}, `Microphone input device (live) changed: from ${currentDeviceId} to ${deviceId}`);
|
||||
this.setSenderTrackEnabled(!this.isMuted);
|
||||
this.inputDeviceId = deviceId;
|
||||
this.inputStream = stream;
|
||||
const extractedDeviceId = MediaStreamUtils.extractDeviceIdFromStream(this.inputStream, 'audio');
|
||||
if (extractedDeviceId && extractedDeviceId !== this.inputDeviceId) {
|
||||
this.changeInputDevice(extractedDeviceId);
|
||||
}
|
||||
// Live input device change - add device ID to session storage so it
|
||||
// can be re-used on refreshes/other sessions
|
||||
storeAudioInputDeviceId(deviceId);
|
||||
this.inputStream = stream;
|
||||
storeAudioInputDeviceId(extractedDeviceId);
|
||||
this.setSenderTrackEnabled(!this.isMuted);
|
||||
}).catch((error) => {
|
||||
logger.error({
|
||||
logCode: 'audiomanager_input_live_device_change_failure',
|
||||
|
@ -84,6 +84,7 @@ class AudioBroker extends BaseBroker {
|
||||
},
|
||||
trace: this.traceLogs,
|
||||
networkPriorities: this.networkPriority ? { audio: this.networkPriority } : undefined,
|
||||
mediaStreamFactory: this.mediaStreamFactory,
|
||||
};
|
||||
|
||||
const peerRole = this.role === 'sendrecv' ? this.role : 'recvonly';
|
||||
|
@ -43,7 +43,7 @@ export default class WebRtcPeer extends EventEmitter2 {
|
||||
this.on('candidategatheringdone', this.oncandidategatheringdone);
|
||||
}
|
||||
if (typeof this.options.mediaStreamFactory === 'function') {
|
||||
this.mediaStreamFactory = this.options.mediaStreamFactory.bind(this);
|
||||
this._mediaStreamFactory = this.options.mediaStreamFactory.bind(this);
|
||||
}
|
||||
}
|
||||
|
||||
@ -145,8 +145,7 @@ export default class WebRtcPeer extends EventEmitter2 {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
this.logger.info('BBB::WebRtcPeer::mediaStreamFactory - running default factory', this.mediaConstraints);
|
||||
return navigator.mediaDevices.getUserMedia(this.mediaConstraints).then((stream) => {
|
||||
const handleGUMResolution = (stream) => {
|
||||
if (stream.getAudioTracks().length > 0) {
|
||||
this.audioStream = stream;
|
||||
this.logger.debug('BBB::WebRtcPeer::mediaStreamFactory - generated audio', this.audioStream);
|
||||
@ -155,10 +154,22 @@ export default class WebRtcPeer extends EventEmitter2 {
|
||||
this.videoStream = stream;
|
||||
this.logger.debug('BBB::WebRtcPeer::mediaStreamFactory - generated video', this.videoStream);
|
||||
}
|
||||
}).catch((error) => {
|
||||
this.logger.error('BBB::WebRtcPeer::mediaStreamFactory - gUM failed', error);
|
||||
throw error;
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
if (typeof this._mediaStreamFactory === 'function') {
|
||||
return this._mediaStreamFactory(this.mediaConstraints).then(handleGUMResolution);
|
||||
}
|
||||
|
||||
this.logger.info('BBB::WebRtcPeer::mediaStreamFactory - running default factory', this.mediaConstraints);
|
||||
|
||||
return navigator.mediaDevices.getUserMedia(this.mediaConstraints)
|
||||
.then(handleGUMResolution)
|
||||
.catch((error) => {
|
||||
this.logger.error('BBB::WebRtcPeer::mediaStreamFactory - gUM failed', error);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
set peerConnection(pc) {
|
||||
|
Loading…
Reference in New Issue
Block a user