Merge pull request #21036 from prlanzarin/u27/feat/rtc-logs-info

feat: add WebRTC stats information to client logs
This commit is contained in:
Anton Georgiev 2024-09-11 16:30:59 -04:00 committed by GitHub
commit 11ac9d2184
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 769 additions and 386 deletions

View File

@ -7,11 +7,22 @@ import {
getAudioConstraints,
doGUM,
} from '/imports/api/audio/client/bridge/service';
import { getTransportStats } from '/imports/utils/stats';
const MEDIA = Meteor.settings.public.media;
const BASE_BRIDGE_NAME = 'base';
const CALL_TRANSFER_TIMEOUT = MEDIA.callTransferTimeout;
const TRANSFER_TONE = '1';
/**
* Audio status to be filtered in getStats()
*/
const FILTER_AUDIO_STATS = [
'outbound-rtp',
'inbound-rtp',
'candidate-pair',
'local-candidate',
'transport',
];
export default class BaseAudioBridge {
constructor(userData) {
@ -174,4 +185,55 @@ export default class BaseAudioBridge {
});
});
}
/**
* Get stats about active audio peer.
* We filter the status based on FILTER_AUDIO_STATS constant.
* We also append to the returned object the information about peer's
* transport. This transport information is retrieved by
* getTransportStatsFromPeer().
*
* @returns An Object containing the status about the active audio peer.
*
* For more information see:
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
*/
async getStats(stats) {
let peer;
let peerStats = stats;
let transportStats = {};
if (!peerStats) {
peer = this.getPeerConnection();
if (!peer) return null;
peerStats = await peer.getStats();
}
const audioStats = {};
peerStats.forEach((stat) => {
if (FILTER_AUDIO_STATS.includes(stat.type)) {
audioStats[stat.id] = stat;
}
});
try {
transportStats = await getTransportStats(peer, audioStats);
} catch (error) {
logger.debug({
logCode: 'audio_transport_stats_failed',
extraInfo: {
errorCode: error.errorCode,
errorMessage: error.errorMessage,
bridgeName: this.bridgeName,
},
}, 'Failed to get transport stats for audio');
}
return { transportStats, ...audioStats };
}
}

View File

@ -17,6 +17,7 @@ import {
doGUM,
} from '/imports/api/audio/client/bridge/service';
import { shouldForceRelay } from '/imports/ui/services/bbb-webrtc-sfu/utils';
import { getRTCStatsLogMetadata } from '/imports/utils/stats';
const SFU_URL = Meteor.settings.public.kurento.wsUrl;
const DEFAULT_LISTENONLY_MEDIA_SERVER = Meteor.settings.public.kurento.listenOnlyMediaServer;
@ -125,6 +126,24 @@ export default class SFUAudioBridge extends BaseAudioBridge {
return null;
}
async getStatsLogData() {
try {
const stats = await this.getStats();
return getRTCStatsLogMetadata(stats);
} catch (error) {
logger.warn({
logCode: 'sfuaudio_stats_log_error',
extraInfo: {
errorMessage: error.message,
bridge: this.bridgeName,
role: this.role,
},
}, 'Failed to get audio stats log data');
return null;
}
}
// eslint-disable-next-line class-methods-use-this
mediaStreamFactory(constraints) {
return doGUM(constraints, true);
@ -195,59 +214,69 @@ export default class SFUAudioBridge extends BaseAudioBridge {
if (!this.reconnecting) {
if (this.broker.started) {
logger.error({
logCode: 'sfuaudio_error_try_to_reconnect',
extraInfo: {
errorMessage,
errorCode,
errorCause,
bridge: this.bridgeName,
role: this.role,
},
}, 'SFU audio failed, try to reconnect');
this.reconnect();
return resolve();
return this.getStatsLogData().then((stats) => {
logger.error({
logCode: 'sfuaudio_error_try_to_reconnect',
extraInfo: {
errorMessage,
errorCode,
errorCause,
bridge: this.bridgeName,
role: this.role,
stats,
},
}, 'SFU audio failed, try to reconnect');
this.reconnect();
resolve();
});
}
if (RETRYABLE_ERRORS.includes(errorCode) && RETRY_THROUGH_RELAY) {
logger.error({
logCode: 'sfuaudio_error_retry_through_relay',
extraInfo: {
errorMessage,
errorCode,
errorCause,
bridge: this.bridgeName,
role: this.role,
},
}, 'SFU audio failed to connect, retry through relay');
this.reconnect({ forceRelay: true });
return resolve();
return this.getStatsLogData().then((stats) => {
logger.error({
logCode: 'sfuaudio_error_retry_through_relay',
extraInfo: {
errorMessage,
errorCode,
errorCause,
bridge: this.bridgeName,
role: this.role,
stats,
},
}, 'SFU audio failed to connect, retry through relay');
this.reconnect({ forceRelay: true });
resolve();
});
}
}
// Already tried reconnecting once OR the user handn't succesfully
// connected firsthand and retrying isn't an option. Finish the session
// and reject with the error
logger.error({
logCode: 'sfuaudio_error',
extraInfo: {
errorMessage,
errorCode,
errorCause,
reconnecting: this.reconnecting,
bridge: this.bridgeName,
role: this.role,
},
}, 'SFU audio failed');
this.clearConnectionTimeout();
this.broker.stop();
this.callback({
status: this.baseCallStates.failed,
error: errorCode,
bridgeError: errorMessage,
bridge: this.bridgeName,
this.getStatsLogData().then((stats) => {
logger.error({
logCode: 'sfuaudio_error',
extraInfo: {
errorMessage,
errorCode,
errorCause,
reconnecting: this.reconnecting,
bridge: this.bridgeName,
role: this.role,
},
}, 'SFU audio failed');
this.broker.stop();
this.callback({
status: this.baseCallStates.failed,
error: errorCode,
bridgeError: errorMessage,
bridge: this.bridgeName,
stats,
});
reject(error);
});
return reject(error);
});
}
@ -301,6 +330,10 @@ export default class SFUAudioBridge extends BaseAudioBridge {
});
}
get clientSessionNumber() {
return this.broker ? this.broker.clientSessionNumber : null;
}
async _startBroker(options) {
try {
this.iceServers = await fetchWebRTCMappedStunTurnServers(this.sessionToken);

View File

@ -7,6 +7,7 @@ import { SCREENSHARING_ERRORS } from './errors';
import { shouldForceRelay } from '/imports/ui/services/bbb-webrtc-sfu/utils';
import MediaStreamUtils from '/imports/utils/media-stream-utils';
import { notifyStreamStateChange } from '/imports/ui/services/bbb-webrtc-sfu/stream-state-service';
import { getRTCStatsLogMetadata, getTransportStats } from '/imports/utils/stats';
const SFU_CONFIG = Meteor.settings.public.kurento;
const SFU_URL = SFU_CONFIG.wsUrl;
@ -16,11 +17,18 @@ const TRACE_LOGS = Meteor.settings.public.kurento.traceLogs;
const { screenshare: NETWORK_PRIORITY } = Meteor.settings.public.media.networkPriorities || {};
const GATHERING_TIMEOUT = Meteor.settings.public.kurento.gatheringTimeout;
const BRIDGE_NAME = 'kurento'
const BRIDGE_NAME = 'kurento';
const SCREENSHARE_VIDEO_TAG = 'screenshareVideo';
const SEND_ROLE = 'send';
const RECV_ROLE = 'recv';
const DEFAULT_VOLUME = 1;
const DEFAULT_SCREENSHARE_STATS_TYPES = [
'outbound-rtp',
'inbound-rtp',
'candidate-pair',
'local-candidate',
'transport',
];
// the error-code mapping is bridge specific; that's why it's not in the errors util
const ERROR_MAP = {
@ -90,6 +98,79 @@ export default class KurentoScreenshareBridge {
}
}
/**
* Get stats about all active screenshare peers.
*
* For more information see:
* - https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats
* - https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
* @param {Array[String]} statsType - An array containing valid RTCStatsType
* values to include in the return object
*
* @returns {Object} The information about each active screen sharing peer.
* The returned format follows the format returned by video's service
* getStats, which considers more than one peer connection to be returned.
* The format is given by:
* {
* peerIdString: RTCStatsReport
* }
*/
async getStats(statsTypes = DEFAULT_SCREENSHARE_STATS_TYPES) {
const stats = {};
let transportStats = {};
let peerStats = null;
const peer = this.getPeerConnection();
if (!peer) return null;
try {
peerStats = await peer.getStats();
} catch (error) {
return null;
}
peerStats.forEach((stat) => {
if (statsTypes.includes(stat.type)) {
stats[stat.type] = stat;
}
});
try {
transportStats = await getTransportStats(peer, stats);
} catch (error) {
logger.debug({
logCode: 'screenshare_transport_stats_failed',
extraInfo: {
errorCode: error.errorCode,
errorMessage: error.errorMessage,
role: this.role,
bridge: BRIDGE_NAME,
},
}, 'Failed to get transport stats for screenshare');
}
return { screenshareStats: { transportStats, ...stats } };
}
async getStatsLogData() {
try {
const { screenshareStats } = await this.getStats();
return getRTCStatsLogMetadata(screenshareStats);
} catch (error) {
logger.warn({
logCode: 'sfuaudio_stats_log_error',
extraInfo: {
errorMessage: error.message,
bridge: this.bridgeName,
role: this.role,
},
}, 'Failed to get audio stats log data');
return null;
}
}
inboundStreamReconnect() {
const currentRestartIntervalMs = this.restartIntervalMs;
@ -212,43 +293,56 @@ export default class KurentoScreenshareBridge {
this.startedOnce = true;
this.clearReconnectionTimeout();
this.connectionAttempts = 0;
this.getStatsLogData().then((stats) => {
logger.info({
logCode: 'screenshare_viewer_start_success',
extraInfo: {
role: this.broker?.role || this.role,
bridge: BRIDGE_NAME,
stats,
},
}, 'Screenshare presenter started succesfully');
});
}
handleBrokerFailure(error) {
mapErrorCode(error);
const { errorMessage, errorCode } = error;
logger.error({
logCode: 'screenshare_broker_failure',
extraInfo: {
errorCode,
errorMessage,
role: this.broker.role,
started: this.broker.started,
reconnecting: this.reconnecting,
bridge: BRIDGE_NAME,
},
}, `Screenshare broker failure: ${errorMessage}`);
return this.getStatsLogData().then((stats) => {
logger.error({
logCode: 'screenshare_broker_failure',
extraInfo: {
errorCode,
errorMessage,
role: this.role,
started: this.broker?.started,
reconnecting: this.reconnecting,
bridge: BRIDGE_NAME,
stats,
},
}, `Screenshare broker failure: ${errorMessage}`);
notifyStreamStateChange('screenshare', 'failed');
// Screensharing was already successfully negotiated and error occurred during
// during call; schedule a reconnect
if (this._shouldReconnect()) {
// this.broker.started => whether the reconnect should happen immediately.
// If this session previously established connection (N-sessions back)
// and it failed abruptly, then the timeout is overridden to a intermediate value
// (BASE_RECONNECTION_TIMEOUT)
let overrideTimeout;
if (this.broker?.started) {
overrideTimeout = 0;
} else if (this.startedOnce) {
overrideTimeout = BridgeService.BASE_RECONNECTION_TIMEOUT;
notifyStreamStateChange('screenshare', 'failed');
// Screensharing was already successfully negotiated and error occurred during
// during call; schedule a reconnect
if (this._shouldReconnect()) {
// this.broker.started => whether the reconnect should happen immediately.
// If this session previously established connection (N-sessions back)
// and it failed abruptly, then the timeout is overridden to a intermediate value
// (BASE_RECONNECTION_TIMEOUT)
let overrideTimeout;
if (this.broker?.started) {
overrideTimeout = 0;
} else if (this.startedOnce) {
overrideTimeout = BridgeService.BASE_RECONNECTION_TIMEOUT;
}
this.scheduleReconnect({ overrideTimeout });
}
this.scheduleReconnect({ overrideTimeout });
}
return error;
return error;
});
}
async view(options = {
@ -289,13 +383,20 @@ export default class KurentoScreenshareBridge {
}
handlePresenterStart() {
logger.info({
logCode: 'screenshare_presenter_start_success',
}, 'Screenshare presenter started succesfully');
this.clearReconnectionTimeout();
this.startedOnce = true;
this.reconnecting = false;
this.connectionAttempts = 0;
this.getStatsLogData().then((stats) => {
logger.info({
logCode: 'screenshare_presenter_start_success',
extraInfo: {
role: this.broker?.role || this.role,
bridge: BRIDGE_NAME,
stats,
},
}, 'Screenshare presenter started succesfully');
});
}
handleEnded() {
@ -310,8 +411,8 @@ export default class KurentoScreenshareBridge {
this.hasAudio = BridgeService.streamHasAudioTrack(stream);
this.gdmStream = stream;
const onerror = (error) => {
const normalizedError = this.handleBrokerFailure(error);
const onerror = async (error) => {
const normalizedError = await this.handleBrokerFailure(error);
if (!this.broker.started) {
// Broker hasn't started - if there are retries left, try again.
if (this.maxConnectionAttemptsReached()) {

View File

@ -36,17 +36,21 @@ class ConnectionStatusButton extends PureComponent {
);
}
setModalIsOpen = (isOpen) => this.setState({ isModalOpen: isOpen });
setModalIsOpen = (isOpen) => this.setState({ isModalOpen: isOpen });
renderModal(isModalOpen) {
const { logMediaStats, monitoringInterval } = this.props;
return (
isModalOpen ?
<ConnectionStatusModalContainer
{...{
isModalOpen,
setModalIsOpen: this.setModalIsOpen
}}
/> : null
(isModalOpen || logMediaStats) ?
<ConnectionStatusModalContainer
{...{
isModalOpen,
setModalIsOpen: this.setModalIsOpen,
logMediaStats,
monitoringInterval,
}}
/> : null
)
}

View File

@ -12,5 +12,6 @@ export default withTracker(() => {
return {
connected,
stats: ConnectionStatusService.getStats(),
logMediaStats: ConnectionStatusService.LOG_MEDIA_STATS,
};
})(connectionStatusButtonContainer);

View File

@ -6,6 +6,7 @@ import Icon from '/imports/ui/components/connection-status/icon/component';
import Service from '../service';
import Styled from './styles';
import ConnectionStatusHelper from '../status-helper/container';
import logger from '/imports/startup/client/logger';
const NETWORK_MONITORING_INTERVAL_MS = 2000;
const MIN_TIMEOUT = 3000;
@ -138,6 +139,14 @@ const propTypes = {
intl: PropTypes.shape({
formatMessage: PropTypes.func.isRequired,
}).isRequired,
isModalOpen: PropTypes.bool.isRequired,
logMonitoringInterval: PropTypes.number,
logMediaStats: PropTypes.bool,
};
const defaultProps = {
logMediaStats: false,
logMonitoringInterval: 30000,
};
const isConnectionStatusEmpty = (connectionStatus) => {
@ -194,8 +203,18 @@ class ConnectionStatusComponent extends PureComponent {
this.startMonitoringNetwork();
}
componentDidUpdate(prevProps) {
const { isModalOpen } = this.props;
// If the modal changed open state, we should re-start network monitoring
// with the appropriate interval
if (prevProps.isModalOpen !== isModalOpen) {
this.startMonitoringNetwork();
}
}
componentWillUnmount() {
Meteor.clearInterval(this.rateInterval);
this.stopMonitoringNetwork();
}
handleSelectTab(tab) {
@ -210,13 +229,42 @@ class ConnectionStatusComponent extends PureComponent {
});
}
stopMonitoringNetwork() {
clearInterval(this.rateInterval);
this.rateInterval = null;
clearTimeout(this.copyNetworkDataTimeout);
this.copyNetworkDataTimeout = null;
}
shouldLogMediaStats() {
const { logMediaStats, isModalOpen } = this.props;
const { networkData } = this.state;
const { audio, video } = networkData;
return logMediaStats
&& !isModalOpen
&& (Object.keys(audio).length > 0 || Object.keys(video).length > 0);
}
/**
* Start monitoring the network data.
* @return {Promise} A Promise that resolves when process started.
*/
async startMonitoringNetwork() {
const {
isModalOpen,
logMonitoringInterval,
logMediaStats,
} = this.props;
const monitoringInterval = !isModalOpen
? logMonitoringInterval
: NETWORK_MONITORING_INTERVAL_MS;
if (this.rateInterval) this.stopMonitoringNetwork();
let previousData = await Service.getNetworkData();
this.rateInterval = Meteor.setInterval(async () => {
this.rateInterval = setInterval(async () => {
const data = await Service.getNetworkData();
const {
@ -239,7 +287,7 @@ class ConnectionStatusComponent extends PureComponent {
audioCurrentDownloadRate,
jitter,
packetsLost,
transportStats: data.audio.transportStats,
transportStats: data.audio?.transportStats || {},
};
const {
@ -251,6 +299,7 @@ class ConnectionStatusComponent extends PureComponent {
const video = {
videoCurrentUploadRate,
videoCurrentDownloadRate,
screenshareTransportStats: data.video?.screenshareStats?.transportStats || {},
};
const { user } = data;
@ -266,7 +315,17 @@ class ConnectionStatusComponent extends PureComponent {
networkData,
hasNetworkData: true,
});
}, NETWORK_MONITORING_INTERVAL_MS);
if (this.shouldLogMediaStats()) {
logger.info({
logCode: 'media_stats',
extraInfo: {
audio,
video,
},
}, 'Media stats');
}
}, monitoringInterval);
}
/**
@ -524,6 +583,8 @@ class ConnectionStatusComponent extends PureComponent {
const { selectedTab } = this.state;
if (!isModalOpen) return null;
return (
<Styled.ConnectionStatusModal
priority="low"

View File

@ -7,4 +7,5 @@ const connectionStatusContainer = props => <ConnectionStatusComponent {...props}
export default withTracker(() => ({
connectionStatus: ConnectionStatusService.getConnectionStatus(),
logMonitoringInterval: ConnectionStatusService.LOG_MONITORING_INTERVAL,
}))(connectionStatusContainer);

View File

@ -9,10 +9,13 @@ import { makeCall } from '/imports/ui/services/api';
import AudioService from '/imports/ui/components/audio/service';
import VideoService from '/imports/ui/components/video-provider/service';
import ScreenshareService from '/imports/ui/components/screenshare/service';
import { getDataType, addExtraInboundNetworkParameters } from '/imports/utils/stats';
const STATS = Meteor.settings.public.stats;
const NOTIFICATION = STATS.notification;
const STATS_INTERVAL = STATS.interval;
const LOG_MEDIA_STATS = STATS.logMediaStats.enabled;
const LOG_MONITORING_INTERVAL = STATS.logMediaStats.interval;
const ROLE_MODERATOR = Meteor.settings.public.user.role_moderator;
const intlMessages = defineMessages({
@ -284,68 +287,6 @@ const notification = (level, intl) => {
if (intl) notify(intl.formatMessage(intlMessages.notification), level, 'warning');
};
/**
* Calculates the jitter buffer average.
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay
* @param {Object} inboundRtpData The RTCInboundRtpStreamStats object retrieved
* in getStats() call.
* @returns The jitter buffer average in ms
*/
const calculateJitterBufferAverage = (inboundRtpData) => {
if (!inboundRtpData) return 0;
const {
jitterBufferDelay,
jitterBufferEmittedCount,
} = inboundRtpData;
if (!jitterBufferDelay || !jitterBufferEmittedCount) return '--';
return Math.round((jitterBufferDelay / jitterBufferEmittedCount) * 1000);
};
/**
* Given the data returned from getStats(), returns an array containing all the
* the stats of the given type.
* For more information see:
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsType
* @param {Object} data - RTCStatsReport object returned from getStats() API
* @param {String} type - The string type corresponding to RTCStatsType object
* @returns {Array[Object]} An array containing all occurrences of the given
* type in the data Object.
*/
const getDataType = (data, type) => {
if (!data || typeof data !== 'object' || !type) return [];
return Object.values(data).filter((stat) => stat.type === type);
};
/**
* Returns a new Object containing extra parameters calculated from inbound
* data. The input data is also appended in the returned Object.
* @param {Object} currentData - The object returned from getStats / service's
* getNetworkData()
* @returns {Object} the currentData object with the extra inbound network
* added to it.
*/
const addExtraInboundNetworkParameters = (data) => {
if (!data) return data;
const inboundRtpData = getDataType(data, 'inbound-rtp')[0];
if (!inboundRtpData) return data;
const extraParameters = {
jitterBufferAverage: calculateJitterBufferAverage(inboundRtpData),
packetsLost: inboundRtpData.packetsLost,
};
return Object.assign(inboundRtpData, extraParameters);
};
/**
* Retrieves the inbound and outbound data using WebRTC getStats API, for audio.
* @returns An Object with format (property:type) :
@ -546,6 +487,8 @@ const calculateBitsPerSecondFromMultipleData = (currentData, previousData) => {
};
export default {
LOG_MEDIA_STATS,
LOG_MONITORING_INTERVAL,
isModerator,
getConnectionStatus,
getStats,

View File

@ -15,12 +15,6 @@ import NotesService from '/imports/ui/components/notes/service';
const VOLUME_CONTROL_ENABLED = Meteor.settings.public.kurento.screenshare.enableVolumeControl;
const SCREENSHARE_MEDIA_ELEMENT_NAME = 'screenshareVideo';
const DEFAULT_SCREENSHARE_STATS_TYPES = [
'outbound-rtp',
'inbound-rtp',
];
const CONTENT_TYPE_CAMERA = "camera";
const CONTENT_TYPE_SCREENSHARE = "screenshare";
@ -355,9 +349,6 @@ const dataSavingSetting = () => Settings.dataSaving.viewScreenshare;
* For more information see:
* - https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats
* - https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
* @param {Array[String]} statsType - An array containing valid RTCStatsType
* values to include in the return object
*
* @returns {Object} The information about each active screen sharing peer.
* The returned format follows the format returned by video's service
@ -367,22 +358,7 @@ const dataSavingSetting = () => Settings.dataSaving.viewScreenshare;
* peerIdString: RTCStatsReport
* }
*/
const getStats = async (statsTypes = DEFAULT_SCREENSHARE_STATS_TYPES) => {
const screenshareStats = {};
const peer = KurentoBridge.getPeerConnection();
if (!peer) return null;
const peerStats = await peer.getStats();
peerStats.forEach((stat) => {
if (statsTypes.includes(stat.type)) {
screenshareStats[stat.type] = stat;
}
});
return { screenshareStats };
};
const getStats = () => KurentoBridge.getStats();
// This method may throw errors
const isMediaFlowing = (previousStats, currentStats) => {

View File

@ -8,7 +8,10 @@ import logger from '/imports/startup/client/logger';
import { notify } from '/imports/ui/services/notification';
import playAndRetry from '/imports/utils/mediaElementPlayRetry';
import iosWebviewAudioPolyfills from '/imports/utils/ios-webview-audio-polyfills';
import { monitorAudioConnection } from '/imports/utils/stats';
import {
monitorAudioConnection,
getRTCStatsLogMetadata,
} from '/imports/utils/stats';
import AudioErrors from './error-codes';
import { Meteor } from 'meteor/meteor';
import browserInfo from '/imports/utils/browserInfo';
@ -47,17 +50,6 @@ const BREAKOUT_AUDIO_TRANSFER_STATES = {
RETURNING: 'returning',
};
/**
* Audio status to be filtered in getStats()
*/
const FILTER_AUDIO_STATS = [
'outbound-rtp',
'inbound-rtp',
'candidate-pair',
'local-candidate',
'transport',
];
class AudioManager {
constructor() {
this._inputDevice = {
@ -539,15 +531,19 @@ class AudioManager {
if (!this.isEchoTest) {
this.notify(this.intl.formatMessage(this.messages.info.JOINED_AUDIO));
logger.info({
logCode: 'audio_joined',
extraInfo: {
secondsToActivateAudio,
inputDeviceId: this.inputDeviceId,
outputDeviceId: this.outputDeviceId,
isListenOnly: this.isListenOnly,
},
}, 'Audio Joined');
this.getStats().then((stats) => {
logger.info({
logCode: 'audio_joined',
extraInfo: {
secondsToActivateAudio,
inputDeviceId: this.inputDeviceId,
outputDeviceId: this.outputDeviceId,
isListenOnly: this.isListenOnly,
stats: getRTCStatsLogMetadata(stats),
clientSessionNumber: this.bridge.clientSessionNumber,
},
}, 'Audio Joined');
});
if (STATS.enabled) this.monitor();
this.audioEventHandler({
name: 'started',
@ -597,11 +593,15 @@ class AudioManager {
callStateCallback(response) {
return new Promise((resolve) => {
const { STARTED, ENDED, FAILED, RECONNECTING, AUTOPLAY_BLOCKED } =
CALL_STATES;
const { status, error, bridgeError, silenceNotifications, bridge } =
response;
const { STARTED, ENDED, FAILED, RECONNECTING, AUTOPLAY_BLOCKED } = CALL_STATES;
const {
status,
error,
bridgeError,
silenceNotifications,
bridge,
stats = {},
} = response;
if (status === STARTED) {
this.isReconnecting = false;
@ -613,8 +613,15 @@ class AudioManager {
breakoutMeetingId: '',
status: BREAKOUT_AUDIO_TRANSFER_STATES.DISCONNECTED,
});
logger.info({ logCode: 'audio_ended' }, 'Audio ended without issue');
this.onAudioExit();
logger.info({
logCode: 'audio_ended',
extraInfo: {
inputDeviceId: this.inputDeviceId,
outputDeviceId: this.outputDeviceId,
isListenOnly: this.isListenOnly,
},
}, 'Audio ended without issue');
} else if (status === FAILED) {
this.isReconnecting = false;
this.setBreakoutAudioTransferStatus({
@ -635,6 +642,7 @@ class AudioManager {
inputDeviceId: this.inputDeviceId,
outputDeviceId: this.outputDeviceId,
isListenOnly: this.isListenOnly,
stats,
},
},
`Audio error - errorCode=${error}, cause=${bridgeError}`
@ -650,10 +658,16 @@ class AudioManager {
breakoutMeetingId: '',
status: BREAKOUT_AUDIO_TRANSFER_STATES.DISCONNECTED,
});
logger.info(
{ logCode: 'audio_reconnecting' },
'Attempting to reconnect audio'
);
logger.info({
logCode: 'audio_reconnecting',
extraInfo: {
bridge,
inputDeviceId: this.inputDeviceId,
outputDeviceId: this.outputDeviceId,
isListenOnly: this.isListenOnly,
stats,
},
}, 'Attempting to reconnect audio');
this.notify(
this.intl.formatMessage(this.messages.info.RECONNECTING_AUDIO),
true
@ -971,172 +985,12 @@ class AudioManager {
await this.bridge.updateAudioConstraints(constraints);
}
/**
* Get the info about candidate-pair that is being used by the current peer.
* For firefox, or any other browser that doesn't support iceTransport
* property of RTCDtlsTransport, we retrieve the selected local candidate
* by looking into stats returned from getStats() api. For other browsers,
* we should use getSelectedCandidatePairFromPeer instead, because it has
* relatedAddress and relatedPort information about local candidate.
*
* @param {Object} stats object returned by getStats() api
* @returns An Object of type RTCIceCandidatePairStats containing information
* about the candidate-pair being used by the peer.
*
* For firefox, we can use the 'selected' flag to find the candidate pair
* being used, while in chrome we can retrieved the selected pair
* by looking for the corresponding transport of the active peer.
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePairStats/selected#value
*/
static getSelectedCandidatePairFromStats(stats) {
if (!stats || typeof stats !== 'object') return null;
const transport =
Object.values(stats).find((stat) => stat.type === 'transport') || {};
return Object.values(stats).find(
(stat) =>
stat.type === 'candidate-pair' &&
stat.nominated &&
(stat.selected || stat.id === transport.selectedCandidatePairId)
);
}
/**
* Get the info about candidate-pair that is being used by the current peer.
* This function's return value (RTCIceCandidatePair object ) is different
* from getSelectedCandidatePairFromStats (RTCIceCandidatePairStats object).
* The information returned here contains the relatedAddress and relatedPort
* fields (only for candidates that are derived from another candidate, for
* host candidates, these fields are null). These field can be helpful for
* debugging network issues. For all the browsers that support iceTransport
* field of RTCDtlsTransport, we use this function as default to retrieve
* information about current selected-pair. For other browsers we retrieve it
* from getSelectedCandidatePairFromStats
*
* @returns {Object} An RTCIceCandidatePair represented the selected
* candidate-pair of the active peer.
*
* For more info see:
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidatepair
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePair
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCDtlsTransport
*/
getSelectedCandidatePairFromPeer() {
if (!this.bridge) return null;
const peer = this.bridge.getPeerConnection();
if (!peer) return null;
let selectedPair = null;
const receivers = peer.getReceivers();
if (
receivers &&
receivers[0] &&
receivers[0].transport &&
receivers[0].transport.iceTransport &&
typeof receivers[0].transport.iceTransport.getSelectedCandidatePair === 'function'
) {
selectedPair = receivers[0].transport.iceTransport.getSelectedCandidatePair();
}
return selectedPair;
}
/**
* Gets the selected local-candidate information. For browsers that support
* iceTransport property (see getSelectedCandidatePairFromPeer) we get this
* info from peer, otherwise we retrieve this information from getStats() api
*
* @param {Object} [stats] The status object returned from getStats() api
* @returns {Object} An Object containing the information about the
* local-candidate. For browsers that support iceTransport
* property, the object's type is RCIceCandidate. A
* RTCIceCandidateStats is returned, otherwise.
*
* For more info see:
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidate
* and
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
*
*/
getSelectedLocalCandidate(stats) {
let selectedPair = this.getSelectedCandidatePairFromPeer();
if (selectedPair) return selectedPair.local;
if (!stats) return null;
selectedPair = AudioManager.getSelectedCandidatePairFromStats(stats);
if (selectedPair) return stats[selectedPair.localCandidateId];
return null;
}
/**
* Gets the information about private/public ip address from peer
* stats. The information retrieved from selected pair from the current
* RTCIceTransport and returned in a new Object with format:
* {
* address: String,
* relatedAddress: String,
* port: Number,
* relatedPort: Number,
* candidateType: String,
* selectedLocalCandidate: Object,
* }
*
* If users isn't behind NAT, relatedAddress and relatedPort may be null.
*
* @returns An Object containing the information about private/public IP
* addresses and ports.
*
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
* and
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
* and
* https://www.w3.org/TR/webrtc/#rtcicecandidatetype-enum
*/
async getInternalExternalIpAddresses(stats) {
let transports = {};
if (stats) {
const selectedLocalCandidate = this.getSelectedLocalCandidate(stats);
if (!selectedLocalCandidate) return transports;
const candidateType =
selectedLocalCandidate.candidateType || selectedLocalCandidate.type;
transports = {
isUsingTurn: candidateType === 'relay',
address: selectedLocalCandidate.address,
relatedAddress: selectedLocalCandidate.relatedAddress,
port: selectedLocalCandidate.port,
relatedPort: selectedLocalCandidate.relatedPort,
candidateType,
selectedLocalCandidate,
};
}
return transports;
}
/**
* Get stats about active audio peer.
* We filter the status based on FILTER_AUDIO_STATS constant.
* We also append to the returned object the information about peer's
* transport. This transport information is retrieved by
* getInternalExternalIpAddressesFromPeer().
* getTransportStatsFromPeer().
*
* @returns An Object containing the status about the active audio peer.
*
@ -1145,28 +999,23 @@ class AudioManager {
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
*/
async getStats() {
async getStats(stats) {
if (!this.bridge) return null;
const peer = this.bridge.getPeerConnection();
try {
const processedStats = await this.bridge.getStats(stats);
if (!peer) return null;
const peerStats = await peer.getStats();
const audioStats = {};
peerStats.forEach((stat) => {
if (FILTER_AUDIO_STATS.includes(stat.type)) {
audioStats[stat.id] = stat;
}
});
const transportStats = await this.getInternalExternalIpAddresses(
audioStats
);
return { transportStats, ...audioStats };
return processedStats;
} catch (error) {
logger.debug({
logCode: 'audiomanager_get_stats_failed',
extraInfo: {
errorName: error.name,
errorMessage: error.message,
},
}, `Failed to get audio stats: ${error.message}`);
return null;
}
}
}

View File

@ -190,6 +190,349 @@ const monitorAudioConnection = conn => {
});
};
/**
* Calculates the jitter buffer average.
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay
* @param {Object} inboundRtpData The RTCInboundRtpStreamStats object retrieved
* in getStats() call.
* @returns The jitter buffer average in ms
*/
const calculateJitterBufferAverage = (inboundRtpData) => {
if (!inboundRtpData) return 0;
const {
jitterBufferDelay,
jitterBufferEmittedCount,
} = inboundRtpData;
if (!jitterBufferDelay || !jitterBufferEmittedCount) return '--';
return Math.round((jitterBufferDelay / jitterBufferEmittedCount) * 1000);
};
/**
* Given the data returned from getStats(), returns an array containing all the
* the stats of the given type.
* For more information see:
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsType
* @param {Object} data - RTCStatsReport object returned from getStats() API
* @param {String} type - The string type corresponding to RTCStatsType object
* @returns {Array[Object]} An array containing all occurrences of the given
* type in the data Object.
*/
const getDataType = (data, type) => {
if (!data || typeof data !== 'object' || !type) return [];
return Object.values(data).filter((stat) => stat.type === type);
};
/**
* Returns a new Object containing extra parameters calculated from inbound
* data. The input data is also appended in the returned Object.
* @param {Object} currentData - The object returned from getStats / service's
* getNetworkData()
* @returns {Object} the currentData object with the extra inbound network
* added to it.
*/
const addExtraInboundNetworkParameters = (data) => {
if (!data) return data;
const inboundRtpData = getDataType(data, 'inbound-rtp')[0];
if (!inboundRtpData) return data;
const extraParameters = {
jitterBufferAverage: calculateJitterBufferAverage(inboundRtpData),
packetsLost: inboundRtpData.packetsLost,
};
return Object.assign(inboundRtpData, extraParameters);
};
/**
* Get the info about candidate-pair that is being used by the current peer.
* For firefox, or any other browser that doesn't support iceTransport
* property of RTCDtlsTransport, we retrieve the selected local candidate
* by looking into stats returned from getStats() api. For other browsers,
* we should use getSelectedCandidatePairFromPeer instead, because it has
* relatedAddress and relatedPort information about local candidate.
*
* @param {Object} stats object returned by getStats() api
* @returns An Object of type RTCIceCandidatePairStats containing information
* about the candidate-pair being used by the peer.
*
* For firefox, we can use the 'selected' flag to find the candidate pair
* being used, while in chrome we can retrieved the selected pair
* by looking for the corresponding transport of the active peer.
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePairStats/selected#value
*/
const getSelectedCandidatePairFromStats = (stats) => {
if (!stats || typeof stats !== 'object') return null;
const transport = Object.values(stats).find((stat) => stat.type === 'transport') || {};
return Object.values(stats).find((stat) => stat.type === 'candidate-pair'
&& stat.nominated
&& (stat.selected || stat.id === transport.selectedCandidatePairId));
};
/**
* Get the info about candidate-pair that is being used by the current peer.
* This function's return value (RTCIceCandidatePair object ) is different
* from getSelectedCandidatePairFromStats (RTCIceCandidatePairStats object).
* The information returned here contains the relatedAddress and relatedPort
* fields (only for candidates that are derived from another candidate, for
* host candidates, these fields are null). These field can be helpful for
* debugging network issues. For all the browsers that support iceTransport
* field of RTCDtlsTransport, we use this function as default to retrieve
* information about current selected-pair. For other browsers we retrieve it
* from getSelectedCandidatePairFromStats
*
* @returns {Object} An RTCIceCandidatePair represented the selected
* candidate-pair of the active peer.
*
* For more info see:
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidatepair
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePair
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCDtlsTransport
*/
const getSelectedCandidatePairFromPeer = (peer) => {
if (!peer) return null;
let selectedPair = null;
const receivers = peer.getReceivers();
if (receivers
&& receivers[0]
&& receivers[0]?.transport?.iceTransport
&& typeof receivers[0].transport.iceTransport.getSelectedCandidatePair === 'function') {
selectedPair = receivers[0].transport.iceTransport.getSelectedCandidatePair();
}
return selectedPair;
};
/**
* Gets the selected candidates (local and remote) information.
* For browsers that support iceTransport property (see
* getSelectedCandidatePairFromPeer) we get this info from peer, otherwise
* we retrieve this information from getStats() api
*
* @param {Object} An object {peer?, stats?} containing the peer connection
* object and/or the stats
* @returns {Object} An Object {local, remote} containing the information about
* the selected candidates. For browsers that support the
* iceTransport property, the object attribute's type is RTCIceCandidate.
* A RTCIceCandidateStats is returned, otherwise.
*
* For more info see:
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidate
* and
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
*
*/
const getSelectedCandidates = ({ peer, stats }) => {
let selectedPair = getSelectedCandidatePairFromPeer(peer);
if (selectedPair) return selectedPair;
if (!stats) return null;
selectedPair = getSelectedCandidatePairFromStats(stats);
if (selectedPair) {
return {
local: stats[selectedPair?.localCandidateId],
remote: stats[selectedPair?.remoteCandidateId],
};
}
return null;
};
/**
* Gets the information about private/public ip address from peer
* stats. The information retrieved from selected pair from the current
* RTCIceTransport and returned in a new Object with format:
* {
* isUsingTurn: Boolean,
* address: String,
* relatedAddress: String,
* port: Number,
* relatedPort: Number,
* protocol: String,
* candidateType: String,
* ufrag: String,
* remoteAddress: String,
* remotePort: Number,
* remoteCandidateType: String,
* remoteProtocol: String,
* remoteUfrag: String,
* dtlsRole: String,
* dtlsState: String,
* iceRole: String,
* iceState: String,
* selectedCandidatePairChanges: Number
* relayProtocol: String
* }
*
* If users isn't behind NAT, relatedAddress and relatedPort may be null.
*
* @returns An Object containing the information about the peer's transport.
*
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
* and
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
* and
* https://www.w3.org/TR/webrtc/#rtcicecandidatetype-enum
*/
const getTransportStats = async (peer, stats) => {
let transports = {};
if (stats) {
const selectedCandidates = getSelectedCandidates({ peer, stats }) || {};
const {
local: selectedLocalCandidate = {},
remote: selectedRemoteCandidate = {},
} = selectedCandidates;
const candidateType = selectedLocalCandidate?.candidateType || selectedLocalCandidate?.type;
const remoteCandidateType = selectedRemoteCandidate?.candidateType
|| selectedRemoteCandidate?.type;
const isUsingTurn = candidateType ? candidateType === 'relay' : null;
// 1 transport per peer connection - we can safely get the first one
const transportData = getDataType(stats, 'transport')[0];
transports = {
isUsingTurn,
address: selectedLocalCandidate?.address,
relatedAddress: selectedLocalCandidate?.relatedAddress,
port: selectedLocalCandidate?.port,
relatedPort: selectedLocalCandidate?.relatedPort,
protocol: selectedLocalCandidate?.protocol,
candidateType,
ufrag: selectedLocalCandidate?.usernameFragment,
remoteAddress: selectedRemoteCandidate?.address,
remotePort: selectedRemoteCandidate?.port,
remoteCandidateType,
remoteProtocol: selectedRemoteCandidate?.protocol,
remoteUfrag: selectedRemoteCandidate?.usernameFragment,
dtlsRole: transportData?.dtlsRole,
dtlsState: transportData?.dtlsState,
iceRole: transportData?.iceRole,
iceState: transportData?.iceState,
selectedCandidatePairChanges: transportData?.selectedCandidatePairChanges,
};
if (isUsingTurn) transports.relayProtocol = selectedLocalCandidate.relayProtocol;
}
return transports;
};
const buildInboundRtpData = (inbound) => {
if (!inbound) return {};
const inboundRtp = {
kind: inbound.kind,
jitterBufferAverage: inbound.jitterBufferAverage,
lastPacketReceivedTimestamp: inbound.lastPacketReceivedTimestamp,
packetsLost: inbound.packetsLost,
packetsReceived: inbound.packetsReceived,
packetsDiscarded: inbound.packetsDiscarded,
};
if (inbound.kind === 'audio') {
inboundRtp.totalAudioEnergy = inbound.totalAudioEnergy;
} else if (inbound.kind === 'video') {
inboundRtp.framesDecoded = inbound.framesDecoded;
inboundRtp.framesDropped = inbound.framesDropped;
inboundRtp.framesReceived = inbound.framesReceived;
inboundRtp.hugeFramesSent = inbound.hugeFramesSent;
inboundRtp.keyFramesDecoded = inbound.keyFramesDecoded;
inboundRtp.keyFramesReceived = inbound.keyFramesReceived;
inboundRtp.totalDecodeTime = inbound.totalDecodeTime;
inboundRtp.totalInterFrameDelay = inbound.totalInterFrameDelay;
inboundRtp.totalSquaredInterFrameDelay = inbound.totalSquaredInterFrameDelay;
}
return inboundRtp;
};
const buildOutboundRtpData = (outbound) => {
if (!outbound) return {};
const outboundRtp = {
kind: outbound.kind,
packetsSent: outbound.packetsSent,
nackCount: outbound.nackCount,
targetBitrate: outbound.targetBitrate,
totalPacketSendDelay: outbound.totalPacketSendDelay,
};
if (outbound.kind === 'audio') {
outboundRtp.totalAudioEnergy = outbound.totalAudioEnergy;
} else if (outbound.kind === 'video') {
outboundRtp.framesEncoded = outbound.framesEncoded;
outboundRtp.framesSent = outbound.framesSent;
outboundRtp.hugeFramesSent = outbound.hugeFramesSent;
outboundRtp.keyFramesEncoded = outbound.keyFramesEncoded;
outboundRtp.totalEncodeTime = outbound.totalEncodeTime;
outboundRtp.totalPacketSendDelay = outbound.totalPacketSendDelay;
outboundRtp.firCount = outbound.firCount;
outboundRtp.pliCount = outbound.pliCount;
outboundRtp.nackCount = outbound.nackCount;
outboundRtp.qpsFE = outbound.qpSum / outbound.framesEncoded;
}
return outboundRtp;
};
const getRTCStatsLogMetadata = (stats) => {
if (!stats) return {};
const { transportStats = {} } = stats;
addExtraInboundNetworkParameters(stats);
const selectedPair = getSelectedCandidatePairFromStats(stats);
const inbound = getDataType(stats, 'inbound-rtp')[0];
const outbound = getDataType(stats, 'outbound-rtp')[0];
return {
inboundRtp: buildInboundRtpData(inbound),
outbound: buildOutboundRtpData(outbound),
selectedPair: {
state: selectedPair?.state,
nominated: selectedPair?.nominated,
totalRoundTripTime: selectedPair?.totalRoundTripTime,
requestsSent: selectedPair?.requestsSent,
responsesReceived: selectedPair?.responsesReceived,
availableOutgoingBitrate: selectedPair?.availableOutgoingBitrate,
availableIncomingBitrate: selectedPair?.availableIncomingBitrate,
lastPacketSentTimestamp: selectedPair?.lastPacketSentTimestamp,
lastPacketReceivedTimestamp: selectedPair?.lastPacketReceivedTimestamp,
},
transport: transportStats,
};
};
export {
addExtraInboundNetworkParameters,
calculateJitterBufferAverage,
getDataType,
getTransportStats,
getSelectedCandidates,
getSelectedCandidatePairFromPeer,
getSelectedCandidatePairFromStats,
getRTCStatsLogMetadata,
monitorAudioConnection,
};

View File

@ -789,7 +789,16 @@ public:
enabled: true
interval: 10000
timeout: 30000
# Server-side stats event logging. Everytime a connection status event update
# is received by Meteor, it'll be logged *by the server*.
log: true
# Client-side WebRTC stats logging. If enabled, the client will log periodic
# WebRTC stats information to the configured logging transports.
# Disabled by default. This log is verbose and may incur in additional network
# usage if external logging targets are configured.
logMediaStats:
enabled: false
interval: 30000
notification:
warning: false
error: true