Merge pull request #13104 from antobinary/merge-aug30

chore: Merge 2.3.13 into develop
This commit is contained in:
Anton Georgiev 2021-08-30 14:14:29 -04:00 committed by GitHub
commit bc8b3ff691
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 1040 additions and 58 deletions

View File

@ -68,7 +68,9 @@ object Polls {
stoppedPoll match {
case None => {
for {
curPoll <- getRunningPollThatStartsWith("public", lm.polls)
// Assuming there's only one running poll at a time, fallback to the
// current running poll without indexing by a presentation page.
curPoll <- getRunningPoll(lm.polls)
} yield {
stopPoll(curPoll.id, lm.polls)
curPoll.id
@ -307,6 +309,12 @@ object Polls {
shape.toMap
}
def getRunningPoll(polls: Polls): Option[PollVO] = {
for {
poll <- polls.polls.values find { poll => poll.isRunning() }
} yield poll.toPollVO()
}
def getRunningPollThatStartsWith(pollId: String, polls: Polls): Option[PollVO] = {
for {
poll <- polls.polls.values find { poll => poll.id.startsWith(pollId) && poll.isRunning() }

View File

@ -87,6 +87,8 @@ public class ParamsProcessorUtil {
private boolean defaultMuteOnStart = false;
private boolean defaultAllowModsToUnmuteUsers = false;
private boolean defaultKeepEvents = false;
private Boolean useDefaultLogo;
private String defaultLogoURL;
private boolean defaultBreakoutRoomsEnabled;
private boolean defaultBreakoutRoomsRecord;
@ -565,6 +567,8 @@ public class ParamsProcessorUtil {
if (!StringUtils.isEmpty(params.get(ApiParams.LOGO))) {
meeting.setCustomLogoURL(params.get(ApiParams.LOGO));
} else if (this.getUseDefaultLogo()) {
meeting.setCustomLogoURL(this.getDefaultLogoURL());
}
if (!StringUtils.isEmpty(params.get(ApiParams.COPYRIGHT))) {
@ -614,6 +618,14 @@ public class ParamsProcessorUtil {
return defaultGuestWaitURL;
}
public Boolean getUseDefaultLogo() {
return useDefaultLogo;
}
public String getDefaultLogoURL() {
return defaultLogoURL;
}
public Boolean getAllowRequestsWithoutSession() {
return allowRequestsWithoutSession;
}
@ -902,6 +914,14 @@ public class ParamsProcessorUtil {
this.defaultGuestWaitURL = url;
}
public void setUseDefaultLogo(Boolean value) {
this.useDefaultLogo = value;
}
public void setDefaultLogoURL(String url) {
this.defaultLogoURL = url;
}
public void setAllowRequestsWithoutSession(Boolean allowRequestsWithoutSession) {
this.allowRequestsWithoutSession = allowRequestsWithoutSession;
}

View File

@ -1,4 +1,4 @@
FROM openjdk:11-jre
FROM openjdk:11-jre-buster
ENV DEBIAN_FRONTEND noninteractive

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

View File

@ -69,6 +69,8 @@ export default class KurentoAudioBridge extends BaseAudioBridge {
}
getPeerConnection() {
if (!this.broker) return null;
const webRtcPeer = this.broker.webRtcPeer;
if (webRtcPeer) return webRtcPeer.peerConnection;
return null;

View File

@ -1424,6 +1424,8 @@ export default class SIPBridge extends BaseAudioBridge {
}
getPeerConnection() {
if (!this.activeSession) return null;
const { currentSession } = this.activeSession;
if (currentSession && currentSession.sessionDescriptionHandler) {
return currentSession.sessionDescriptionHandler.peerConnection;

View File

@ -54,6 +54,26 @@ export default class KurentoScreenshareBridge {
this._gdmStream = stream;
}
/**
* Get the RTCPeerConnection object related to the screensharing stream.
* @returns {Object} The RTCPeerConnection object related to the presenter/
* viewer peer. If there's no stream being shared, returns
* null.
*/
getPeerConnection() {
try {
let peerConnection = null;
if (this.broker && this.broker.webRtcPeer) {
peerConnection = this.broker.webRtcPeer.peerConnection;
}
return peerConnection;
} catch (error) {
return null;
}
}
outboundStreamReconnect() {
const currentRestartIntervalMs = this.restartIntervalMs;
const stream = this.gdmStream;

View File

@ -1,10 +1,12 @@
@import "/imports/ui/stylesheets/variables/_all";
@import '/imports/ui/stylesheets/variables/_all';
@import '/imports/ui/stylesheets/mixins/_indicators';
:root {
--navbar-height: 3.9375rem; // TODO: Change to NavBar real height
--actionsbar-height: 75px; // TODO: Change to ActionsBar real height
--bars-padding: calc(var(--lg-padding-x) - .45rem); // -.45 so user-list and chat title is aligned with the presentation title
--bars-padding: calc(
var(--lg-padding-x) - 0.45rem
); // -.45 so user-list and chat title is aligned with the presentation title
--userlist-handle-width: 5px; // 5px so user-list and chat resize handle render as the same size
--poll-pane-min-width: 20em;
--panel-margin-left: 0.1em;
@ -15,9 +17,9 @@
position: relative;
}
.layout{
.layout {
@extend %flex-column;
background-color: #06172A;
background-color: #06172a;
}
.navbar {
@ -59,7 +61,8 @@
}
}
.content, .noPanelContent {
.content,
.noPanelContent {
@extend %full-page;
order: 3;
@ -74,7 +77,7 @@
pointer-events: none;
:global(.animationsEnabled) & {
transition: opacity .3s;
transition: opacity 0.3s;
}
}
@ -92,11 +95,11 @@
}
}
.content{
margin:0 0 0 var(--panel-margin-left);
.content {
margin: 0 0 0 var(--panel-margin-left);
[dir="rtl"] & {
margin:0 var(--panel-margin-left) 0 0;
[dir='rtl'] & {
margin: 0 var(--panel-margin-left) 0 0;
}
}
@ -201,7 +204,7 @@
overflow: hidden;
z-index: 0;
[dir="rtl"] & {
[dir='rtl'] & {
margin: 0 var(--panel-margin-right) 0 0;
}
@ -213,7 +216,8 @@
.captionsWrapper {
height: auto;
bottom: 100px;
z-index: 2;
left: 20%;
z-index: 5;
}
.actionsbar {

View File

@ -136,4 +136,5 @@ export default {
.setBreakoutAudioTransferStatus(status),
getBreakoutAudioTransferStatus: () => AudioManager
.getBreakoutAudioTransferStatus(),
getStats: () => AudioManager.getStats(),
};

View File

@ -66,7 +66,7 @@ class BreakoutJoinConfirmation extends Component {
this.state = {
selectValue: props.breakout.breakoutId,
waiting: false,
waiting: true,
};
this.handleJoinBreakoutConfirmation = this.handleJoinBreakoutConfirmation.bind(this);
@ -77,19 +77,23 @@ class BreakoutJoinConfirmation extends Component {
componentDidMount() {
const {
isFreeJoin,
requestJoinURL,
getURL,
} = this.props;
const {
selectValue,
} = this.state;
if (isFreeJoin && !getURL(selectValue)) {
requestJoinURL(selectValue);
if (isFreeJoin) {
this.fetchJoinURL(selectValue);
} else {
this.setState({ waiting: false });
}
}
componentWillUnmount() {
if (interval) clearInterval(interval);
}
handleJoinBreakoutConfirmation() {
const {
getURL,
@ -129,37 +133,42 @@ class BreakoutJoinConfirmation extends Component {
mountModal(null);
}
async handleSelectChange(e) {
const { value } = e.target;
async fetchJoinURL(selectValue) {
const {
requestJoinURL,
getURL,
} = this.props;
this.setState({ selectValue: value });
if (!getURL(value)) {
requestJoinURL(value);
this.setState({ selectValue });
if (!getURL(selectValue)) {
requestJoinURL(selectValue);
this.setState({ waiting: true });
this.setState({
waiting: true,
})
await new Promise((resolve) => {
interval = setInterval(() => {
const temp = getURL(value);
if (temp !== "") {
const url = getURL(selectValue);
if (url !== "") {
resolve();
clearInterval(interval)
this.setState({
waiting: false,
})
clearInterval(interval);
this.setState({ waiting: false });
}
}, 1000)
})
} else {
this.setState({ waiting: false });
}
}
handleSelectChange(e) {
const { value } = e.target;
this.fetchJoinURL(value);
}
renderSelectMeeting() {
const { breakouts, intl } = this.props;
const { selectValue, waiting, } = this.state;

View File

@ -9,6 +9,8 @@ import Service from '../service';
import Modal from '/imports/ui/components/modal/simple/component';
import { styles } from './styles';
const NETWORK_MONITORING_INTERVAL_MS = 2000;
const intlMessages = defineMessages({
ariaTitle: {
id: 'app.connection-status.ariaTitle',
@ -30,6 +32,22 @@ const intlMessages = defineMessages({
id: 'app.connection-status.more',
description: 'More about conectivity issues',
},
audioLabel: {
id: 'app.settings.audioTab.label',
description: 'Audio label',
},
videoLabel: {
id: 'app.settings.videoTab.label',
description: 'Video label',
},
copy: {
id: 'app.connection-status.copy',
description: 'Copy network data',
},
copied: {
id: 'app.connection-status.copied',
description: 'Copied network data',
},
offline: {
id: 'app.connection-status.offline',
description: 'Offline user',
@ -54,6 +72,26 @@ const intlMessages = defineMessages({
id: 'app.switch.offLabel',
description: 'label for toggle switch off state',
},
no: {
id: 'app.connection-status.no',
description: 'No to is using turn',
},
yes: {
id: 'app.connection-status.yes',
description: 'Yes to is using turn',
},
usingTurn: {
id: 'app.connection-status.usingTurn',
description: 'User is using turn server',
},
jitter: {
id: 'app.connection-status.jitter',
description: 'Jitter buffer in ms',
},
lostPackets: {
id: 'app.connection-status.lostPackets',
description: 'Number of lost packets',
},
});
const propTypes = {
@ -80,9 +118,42 @@ class ConnectionStatusComponent extends PureComponent {
constructor(props) {
super(props);
const { intl } = this.props;
this.help = Service.getHelp();
this.state = { dataSaving: props.dataSaving };
this.state = {
dataSaving: props.dataSaving,
hasNetworkData: false,
networkData: {
user: {
},
audio: {
audioCurrentUploadRate: 0,
audioCurrentDownloadRate: 0,
jitter: 0,
packetsLost: 0,
transportStats: {},
},
video: {
videoCurrentUploadRate: 0,
videoCurrentDownloadRate: 0,
},
},
};
this.displaySettingsStatus = this.displaySettingsStatus.bind(this);
this.rateInterval = null;
this.audioLabel = (intl.formatMessage(intlMessages.audioLabel)).charAt(0);
this.videoLabel = (intl.formatMessage(intlMessages.videoLabel)).charAt(0);
}
async componentDidMount() {
this.startMonitoringNetwork();
}
componentWillUnmount() {
Meteor.clearInterval(this.rateInterval);
}
handleDataSavingChange(key) {
@ -91,6 +162,63 @@ class ConnectionStatusComponent extends PureComponent {
this.setState(dataSaving);
}
/**
* Start monitoring the network data.
* @return {Promise} A Promise that resolves when process started.
*/
async startMonitoringNetwork() {
let previousData = await Service.getNetworkData();
this.rateInterval = Meteor.setInterval(async () => {
const data = await Service.getNetworkData();
const {
outbound: audioCurrentUploadRate,
inbound: audioCurrentDownloadRate,
} = Service.calculateBitsPerSecond(data.audio, previousData.audio);
const jitter = data.audio['inbound-rtp']
? data.audio['inbound-rtp'].jitterBufferAverage
: 0;
const packetsLost = data.audio['inbound-rtp']
? data.audio['inbound-rtp'].packetsLost
: 0;
const audio = {
audioCurrentUploadRate,
audioCurrentDownloadRate,
jitter,
packetsLost,
transportStats: data.audio.transportStats,
};
const {
outbound: videoCurrentUploadRate,
inbound: videoCurrentDownloadRate,
} = Service.calculateBitsPerSecondFromMultipleData(data.video,
previousData.video);
const video = {
videoCurrentUploadRate,
videoCurrentDownloadRate,
};
const { user } = data;
const networkData = {
user,
audio,
video,
};
previousData = data;
this.setState({
networkData,
hasNetworkData: true,
});
}, NETWORK_MONITORING_INTERVAL_MS);
}
renderEmpty() {
const { intl } = this.props;
@ -121,6 +249,35 @@ class ConnectionStatusComponent extends PureComponent {
);
}
/**
* Copy network data to clipboard
* @param {Object} e Event object from click event
* @return {Promise} A Promise that is resolved after data is copied.
*
*
*/
async copyNetworkData(e) {
const { intl } = this.props;
const {
networkData,
hasNetworkData,
} = this.state;
if (!hasNetworkData) return;
const { target: copyButton } = e;
copyButton.innerHTML = intl.formatMessage(intlMessages.copied);
const data = JSON.stringify(networkData, null, 2);
await navigator.clipboard.writeText(data);
this.copyNetworkDataTimeout = setTimeout(() => {
copyButton.innerHTML = intl.formatMessage(intlMessages.copy);
}, 1000);
}
renderConnections() {
const {
connectionStatus,
@ -250,6 +407,111 @@ class ConnectionStatusComponent extends PureComponent {
);
}
/**
* Render network data , containing information abount current upload and
* download rates
* @return {Object} The component to be renderized.
*/
renderNetworkData() {
const { enableNetworkStats } = Meteor.settings.public.app;
if (!enableNetworkStats) {
return null;
}
const {
audioLabel,
videoLabel,
} = this;
const { intl } = this.props;
const { networkData } = this.state;
const {
audioCurrentUploadRate,
audioCurrentDownloadRate,
jitter,
packetsLost,
transportStats,
} = networkData.audio;
const {
videoCurrentUploadRate,
videoCurrentDownloadRate,
} = networkData.video;
let isUsingTurn = '--';
if (transportStats) {
switch (transportStats.isUsingTurn) {
case true:
isUsingTurn = intl.formatMessage(intlMessages.yes);
break;
case false:
isUsingTurn = intl.formatMessage(intlMessages.no);
break;
default:
break;
}
}
return (
<div className={styles.networkDataContainer}>
<div className={styles.networkData}>
{`${audioLabel}: ${audioCurrentUploadRate} k`}
</div>
<div className={styles.networkData}>
{`${audioLabel}: ${audioCurrentDownloadRate} k`}
</div>
<div className={styles.networkData}>
{`${videoLabel}: ${videoCurrentUploadRate} k`}
</div>
<div className={styles.networkData}>
{`${videoLabel}: ${videoCurrentDownloadRate} k`}
</div>
<div className={styles.networkData}>
{`${intl.formatMessage(intlMessages.jitter)}: ${jitter} ms`}
</div>
<div className={styles.networkData}>
{`${intl.formatMessage(intlMessages.lostPackets)}: ${packetsLost}`}
</div>
<div className={styles.networkData}>
{`${intl.formatMessage(intlMessages.usingTurn)}: ${isUsingTurn}`}
</div>
</div>
);
}
/**
* Renders the clipboard's copy button, for network stats.
* @return {Object} - The component to be renderized
*/
renderCopyDataButton() {
const { enableCopyNetworkStatsButton } = Meteor.settings.public.app;
if (!enableCopyNetworkStatsButton) {
return null;
}
const { intl } = this.props;
const { hasNetworkData } = this.state;
return (
<div className={styles.copyContainer}>
<span
className={cx(styles.copy, !hasNetworkData ? styles.disabled : '')}
role="button"
onClick={this.copyNetworkData.bind(this)}
onKeyPress={this.copyNetworkData.bind(this)}
tabIndex={0}
>
{intl.formatMessage(intlMessages.copy)}
</span>
</div>
);
}
render() {
const {
closeModal,
@ -283,6 +545,8 @@ class ConnectionStatusComponent extends PureComponent {
)
}
</div>
{this.renderNetworkData()}
{this.renderCopyDataButton()}
{this.renderDataSaving()}
<div className={styles.content}>
<div className={styles.wrapper}>

View File

@ -42,7 +42,7 @@
.description {
text-align: center;
color: var(--color-gray);
margin-bottom: var(--jumbo-padding-y)
margin-bottom: var(--sm-padding-y)
}
.label {
@ -133,6 +133,38 @@
}
}
.networkDataContainer {
width: 100%;
display: flex;
background-color: var(--color-off-white);
}
.networkData {
float: left;
font-size: var(--font-size-small);
margin-left: var(--sm-padding-x);
}
.disabled {
cursor: not-allowed !important;
}
.copyContainer {
width: 100%;
}
.copy {
float: right;
text-decoration: underline;
cursor: pointer;
margin-right: var(--sm-padding-x);
[dir="rtl"] & {
margin-left: var(--sm-padding-x);
float: left;
}
}
.right {
display: flex;
width: 5rem;

View File

@ -8,6 +8,9 @@ import _ from 'lodash';
import { Session } from 'meteor/session';
import { notify } from '/imports/ui/services/notification';
import { makeCall } from '/imports/ui/services/api';
import AudioService from '/imports/ui/components/audio/service';
import VideoService from '/imports/ui/components/video-provider/service';
import ScreenshareService from '/imports/ui/components/screenshare/service';
const STATS = Meteor.settings.public.stats;
const NOTIFICATION = STATS.notification;
@ -305,6 +308,267 @@ const notification = (level, intl) => {
if (intl) notify(intl.formatMessage(intlMessages.notification), level, 'warning');
};
/**
* Calculates the jitter buffer average.
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay
* @param {Object} inboundRtpData The RTCInboundRtpStreamStats object retrieved
* in getStats() call.
* @returns The jitter buffer average in ms
*/
const calculateJitterBufferAverage = (inboundRtpData) => {
if (!inboundRtpData) return 0;
const {
jitterBufferDelay,
jitterBufferEmittedCount,
} = inboundRtpData;
if (!jitterBufferDelay || !jitterBufferEmittedCount) return '--';
return Math.round((jitterBufferDelay / jitterBufferEmittedCount) * 1000);
};
/**
* Given the data returned from getStats(), returns an array containing all the
* the stats of the given type.
* For more information see:
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsType
* @param {Object} data - RTCStatsReport object returned from getStats() API
* @param {String} type - The string type corresponding to RTCStatsType object
* @returns {Array[Object]} An array containing all occurrences of the given
* type in the data Object.
*/
const getDataType = (data, type) => {
if (!data || typeof data !== 'object' || !type) return [];
return Object.values(data).filter((stat) => stat.type === type);
};
/**
* Returns a new Object containing extra parameters calculated from inbound
* data. The input data is also appended in the returned Object.
* @param {Object} currentData - The object returned from getStats / service's
* getNetworkData()
* @returns {Object} the currentData object with the extra inbound network
* added to it.
*/
const addExtraInboundNetworkParameters = (data) => {
if (!data) return data;
const inboundRtpData = getDataType(data, 'inbound-rtp')[0];
if (!inboundRtpData) return data;
const extraParameters = {
jitterBufferAverage: calculateJitterBufferAverage(inboundRtpData),
packetsLost: inboundRtpData.packetsLost,
};
return Object.assign(inboundRtpData, extraParameters);
};
/**
* Retrieves the inbound and outbound data using WebRTC getStats API, for audio.
* @returns An Object with format (property:type) :
* {
* transportStats: Object,
* inbound-rtp: RTCInboundRtpStreamStats,
* outbound-rtp: RTCOutboundRtpStreamStats,
* }
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats
* and
* https://www.w3.org/TR/webrtc-stats/#dom-rtcoutboundrtpstreamstats
*/
const getAudioData = async () => {
const data = await AudioService.getStats();
if (!data) return {};
addExtraInboundNetworkParameters(data);
return data;
};
/**
* Retrieves the inbound and outbound data using WebRTC getStats API, for video.
* The video stats contains the stats about all video peers (cameras) and
* for screenshare peer appended into one single object, containing the id
* of the peers with it's stats information.
* @returns An Object containing video data for all video peers and screenshare
* peer
*/
const getVideoData = async () => {
const camerasData = await VideoService.getStats() || {};
const screenshareData = await ScreenshareService.getStats() || {};
return {
...camerasData,
...screenshareData,
};
};
/**
* Get the user, audio and video data from current active streams.
* For audio, this will get information about the mic/listen-only stream.
* @returns An Object containing all this data.
*/
const getNetworkData = async () => {
const audio = await getAudioData();
const video = await getVideoData();
const user = {
time: new Date(),
username: Auth.username,
meeting_name: Auth.confname,
meeting_id: Auth.meetingID,
connection_id: Auth.connectionID,
user_id: Auth.userID,
extern_user_id: Auth.externUserID,
};
const fullData = {
user,
audio,
video,
};
return fullData;
};
/**
* Calculates both upload and download rates using data retreived from getStats
* API. For upload (outbound-rtp) we use both bytesSent and timestamp fields.
* byteSent field contains the number of octets sent at the given timestamp,
* more information can be found in:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcsentrtpstreamstats-bytessent
*
* timestamp is given in millisseconds, more information can be found in:
* https://www.w3.org/TR/webrtc-stats/#webidl-1049090475
* @param {Object} currentData - The object returned from getStats / service's
* getNetworkData()
* @param {Object} previousData - The same object as above, but representing
* a data collected in past (previous call of
* service's getNetworkData())
* @returns An object of numbers, containing both outbound (upload) and inbound
* (download) rates (kbps).
*/
const calculateBitsPerSecond = (currentData, previousData) => {
const result = {
outbound: 0,
inbound: 0,
};
if (!currentData || !previousData) return result;
const currentOutboundData = getDataType(currentData, 'outbound-rtp')[0];
const currentInboundData = getDataType(currentData, 'inbound-rtp')[0];
const previousOutboundData = getDataType(previousData, 'outbound-rtp')[0];
const previousInboundData = getDataType(previousData, 'inbound-rtp')[0];
if (currentOutboundData && previousOutboundData) {
const {
bytesSent: outboundBytesSent,
timestamp: outboundTimestamp,
} = currentOutboundData;
let {
headerBytesSent: outboundHeaderBytesSent,
} = currentOutboundData;
if (!outboundHeaderBytesSent) outboundHeaderBytesSent = 0;
const {
bytesSent: previousOutboundBytesSent,
timestamp: previousOutboundTimestamp,
} = previousOutboundData;
let {
headerBytesSent: previousOutboundHeaderBytesSent,
} = previousOutboundData;
if (!previousOutboundHeaderBytesSent) previousOutboundHeaderBytesSent = 0;
const outboundBytesPerSecond = (outboundBytesSent + outboundHeaderBytesSent
- previousOutboundBytesSent - previousOutboundHeaderBytesSent)
/ (outboundTimestamp - previousOutboundTimestamp);
result.outbound = Math.round((outboundBytesPerSecond * 8 * 1000) / 1024);
}
if (currentInboundData && previousInboundData) {
const {
bytesReceived: inboundBytesReceived,
timestamp: inboundTimestamp,
} = currentInboundData;
let {
headerBytesReceived: inboundHeaderBytesReceived,
} = currentInboundData;
if (!inboundHeaderBytesReceived) inboundHeaderBytesReceived = 0;
const {
bytesReceived: previousInboundBytesReceived,
timestamp: previousInboundTimestamp,
} = previousInboundData;
let {
headerBytesReceived: previousInboundHeaderBytesReceived,
} = previousInboundData;
if (!previousInboundHeaderBytesReceived) {
previousInboundHeaderBytesReceived = 0;
}
const inboundBytesPerSecond = (inboundBytesReceived
+ inboundHeaderBytesReceived - previousInboundBytesReceived
- previousInboundHeaderBytesReceived) / (inboundTimestamp
- previousInboundTimestamp);
result.inbound = Math.round((inboundBytesPerSecond * 8 * 1000) / 1024);
}
return result;
};
/**
* Similar to calculateBitsPerSecond, but it receives stats from multiple
* peers. The total inbound/outbound is the sum of all peers.
* @param {Object} currentData - The Object returned from
* getStats / service's getNetworkData()
* @param {Object} previousData - The same object as above, but
* representing a data collected in past
* (previous call of service's getNetworkData())
*/
const calculateBitsPerSecondFromMultipleData = (currentData, previousData) => {
const result = {
outbound: 0,
inbound: 0,
};
if (!currentData || !previousData) return result;
Object.keys(currentData).forEach((peerId) => {
if (previousData[peerId]) {
const {
outbound: peerOutbound,
inbound: peerInbound,
} = calculateBitsPerSecond(currentData[peerId], previousData[peerId]);
result.outbound += peerOutbound;
result.inbound += peerInbound;
}
});
return result;
};
export default {
getConnectionStatus,
getStats,
@ -314,4 +578,7 @@ export default {
startRoundTripTime,
stopRoundTripTime,
updateDataSavingSettings,
getNetworkData,
calculateBitsPerSecond,
calculateBitsPerSecondFromMultipleData,
};

View File

@ -13,6 +13,14 @@ import MediaStreamUtils from '/imports/utils/media-stream-utils';
const SCREENSHARE_MEDIA_ELEMENT_NAME = 'screenshareVideo';
/**
* Screenshare status to be filtered in getStats()
*/
const FILTER_SCREENSHARE_STATS = [
'outbound-rtp',
'inbound-rtp',
];
let _isSharingScreen = false;
const _sharingScreenDep = {
value: false,
@ -135,6 +143,41 @@ const screenShareEndAlert = () => AudioService
const dataSavingSetting = () => Settings.dataSaving.viewScreenshare;
/**
* Get stats about all active screenshare peer.
* We filter the status based on FILTER_SCREENSHARE_STATS constant.
*
* For more information see:
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
* @returns An Object containing the information about each active peer
* (currently one, for screenshare). The returned format
* follows the format returned by video's service getStats, which
* considers more than one peer connection to be returned.
* The format is given by:
* {
* peerIdString: RTCStatsReport
* }
*/
const getStats = async () => {
const peer = KurentoBridge.getPeerConnection();
if (!peer) return null;
const peerStats = await peer.getStats();
const screenshareStats = {};
peerStats.forEach((stat) => {
if (FILTER_SCREENSHARE_STATS.includes(stat.type)) {
screenshareStats[stat.type] = stat;
}
});
return { screenshareStats };
};
export {
SCREENSHARE_MEDIA_ELEMENT_NAME,
isVideoBroadcasting,
@ -148,4 +191,5 @@ export {
getMediaElement,
attachLocalPreviewStream,
isGloballyBroadcasting,
getStats,
};

View File

@ -1,15 +1,17 @@
.separator {
height: 1px;
background-color: var(--color-gray-lighter);
margin-top: calc(var(--line-height-computed) * .5);
margin-bottom: calc(var(--line-height-computed) * .5);
}
.branding {
padding: 0 var(--sm-padding-x);
padding: var(--sm-padding-x);
width: 100%;
& > img {
max-height: 2rem;
max-width: 100%;
display: block;
margin-left: auto;
margin-right: auto;
}
}

View File

@ -123,7 +123,7 @@ class VideoProvider extends Component {
this.wsQueue = [];
this.restartTimeout = {};
this.restartTimer = {};
this.webRtcPeers = {};
this.webRtcPeers = VideoService.getWebRtcPeers();
this.outboundIceQueues = {};
this.videoTags = {};

View File

@ -42,6 +42,11 @@ const {
defaultSorting: DEFAULT_SORTING,
} = Meteor.settings.public.kurento.cameraSortingModes;
const FILTER_VIDEO_STATS = [
'outbound-rtp',
'inbound-rtp',
];
const TOKEN = '_';
class VideoService {
@ -71,6 +76,7 @@ class VideoService {
}
this.updateNumberOfDevices();
}
this.webRtcPeers = {};
}
defineProperties(obj) {
@ -821,6 +827,79 @@ class VideoService {
if (this.deviceId == null) return;
return VideoPreviewService.getStream(this.deviceId);
}
/**
* Getter for webRtcPeers hash, which stores a reference for all
* RTCPeerConnection objects.
*/
getWebRtcPeers() {
return this.webRtcPeers;
}
/**
* Get all active video peers.
* @returns An Object containing the reference for all active peers peers
*/
getActivePeers() {
const videoData = this.getVideoStreams();
if (!videoData) return null;
const { streams: activeVideoStreams } = videoData;
if (!activeVideoStreams) return null;
const peers = this.getWebRtcPeers();
const activePeers = {};
activeVideoStreams.forEach((stream) => {
if (peers[stream.stream]) {
activePeers[stream.stream] = peers[stream.stream].peerConnection;
}
});
return activePeers;
}
/**
* Get stats about all active video peer.
* We filter the status based on FILTER_VIDEO_STATS constant.
*
* For more information see:
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
* @returns An Object containing the information about each active peer.
* The returned object follows the format:
* {
* peerId: RTCStatsReport
* }
*/
async getStats() {
const peers = this.getActivePeers();
if (!peers) return null;
const stats = {};
await Promise.all(
Object.keys(peers).map(async (peerId) => {
const peerStats = await peers[peerId].getStats();
const videoStats = {};
peerStats.forEach((stat) => {
if (FILTER_VIDEO_STATS.includes(stat.type)) {
videoStats[stat.type] = stat;
}
});
stats[peerId] = videoStats;
})
);
return stats;
}
}
const videoService = new VideoService();
@ -863,4 +942,6 @@ export default {
getUsersIdFromVideoStreams: () => videoService.getUsersIdFromVideoStreams(),
shouldRenderPaginationToggle: () => videoService.shouldRenderPaginationToggle(),
getPreloadedStream: () => videoService.getPreloadedStream(),
getWebRtcPeers: () => videoService.getWebRtcPeers(),
getStats: () => videoService.getStats(),
};

View File

@ -38,6 +38,17 @@ const BREAKOUT_AUDIO_TRANSFER_STATES = {
RETURNING: 'returning',
};
/**
* Audio status to be filtered in getStats()
*/
const FILTER_AUDIO_STATS = [
'outbound-rtp',
'inbound-rtp',
'candidate-pair',
'local-candidate',
'transport',
];
class AudioManager {
constructor() {
this._inputDevice = {
@ -749,6 +760,209 @@ class AudioManager {
async updateAudioConstraints(constraints) {
await this.bridge.updateAudioConstraints(constraints);
}
/**
* Helper for retrieving the current bridge being used by audio.
* @returns An Object representing the current bridge.
*/
getCurrentBridge() {
return this.isListenOnly ? this.listenOnlyBridge : this.bridge;
}
/**
* Get the info about candidate-pair that is being used by the current peer.
* For firefox, or any other browser that doesn't support iceTransport
* property of RTCDtlsTransport, we retrieve the selected local candidate
* by looking into stats returned from getStats() api. For other browsers,
* we should use getSelectedCandidatePairFromPeer instead, because it has
* relatedAddress and relatedPort information about local candidate.
*
* @param {Object} stats object returned by getStats() api
* @returns An Object of type RTCIceCandidatePairStats containing information
* about the candidate-pair being used by the peer.
*
* For firefox, we can use the 'selected' flag to find the candidate pair
* being used, while in chrome we can retrieved the selected pair
* by looking for the corresponding transport of the active peer.
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePairStats/selected#value
*/
static getSelectedCandidatePairFromStats(stats) {
if (!stats || typeof stats !== 'object') return null;
const transport = Object.values(stats).find((stat) => stat.type
=== 'transport') || {};
return Object.values(stats).find((stat) => stat.type === 'candidate-pair'
&& stat.nominated && (stat.selected
|| stat.id === transport.selectedCandidatePairId));
}
/**
* Get the info about candidate-pair that is being used by the current peer.
* This function's return value (RTCIceCandidatePair object ) is different
* from getSelectedCandidatePairFromStats (RTCIceCandidatePairStats object).
* The information returned here contains the relatedAddress and relatedPort
* fields (only for candidates that are derived from another candidate, for
* host candidates, these fields are null). These field can be helpful for
* debugging network issues. For all the browsers that support iceTransport
* field of RTCDtlsTransport, we use this function as default to retrieve
* information about current selected-pair. For other browsers we retrieve it
* from getSelectedCandidatePairFromStats
*
* @returns {Object} An RTCIceCandidatePair represented the selected
* candidate-pair of the active peer.
*
* For more info see:
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidatepair
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidatePair
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCDtlsTransport
*/
getSelectedCandidatePairFromPeer() {
const bridge = this.getCurrentBridge();
if (!bridge) return null;
const peer = bridge.getPeerConnection();
if (!peer) return null;
let selectedPair = null;
const receivers = peer.getReceivers();
if (receivers && receivers[0] && receivers[0].transport
&& receivers[0].transport.iceTransport
&& receivers[0].transport.iceTransport) {
selectedPair = receivers[0].transport.iceTransport
.getSelectedCandidatePair();
}
return selectedPair;
}
/**
* Gets the selected local-candidate information. For browsers that support
* iceTransport property (see getSelectedCandidatePairFromPeer) we get this
* info from peer, otherwise we retrieve this information from getStats() api
*
* @param {Object} [stats] The status object returned from getStats() api
* @returns {Object} An Object containing the information about the
* local-candidate. For browsers that support iceTransport
* property, the object's type is RCIceCandidate. A
* RTCIceCandidateStats is returned, otherwise.
*
* For more info see:
* https://www.w3.org/TR/webrtc/#dom-rtcicecandidate
* and
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
*
*/
getSelectedLocalCandidate(stats) {
let selectedPair = this.getSelectedCandidatePairFromPeer();
if (selectedPair) return selectedPair.local;
if (!stats) return null;
selectedPair = AudioManager.getSelectedCandidatePairFromStats(stats);
if (selectedPair) return stats[selectedPair.localCandidateId];
return null;
}
/**
* Gets the information about private/public ip address from peer
* stats. The information retrieved from selected pair from the current
* RTCIceTransport and returned in a new Object with format:
* {
* address: String,
* relatedAddress: String,
* port: Number,
* relatedPort: Number,
* candidateType: String,
* selectedLocalCandidate: Object,
* }
*
* If users isn't behind NAT, relatedAddress and relatedPort may be null.
*
* @returns An Object containing the information about private/public IP
* addresses and ports.
*
* For more information see:
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatepairstats
* and
* https://www.w3.org/TR/webrtc-stats/#dom-rtcicecandidatestats
* and
* https://www.w3.org/TR/webrtc/#rtcicecandidatetype-enum
*/
async getInternalExternalIpAddresses(stats) {
let transports = {};
if (stats) {
const selectedLocalCandidate = this.getSelectedLocalCandidate(stats);
if (!selectedLocalCandidate) return transports;
const candidateType = selectedLocalCandidate.candidateType
|| selectedLocalCandidate.type;
transports = {
isUsingTurn: (candidateType === 'relay'),
address: selectedLocalCandidate.address,
relatedAddress: selectedLocalCandidate.relatedAddress,
port: selectedLocalCandidate.port,
relatedPort: selectedLocalCandidate.relatedPort,
candidateType,
selectedLocalCandidate,
};
}
return transports;
}
/**
* Get stats about active audio peer.
* We filter the status based on FILTER_AUDIO_STATS constant.
* We also append to the returned object the information about peer's
* transport. This transport information is retrieved by
* getInternalExternalIpAddressesFromPeer().
*
* @returns An Object containing the status about the active audio peer.
*
* For more information see:
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats
* and
* https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport
*/
async getStats() {
const bridge = this.getCurrentBridge();
if (!bridge) return null;
const peer = bridge.getPeerConnection();
if (!peer) return null;
const peerStats = await peer.getStats();
const audioStats = {};
peerStats.forEach((stat) => {
if (FILTER_AUDIO_STATS.includes(stat.type)) {
audioStats[stat.id] = stat;
}
});
const transportStats = await this
.getInternalExternalIpAddresses(audioStats);
return { transportStats, ...audioStats };
}
}
const audioManager = new AudioManager();

View File

@ -3605,6 +3605,11 @@
"lru-cache": "^6.0.0"
}
},
"trim-newlines": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-4.0.2.tgz",
"integrity": "sha512-GJtWyq9InR/2HRiLZgpIKv+ufIKrVrvjQWEj7PxAXNc5dwbNJkqhAUoAGgzRmULAnoOM5EIpveYd3J2VeSAIew=="
},
"type-fest": {
"version": "0.18.1",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz",
@ -6071,11 +6076,6 @@
"punycode": "^2.1.1"
}
},
"trim-newlines": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz",
"integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw=="
},
"triple-beam": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz",

View File

@ -89,6 +89,12 @@ public:
# https://developer.mozilla.org/en-US/docs/Web/API/RTCDtlsTransport/iceTransport
# This is an EXPERIMENTAL setting and the default value is false
# experimentalUseKmsTrickleIceForMicrophone: false
#
# Shows stats about download and upload rates, audio jitter, lost packets
# and turn information
enableNetworkStats: false
# Enable the button to allow users to copy network stats to clipboard
enableCopyNetworkStatsButton: false
defaultSettings:
application:
animations: true

View File

@ -668,9 +668,16 @@
"app.connection-status.description": "View users' connection status",
"app.connection-status.empty": "There are currently no reported connection issues",
"app.connection-status.more": "more",
"app.connection-status.copy": "Copy network data",
"app.connection-status.copied": "Copied!",
"app.connection-status.jitter": "Jitter",
"app.connection-status.label": "Connection status",
"app.connection-status.no": "No",
"app.connection-status.notification": "Loss in your connection was detected",
"app.connection-status.offline": "offline",
"app.connection-status.lostPackets": "Lost packets",
"app.connection-status.usingTurn": "Using TURN",
"app.connection-status.yes": "Yes",
"app.activity-report.label": "Learning Dashboard",
"app.activity-report.description": "Open dashboard with users activities",
"app.activity-report.clickHereToOpen": "Open Learning Dashboard",

View File

@ -286,6 +286,9 @@ bigbluebutton.web.logoutURL=default
# successfully joining the meeting.
defaultHTML5ClientUrl=${bigbluebutton.web.serverURL}/html5client/join
useDefaultLogo=false
defaultLogoURL=${bigbluebutton.web.serverURL}/images/logo.png
# Allow requests without JSESSIONID to be handled (default = false)
allowRequestsWithoutSession=false

View File

@ -145,6 +145,8 @@ with BigBlueButton; if not, see <http://www.gnu.org/licenses/>.
<property name="defaultServerUrl" value="${bigbluebutton.web.serverURL}"/>
<property name="defaultNumDigitsForTelVoice" value="${defaultNumDigitsForTelVoice}"/>
<property name="defaultHTML5ClientUrl" value="${defaultHTML5ClientUrl}"/>
<property name="useDefaultLogo" value="${useDefaultLogo}"/>
<property name="defaultLogoURL" value="${defaultLogoURL}"/>
<property name="defaultGuestWaitURL" value="${defaultGuestWaitURL}"/>
<property name="allowRequestsWithoutSession" value="${allowRequestsWithoutSession}"/>
<property name="defaultMeetingDuration" value="${defaultMeetingDuration}"/>

View File

@ -23,7 +23,7 @@ module BigBlueButton
FFMPEG_AEVALSRC = "aevalsrc=s=48000:c=stereo:exprs=0|0"
FFMPEG_AFORMAT = "aresample=async=1000,aformat=sample_fmts=s16:sample_rates=48000:channel_layouts=stereo"
FFMPEG_WF_CODEC = 'libvorbis'
FFMPEG_WF_ARGS = ['-vn', '-c:a', FFMPEG_WF_CODEC, '-q:a', '2', '-f', 'ogg']
FFMPEG_WF_ARGS = ['-c:a', FFMPEG_WF_CODEC, '-q:a', '2', '-f', 'ogg']
WF_EXT = 'ogg'
def self.dump(edl)
@ -51,7 +51,7 @@ module BigBlueButton
ffmpeg_cmd += ['-filter_complex', "amix=inputs=#{inputs.length}"]
output = "#{output_basename}.#{WF_EXT}"
ffmpeg_cmd += [*FFMPEG_WF_ARGS, output]
ffmpeg_cmd += ['-vn', *FFMPEG_WF_ARGS, output]
BigBlueButton.logger.info "Running audio mixer..."
exitstatus = BigBlueButton.exec_ret(*ffmpeg_cmd)
@ -182,7 +182,7 @@ module BigBlueButton
ffmpeg_cmd << '-filter_complex_script' << filter_complex_script
output = "#{output_basename}.#{WF_EXT}"
ffmpeg_cmd += [*FFMPEG_WF_ARGS, output]
ffmpeg_cmd += ['-vn', *FFMPEG_WF_ARGS, output]
BigBlueButton.logger.info "Running audio processing..."
exitstatus = BigBlueButton.exec_ret(*ffmpeg_cmd)

View File

@ -24,7 +24,7 @@ module BigBlueButton
module EDL
module Video
FFMPEG_WF_CODEC = 'libx264'
FFMPEG_WF_ARGS = ['-an', '-codec', FFMPEG_WF_CODEC.to_s, '-preset', 'veryfast', '-crf', '30', '-force_key_frames', 'expr:gte(t,n_forced*10)', '-pix_fmt', 'yuv420p']
FFMPEG_WF_ARGS = ['-codec', FFMPEG_WF_CODEC.to_s, '-preset', 'veryfast', '-crf', '30', '-force_key_frames', 'expr:gte(t,n_forced*10)', '-pix_fmt', 'yuv420p']
WF_EXT = 'mp4'
def self.dump(edl)
@ -212,9 +212,7 @@ module BigBlueButton
newvideofile = File.join(File.dirname(output_basename), File.basename(videofile))
if !File.exist?(newvideofile)
ffmpeg_cmd = [*FFMPEG]
ffmpeg_cmd += ['-i', videofile, '-c', 'copy', newvideofile]
ffmpeg_cmd = [*FFMPEG, '-i', videofile, '-c', 'copy', newvideofile]
exitstatus = BigBlueButton.exec_ret(*ffmpeg_cmd)
raise "ffmpeg failed, exit code #{exitstatus}" if exitstatus != 0
end
@ -271,8 +269,7 @@ module BigBlueButton
end
end
ffmpeg_cmd = [*FFMPEG]
ffmpeg_cmd += ['-safe', '0', '-f', 'concat', '-i', concat_file , '-c', 'copy', render]
ffmpeg_cmd = [*FFMPEG, '-safe', '0', '-f', 'concat', '-i', concat_file , '-c', 'copy', render]
exitstatus = BigBlueButton.exec_ret(*ffmpeg_cmd)
raise "ffmpeg failed, exit code #{exitstatus}" if exitstatus != 0
@ -559,9 +556,7 @@ module BigBlueButton
ffmpeg_filter << ",trim=end=#{ms_to_s(duration)}"
ffmpeg_cmd = [*FFMPEG]
ffmpeg_cmd += ['-filter_complex', ffmpeg_filter, *FFMPEG_WF_ARGS, '-r', layout[:framerate].to_s, output]
ffmpeg_cmd = [*FFMPEG, '-filter_complex', ffmpeg_filter, '-an', *FFMPEG_WF_ARGS, '-r', layout[:framerate].to_s, output]
exitstatus = BigBlueButton.exec_ret(*ffmpeg_cmd)
raise "ffmpeg failed, exit code #{exitstatus}" if exitstatus != 0

View File

@ -58,7 +58,7 @@ module BigBlueButton
# and mixing it with deskshare audio
deskshare_dir = "#{archive_dir}/deskshare"
if BigBlueButton::Events.screenshare_has_audio?(events_xml, deskshare_dir)
if BigBlueButton::Events.screenshare_has_audio?(events, deskshare_dir)
BigBlueButton.logger.info("AudioProcessor.process: processing Deskshare audio...")
mixed_dir = "#{archive_dir}/mixed"

View File

@ -751,8 +751,7 @@ module BigBlueButton
end
# Check if any screenshare files has audio
def self.screenshare_has_audio?(events_xml, deskshare_dir)
events = Nokogiri::XML(File.open(events_xml))
def self.screenshare_has_audio?(events, deskshare_dir)
events.xpath('/recording/event[@eventname="StartWebRTCDesktopShareEvent"]').each do |event|
filename = event.at_xpath('filename').text
filename = "#{deskshare_dir}/#{File.basename(filename)}"