Merge pull request #7882 from prlanzarin/2.2-ap-ui

Add UI prompts when autoplay is blocked for video/screenshare/listen only
This commit is contained in:
Anton Georgiev 2019-08-14 15:24:16 -04:00 committed by GitHub
commit 5b9c835513
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 406 additions and 75 deletions

View File

@ -17,6 +17,7 @@ export default class BaseAudioBridge {
ended: 'ended',
failed: 'failed',
reconnecting: 'reconnecting',
autoplayBlocked: 'autoplayBlocked',
};
}

View File

@ -69,13 +69,28 @@ export default class KurentoAudioBridge extends BaseAudioBridge {
audioTag.pause();
audioTag.srcObject = stream;
audioTag.muted = false;
audioTag.play().catch((e) => {
const tagFailedEvent = new CustomEvent('mediaTagPlayFailed', { detail: { mediaTag: audioTag } });
window.dispatchEvent(tagFailedEvent);
logger.warn({
logCode: 'sfuaudiobridge_play_error',
extraInfo: { error: e },
}, 'Could not play audio tag, emit mediaTagPlayFailed event');
audioTag.play()
.then(() => {
resolve(this.callback({ status: this.baseCallStates.started }));
})
.catch((error) => {
// NotAllowedError equals autoplay issues, fire autoplay handling event
if (error.name === 'NotAllowedError') {
const tagFailedEvent = new CustomEvent('audioPlayFailed', { detail: { mediaElement: audioTag } });
window.dispatchEvent(tagFailedEvent);
}
logger.warn({
logCode: 'sfuaudiobridge_play_maybe_error',
extraInfo: { error },
}, `Listen only media play failed due to ${error.name}`);
resolve(this.callback({
status: this.baseCallStates.autoplayBlocked,
}));
});
} else {
this.callback({
status: this.baseCallStates.failed,
error: this.baseErrorCodes.CONNECTION_ERROR,
});
}
@ -83,9 +98,6 @@ export default class KurentoAudioBridge extends BaseAudioBridge {
this.reconnectOngoing = false;
clearTimeout(this.reconnectTimeout);
}
resolve(this.callback({ status: this.baseCallStates.started }));
};
const onFail = (error) => {
@ -95,7 +107,7 @@ export default class KurentoAudioBridge extends BaseAudioBridge {
logger.error({
logCode: 'sfuaudiobridge_listen_only_error_reconnect',
extraInfo: { error },
}, `Listen only failed for an ongoing session, try to reconnect`);
}, 'Listen only failed for an ongoing session, try to reconnect');
window.kurentoExitAudio();
this.callback({ status: this.baseCallStates.reconnecting });
this.reconnectOngoing = true;

View File

@ -1,4 +1,3 @@
import Users from '/imports/api/users';
import Auth from '/imports/ui/services/auth';
import BridgeService from './service';
import { fetchWebRTCMappedStunTurnServers } from '/imports/utils/fetchStunTurnServers';
@ -18,8 +17,6 @@ const getUserId = () => Auth.userID;
const getMeetingId = () => Auth.meetingID;
const getUsername = () => Users.findOne({ userId: getUserId() }).name;
const getSessionToken = () => Auth.sessionToken;
export default class KurentoScreenshareBridge {
@ -38,13 +35,36 @@ export default class KurentoScreenshareBridge {
logger,
};
const onSuccess = () => {
const { webRtcPeer } = window.kurentoManager.kurentoVideo;
if (webRtcPeer) {
const screenshareTag = document.getElementById(SCREENSHARE_VIDEO_TAG);
const stream = webRtcPeer.getRemoteStream();
screenshareTag.muted = true;
screenshareTag.pause();
screenshareTag.srcObject = stream;
screenshareTag.play().catch((error) => {
// NotAllowedError equals autoplay issues, fire autoplay handling event
if (error.name === 'NotAllowedError') {
const tagFailedEvent = new CustomEvent('screensharePlayFailed',
{ detail: { mediaElement: screenshareTag } });
window.dispatchEvent(tagFailedEvent);
}
logger.warn({
logCode: 'sfuscreenshareview_play_maybe_error',
extraInfo: { error },
}, `Screenshare viewer media play failed due to ${error.name}`);
});
}
};
window.kurentoWatchVideo(
SCREENSHARE_VIDEO_TAG,
BridgeService.getConferenceBridge(),
getUserId(),
getMeetingId(),
null,
null,
onSuccess,
options,
);
}

View File

@ -13,7 +13,7 @@ import AudioSettings from '../audio-settings/component';
import EchoTest from '../echo-test/component';
import Help from '../help/component';
import AudioDial from '../audio-dial/component';
import AudioAutoplayPrompt from '../autoplay/component';
const propTypes = {
intl: intlShape.isRequired,
@ -44,6 +44,8 @@ const propTypes = {
isIEOrEdge: PropTypes.bool.isRequired,
hasMediaDevices: PropTypes.bool.isRequired,
formattedTelVoice: PropTypes.string.isRequired,
autoplayBlocked: PropTypes.bool.isRequired,
handleAllowAutoplay: PropTypes.func.isRequired,
};
const defaultProps = {
@ -109,6 +111,10 @@ const intlMessages = defineMessages({
id: 'app.audioModal.ariaTitle',
description: 'aria label for modal title',
},
autoplayPromptTitle: {
id: 'app.audioModal.autoplayBlockedDesc',
description: 'Message for autoplay audio block',
},
});
class AudioModal extends Component {
@ -145,7 +151,12 @@ class AudioModal extends Component {
title: intlMessages.audioDialTitle,
component: () => this.renderAudioDial(),
},
autoplayBlocked: {
title: intlMessages.autoplayPromptTitle,
component: () => this.renderAutoplayOverlay(),
},
};
this.failedMediaElements = [];
}
componentDidMount() {
@ -169,6 +180,13 @@ class AudioModal extends Component {
}
}
componentDidUpdate(prevProps) {
const { autoplayBlocked, closeModal } = this.props;
if (autoplayBlocked !== prevProps.autoplayBlocked) {
autoplayBlocked ? this.setState({ content: 'autoplayBlocked' }) : closeModal();
}
}
componentWillUnmount() {
const {
isEchoTest,
@ -437,6 +455,15 @@ class AudioModal extends Component {
);
}
renderAutoplayOverlay() {
const { handleAllowAutoplay } = this.props;
return (
<AudioAutoplayPrompt
handleAllowAutoplay={handleAllowAutoplay}
/>
);
}
render() {
const {
intl,

View File

@ -55,7 +55,23 @@ export default lockContextContainer(withModalMounter(withTracker(({ mountModal,
throw error;
});
},
joinListenOnly: () => Service.joinListenOnly().then(() => mountModal(null)),
joinListenOnly: () => {
const call = new Promise((resolve) => {
Service.joinListenOnly().then(() => {
// Autoplay block wasn't triggered. Close the modal. If autoplay was
// blocked, that'll be handled in the modal component when then
// prop transitions to a state where it was handled OR the user opts
// to close the modal.
if (!Service.autoplayBlocked()) {
mountModal(null);
}
resolve();
});
});
return call.catch((error) => {
throw error;
});
},
leaveEchoTest: () => {
if (!Service.isEchoTest()) {
return Promise.resolve();
@ -85,5 +101,7 @@ export default lockContextContainer(withModalMounter(withTracker(({ mountModal,
isMobileNative: navigator.userAgent.toLowerCase().includes('bbbnative'),
isIEOrEdge: browser().name === 'edge' || browser().name === 'ie',
hasMediaDevices: deviceInfo.hasMediaDevices,
autoplayBlocked: Service.autoplayBlocked(),
handleAllowAutoplay: () => Service.handleAllowAutoplay(),
});
})(AudioModalContainer)));

View File

@ -0,0 +1,49 @@
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import Button from '/imports/ui/components/button/component';
import { defineMessages, intlShape, injectIntl } from 'react-intl';
import { styles } from './styles';
const intlMessages = defineMessages({
confirmLabel: {
id: 'app.audioModal.playAudio',
description: 'Play audio prompt for autoplay',
},
confirmAriaLabel: {
id: 'app.audioModal.playAudio.arialabel',
description: 'Provides better context for play audio prompt btn label',
},
});
const propTypes = {
handleAllowAutoplay: PropTypes.func.isRequired,
intl: intlShape.isRequired,
};
class AudioAutoplayPrompt extends PureComponent {
render() {
const {
intl,
handleAllowAutoplay,
} = this.props;
return (
<span className={styles.autoplayPrompt}>
<Button
className={styles.button}
label={intl.formatMessage(intlMessages.confirmLabel)}
aria-label={intl.formatMessage(intlMessages.confirmAriaLabel)}
icon="thumbs_up"
circle
color="success"
size="jumbo"
onClick={handleAllowAutoplay}
/>
</span>
);
}
}
export default injectIntl(AudioAutoplayPrompt);
AudioAutoplayPrompt.propTypes = propTypes;

View File

@ -0,0 +1,18 @@
@import "/imports/ui/stylesheets/variables/_all";
.autoplayPrompt {
margin-top: auto;
margin-bottom: auto;
}
.button {
&:focus {
outline: none !important;
}
span:last-child {
color: black;
font-size: 1rem;
font-weight: 600;
}
}

View File

@ -81,4 +81,6 @@ export default {
error: () => AudioManager.error,
isUserModerator: () => Users.findOne({ userId: Auth.userID }).role === ROLE_MODERATOR,
currentUser,
autoplayBlocked: () => AudioManager.autoplayBlocked,
handleAllowAutoplay: () => AudioManager.handleAllowAutoplay(),
};

View File

@ -0,0 +1,53 @@
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import { defineMessages, injectIntl } from 'react-intl';
import Button from '/imports/ui/components/button/component';
import { styles } from './styles';
const propTypes = {
autoplayBlockedDesc: PropTypes.string.isRequired,
autoplayAllowLabel: PropTypes.string.isRequired,
handleAllowAutoplay: PropTypes.func.isRequired,
intl: PropTypes.objectOf(Object).isRequired,
};
const intlMessages = defineMessages({
autoplayAlertDesc: {
id: 'app.media.autoplayAlertDesc',
description: 'Description for the autoplay alert title',
},
});
class AutoplayOverlay extends PureComponent {
render() {
const {
intl,
handleAllowAutoplay,
autoplayBlockedDesc,
autoplayAllowLabel,
} = this.props;
return (
<div className={styles.autoplayOverlay}>
<div className={styles.title}>
{ intl.formatMessage(intlMessages.autoplayAlertDesc) }
</div>
<div className={styles.autoplayOverlayContent}>
<div className={styles.label}>
{autoplayBlockedDesc}
</div>
<Button
color="primary"
label={autoplayAllowLabel}
onClick={handleAllowAutoplay}
role="button"
size="lg"
/>
</div>
</div>
);
}
}
AutoplayOverlay.propTypes = propTypes;
export default injectIntl(AutoplayOverlay);

View File

@ -0,0 +1,31 @@
@import "/imports/ui/stylesheets/variables/_all";
.autoplayOverlayContent {
text-align: center;
margin-top: 8px;
}
.title {
display: block;
font-size: var(--font-size-large);
text-align: center;
}
.label {
display: block;
font-size: var(--font-size-base);
text-align: center;
margin-bottom: 12px;
}
.autoplayOverlay {
display: flex;
justify-content: center;
flex-direction: column;
background: rgba(0, 0, 0, 1);
height: 100%;
width: 100%;
color: var(--color-white);
font-size: var(--font-size-large);
border-radius: 5px;
position: absolute;
z-index: 9999;
text-align: center;
}

View File

@ -33,56 +33,6 @@ export default class Media extends Component {
constructor(props) {
super(props);
this.refContainer = React.createRef();
this.failedTags = [];
this.listeningToTagPlayFailed = false;
this.monitorMediaTagPlayFailures();
}
monitorMediaTagPlayFailures() {
const handleFailTagEvent = (e) => {
e.stopPropagation();
this.failedTags.push(e.detail.mediaTag);
if (!this.listeningToTagPlayFailed) {
this.listeningToTagPlayFailed = true;
// Monitor user action events so we can play and flush all the failed tags
// in the queue when the user performs one of them
window.addEventListener('click', flushFailedTags);
window.addEventListener('auxclick', flushFailedTags);
window.addEventListener('keydown', flushFailedTags);
window.addEventListener('touchstart', flushFailedTags);
}
};
const flushFailedTags = () => {
window.removeEventListener('click', flushFailedTags);
window.removeEventListener('auxclick', flushFailedTags);
window.removeEventListener('keydown', flushFailedTags);
window.removeEventListener('touchstart', flushFailedTags);
while (this.failedTags.length) {
const mediaTag = this.failedTags.shift();
if (mediaTag) {
mediaTag.play().catch((e) => {
// Ignore the error for now.
});
}
}
this.listeningToTagPlayFailed = false;
};
// Monitor tag play failure events, probably due to autoplay. The callback
// puts the failed tags in a queue which will be flushed on a user action
// by the listeners created @handleFailTagEvent. Once the queue is flushed, all
// user action listeners are removed since the autoplay restriction should be over.
// Every media tag in the app should have a then/catch handler and emit
// this event accordingly so we can try to circumvent autoplay without putting
// a UI block/prompt.
// If a tag fail to play again for some odd reason, the listeners will be
// reattached (see this.listeningToTagPlayFailed) and flushFailedTags runs again
window.addEventListener('mediaTagPlayFailed', handleFailTagEvent);
}
componentWillUpdate() {

View File

@ -5,12 +5,19 @@ import _ from 'lodash';
import FullscreenService from '../fullscreen-button/service';
import FullscreenButtonContainer from '../fullscreen-button/container';
import { styles } from './styles';
import AutoplayOverlay from '../media/autoplay-overlay/component';
const intlMessages = defineMessages({
screenShareLabel: {
id: 'app.screenshare.screenShareLabel',
description: 'screen share area element label',
},
autoplayBlockedDesc: {
id: 'app.media.screenshare.autoplayBlockedDesc',
},
autoplayAllowLabel: {
id: 'app.media.screenshare.autoplayAllowLabel',
},
});
const ALLOW_FULLSCREEN = Meteor.settings.public.app.allowFullscreen;
@ -21,10 +28,14 @@ class ScreenshareComponent extends React.Component {
this.state = {
loaded: false,
isFullscreen: false,
autoplayBlocked: false,
};
this.onVideoLoad = this.onVideoLoad.bind(this);
this.onFullscreenChange = this.onFullscreenChange.bind(this);
this.handleAllowAutoplay = this.handleAllowAutoplay.bind(this);
this.handlePlayElementFailed = this.handlePlayElementFailed.bind(this);
this.failedMediaElements = [];
}
componentDidMount() {
@ -32,6 +43,7 @@ class ScreenshareComponent extends React.Component {
presenterScreenshareHasStarted();
this.screenshareContainer.addEventListener('fullscreenchange', this.onFullscreenChange);
window.addEventListener('screensharePlayFailed', this.handlePlayElementFailed);
}
componentWillReceiveProps(nextProps) {
@ -50,6 +62,7 @@ class ScreenshareComponent extends React.Component {
presenterScreenshareHasEnded();
unshareScreen();
this.screenshareContainer.removeEventListener('fullscreenchange', this.onFullscreenChange);
window.removeEventListener('screensharePlayFailed', this.handlePlayElementFailed);
}
onVideoLoad() {
@ -64,6 +77,32 @@ class ScreenshareComponent extends React.Component {
}
}
handleAllowAutoplay() {
const { autoplayBlocked } = this.state;
window.removeEventListener('screensharePlayFailed', this.handlePlayElementFailed);
while (this.failedMediaElements.length) {
const mediaElement = this.failedMediaElements.shift();
if (mediaElement) {
mediaElement.play().catch(() => {
// Ignore the error for now.
});
}
}
if (autoplayBlocked) { this.setState({ autoplayBlocked: false }); }
}
handlePlayElementFailed(e) {
const { mediaElement } = e.detail;
const { autoplayBlocked } = this.state;
e.stopPropagation();
this.failedMediaElements.push(mediaElement);
if (!autoplayBlocked) {
this.setState({ autoplayBlocked: true });
}
}
renderFullscreenButton() {
const { intl } = this.props;
const { isFullscreen } = this.state;
@ -82,7 +121,8 @@ class ScreenshareComponent extends React.Component {
}
render() {
const { loaded } = this.state;
const { loaded, autoplayBlocked } = this.state;
const { intl } = this.props;
return (
[!loaded
@ -93,6 +133,16 @@ class ScreenshareComponent extends React.Component {
/>
)
: null,
!autoplayBlocked
? null
: (
<AutoplayOverlay
key={_.uniqueId('screenshareAutoplayOverlay')}
autoplayBlockedDesc={intl.formatMessage(intlMessages.autoplayBlockedDesc)}
autoplayAllowLabel={intl.formatMessage(intlMessages.autoplayAllowLabel)}
handleAllowAutoplay={this.handleAllowAutoplay}
/>
),
(
<div
className={styles.screenshareContainer}

View File

@ -6,6 +6,7 @@ import _ from 'lodash';
import { styles } from './styles';
import VideoListItemContainer from './video-list-item/container';
import { withDraggableConsumer } from '../../media/webcam-draggable-overlay/context';
import AutoplayOverlay from '../../media/autoplay-overlay/component';
const propTypes = {
users: PropTypes.arrayOf(PropTypes.object).isRequired,
@ -30,6 +31,12 @@ const intlMessages = defineMessages({
unfocusDesc: {
id: 'app.videoDock.webcamUnfocusDesc',
},
autoplayBlockedDesc: {
id: 'app.videoDock.autoplayBlockedDesc',
},
autoplayAllowLabel: {
id: 'app.videoDock.autoplayAllowLabel',
},
});
const findOptimalGrid = (canvasWidth, canvasHeight, gutter, aspectRatio, numItems, columns = 1) => {
@ -66,17 +73,22 @@ class VideoList extends Component {
rows: 1,
filledArea: 0,
},
autoplayBlocked: false,
};
this.ticking = false;
this.grid = null;
this.canvas = null;
this.failedMediaElements = [];
this.handleCanvasResize = _.throttle(this.handleCanvasResize.bind(this), 66,
{
leading: true,
trailing: true,
});
this.setOptimalGrid = this.setOptimalGrid.bind(this);
this.handleAllowAutoplay = this.handleAllowAutoplay.bind(this);
this.handlePlayElementFailed = this.handlePlayElementFailed.bind(this);
this.autoplayWasHandled = false;
}
componentDidMount() {
@ -90,10 +102,12 @@ class VideoList extends Component {
this.handleCanvasResize();
window.addEventListener('resize', this.handleCanvasResize, false);
window.addEventListener('videoPlayFailed', this.handlePlayElementFailed);
}
componentWillUnmount() {
window.removeEventListener('resize', this.handleCanvasResize, false);
window.removeEventListener('videoPlayFailed', this.handlePlayElementFailed);
}
setOptimalGrid() {
@ -128,6 +142,33 @@ class VideoList extends Component {
});
}
handleAllowAutoplay() {
const { autoplayBlocked } = this.state;
this.autoplayWasHandled = true;
window.removeEventListener('videoPlayFailed', this.handlePlayElementFailed);
while (this.failedMediaElements.length) {
const mediaElement = this.failedMediaElements.shift();
if (mediaElement) {
mediaElement.play().catch(() => {
// Ignore the error for now.
});
}
}
if (autoplayBlocked) { this.setState({ autoplayBlocked: false }); }
}
handlePlayElementFailed(e) {
const { mediaElement } = e.detail;
const { autoplayBlocked } = this.state;
e.stopPropagation();
this.failedMediaElements.push(mediaElement);
if (!autoplayBlocked && !this.autoplayWasHandled) {
this.setState({ autoplayBlocked: true });
}
}
handleVideoFocus(id) {
const { focusedId } = this.state;
this.setState({
@ -196,8 +237,8 @@ class VideoList extends Component {
}
render() {
const { users } = this.props;
const { optimalGrid } = this.state;
const { users, intl } = this.props;
const { optimalGrid, autoplayBlocked } = this.state;
const canvasClassName = cx({
[styles.videoCanvas]: true,
@ -230,6 +271,13 @@ class VideoList extends Component {
{this.renderVideoList()}
</div>
)}
{ !autoplayBlocked ? null : (
<AutoplayOverlay
autoplayBlockedDesc={intl.formatMessage(intlMessages.autoplayBlockedDesc)}
autoplayAllowLabel={intl.formatMessage(intlMessages.autoplayAllowLabel)}
handleAllowAutoplay={this.handleAllowAutoplay}
/>
)}
</div>
);
}

View File

@ -65,12 +65,15 @@ class VideoListItem extends Component {
const playElement = (elem) => {
if (elem.paused) {
elem.play().catch((error) => {
const tagFailedEvent = new CustomEvent('mediaTagPlayFailed', { detail: { mediaTag: elem } });
window.dispatchEvent(tagFailedEvent);
// NotAllowedError equals autoplay issues, fire autoplay handling event
if (error.name === 'NotAllowedError') {
const tagFailedEvent = new CustomEvent('videoPlayFailed', { detail: { mediaTag: elem } });
window.dispatchEvent(tagFailedEvent);
}
logger.warn({
logCode: 'videolistitem_component_play_error',
logCode: 'videolistitem_component_play_maybe_error',
extraInfo: { error },
}, 'Could not play video tag, emit mediaTagPlayFailed event');
}, `Could not play video tag due to ${error.name}`);
});
}
};

View File

@ -19,6 +19,7 @@ const CALL_STATES = {
ENDED: 'ended',
FAILED: 'failed',
RECONNECTING: 'reconnecting',
AUTOPLAY_BLOCKED: 'autoplayBlocked',
};
class AudioManager {
@ -40,9 +41,12 @@ class AudioManager {
error: null,
outputDeviceId: null,
muteHandle: null,
autoplayBlocked: false,
});
this.useKurento = Meteor.settings.public.kurento.enableListenOnly;
this.failedMediaElements = [];
this.handlePlayElementFailed = this.handlePlayElementFailed.bind(this);
}
init(userData) {
@ -203,6 +207,8 @@ class AudioManager {
logger.info({ logCode: 'audiomanager_join_listenonly', extraInfo: { logType: 'user_action' } }, 'user requested to connect to audio conference as listen only');
window.addEventListener('audioPlayFailed', this.handlePlayElementFailed);
return this.onAudioJoining()
.then(() => Promise.race([
bridge.joinAudio(callOptions, this.callStateCallback.bind(this)),
@ -299,6 +305,8 @@ class AudioManager {
this.isConnecting = false;
this.isHangingUp = false;
this.isListenOnly = false;
this.autoplayBlocked = false;
this.failedMediaElements = [];
if (this.inputStream) {
window.defaultInputStream.forEach(track => track.stop());
@ -314,6 +322,7 @@ class AudioManager {
}
window.parent.postMessage({ response: 'notInAudio' }, '*');
window.removeEventListener('audioPlayFailed', this.handlePlayElementFailed);
}
callStateCallback(response) {
@ -323,6 +332,7 @@ class AudioManager {
ENDED,
FAILED,
RECONNECTING,
AUTOPLAY_BLOCKED,
} = CALL_STATES;
const {
@ -358,6 +368,10 @@ class AudioManager {
logger.info({ logCode: 'audio_reconnecting' }, 'Attempting to reconnect audio');
this.notify(this.intl.formatMessage(this.messages.info.RECONNECTING_AUDIO), true);
this.playHangUpSound();
} else if (status === AUTOPLAY_BLOCKED) {
this.autoplayBlocked = true;
this.onAudioJoin();
resolve(AUTOPLAY_BLOCKED);
}
});
}
@ -468,6 +482,29 @@ class AudioManager {
audioIcon,
);
}
handleAllowAutoplay() {
window.removeEventListener('audioPlayFailed', this.handlePlayElementFailed);
while (this.failedMediaElements.length) {
const mediaElement = this.failedMediaElements.shift();
if (mediaElement) {
mediaElement.play().catch(() => {
// Ignore the error for now.
});
}
}
this.autoplayBlocked = false;
}
handlePlayElementFailed(e) {
const { mediaElement } = e.detail;
e.stopPropagation();
this.failedMediaElements.push(mediaElement);
if (!this.autoplayBlocked) {
this.autoplayBlocked = true;
}
}
}
const audioManager = new AudioManager();

View File

@ -107,9 +107,12 @@
"app.userList.userOptions.enableNote": "Shared notes are now enabled",
"app.userList.userOptions.enableOnlyModeratorWebcam": "You can enable your webcam now, everyone will see you",
"app.media.label": "Media",
"app.media.autoplayAlertDesc": "Allow Access",
"app.media.screenshare.start": "Screenshare has started",
"app.media.screenshare.end": "Screenshare has ended",
"app.media.screenshare.safariNotSupported": "Screenshare is currently not supported by Safari. Please, use Firefox or Google Chrome.",
"app.media.screenshare.autoplayBlockedDesc": "We need your permission to show you the presenter's screen.",
"app.media.screenshare.autoplayAllowLabel": "View shared screen",
"app.meeting.ended": "This session has ended",
"app.meeting.meetingTimeRemaining": "Meeting time remaining: {0}",
"app.meeting.meetingTimeHasEnded": "Time ended. Meeting will close soon",
@ -392,6 +395,9 @@
"app.audioModal.audioDialTitle": "Join using your phone",
"app.audioDial.audioDialDescription": "Dial",
"app.audioDial.audioDialConfrenceText": "and enter the conference PIN number:",
"app.audioModal.autoplayBlockedDesc": "We need your permission to play audio.",
"app.audioModal.playAudio": "Play audio",
"app.audioModal.playAudio.arialabel" : "Play audio",
"app.audioDial.tipIndicator": "Tip",
"app.audioDial.tipMessage": "Press the '0' key on your phone to mute/unmute yourself.",
"app.audioModal.connecting": "Connecting",
@ -602,6 +608,8 @@
"app.videoDock.webcamFocusDesc": "Focus the selected webcam",
"app.videoDock.webcamUnfocusLabel": "Unfocus",
"app.videoDock.webcamUnfocusDesc": "Unfocus the selected webcam",
"app.videoDock.autoplayBlockedDesc": "We need your permission to show you other users' webcams.",
"app.videoDock.autoplayAllowLabel": "View webcams",
"app.invitation.title": "Breakout room invitation",
"app.invitation.confirm": "Invite",
"app.createBreakoutRoom.title": "Breakout Rooms",

View File

@ -301,6 +301,11 @@ Kurento.prototype.startResponse = function (message) {
extraInfo: { sfuResponse: message }
}, `Start request accepted for ${message.type}`);
this.webRtcPeer.processAnswer(message.sdpAnswer);
// audio calls gets their success callback in a subsequent step (@webRTCAudioSuccess)
// due to legacy messaging which I don't intend to break now - prlanzarin
if (message.type === 'screenshare') {
this.onSuccess()
}
}
};
@ -470,7 +475,6 @@ Kurento.prototype.viewer = function () {
mediaConstraints: {
audio: false,
},
remoteVideo: document.getElementById(this.renderTag),
onicecandidate: (candidate) => {
this.onIceCandidate(candidate, this.RECV_ROLE);
},