Merge branch 'v2.0.x-release' of https://github.com/bigbluebutton/bigbluebutton into settings-lock-video-dock
This commit is contained in:
commit
9967e0f66e
@ -70,7 +70,7 @@ if (request.getParameterMap().isEmpty()) {
|
||||
<td> </td>
|
||||
<td style="text-align: right; ">Moderator Role:</td>
|
||||
<td style="width: 5px; "> </td>
|
||||
<td style="text-align: left "><input type=checkbox name=isModerator value="true"></td>
|
||||
<td style="text-align: left "><input type=checkbox name=isModerator value="true" checked></td>
|
||||
<tr>
|
||||
|
||||
<tr>
|
||||
|
@ -23,6 +23,7 @@ import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLDecoder;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
@ -546,7 +547,7 @@ public class ParamsProcessorUtil {
|
||||
int status = response.getStatusLine().getStatusCode();
|
||||
if (status >= 200 && status < 300) {
|
||||
HttpEntity entity = response.getEntity();
|
||||
return entity != null ? EntityUtils.toString(entity) : null;
|
||||
return entity != null ? EntityUtils.toString(entity, StandardCharsets.UTF_8) : null;
|
||||
} else {
|
||||
throw new ClientProtocolException("Unexpected response status: " + status);
|
||||
}
|
||||
|
@ -418,18 +418,28 @@ window.getScreenConstraints = function(sendSource, callback) {
|
||||
let chromeMediaSourceId = sendSource;
|
||||
let screenConstraints = {video: {}};
|
||||
|
||||
// Limiting FPS to a range of 5-10 (5 ideal)
|
||||
screenConstraints.video.frameRate = {ideal: 5, max: 10};
|
||||
|
||||
// Limiting max resolution to screen size
|
||||
screenConstraints.video.height = {max: window.screen.height};
|
||||
screenConstraints.video.width = {max: window.screen.width};
|
||||
|
||||
if(isChrome) {
|
||||
getChromeScreenConstraints ((constraints) => {
|
||||
if(!constraints){
|
||||
if (!constraints) {
|
||||
document.dispatchEvent(new Event("installChromeExtension"));
|
||||
return;
|
||||
}
|
||||
extensionInstalled = true;
|
||||
|
||||
let sourceId = constraints.streamId;
|
||||
|
||||
kurentoManager.kurentoScreenshare.extensionInstalled = true;
|
||||
|
||||
// this statement sets gets 'sourceId" and sets "chromeMediaSourceId"
|
||||
screenConstraints.video.chromeMediaSource = { exact: [sendSource]};
|
||||
screenConstraints.video.chromeMediaSourceId = sourceId;
|
||||
|
||||
console.log("getScreenConstraints for Chrome returns => ");
|
||||
console.log(screenConstraints);
|
||||
// now invoking native getUserMedia API
|
||||
@ -439,8 +449,6 @@ window.getScreenConstraints = function(sendSource, callback) {
|
||||
}
|
||||
else if (isFirefox) {
|
||||
screenConstraints.video.mediaSource= "window";
|
||||
screenConstraints.video.width= {max: "1280"};
|
||||
screenConstraints.video.height = {max: "720"};
|
||||
|
||||
console.log("getScreenConstraints for Firefox returns => ");
|
||||
console.log(screenConstraints);
|
||||
@ -449,8 +457,6 @@ window.getScreenConstraints = function(sendSource, callback) {
|
||||
}
|
||||
else if(isSafari) {
|
||||
screenConstraints.video.mediaSource= "screen";
|
||||
screenConstraints.video.width= {max: window.screen.width};
|
||||
screenConstraints.video.height = {max: window.screen.vid_height};
|
||||
|
||||
console.log("getScreenConstraints for Safari returns => ");
|
||||
console.log(screenConstraints);
|
||||
|
@ -295,7 +295,7 @@ stop_bigbluebutton () {
|
||||
if [ -f /usr/lib/systemd/system/bbb-webhooks.service ]; then
|
||||
WEBHOOKS=bbb-webhooks
|
||||
fi
|
||||
if [ -f /usr/lib/systemd/system/bbb-bbb-transcode-akka.service ]; then
|
||||
if [ -f /usr/lib/systemd/system/bbb-transcode-akka.service ]; then
|
||||
BBB_TRANSCODE_AKKA=bbb-transcode-akka
|
||||
fi
|
||||
|
||||
@ -351,7 +351,7 @@ start_bigbluebutton () {
|
||||
if [ -f /usr/lib/systemd/system/bbb-webhooks.service ]; then
|
||||
WEBHOOKS=bbb-webhooks
|
||||
fi
|
||||
if [ -f /usr/lib/systemd/system/bbb-bbb-transcode-akka.service ]; then
|
||||
if [ -f /usr/lib/systemd/system/bbb-transcode-akka.service ]; then
|
||||
BBB_TRANSCODE_AKKA=bbb-transcode-akka
|
||||
fi
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
|
||||
(function() {
|
||||
function adjustVideos(tagId, centerVideos) {
|
||||
const _minContentAspectRatio = 4 / 3.0;
|
||||
function adjustVideos(tagId, centerVideos, moreThan4VideosClass, mediaContainerClass, overlayWrapperClass, presentationAreaDataId, screenshareVideoId) {
|
||||
const _minContentAspectRatio = 16 / 9.0;
|
||||
|
||||
function calculateOccupiedArea(canvasWidth, canvasHeight, numColumns, numRows, numChildren) {
|
||||
const obj = calculateCellDimensions(canvasWidth, canvasHeight, numColumns, numRows);
|
||||
@ -55,11 +55,40 @@
|
||||
}
|
||||
|
||||
// http://stackoverflow.com/a/3437825/414642
|
||||
const e = $("#" + tagId).parent();
|
||||
const e = $("." + overlayWrapperClass);
|
||||
const x = e.outerWidth() - 1;
|
||||
const y = e.outerHeight() - 1;
|
||||
|
||||
const videos = $("#" + tagId + " video:visible");
|
||||
const videos = $("#" + tagId + " > div:visible");
|
||||
const isPortrait = ( $(document).width() < $(document).height() );
|
||||
|
||||
if (isPortrait) {
|
||||
// If currently displaying a presentation
|
||||
if ( $("#" + presentationAreaDataId).length ) {
|
||||
e.css({
|
||||
"margin-top": $('#' + presentationAreaDataId).offset().top - 221,
|
||||
"width": "calc(100% - " + $('#' + presentationAreaDataId).offset().left + ")"
|
||||
});
|
||||
} else if ( $("#" + screenshareVideoId).length ) { // Or if currently displaying a screenshare
|
||||
e.css({
|
||||
"margin-top": $('#' + screenshareVideoId).offset().top - 221,
|
||||
"width": "calc(100% - " + $('#' + screenshareVideoId).offset().left + ")"
|
||||
});
|
||||
}
|
||||
} else {
|
||||
e.css({
|
||||
"width": "100%",
|
||||
"margin-top": 0
|
||||
});
|
||||
}
|
||||
|
||||
if (videos.length > 4 && !isPortrait) {
|
||||
e.addClass(moreThan4VideosClass);
|
||||
$("." + mediaContainerClass).css("max-width", "calc(100% - 170px)");
|
||||
} else {
|
||||
e.removeClass(moreThan4VideosClass);
|
||||
$("." + mediaContainerClass).css("max-width", "100%");
|
||||
}
|
||||
|
||||
const best = findBestConfiguration(x, y, videos.length);
|
||||
|
||||
@ -67,18 +96,10 @@
|
||||
const row = Math.floor(i / best.numColumns);
|
||||
const col = Math.floor(i % best.numColumns);
|
||||
|
||||
// Free width space remaining to the right and below of the videos
|
||||
const remX = (x - best.width * best.numColumns);
|
||||
const remY = (y - best.height * best.numRows);
|
||||
const top = (row > 0 && videos.length <= 4 && !isPortrait) ? 1 : 0;
|
||||
const left = (col > 0 && videos.length <= 4 && !isPortrait) ? 1 : 0;
|
||||
|
||||
// Center videos
|
||||
const top = Math.floor(((best.height) * row) + remY / 2);
|
||||
const left = Math.floor(((best.width) * col) + remX / 2);
|
||||
|
||||
const videoTop = `top: ${top}px;`;
|
||||
const videoLeft = `left: ${left}px;`;
|
||||
|
||||
$(this).attr('style', videoTop + videoLeft);
|
||||
$(this).attr('style', `margin-top: ${top}px; margin-left: ${left}px; width: ${best.width}px; height: ${best.height}px;`);
|
||||
});
|
||||
|
||||
videos.attr('width', best.width);
|
||||
|
@ -418,13 +418,28 @@ window.getScreenConstraints = function(sendSource, callback) {
|
||||
let chromeMediaSourceId = sendSource;
|
||||
let screenConstraints = {video: {}};
|
||||
|
||||
// Limiting FPS to a range of 5-10 (5 ideal)
|
||||
screenConstraints.video.frameRate = {ideal: 5, max: 10};
|
||||
|
||||
// Limiting max resolution to screen size
|
||||
screenConstraints.video.height = {max: window.screen.height};
|
||||
screenConstraints.video.width = {max: window.screen.width};
|
||||
|
||||
if(isChrome) {
|
||||
getChromeScreenConstraints ((constraints) => {
|
||||
if (!constraints) {
|
||||
document.dispatchEvent(new Event("installChromeExtension"));
|
||||
return;
|
||||
}
|
||||
|
||||
let sourceId = constraints.streamId;
|
||||
|
||||
kurentoManager.kurentoScreenshare.extensionInstalled = true;
|
||||
|
||||
// this statement sets gets 'sourceId" and sets "chromeMediaSourceId"
|
||||
screenConstraints.video.chromeMediaSource = { exact: [sendSource]};
|
||||
screenConstraints.video.chromeMediaSourceId = sourceId;
|
||||
|
||||
console.log("getScreenConstraints for Chrome returns => ");
|
||||
console.log(screenConstraints);
|
||||
// now invoking native getUserMedia API
|
||||
@ -434,8 +449,6 @@ window.getScreenConstraints = function(sendSource, callback) {
|
||||
}
|
||||
else if (isFirefox) {
|
||||
screenConstraints.video.mediaSource= "window";
|
||||
screenConstraints.video.width= {max: "1280"};
|
||||
screenConstraints.video.height = {max: "720"};
|
||||
|
||||
console.log("getScreenConstraints for Firefox returns => ");
|
||||
console.log(screenConstraints);
|
||||
@ -444,8 +457,6 @@ window.getScreenConstraints = function(sendSource, callback) {
|
||||
}
|
||||
else if(isSafari) {
|
||||
screenConstraints.video.mediaSource= "screen";
|
||||
screenConstraints.video.width= {max: window.screen.width};
|
||||
screenConstraints.video.height = {max: window.screen.vid_height};
|
||||
|
||||
console.log("getScreenConstraints for Safari returns => ");
|
||||
console.log(screenConstraints);
|
||||
|
@ -3,8 +3,10 @@ import handleJoinVoiceUser from './handlers/joinVoiceUser';
|
||||
import handleLeftVoiceUser from './handlers/leftVoiceUser';
|
||||
import handleTalkingVoiceUser from './handlers/talkingVoiceUser';
|
||||
import handleMutedVoiceUser from './handlers/mutedVoiceUser';
|
||||
import handleGetVoiceUsers from './handlers/getVoiceUsers';
|
||||
|
||||
RedisPubSub.on('UserLeftVoiceConfToClientEvtMsg', handleLeftVoiceUser);
|
||||
RedisPubSub.on('UserJoinedVoiceConfToClientEvtMsg', handleJoinVoiceUser);
|
||||
RedisPubSub.on('UserTalkingVoiceEvtMsg', handleTalkingVoiceUser);
|
||||
RedisPubSub.on('UserMutedVoiceEvtMsg', handleMutedVoiceUser);
|
||||
RedisPubSub.on('GetVoiceUsersMeetingRespMsg', handleGetVoiceUsers);
|
||||
|
@ -0,0 +1,63 @@
|
||||
import { check } from 'meteor/check';
|
||||
import VoiceUsers from '/imports/api/voice-users/';
|
||||
import Meetings from '/imports/api/meetings';
|
||||
import addVoiceUser from '../modifiers/addVoiceUser';
|
||||
import removeVoiceUser from '../modifiers/removeVoiceUser';
|
||||
import updateVoiceUser from '../modifiers/updateVoiceUser';
|
||||
|
||||
export default function handleGetVoiceUsers({ body }, meetingId) {
|
||||
const { users } = body;
|
||||
|
||||
check(meetingId, String);
|
||||
check(users, Array);
|
||||
|
||||
const meeting = Meetings.findOne({ meetingId: meetingId });
|
||||
const usersIds = users.map(m => m.intId);
|
||||
|
||||
const voiceUsersIdsToUpdate = VoiceUsers.find({
|
||||
meetingId,
|
||||
intId: { $in: usersIds },
|
||||
}).fetch().map(m => m.intId);;
|
||||
|
||||
let voiceUsersUpdated = [];
|
||||
users.forEach(user => {
|
||||
if(voiceUsersIdsToUpdate.indexOf(user.intId) >= 0) {
|
||||
// user already exist, then update
|
||||
voiceUsersUpdated.push(updateVoiceUser(meetingId, {
|
||||
intId: user.intId,
|
||||
voiceUserId: user.voiceUserId,
|
||||
talking: user.talking,
|
||||
muted: user.muted,
|
||||
voiceConf: meeting.voiceProp.voiceConf,
|
||||
joined: true
|
||||
}));
|
||||
} else {
|
||||
// user doesn't exist yet, then add it
|
||||
addVoiceUser(meetingId, {
|
||||
voiceUserId: user.voiceUserId,
|
||||
intId: user.intId,
|
||||
callerName: user.callerName,
|
||||
callerNum: user.callerNum,
|
||||
muted: user.muted,
|
||||
talking: user.talking,
|
||||
callingWith: user.callingWith,
|
||||
listenOnly: user.listenOnly,
|
||||
voiceConf: meeting.voiceProp.voiceConf,
|
||||
joined: true
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// removing extra users already existing in Mongo
|
||||
const voiceUsersToRemove = VoiceUsers.find({
|
||||
meetingId,
|
||||
intId: { $nin: usersIds },
|
||||
}).fetch();
|
||||
voiceUsersToRemove.forEach(user => removeVoiceUser(meetingId, {
|
||||
voiceConf: meeting.voiceProp.voiceConf,
|
||||
voiceUserId: user.voiceUserId,
|
||||
intId: user.intId
|
||||
}));
|
||||
|
||||
return voiceUsersUpdated;
|
||||
}
|
@ -11,6 +11,7 @@ export default function updateVoiceUser(meetingId, voiceUser) {
|
||||
talking: Match.Maybe(Boolean),
|
||||
muted: Match.Maybe(Boolean),
|
||||
voiceConf: String,
|
||||
joined: Match.Maybe(Boolean)
|
||||
});
|
||||
|
||||
const { intId } = voiceUser;
|
||||
|
@ -1,23 +0,0 @@
|
||||
import React from 'react';
|
||||
import Button from '/imports/ui/components/button/component';
|
||||
// import { styles } from '../styles.scss';
|
||||
|
||||
export default class JoinVideo extends React.Component {
|
||||
|
||||
handleClick() {
|
||||
}
|
||||
|
||||
render() {
|
||||
return (
|
||||
<Button
|
||||
onClick={this.handleClick}
|
||||
label={'Cam Off'}
|
||||
color={'primary'}
|
||||
icon={'video_off'}
|
||||
size={'lg'}
|
||||
circle
|
||||
style={{ visibility: 'hidden' }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
}
|
@ -15,7 +15,7 @@
|
||||
display: block;
|
||||
height: 0;
|
||||
padding: 0;
|
||||
padding-bottom: calc(100% * 9 / 16);
|
||||
//padding-bottom: calc(100% * 9 / 16);
|
||||
}
|
||||
|
||||
%ratio-item {
|
||||
@ -29,16 +29,37 @@
|
||||
}
|
||||
|
||||
.overlayWrapper {
|
||||
position: absolute;
|
||||
width: 30%;
|
||||
right: 2%;
|
||||
bottom: 2%;
|
||||
position: fixed;
|
||||
width: 100%;
|
||||
max-width: 170px;
|
||||
height: 96px;
|
||||
right: 8px;
|
||||
bottom: 8px;
|
||||
|
||||
@include mq($portrait) {
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
max-width: none;
|
||||
height: 96px;
|
||||
top: 60px;
|
||||
left: 0;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
@include mq($large-up) {
|
||||
flex-basis: 15%;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
.moreThan4Videos{
|
||||
top: 63px;
|
||||
right: 0;
|
||||
position: fixed;
|
||||
height: calc(100% - 143px);
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.overlayRatio {
|
||||
@extend %ratio;
|
||||
}
|
||||
|
@ -158,6 +158,7 @@ export default class PresentationArea extends Component {
|
||||
WebkitTransition: 'width 0.2s', /* Safari */
|
||||
transition: 'width 0.2s',
|
||||
}}
|
||||
id="presentationAreaData"
|
||||
>
|
||||
<TransitionGroup>
|
||||
<CSSTransition
|
||||
@ -281,7 +282,7 @@ export default class PresentationArea extends Component {
|
||||
|
||||
render() {
|
||||
return (
|
||||
<div className={styles.presentationContainer}>
|
||||
<div className={styles.presentationContainer} id="presentationContainer">
|
||||
<div
|
||||
ref={(ref) => { this.refPresentationArea = ref; }}
|
||||
className={styles.presentationArea}
|
||||
|
@ -7,7 +7,7 @@ export default class ScreenshareComponent extends React.Component {
|
||||
|
||||
render() {
|
||||
return (
|
||||
<video id="screenshareVideo" style={{ height: '100%', width: '100%' }} autoPlay playsInline />
|
||||
<video id="screenshareVideo" style={{ maxHeight: '100%', width: '100%' }} autoPlay playsInline />
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,9 @@ import VideoService from './service';
|
||||
import { log } from '/imports/ui/services/api';
|
||||
import { notify } from '/imports/ui/services/notification';
|
||||
import { toast } from 'react-toastify';
|
||||
import { styles as mediaStyles } from '/imports/ui/components/media/styles';
|
||||
import Toast from '/imports/ui/components/toast/component';
|
||||
import _ from 'lodash';
|
||||
|
||||
const intlMessages = defineMessages({
|
||||
iceCandidateError: {
|
||||
@ -40,19 +42,38 @@ class VideoElement extends Component {
|
||||
}
|
||||
|
||||
render() {
|
||||
let cssClass;
|
||||
if (this.props.shared || !this.props.localCamera) {
|
||||
cssClass = styles.sharedWebcamVideoLocal;
|
||||
} else {
|
||||
cssClass = styles.sharedWebcamVideo;
|
||||
}
|
||||
return (
|
||||
<video
|
||||
id={`video-elem-${this.props.videoId}`}
|
||||
width={320}
|
||||
height={240}
|
||||
autoPlay="autoPlay"
|
||||
playsInline="playsInline"
|
||||
/>
|
||||
<div className={`${styles.videoContainer} ${cssClass}`} >
|
||||
{ this.props.localCamera ?
|
||||
<video id="shareWebcam" muted autoPlay playsInline />
|
||||
:
|
||||
<video id={`video-elem-${this.props.videoId}`} autoPlay playsInline />
|
||||
}
|
||||
<div className={styles.videoText}>
|
||||
<div className={styles.userName}>{this.props.name}</div>
|
||||
{/* <Button
|
||||
label=""
|
||||
className={styles.pauseButton}
|
||||
icon={'unmute'}
|
||||
size={'sm'}
|
||||
circle
|
||||
onClick={() => {}}
|
||||
/> */}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.props.onMount(this.props.videoId, false);
|
||||
if (typeof this.props.onMount === 'function' && !this.props.localCamera) {
|
||||
this.props.onMount(this.props.videoId, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -71,6 +92,7 @@ class VideoDock extends Component {
|
||||
this.state = {
|
||||
videos: {},
|
||||
sharedWebcam: false,
|
||||
userNames: {},
|
||||
};
|
||||
|
||||
this.unshareWebcam = this.unshareWebcam.bind(this);
|
||||
@ -129,6 +151,7 @@ class VideoDock extends Component {
|
||||
document.addEventListener('installChromeExtension', this.installChromeExtension.bind(this));
|
||||
|
||||
window.addEventListener('resize', this.adjustVideos);
|
||||
window.addEventListener('orientationchange', this.adjustVideos);
|
||||
|
||||
ws.addEventListener('message', this.onWsMessage);
|
||||
}
|
||||
@ -153,6 +176,7 @@ class VideoDock extends Component {
|
||||
document.removeEventListener('exitVideo', this.unshareWebcam);
|
||||
document.removeEventListener('installChromeExtension', this.installChromeExtension);
|
||||
window.removeEventListener('resize', this.adjustVideos);
|
||||
window.removeEventListener('orientationchange', this.adjustVideos);
|
||||
|
||||
this.ws.removeEventListener('message', this.onWsMessage);
|
||||
this.ws.removeEventListener('open', this.onWsOpen);
|
||||
@ -176,7 +200,7 @@ class VideoDock extends Component {
|
||||
|
||||
adjustVideos() {
|
||||
setTimeout(() => {
|
||||
window.adjustVideos('webcamArea', true);
|
||||
window.adjustVideos('webcamArea', true, mediaStyles.moreThan4Videos, mediaStyles.container, mediaStyles.overlayWrapper, 'presentationAreaData', 'screenshareVideo');
|
||||
}, 0);
|
||||
}
|
||||
|
||||
@ -221,7 +245,7 @@ class VideoDock extends Component {
|
||||
case 'iceCandidate':
|
||||
const webRtcPeer = this.webRtcPeers[parsedMessage.cameraId];
|
||||
|
||||
if (!!webRtcPeer) {
|
||||
if (webRtcPeer) {
|
||||
if (webRtcPeer.didSDPAnswered) {
|
||||
webRtcPeer.addIceCandidate(parsedMessage.candidate, (err) => {
|
||||
if (err) {
|
||||
@ -245,10 +269,18 @@ class VideoDock extends Component {
|
||||
}
|
||||
|
||||
start(id, shareWebcam) {
|
||||
const { users } = this.props;
|
||||
const that = this;
|
||||
const { intl } = this.props;
|
||||
|
||||
console.log(`Starting video call for video: ${id} with ${shareWebcam}`);
|
||||
const userNames = this.state.userNames;
|
||||
users.forEach((user) => {
|
||||
if (user.userId === id) {
|
||||
userNames[id] = user.name;
|
||||
}
|
||||
});
|
||||
this.setState({ userNames });
|
||||
|
||||
this.cameraTimeouts[id] = setTimeout(() => {
|
||||
log('error', `Camera share has not suceeded in ${CAMERA_SHARE_FAILED_WAIT_TIME}`);
|
||||
@ -327,7 +359,7 @@ class VideoDock extends Component {
|
||||
|
||||
let peerObj;
|
||||
if (shareWebcam) {
|
||||
options.localVideo = this.refs.videoInput;
|
||||
options.localVideo = document.getElementById('shareWebcam');
|
||||
peerObj = kurentoUtils.WebRtcPeer.WebRtcPeerSendonly;
|
||||
} else {
|
||||
peerObj = kurentoUtils.WebRtcPeer.WebRtcPeerRecvonly;
|
||||
@ -427,14 +459,13 @@ class VideoDock extends Component {
|
||||
}
|
||||
|
||||
destroyVideoTag(id) {
|
||||
const videos = this.state.videos;
|
||||
const { videos, userNames } = this.state;
|
||||
|
||||
delete videos[id];
|
||||
this.setState({ videos });
|
||||
|
||||
if (id == this.myId) {
|
||||
this.setState({ sharedWebcam: false });
|
||||
}
|
||||
this.setState({
|
||||
videos: _.omit(videos, id),
|
||||
userNames: _.omit(userNames, id),
|
||||
sharedWebcam: id !== this.myId,
|
||||
});
|
||||
}
|
||||
|
||||
destroyWebRTCPeer(id) {
|
||||
@ -585,27 +616,13 @@ class VideoDock extends Component {
|
||||
}
|
||||
|
||||
render() {
|
||||
let cssClass;
|
||||
if (this.state.sharedWebcam) {
|
||||
cssClass = styles.sharedWebcamVideoLocal;
|
||||
} else {
|
||||
cssClass = styles.sharedWebcamVideo;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={styles.videoDock}>
|
||||
<div id="webcamArea">
|
||||
<div id="webcamArea" className={styles.webcamArea}>
|
||||
{Object.keys(this.state.videos).map(id => (
|
||||
<VideoElement videoId={id} key={id} onMount={this.initWebRTC.bind(this)} />
|
||||
))}
|
||||
<video
|
||||
autoPlay="autoPlay"
|
||||
playsInline="playsInline"
|
||||
muted="muted"
|
||||
id="shareWebcamVideo"
|
||||
className={cssClass}
|
||||
ref="videoInput"
|
||||
/>
|
||||
<VideoElement videoId={id} key={id} name={this.state.userNames[id]} localCamera={false} onMount={this.initWebRTC.bind(this)} />
|
||||
))}
|
||||
<VideoElement shared={this.state.sharedWebcam} name={this.state.userNames[this.myId]} localCamera />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@ -616,22 +633,19 @@ class VideoDock extends Component {
|
||||
const currentUsers = this.props.users || {};
|
||||
const nextUsers = nextProps.users;
|
||||
|
||||
let users = {};
|
||||
let present = {};
|
||||
|
||||
if (!currentUsers)
|
||||
return false;
|
||||
const users = {};
|
||||
const present = {};
|
||||
|
||||
if (!currentUsers) { return false; }
|
||||
|
||||
// Map user objectos to an object in the form {userId: has_stream}
|
||||
currentUsers.forEach((user) => {
|
||||
users[user.userId] = user.has_stream;
|
||||
});
|
||||
|
||||
|
||||
// Keep instances where the flag has changed or next user adds it
|
||||
nextUsers.forEach((user) => {
|
||||
let id = user.userId;
|
||||
const id = user.userId;
|
||||
// The case when a user exists and stream status has not changed
|
||||
if (users[id] === user.has_stream) {
|
||||
delete users[id];
|
||||
@ -647,7 +661,7 @@ class VideoDock extends Component {
|
||||
const userIds = Object.keys(users);
|
||||
|
||||
for (let i = 0; i < userIds.length; i++) {
|
||||
let id = userIds[i];
|
||||
const id = userIds[i];
|
||||
|
||||
// If a userId is not present in nextUsers let's stop it
|
||||
if (!present[id]) {
|
||||
@ -663,8 +677,7 @@ class VideoDock extends Component {
|
||||
if (userId !== id) {
|
||||
this.start(id, false);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
this.stop(id);
|
||||
}
|
||||
}
|
||||
|
@ -2,21 +2,87 @@
|
||||
|
||||
.videoDock {
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
box-shadow: 0 0 5px rgba(0, 0, 0, .5);
|
||||
border-radius: .2rem;
|
||||
text-align: center;
|
||||
|
||||
@include mq($portrait) {
|
||||
height: 96px;
|
||||
}
|
||||
}
|
||||
|
||||
.videoDock video {
|
||||
height: auto;
|
||||
width: auto;
|
||||
max-height: 100%;
|
||||
max-width: 100%;
|
||||
display: block;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.webcamArea {
|
||||
@include mq($portrait) {
|
||||
position: absolute;
|
||||
margin: 0 auto;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: #04111F;
|
||||
}
|
||||
}
|
||||
|
||||
.sharedWebcamVideo {
|
||||
display: none;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.sharedWebcamVideoLocal {
|
||||
display: normal;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.videoContainer {
|
||||
background-color: #000;
|
||||
}
|
||||
|
||||
.videoText {
|
||||
margin-top: -22px;
|
||||
padding: 0 5px;
|
||||
z-index: 1;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.userName {
|
||||
color: #FFFFFF;
|
||||
background: #555;
|
||||
padding: 3px 8px;
|
||||
border-radius: 0.75rem;
|
||||
font-size: 10px;
|
||||
line-height: 13px;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.pauseButton {
|
||||
float: right;
|
||||
|
||||
span:last-child {
|
||||
display: none;
|
||||
} // TODO fix it better
|
||||
|
||||
span:first-child {
|
||||
background: #D8D8D8;
|
||||
border: 1px solid #555;
|
||||
|
||||
i {
|
||||
font-size: 11px; // TODO size should be set on button?
|
||||
color: #555;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import Button from '/imports/ui/components/button/component';
|
||||
import { defineMessages, injectIntl, intlShape } from 'react-intl';
|
||||
import { styles } from './styles';
|
||||
|
||||
const intlMessages = defineMessages({
|
||||
joinVideo: {
|
||||
@ -29,32 +30,37 @@ class JoinVideoOptions extends React.Component {
|
||||
|
||||
if (isSharingVideo) {
|
||||
return (
|
||||
<Button
|
||||
onClick={handleCloseVideo}
|
||||
label={intl.formatMessage(intlMessages.leaveVideo)}
|
||||
hideLabel
|
||||
aria-label={intl.formatMessage(intlMessages.leaveVideo)}
|
||||
color="danger"
|
||||
icon="video"
|
||||
size="lg"
|
||||
circle
|
||||
disabled={isLocked || isWaitingResponse}
|
||||
/>
|
||||
<span className={styles.container}>
|
||||
<Button
|
||||
onClick={handleCloseVideo}
|
||||
label={intl.formatMessage(intlMessages.leaveVideo)}
|
||||
hideLabel
|
||||
aria-label={intl.formatMessage(intlMessages.leaveVideo)}
|
||||
color="danger"
|
||||
icon="video_off"
|
||||
size="lg"
|
||||
circle
|
||||
disabled={isLocked || isWaitingResponse}
|
||||
/>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Button
|
||||
onClick={handleJoinVideo}
|
||||
label={intl.formatMessage(intlMessages.joinVideo)}
|
||||
hideLabel
|
||||
aria-label={intl.formatMessage(intlMessages.joinVideo)}
|
||||
color="primary"
|
||||
icon="video_off"
|
||||
size="lg"
|
||||
circle
|
||||
disabled={isLocked || isWaitingResponse || (!isSharingVideo && isConnected) || enableShare}
|
||||
/>
|
||||
<span className={styles.container}>
|
||||
<Button
|
||||
className={styles.button}
|
||||
onClick={handleJoinVideo}
|
||||
label={intl.formatMessage(intlMessages.joinVideo)}
|
||||
hideLabel
|
||||
aria-label={intl.formatMessage(intlMessages.joinVideo)}
|
||||
color="primary"
|
||||
icon="video"
|
||||
size="lg"
|
||||
circle
|
||||
disabled={isLocked || isWaitingResponse || (!isSharingVideo && isConnected) || enableShare}
|
||||
/>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,15 @@
|
||||
.container {
|
||||
|
||||
span:first-child{
|
||||
box-shadow: 0 2px 5px 0 rgb(0, 0, 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.button {
|
||||
|
||||
&:focus {
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
}
|
@ -96,7 +96,8 @@ export default class PencilDrawComponent extends Component {
|
||||
}
|
||||
|
||||
getCoordinates(annotation, slideWidth, slideHeight) {
|
||||
if (annotation.points.length === 0) {
|
||||
if ((!annotation || annotation.points.length === 0)
|
||||
|| (annotation.status === 'DRAW_END' && !annotation.commands)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@ redisPort : "6379"
|
||||
clientPort : "3008"
|
||||
minVideoPort: 30000
|
||||
maxVideoPort: 33000
|
||||
mediaFlowTimeoutDuration: 5000
|
||||
from-screenshare: "from-screenshare-sfu"
|
||||
to-screenshare: "to-screenshare-sfu"
|
||||
from-video: "from-video-sfu"
|
||||
|
@ -92,40 +92,53 @@ module BigBlueButton
|
||||
audio = entry[:audio]
|
||||
duration = entry[:next_timestamp] - entry[:timestamp]
|
||||
|
||||
if audio
|
||||
# Check for and handle audio files with mismatched lengths (generated
|
||||
# by buggy versions of freeswitch in old BigBlueButton
|
||||
if audio and entry[:original_duration] and
|
||||
(audioinfo[audio[:filename]][:duration].to_f / entry[:original_duration]) < 0.997 and
|
||||
((entry[:original_duration] - audioinfo[audio[:filename]][:duration]).to_f /
|
||||
entry[:original_duration]).abs < 0.05
|
||||
speed = audioinfo[audio[:filename]][:duration].to_f / entry[:original_duration]
|
||||
BigBlueButton.logger.info " Using input #{audio[:filename]}"
|
||||
|
||||
BigBlueButton.logger.warn " Audio file length mismatch, adjusting speed to #{speed}"
|
||||
|
||||
# Have to calculate the start point after the atempo filter in this case,
|
||||
# since it can affect the audio start time.
|
||||
# Also reset the pts to start at 0, so the duration trim works correctly.
|
||||
filter = "[#{input_index}] "
|
||||
|
||||
if entry[:original_duration] and
|
||||
(audioinfo[audio[:filename]][:duration].to_f / entry[:original_duration]) < 0.999 and
|
||||
((entry[:original_duration] - audioinfo[audio[:filename]][:duration]).to_f /
|
||||
entry[:original_duration]).abs < 0.05
|
||||
speed = audioinfo[audio[:filename]][:duration].to_f / entry[:original_duration]
|
||||
BigBlueButton.logger.warn " Audio file length mismatch, adjusting speed to #{speed}"
|
||||
|
||||
# Have to calculate the start point after the atempo filter in this case,
|
||||
# since it can affect the audio start time.
|
||||
# Also reset the pts to start at 0, so the duration trim works correctly.
|
||||
filter << "atempo=#{speed},atrim=start=#{ms_to_s(audio[:timestamp])},"
|
||||
filter << "asetpts=PTS-STARTPTS,"
|
||||
|
||||
ffmpeg_inputs << {
|
||||
:filename => audio[:filename],
|
||||
:seek => 0
|
||||
}
|
||||
else
|
||||
ffmpeg_inputs << {
|
||||
:filename => audio[:filename],
|
||||
:seek => audio[:timestamp]
|
||||
}
|
||||
end
|
||||
|
||||
filter << "atempo=#{speed},atrim=start=#{ms_to_s(audio[:timestamp])},"
|
||||
filter << "asetpts=PTS-STARTPTS,"
|
||||
filter << "#{FFMPEG_AFORMAT},apad,atrim=end=#{ms_to_s(duration)} [out#{output_index}]"
|
||||
ffmpeg_filters << filter
|
||||
|
||||
ffmpeg_inputs << {
|
||||
:filename => audio[:filename],
|
||||
:seek => 0
|
||||
}
|
||||
|
||||
input_index += 1
|
||||
output_index += 1
|
||||
|
||||
# Normal audio input handling. Skip this input and generate silence
|
||||
# if the seekpoint is past the end of the audio, which can happen
|
||||
# if events are slightly misaligned and you get unlucky with a
|
||||
# start/stop or chapter break.
|
||||
elsif audio and audio[:timestamp] < audioinfo[audio[:filename]][:duration]
|
||||
BigBlueButton.logger.info " Using input #{audio[:filename]}"
|
||||
|
||||
filter = "[#{input_index}] "
|
||||
filter << "#{FFMPEG_AFORMAT},apad,atrim=end=#{ms_to_s(duration)} [out#{output_index}]"
|
||||
ffmpeg_filters << filter
|
||||
|
||||
ffmpeg_inputs << {
|
||||
:filename => audio[:filename],
|
||||
:seek => audio[:timestamp]
|
||||
}
|
||||
|
||||
input_index += 1
|
||||
output_index += 1
|
||||
|
||||
else
|
||||
BigBlueButton.logger.info " Generating silence"
|
||||
|
||||
|
@ -189,11 +189,11 @@ module BigBlueButton
|
||||
videoinfo.keys.each do |videofile|
|
||||
BigBlueButton.logger.debug " #{videofile}"
|
||||
info = video_info(videofile)
|
||||
BigBlueButton.logger.debug " width: #{info[:width]}, height: #{info[:height]}, duration: #{info[:duration]}"
|
||||
if !info[:video]
|
||||
BigBlueButton.logger.warn " This video file is corrupt! It will be removed from the output."
|
||||
corrupt_videos << videofile
|
||||
else
|
||||
BigBlueButton.logger.debug " width: #{info[:width]}, height: #{info[:height]}, duration: #{info[:duration]}, start_time: #{info[:start_time]}"
|
||||
if info[:video][:deskshare_timestamp_bug]
|
||||
BigBlueButton.logger.debug(" has early 1.1 deskshare timestamp bug")
|
||||
end
|
||||
@ -264,6 +264,14 @@ module BigBlueButton
|
||||
# Convert the duration to milliseconds
|
||||
info[:duration] = (info[:format][:duration].to_r * 1000).to_i
|
||||
|
||||
# Red5 writes video files with the first frame often having a pts
|
||||
# much greater than 0.
|
||||
# We can compensate for this during decoding if we know the
|
||||
# timestamp offset, which ffprobe handily finds. Convert the units
|
||||
# to ms.
|
||||
info[:start_time] = (info[:format][:start_time].to_r * 1000).to_i
|
||||
info[:video][:start_time] = (info[:video][:start_time].to_r * 1000).to_i
|
||||
|
||||
return info
|
||||
end
|
||||
{}
|
||||
@ -368,9 +376,10 @@ module BigBlueButton
|
||||
ffmpeg_filter << "[#{layout_area[:name]}_in];"
|
||||
|
||||
area.each do |video|
|
||||
this_videoinfo = videoinfo[video[:filename]]
|
||||
BigBlueButton.logger.debug " tile location (#{tile_x}, #{tile_y})"
|
||||
video_width = videoinfo[video[:filename]][:width]
|
||||
video_height = videoinfo[video[:filename]][:height]
|
||||
video_width = this_videoinfo[:width]
|
||||
video_height = this_videoinfo[:height]
|
||||
BigBlueButton.logger.debug " original size: #{video_width}x#{video_height}"
|
||||
|
||||
scale_width, scale_height = aspect_scale(video_width, video_height, tile_width, tile_height)
|
||||
@ -379,11 +388,13 @@ module BigBlueButton
|
||||
offset_x, offset_y = pad_offset(scale_width, scale_height, tile_width, tile_height)
|
||||
BigBlueButton.logger.debug " offset: left: #{offset_x}, top: #{offset_y}"
|
||||
|
||||
BigBlueButton.logger.debug " start timestamp: #{video[:timestamp]}"
|
||||
BigBlueButton.logger.debug(" codec: #{videoinfo[video[:filename]][:video][:codec_name].inspect}")
|
||||
BigBlueButton.logger.debug(" duration: #{videoinfo[video[:filename]][:duration]}, original duration: #{video[:original_duration]}")
|
||||
BigBlueButton.logger.debug(" start timestamp: #{video[:timestamp]}")
|
||||
seek_offset = this_videoinfo[:video][:start_time]
|
||||
BigBlueButton.logger.debug(" seek offset: #{seek_offset}")
|
||||
BigBlueButton.logger.debug(" codec: #{this_videoinfo[:video][:codec_name].inspect}")
|
||||
BigBlueButton.logger.debug(" duration: #{this_videoinfo[:duration]}, original duration: #{video[:original_duration]}")
|
||||
|
||||
if videoinfo[video[:filename]][:video][:codec_name] == "flashsv2"
|
||||
if this_videoinfo[:video][:codec_name] == "flashsv2"
|
||||
# Desktop sharing videos in flashsv2 do not have regular
|
||||
# keyframes, so seeking in them doesn't really work.
|
||||
# To make processing more reliable, always decode them from the
|
||||
@ -403,8 +414,8 @@ module BigBlueButton
|
||||
# actually be...) and scale the video length.
|
||||
scale = nil
|
||||
if !video[:original_duration].nil? and
|
||||
videoinfo[video[:filename]][:video][:deskshare_timestamp_bug]
|
||||
scale = video[:original_duration].to_f / videoinfo[video[:filename]][:duration]
|
||||
this_videoinfo[:video][:deskshare_timestamp_bug]
|
||||
scale = video[:original_duration].to_f / this_videoinfo[:duration]
|
||||
# Rather than attempt to recalculate seek...
|
||||
seek = 0
|
||||
BigBlueButton.logger.debug(" Early 1.1 deskshare timestamp bug: scaling video length by #{scale}")
|
||||
@ -412,12 +423,28 @@ module BigBlueButton
|
||||
|
||||
pad_name = "#{layout_area[:name]}_x#{tile_x}_y#{tile_y}"
|
||||
|
||||
# Apply the video start time offset to seek to the correct point.
|
||||
# Only actually apply the offset if we're already seeking so we
|
||||
# don't start seeking in a file where we've overridden the seek
|
||||
# behaviour.
|
||||
if seek > 0
|
||||
seek = seek + seek_offset
|
||||
end
|
||||
ffmpeg_filter << "movie=#{video[:filename]}:sp=#{ms_to_s(seek)}"
|
||||
# Scale the video length for the deskshare timestamp workaround
|
||||
if !scale.nil?
|
||||
ffmpeg_filter << ",setpts=PTS*#{scale}"
|
||||
end
|
||||
# Subtract away the offset from the timestamps, so the trimming
|
||||
# in the fps filter is accurate
|
||||
ffmpeg_filter << ",setpts=PTS-#{ms_to_s(seek_offset)}/TB"
|
||||
# fps filter fills in frames up to the desired start point, and
|
||||
# cuts the video there
|
||||
ffmpeg_filter << ",fps=#{FFMPEG_WF_FRAMERATE}:start_time=#{ms_to_s(video[:timestamp])}"
|
||||
# Reset the timestamps to start at 0 so that everything is synced
|
||||
# for the video tiling, and scale to the desired size.
|
||||
ffmpeg_filter << ",setpts=PTS-STARTPTS,scale=#{scale_width}:#{scale_height}"
|
||||
# And finally, pad the video to the desired aspect ratio
|
||||
ffmpeg_filter << ",pad=w=#{tile_width}:h=#{tile_height}:x=#{offset_x}:y=#{offset_y}:color=white"
|
||||
ffmpeg_filter << "[#{pad_name}];"
|
||||
|
||||
@ -428,6 +455,7 @@ module BigBlueButton
|
||||
end
|
||||
end
|
||||
|
||||
# Create the video rows
|
||||
remaining = video_count
|
||||
(0...tiles_v).each do |tile_y|
|
||||
this_tiles_h = [tiles_h, remaining].min
|
||||
@ -443,6 +471,7 @@ module BigBlueButton
|
||||
ffmpeg_filter << "[#{layout_area[:name]}_y#{tile_y}];"
|
||||
end
|
||||
|
||||
# Stack the video rows
|
||||
(0...tiles_v).each do |tile_y|
|
||||
ffmpeg_filter << "[#{layout_area[:name]}_y#{tile_y}]"
|
||||
end
|
||||
|
Loading…
Reference in New Issue
Block a user