- Make sure timestamp is generated properly for the frames. The timestamp was

incremented by 200ms per frame resulting in the recorded file to drift for longer
   recordings.
This commit is contained in:
Richard Alam 2016-09-14 17:22:37 +00:00
parent 2aefda592d
commit 45eb073dcd
3 changed files with 20 additions and 12 deletions

View File

@ -16,6 +16,7 @@ import org.bigbluebutton.screenshare.client.ExitCode;
import org.bigbluebutton.screenshare.client.ScreenShareInfo;
import org.bigbluebutton.screenshare.client.net.NetworkConnectionListener;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.javacpp.avcodec;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
@ -144,26 +145,27 @@ public class FfmpegScreenshare {
frame = grabber.grabImage();
if (frame != null) {
try {
long timestamp = now - startTime;
// Override timestamp from system screen grabber. Otherwise, we will have skewed recorded file.
// FfmpegFrameRecorder needs to propagate this timestamp into the avpacket sent to the server.
// ralam - Sept. 14, 2016
frame.timestamp = timestamp;
//System.out.println("frame timestamp=[" + frame.timestamp + "] ");
mainRecorder.record(frame);
} catch (Exception e) {
//System.out.println("CaptureScreen Exception 1");
if (!ignoreDisconnect) {
listener.networkConnectionException(ExitCode.INTERNAL_ERROR, null);
}
}
}
} catch (Exception e1) {
listener.networkConnectionException(ExitCode.INTERNAL_ERROR, null);
}
long sleepFramerate = (long) (1000 / frameRate);
long timestamp = now - startTime;
// Do not override timestamp from system screen grabber. Otherwise, we will have skewed recorded file.
// mainRecorder.setTimestamp(timestamp * 1000);
// System.out.println("i=[" + i + "] timestamp=[" + timestamp + "]");
//System.out.println("timestamp=[" + timestamp + "]");
mainRecorder.setFrameNumber(frameNumber);
// System.out.println("[ENCODER] encoded image " + frameNumber + " in " + (System.currentTimeMillis() - now));

View File

@ -688,7 +688,7 @@ public class FFmpegFrameRecorder extends FrameRecorder {
if (oc != null) {
try {
/* flush all the buffers */
while (video_st != null && ifmt_ctx == null && recordImage(0, 0, 0, 0, 0, AV_PIX_FMT_NONE, (Buffer[])null));
while (video_st != null && ifmt_ctx == null && recordImage(0, 0, 0, 0, 0, AV_PIX_FMT_NONE, 0, (Buffer[])null));
while (audio_st != null && ifmt_ctx == null && recordSamples(0, 0, (Buffer[])null));
if (interleaved && video_st != null && audio_st != null) {
@ -710,11 +710,11 @@ public class FFmpegFrameRecorder extends FrameRecorder {
}
public void record(Frame frame, int pixelFormat) throws Exception {
if (frame == null || (frame.image == null && frame.samples == null)) {
recordImage(0, 0, 0, 0, 0, pixelFormat, (Buffer[])null);
recordImage(0, 0, 0, 0, 0, pixelFormat, frame.timestamp, (Buffer[])null);
} else {
if (frame.image != null) {
frame.keyFrame = recordImage(frame.imageWidth, frame.imageHeight, frame.imageDepth,
frame.imageChannels, frame.imageStride, pixelFormat, frame.image);
frame.imageChannels, frame.imageStride, pixelFormat, frame.timestamp, frame.image);
}
if (frame.samples != null) {
frame.keyFrame = recordSamples(frame.sampleRate, frame.audioChannels, frame.samples);
@ -722,7 +722,8 @@ public class FFmpegFrameRecorder extends FrameRecorder {
}
}
public boolean recordImage(int width, int height, int depth, int channels, int stride, int pixelFormat, Buffer ... image) throws Exception {
public boolean recordImage(int width, int height, int depth, int channels, int stride,
int pixelFormat, long frameTimestamp, Buffer ... image) throws Exception {
if (video_st == null) {
throw new Exception("No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
}
@ -808,10 +809,15 @@ public class FFmpegFrameRecorder extends FrameRecorder {
/* if zero size, it means the image was buffered */
if (got_video_packet[0] != 0) {
if (video_pkt.pts() != AV_NOPTS_VALUE) {
video_pkt.pts(av_rescale_q(video_pkt.pts(), video_c.time_base(), video_st.time_base()));
// Override timestamp from system screen grabber. Otherwise, we will have skewed recorded file.
// FfmpegFrameRecorder needs to propagate this timestamp into the avpacket sent to the server.
// ralam - Sept. 14, 2016
video_pkt.pts(frameTimestamp);
//video_pkt.pts(av_rescale_q(video_pkt.pts(), video_c.time_base(), video_st.time_base()));
}
if (video_pkt.dts() != AV_NOPTS_VALUE) {
video_pkt.dts(av_rescale_q(video_pkt.dts(), video_c.time_base(), video_st.time_base()));
video_pkt.dts(frameTimestamp);
//video_pkt.dts(av_rescale_q(video_pkt.dts(), video_c.time_base(), video_st.time_base()));
}
video_pkt.stream_index(video_st.index());
} else {