- make some improvements to dekstop sharing by

1. Convert the initial image that is sent to the server as grayscale
  2. Add some debug logs on the applet to determine which step is taking
     a long time. As suspected, it is on the network, esp. using http.
  3. Fix a bug in the server side. Turns out we are not incrementing the
     timestamps in constant interval like we do with the voice. Not doing so,
     doesn't allow the Flash Player to drop frames when packets arrive late.
This commit is contained in:
Richard Alam 2012-05-10 14:10:01 -07:00
parent 76e4d807d3
commit a802ab7e5a
9 changed files with 75 additions and 46 deletions

View File

@ -47,6 +47,7 @@ class SessionSVC(sessionManager:SessionManagerSVC, room: String, screenDim: Dime
private var stop = true
private var mouseLoc:Point = new Point(100,100)
private var pendingGenKeyFrameRequest = false
private var timestamp = 0L;
/*
* Schedule to generate a key frame after 30seconds of a request.
@ -135,7 +136,8 @@ class SessionSVC(sessionManager:SessionManagerSVC, room: String, screenDim: Dime
sessionManager ! new RemoveSession(room)
} else {
if (blockManager != null) {
stream ! new UpdateStream(room, blockManager.generateFrame(keyframe))
timestamp += 50;
stream ! new UpdateStream(room, blockManager.generateFrame(keyframe), timestamp)
stream ! new UpdateStreamMouseLocation(room, mouseLoc)
}
}

View File

@ -28,6 +28,7 @@ import org.red5.server.api.{IContext, IScope}
import org.red5.server.api.so.ISharedObject
import org.red5.server.net.rtmp.event.VideoData;
import org.red5.server.stream.{BroadcastScope, IBroadcastScope, IProviderService}
import org.red5.server.net.rtmp.message.Constants;
import org.apache.mina.core.buffer.IoBuffer
import java.util.ArrayList
import scala.actors.Actor
@ -110,7 +111,14 @@ class DeskshareStream(app: DeskshareApplication, name: String, val width: Int, v
}
val data: VideoData = new VideoData(buffer)
data.setTimestamp((System.currentTimeMillis() - startTimestamp).toInt)
data.setSourceType(Constants.SOURCE_TYPE_LIVE);
/*
* Use timestamp increments. This will force
* Flash Player to playback at proper timestamp. If we calculate timestamp using
* System.currentTimeMillis() - startTimestamp, the video has tendency to drift and
* introduce delay. See how we do the voice. (ralam may 10, 2012)
*/
data.setTimestamp(us.timestamp.toInt);
broadcastStream.dispatchEvent(data)
data.release()

View File

@ -27,7 +27,7 @@ import java.awt.Point
object StopStream
object StartStream
class UpdateStream(val room: String, val videoData: Array[Byte])
class UpdateStream(val room: String, val videoData: Array[Byte], val timestamp: Long)
class UpdateStreamMouseLocation(val room: String, val loc: Point)
abstract class Stream extends Actor

View File

@ -46,7 +46,7 @@ class Block(val dim: Dimension, val position: Int) {
for (i: Int <- 0 until blankPixels.length) {
blankPixels(i) = 0xCECECE;
}
val encodedBlankPixels = ScreenVideoEncoder.encodePixels(blankPixels, dim.width, dim.height)
val encodedBlankPixels = ScreenVideoEncoder.encodePixels(blankPixels, dim.width, dim.height, false)
def update(videoData: Array[Byte], isKeyFrame: Boolean, seqNum: Int): Unit = {
firstBlockReceived = true;

View File

@ -25,6 +25,8 @@ import java.awt.Point;
import java.awt.image.BufferedImage;
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.bigbluebutton.deskshare.client.net.EncodedBlockData;
import org.bigbluebutton.deskshare.common.PixelExtractException;
import org.bigbluebutton.deskshare.common.ScreenVideoEncoder;
@ -40,6 +42,7 @@ public final class Block {
private final Object pixelsLock = new Object();
private AtomicBoolean dirtyBlock = new AtomicBoolean(false);
private long lastSent = System.currentTimeMillis();
private AtomicLong sentCount = new AtomicLong();
Block(Dimension dim, int position, Point location) {
checksum = new BlockChecksum();
@ -86,6 +89,7 @@ public final class Block {
}
public void sent() {
sentCount.incrementAndGet();
dirtyBlock.set(false);
}
@ -96,7 +100,7 @@ public final class Block {
System.arraycopy(capturedPixels, 0, pixelsCopy, 0, capturedPixels.length);
}
byte[] encodedBlock = ScreenVideoEncoder.encodePixels(pixelsCopy, getWidth(), getHeight());
byte[] encodedBlock = ScreenVideoEncoder.encodePixels(pixelsCopy, getWidth(), getHeight(), (sentCount.longValue() > 5) /* send grayscale image */);
return new EncodedBlockData(position, encodedBlock);
}

View File

@ -90,6 +90,7 @@ public class NetworkHttpStreamSender implements Runnable {
* http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4382944
*
*/
long start = System.currentTimeMillis();
try {
url = new URL("http://" + host + SCREEN_CAPTURE__URL);
conn = url.openConnection();
@ -100,10 +101,13 @@ public class NetworkHttpStreamSender implements Runnable {
e.printStackTrace();
throw new ConnectionException("IOException while connecting to " + url.toString());
}
long end = System.currentTimeMillis();
System.out.println("Http[" + id + "] Open connection took [" + (end-start) + " ms]");
}
public void sendStartStreamMessage() {
try {
System.out.println("Http[" + id + "] Open connection. In sendStartStreamMessage");
openConnection();
sendCaptureStartEvent(screenDim, blockDim);
} catch (ConnectionException e) {
@ -116,17 +120,12 @@ public class NetworkHttpStreamSender implements Runnable {
ClientHttpRequest chr;
try {
chr = new ClientHttpRequest(conn);
chr.setParameter(ROOM, room);
chr.setParameter(ROOM, room);
chr.setParameter(SEQ_NUM, seqNumGenerator.getNext());
String screenInfo = Integer.toString(screen.getWidth())
+ "x" + Integer.toString(screen.getHeight());
chr.setParameter(SCREEN, screenInfo);
String blockInfo = Integer.toString(block.getWidth())
+ "x" + Integer.toString(block.getHeight());
String screenInfo = Integer.toString(screen.getWidth()) + "x" + Integer.toString(screen.getHeight());
chr.setParameter(SCREEN, screenInfo);
String blockInfo = Integer.toString(block.getWidth()) + "x" + Integer.toString(block.getHeight());
chr.setParameter(BLOCK, blockInfo);
chr.setParameter(EVENT, CaptureEvents.CAPTURE_START.getEvent());
chr.post();
} catch (IOException e) {
@ -138,13 +137,13 @@ public class NetworkHttpStreamSender implements Runnable {
public void disconnect() throws ConnectionException {
try {
System.out.println("Http[" + id + "] Open connection. In disconnect");
openConnection();
sendCaptureEndEvent();
} catch (ConnectionException e) {
e.printStackTrace();
notifyNetworkStreamListener(ExitCode.DESKSHARE_SERVICE_UNAVAILABLE);
throw e;
throw e;
} finally {
processBlocks = false;
}
@ -168,8 +167,8 @@ public class NetworkHttpStreamSender implements Runnable {
if (message.getMessageType() == Message.MessageType.BLOCK) {
long start = System.currentTimeMillis();
Integer[] changedBlocks = ((BlockMessage)message).getBlocks();
String blockSize = "Block length [";
String encodeTime = "Encode times [";
String blockSize = "Http[" + id + "] Block length [";
String encodeTime = "Http[" + id + "]Encode times [";
long encStart = 0;
long encEnd = 0;
int totalBytes = 0;
@ -191,7 +190,7 @@ public class NetworkHttpStreamSender implements Runnable {
retriever.blockSent((Integer)changedBlocks[i]);
}
long end = System.currentTimeMillis();
System.out.println("[HTTP Thread " + id + "] Sending " + changedBlocks.length + " blocks took " + (end - start) + " millis");
System.out.println("[HTTP " + id + "] Sending " + changedBlocks.length + " blocks took " + (end - start) + " millis");
} else if (message.getMessageType() == Message.MessageType.CURSOR) {
CursorMessage msg = (CursorMessage)message;
sendCursor(msg.getMouseLocation(), msg.getRoom());
@ -217,6 +216,7 @@ public class NetworkHttpStreamSender implements Runnable {
private void sendCursor(Point mouseLoc, String room) {
ClientHttpRequest chr;
try {
System.out.println("Http[" + id + "] Open connection. In sendCursor");
openConnection();
chr = new ClientHttpRequest(conn);
chr.setParameter(ROOM, room);
@ -233,8 +233,10 @@ public class NetworkHttpStreamSender implements Runnable {
}
private void sendBlockData(BlockVideoData blockData) {
long start = System.currentTimeMillis();
ClientHttpRequest chr;
try {
System.out.println("Http[" + id + "] Open connection. In sendBlockData");
openConnection();
chr = new ClientHttpRequest(conn);
chr.setParameter(ROOM, blockData.getRoom());
@ -250,5 +252,7 @@ public class NetworkHttpStreamSender implements Runnable {
} catch (ConnectionException e) {
System.out.println("ERROR: Failed to send block data.");
}
long end = System.currentTimeMillis();
System.out.println("[HTTP " + id + "] Sending " + blockData.getVideoData().length + " bytes took " + (end - start) + " ms");
}
}

View File

@ -117,29 +117,37 @@ public class NetworkSocketStreamSender implements Runnable {
public void disconnect() throws ConnectionException {
System.out.println("Disconnecting socket stream");
if (!processMessages) return;
}
private void processNextMessageToSend(Message message) throws IOException {
if (message.getMessageType() == Message.MessageType.BLOCK) {
long start = System.currentTimeMillis();
ByteArrayOutputStream dataToSend = new ByteArrayOutputStream();
dataToSend.reset();
BlockStreamProtocolEncoder.encodeRoomAndSequenceNumber(room, seqNumGenerator.getNext(), dataToSend);
BlockStreamProtocolEncoder.encodeRoomAndSequenceNumber(room, seqNumGenerator.getNext(), dataToSend);
Integer[] changedBlocks = ((BlockMessage)message).getBlocks();
BlockStreamProtocolEncoder.numBlocksChanged(changedBlocks.length, dataToSend);
String blocksStr = "Encoding ";
String blockSize = "Block length [";
String encodeTime = "Encode times [";
long encStart = 0;
long encEnd = 0;
int totalBytes = 0;
long totalMillis = 0;
for (int i = 0; i < changedBlocks.length; i++) {
blocksStr += " " + (Integer)changedBlocks[i];
encStart = System.currentTimeMillis();
EncodedBlockData block = retriever.getBlockToSend((Integer)changedBlocks[i]);
totalBytes += block.getVideoData().length;
blockSize += block.getVideoData().length + ",";
encEnd = System.currentTimeMillis();
totalMillis += (encEnd - encStart);
encodeTime += (encEnd - encStart) + ",";
BlockVideoData bv = new BlockVideoData(room, block.getPosition(), block.getVideoData(), false /* should remove later */);
BlockStreamProtocolEncoder.encodeBlock(bv, dataToSend);
}
// System.out.println(blocksStr);
System.out.println(blockSize + "] total=" + totalBytes + " bytes");
System.out.println(encodeTime + "] total=" + totalMillis + " ms");
BlockStreamProtocolEncoder.encodeDelimiter(dataToSend);
sendHeader(BlockStreamProtocolEncoder.encodeHeaderAndLength(dataToSend));
@ -147,6 +155,8 @@ public class NetworkSocketStreamSender implements Runnable {
for (int i = 0; i< changedBlocks.length; i++) {
retriever.blockSent((Integer)changedBlocks[i]);
}
long end = System.currentTimeMillis();
System.out.println("[Socket Thread " + id + "] Sending " + changedBlocks.length + " blocks took " + (end - start) + " millis");
} else if (message.getMessageType() == Message.MessageType.CURSOR) {
CursorMessage msg = (CursorMessage)message;
sendCursor(msg.getMouseLocation(), msg.getRoom());

View File

@ -65,7 +65,7 @@ public class NetworkStreamSender implements NextBlockRetriever, NetworkStreamLis
this.blockDim = blockDim;
this.httpTunnel = httpTunnel;
numThreads = Runtime.getRuntime().availableProcessors() * 2;
numThreads = Runtime.getRuntime().availableProcessors() * 3;
System.out.println(NAME + "Starting up " + numThreads + " sender threads.");
executor = Executors.newFixedThreadPool(numThreads);
}

View File

@ -82,11 +82,11 @@ public final class ScreenVideoEncoder {
return pixels;
}
public static byte[] encodePixels(int pixels[], int width, int height) {
public static byte[] encodePixels(int pixels[], int width, int height, boolean grayscale) {
changePixelScanFromBottomLeftToTopRight(pixels, width, height);
byte[] bgrPixels = convertFromRGBtoBGR(pixels);
byte[] bgrPixels = convertFromRGBtoBGR(pixels, grayscale);
byte[] compressedPixels = compressUsingZlib(bgrPixels);
@ -163,8 +163,8 @@ public final class ScreenVideoEncoder {
* @param pixels
* @return pixels in BGR order
*/
private static byte[] convertFromRGBtoBGR(int[] pixels) {
long start = System.currentTimeMillis();
private static byte[] convertFromRGBtoBGR(int[] pixels, boolean grayscale) {
// long start = System.currentTimeMillis();
byte[] rgbPixels = new byte[pixels.length * 3];
int position = 0;
@ -173,19 +173,20 @@ public final class ScreenVideoEncoder {
byte green = (byte) ((pixels[i] >> 8) & 0xff);
byte blue = (byte) (pixels[i] & 0xff);
// Sequence should be BGR
rgbPixels[position++] = blue;
rgbPixels[position++] = green;
rgbPixels[position++] = red;
/*
* If we want to send grayscale images.
byte brightness = convertToGrayScale(red, green, blue);
// Sequence should be BGR
rgbPixels[position++] = brightness;
rgbPixels[position++] = brightness;
rgbPixels[position++] = brightness;
*/ }
if (grayscale) {
byte brightness = convertToGrayScale(red, green, blue);
// Sequence should be BGR
rgbPixels[position++] = brightness;
rgbPixels[position++] = brightness;
rgbPixels[position++] = brightness;
} else {
// Sequence should be BGR
rgbPixels[position++] = blue;
rgbPixels[position++] = green;
rgbPixels[position++] = red;
}
}
long end = System.currentTimeMillis();
// System.out.println("Extracting pixels[" + pixels.length + "] took " + (end-start) + " ms.");