Implement video frames pushing

This commit is contained in:
Tiago Jacobs 2022-03-29 15:59:52 -03:00
parent 7036a276ed
commit 2a2a07616d
3 changed files with 25 additions and 3 deletions

View File

@ -131,6 +131,7 @@ open class BBBSampleHandler : RPBroadcastSampleHandler {
switch sampleBufferType {
case RPSampleBufferType.video:
logger.trace("ReplayKit2 event - processSampleBuffer(video)")
self.screenBroadcaster?.pushVideoFrame(sampleBuffer: sampleBuffer)
break
case RPSampleBufferType.audioApp:
logger.trace("ReplayKit2 event - processSampleBuffer(audioApp)")

View File

@ -13,6 +13,7 @@ open class ScreenBroadcaster {
private var webRTCClient:WebRTCClient
private var appGroupName:String
private let encoder = JSONEncoder()
public var isConnected:Bool = false
init(appGroupName: String) {
self.appGroupName = appGroupName
@ -55,6 +56,20 @@ open class ScreenBroadcaster {
}
}
public func pushVideoFrame(sampleBuffer: CMSampleBuffer) -> Void {
if(!isConnected) {
self.logger.info("Ignoring pushVideoFrame - not connected")
} else {
self.logger.info("pushing video")
let imageBuffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
let timeStampNs: Int64 = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000)
let rtcPixlBuffer = RTCCVPixelBuffer(pixelBuffer: imageBuffer)
let rtcVideoFrame = RTCVideoFrame(buffer: rtcPixlBuffer, rotation: ._0, timeStampNs: timeStampNs)
self.webRTCClient.push(videoFrame: rtcVideoFrame)
self.logger.info("video pushed")
}
}
}
extension ScreenBroadcaster: WebRTCClientDelegate {
@ -133,6 +148,8 @@ extension ScreenBroadcaster: WebRTCClientDelegate {
self.logger.error("peerConnection new signaling state -> UNKNOWN")
}
self.isConnected = true
BBBSharedData
.getUserDefaults(appGroupName: self.appGroupName)
.set(BBBSharedData.generatePayload(properties: [

View File

@ -96,11 +96,11 @@ open class WebRTCClient: NSObject {
// MARK: Media
/*func push(videoFrame: RTCVideoFrame) {
public func push(videoFrame: RTCVideoFrame) {
guard videoCapturer != nil, videoSource != nil else { return }
videoSource!.capturer(videoCapturer!, didCapture: videoFrame)
print("RTCVideoFrame pushed to server.")
}*/
}
/*private func configureAudioSession() {
self.rtcAudioSession.lockForConfiguration()
@ -134,9 +134,13 @@ open class WebRTCClient: NSObject {
}*/
private func createVideoTrack() -> RTCVideoTrack {
let targetWidth:Int32 = 600;
let targetHeight:Int32 = targetWidth * Int32(UIScreen.main.fixedCoordinateSpace.bounds.height / UIScreen.main.fixedCoordinateSpace.bounds.width)
videoSource = WebRTCClient.factory.videoSource(forScreenCast: true)
videoCapturer = RTCVideoCapturer(delegate: videoSource!)
videoSource!.adaptOutputFormat(toWidth: 600, height: 800, fps: 15)
videoSource!.adaptOutputFormat(toWidth: targetWidth, height: targetHeight, fps: 15)
let videoTrack = WebRTCClient.factory.videoTrack(with: videoSource!, trackId: "video0")
videoTrack.isEnabled = true
return videoTrack