Add full audio support

This commit is contained in:
Tiago Jacobs 2022-04-19 20:44:38 -03:00
parent e6824a10b2
commit e28b3ddeae
20 changed files with 634 additions and 37 deletions

View File

@ -1,12 +1,12 @@
PODS:
- bigbluebutton-mobile-sdk (0.1.11):
- bigbluebutton-mobile-sdk (0.1.16):
- bigbluebutton-mobile-sdk-common
- React-Core
- WebRTC-lib
- bigbluebutton-mobile-sdk-broadcast-upload-extension (0.1.11):
- bigbluebutton-mobile-sdk-broadcast-upload-extension (0.1.16):
- bigbluebutton-mobile-sdk-common
- WebRTC-lib
- bigbluebutton-mobile-sdk-common (0.1.11):
- bigbluebutton-mobile-sdk-common (0.1.16):
- WebRTC-lib
- boost-for-react-native (1.63.0)
- DoubleConversion (1.1.6)
@ -361,9 +361,9 @@ EXTERNAL SOURCES:
:path: "../node_modules/react-native/ReactCommon/yoga"
SPEC CHECKSUMS:
bigbluebutton-mobile-sdk: ed5b306bc5161dcc35cf9e870d91ab12c64810b6
bigbluebutton-mobile-sdk-broadcast-upload-extension: af7d83d6e0bd547876f25350b84bc94561ba14cf
bigbluebutton-mobile-sdk-common: 4c14fa0c920d9cd7d57ab2d1aebb8c0ebb0065c4
bigbluebutton-mobile-sdk: c22205a921c38e43d5c5ec2c8f47d890db5e189e
bigbluebutton-mobile-sdk-broadcast-upload-extension: 702331796aa06d3cac9fc202cb20888b3e3a57eb
bigbluebutton-mobile-sdk-common: 287c9acc965d00e72d71a0d2b020fa8e483e7a11
boost-for-react-native: 39c7adb57c4e60d6c5479dd8623128eb5b3f0f2c
DoubleConversion: cde416483dac037923206447da6e1454df403714
FBLazyVector: 3bb422f41b18121b71783a905c10e58606f7dc3e

View File

@ -15,7 +15,7 @@ open class BBBSampleHandler : RPBroadcastSampleHandler {
private var createOfferCallObserver:NSKeyValueObservation?;
private var setRemoteSDPCallObserver:NSKeyValueObservation?;
private var addScreenShareRemoteIceCandidateObserver:NSKeyValueObservation?;
private var screenBroadcaster:ScreenBroadcaster?;
private var screenBroadcaster:ScreenBroadcasterService?;
open func setAppGroupName(appGroupName:String) {
logger.info("Received appGroupName: \(appGroupName)")
@ -34,7 +34,7 @@ open class BBBSampleHandler : RPBroadcastSampleHandler {
logger.info("ReplayKit2 event - broadcastStarted - persisting information on UserDefaults")
userDefaults.set(BBBSharedData.generatePayload(), forKey: BBBSharedData.SharedData.broadcastStarted)
self.screenBroadcaster = ScreenBroadcaster(appGroupName: appGroupName)
self.screenBroadcaster = ScreenBroadcasterService(appGroupName: appGroupName)
// Listen for createOffer requests from the UI APP
logger.info("Configuring observer for createOffer")

View File

@ -7,10 +7,10 @@ import os
import bigbluebutton_mobile_sdk_common
import WebRTC
open class ScreenBroadcaster {
open class ScreenBroadcasterService {
// Logger (these messages are displayed in the console application)
private var logger = os.Logger(subsystem: "BigBlueButtonMobileSDK", category: "ScreenBroadcaster")
private var webRTCClient:WebRTCClient
private var logger = os.Logger(subsystem: "BigBlueButtonMobileSDK", category: "ScreenBroadcasterService")
private var webRTCClient:ScreenShareWebRTCClient
private var appGroupName:String
private let encoder = JSONEncoder()
public var isConnected:Bool = false
@ -18,7 +18,7 @@ open class ScreenBroadcaster {
init(appGroupName: String) {
self.appGroupName = appGroupName
webRTCClient = WebRTCClient(iceServers: ["stun:stun.l.google.com:19302",
webRTCClient = ScreenShareWebRTCClient(iceServers: ["stun:stun.l.google.com:19302",
"stun:stun1.l.google.com:19302",
"stun:stun2.l.google.com:19302",
"stun:stun3.l.google.com:19302",
@ -77,9 +77,9 @@ open class ScreenBroadcaster {
}
extension ScreenBroadcaster: WebRTCClientDelegate {
extension ScreenBroadcasterService: ScreenShareWebRTCClientDelegate {
public func webRTCClient(_ client: WebRTCClient, didDiscoverLocalCandidate rtcIceCandidate: RTCIceCandidate) {
public func webRTCClient(_ client: ScreenShareWebRTCClient, didDiscoverLocalCandidate rtcIceCandidate: RTCIceCandidate) {
do {
let iceCandidate = IceCandidate(from: rtcIceCandidate)
let iceCandidateAsJsonData = try self.encoder.encode(iceCandidate)
@ -96,7 +96,7 @@ extension ScreenBroadcaster: WebRTCClientDelegate {
}
}
public func webRTCClient(_ client: WebRTCClient, didChangeIceConnectionState state: RTCIceConnectionState) {
public func webRTCClient(_ client: ScreenShareWebRTCClient, didChangeIceConnectionState state: RTCIceConnectionState) {
switch state {
case .connected:
self.logger.info("didChangeConnectionState -> connected")
@ -115,7 +115,7 @@ extension ScreenBroadcaster: WebRTCClientDelegate {
}
}
public func webRTCClient(_ client: WebRTCClient, didChangeIceGatheringState state: RTCIceGatheringState) {
public func webRTCClient(_ client: ScreenShareWebRTCClient, didChangeIceGatheringState state: RTCIceGatheringState) {
switch state {
case .new:
self.logger.info("didChangeGatheringState -> new")
@ -128,7 +128,7 @@ extension ScreenBroadcaster: WebRTCClientDelegate {
}
}
public func webRTCClient(_ client: WebRTCClient, didChangeSignalingState state: RTCSignalingState) {
public func webRTCClient(_ client: ScreenShareWebRTCClient, didChangeSignalingState state: RTCSignalingState) {
var stateString = ""
switch(state) {
case .haveLocalOffer:

View File

@ -0,0 +1,275 @@
//
// AudioWebRTCClient.swift
//
// Created by Tiago Daniel Jacobs on 20/04/22.
import Foundation
import WebRTC
import os
public protocol AudioWebRTCClientDelegate: AnyObject {
func webRTCClient(_ client: AudioWebRTCClient, didDiscoverLocalCandidate candidate: RTCIceCandidate)
func webRTCClient(_ client: AudioWebRTCClient, didChangeIceConnectionState state: RTCIceConnectionState)
func webRTCClient(_ client: AudioWebRTCClient, didChangeIceGatheringState state: RTCIceGatheringState)
func webRTCClient(_ client: AudioWebRTCClient, didChangeSignalingState state: RTCSignalingState)
}
open class AudioWebRTCClient: NSObject {
private var logger = os.Logger(subsystem: "BigBlueButtonMobileSDK", category: "AudioWebRTCClient")
private var iceGatheringComplete:Bool = false
// The `RTCPeerConnectionFactory` is in charge of creating new RTCPeerConnection instances.
// A new RTCPeerConnection should be created every new call, but the factory is shared.
private static let factory: RTCPeerConnectionFactory = {
RTCInitializeSSL()
let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
videoEncoderFactory.preferredCodec = RTCVideoCodecInfo(name: kRTCVideoCodecVp8Name)
return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
}()
public weak var delegate: AudioWebRTCClientDelegate?
private let peerConnection: RTCPeerConnection
private let rtcAudioSession = RTCAudioSession.sharedInstance()
private let audioQueue = DispatchQueue(label: "audio")
private let mediaConstrains = [kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueTrue,
kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueFalse]
private var videoSource: RTCVideoSource?
private var videoCapturer: RTCVideoCapturer?
private var localVideoTrack: RTCVideoTrack?
private var isRatioDefined:Bool=false
@available(*, unavailable)
override init() {
fatalError("WebRTCClient:init is unavailable")
}
public required init(iceServers: [String]) {
let config = RTCConfiguration()
config.iceServers = [RTCIceServer(urlStrings: iceServers)]
// Unified plan is more superior than planB
config.sdpSemantics = .unifiedPlan
// gatherContinually will let WebRTC to listen to any network changes and send any new candidates to the other client
// gatherOnce will get candidates only on beginning (this is how BBB expect to have it for now, so we use this one)
config.continualGatheringPolicy = .gatherOnce
// Define media constraints. DtlsSrtpKeyAgreement is required to be true to be able to connect with web browsers.
let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])
guard let peerConnection = AudioWebRTCClient.factory.peerConnection(with: config, constraints: constraints, delegate: nil) else {
fatalError("Could not create new RTCPeerConnection")
}
self.peerConnection = peerConnection
super.init()
createMediaSenders()
configureAudioSession()
self.peerConnection.delegate = self
}
// MARK: Signaling
public func offer() async throws -> RTCSessionDescription {
let constrains = RTCMediaConstraints(mandatoryConstraints: self.mediaConstrains, optionalConstraints: nil)
let sdp = try await self.peerConnection.offer(for: constrains)
try await self.peerConnection.setLocalDescription(sdp)
return sdp
}
public func setRemoteSDP(remoteSDP: String) async throws {
let rtcSessionDescription = RTCSessionDescription(type: RTCSdpType.answer, sdp: remoteSDP)
try await self.peerConnection.setRemoteDescription(rtcSessionDescription)
}
// public func setRemoteCandidate(remoteIceCandidate: IceCandidate) async throws {
// let rtcRemoteCandidate = RTCIceCandidate(sdp: remoteIceCandidate.candidate, sdpMLineIndex: remoteIceCandidate.sdpMLineIndex, sdpMid: remoteIceCandidate.sdpMid)
// try await self.peerConnection.add(rtcRemoteCandidate)
// }
func set(remoteCandidate: RTCIceCandidate, completion: @escaping (Error?) -> ()) {
self.peerConnection.add(remoteCandidate, completionHandler: completion)
}
// MARK: Media
public func push(videoFrame: RTCVideoFrame) {
guard videoCapturer != nil, videoSource != nil else { return }
videoSource!.capturer(videoCapturer!, didCapture: videoFrame)
print("RTCVideoFrame pushed to server.")
}
private func configureAudioSession() {
self.rtcAudioSession.lockForConfiguration()
do {
try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
try self.rtcAudioSession.setMode(AVAudioSession.Mode.voiceChat.rawValue)
} catch let error {
debugPrint("Error changing AVAudioSession category: \(error)")
}
self.rtcAudioSession.unlockForConfiguration()
}
private func createMediaSenders() {
let streamId = "stream"
// Audio
let audioTrack = self.createAudioTrack()
self.peerConnection.add(audioTrack, streamIds: [streamId])
}
private func createAudioTrack() -> RTCAudioTrack {
let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil)
let audioSource = AudioWebRTCClient.factory.audioSource(with: audioConstrains)
let audioTrack = AudioWebRTCClient.factory.audioTrack(with: audioSource, trackId: "audio0")
return audioTrack
}
}
// MARK: RTCPeerConnectionDelegate Methods
extension AudioWebRTCClient: RTCPeerConnectionDelegate {
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
self.logger.info("peerConnection new signaling state: \(stateChanged.rawValue)")
self.delegate?.webRTCClient(self, didChangeSignalingState: stateChanged)
}
public func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
self.logger.info("peerConnection did add stream \(stream.streamId)")
}
public func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {
self.logger.info("peerConnection did remove stream \(stream.streamId)")
}
public func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) {
self.logger.info("peerConnection should negotiate")
}
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
self.logger.info("peerConnection new connection state: \(newState.rawValue)")
self.delegate?.webRTCClient(self, didChangeIceConnectionState: newState)
}
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {
self.logger.info("peerConnection new gathering state: \(newState.rawValue)")
self.delegate?.webRTCClient(self, didChangeIceGatheringState: newState)
if(newState == .complete) {
self.logger.info("peerConnection new gathering state is COMPLETE")
self.iceGatheringComplete = true
} else if(newState == .gathering) {
self.logger.info("peerConnection new gathering state is GATHERING")
} else if(newState == .new) {
self.logger.info("peerConnection new gathering state is NEW")
}
}
public func isIceGatheringComplete() -> Bool {
return iceGatheringComplete;
}
public func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
self.logger.info("peerConnection discovered new candidate")
self.delegate?.webRTCClient(self, didDiscoverLocalCandidate: candidate)
}
public func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) {
self.logger.info("peerConnection did remove candidate(s)")
}
public func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {
self.logger.info("peerConnection did open data channel")
}
}
extension AudioWebRTCClient {
private func setTrackEnabled<T: RTCMediaStreamTrack>(_ type: T.Type, isEnabled: Bool) {
peerConnection.transceivers
.compactMap { return $0.sender.track as? T }
.forEach { $0.isEnabled = isEnabled }
}
}
// MARK: - Video control
extension AudioWebRTCClient {
func hideVideo() {
self.setVideoEnabled(false)
}
func showVideo() {
self.setVideoEnabled(true)
}
private func setVideoEnabled(_ isEnabled: Bool) {
setTrackEnabled(RTCVideoTrack.self, isEnabled: isEnabled)
}
}
// MARK:- Audio control
extension AudioWebRTCClient {
func muteAudio() {
self.setAudioEnabled(false)
}
func unmuteAudio() {
self.setAudioEnabled(true)
}
// Fallback to the default playing device: headphones/bluetooth/ear speaker
func speakerOff() {
self.audioQueue.async { [weak self] in
guard let self = self else {
return
}
self.rtcAudioSession.lockForConfiguration()
do {
try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
try self.rtcAudioSession.overrideOutputAudioPort(.none)
} catch let error {
debugPrint("Error setting AVAudioSession category: \(error)")
}
self.rtcAudioSession.unlockForConfiguration()
}
}
// Force speaker
func speakerOn() {
self.audioQueue.async { [weak self] in
guard let self = self else {
return
}
self.rtcAudioSession.lockForConfiguration()
do {
try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
try self.rtcAudioSession.overrideOutputAudioPort(.speaker)
try self.rtcAudioSession.setActive(true)
} catch let error {
debugPrint("Couldn't force audio to speaker: \(error)")
}
self.rtcAudioSession.unlockForConfiguration()
}
}
private func setAudioEnabled(_ isEnabled: Bool) {
setTrackEnabled(RTCAudioTrack.self, isEnabled: isEnabled)
}
}
extension AudioWebRTCClient: RTCDataChannelDelegate {
public func dataChannelDidChangeState(_ dataChannel: RTCDataChannel) {
debugPrint("dataChannel did change state: \(dataChannel.readyState)")
}
public func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessageWith buffer: RTCDataBuffer) {
debugPrint("dataChannel did receive message with buffer: \(buffer)")
}
}

View File

@ -9,14 +9,14 @@ import Foundation
import WebRTC
import os
public protocol WebRTCClientDelegate: AnyObject {
func webRTCClient(_ client: WebRTCClient, didDiscoverLocalCandidate candidate: RTCIceCandidate)
func webRTCClient(_ client: WebRTCClient, didChangeIceConnectionState state: RTCIceConnectionState)
func webRTCClient(_ client: WebRTCClient, didChangeIceGatheringState state: RTCIceGatheringState)
func webRTCClient(_ client: WebRTCClient, didChangeSignalingState state: RTCSignalingState)
public protocol ScreenShareWebRTCClientDelegate: AnyObject {
func webRTCClient(_ client: ScreenShareWebRTCClient, didDiscoverLocalCandidate candidate: RTCIceCandidate)
func webRTCClient(_ client: ScreenShareWebRTCClient, didChangeIceConnectionState state: RTCIceConnectionState)
func webRTCClient(_ client: ScreenShareWebRTCClient, didChangeIceGatheringState state: RTCIceGatheringState)
func webRTCClient(_ client: ScreenShareWebRTCClient, didChangeSignalingState state: RTCSignalingState)
}
open class WebRTCClient: NSObject {
open class ScreenShareWebRTCClient: NSObject {
private var logger = os.Logger(subsystem: "BigBlueButtonMobileSDK", category: "WebRTCClient")
// The `RTCPeerConnectionFactory` is in charge of creating new RTCPeerConnection instances.
@ -29,7 +29,7 @@ open class WebRTCClient: NSObject {
return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
}()
public weak var delegate: WebRTCClientDelegate?
public weak var delegate: ScreenShareWebRTCClientDelegate?
private let peerConnection: RTCPeerConnection
private let rtcAudioSession = RTCAudioSession.sharedInstance()
private let audioQueue = DispatchQueue(label: "audio")
@ -60,7 +60,7 @@ open class WebRTCClient: NSObject {
let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])
guard let peerConnection = WebRTCClient.factory.peerConnection(with: config, constraints: constraints, delegate: nil) else {
guard let peerConnection = ScreenShareWebRTCClient.factory.peerConnection(with: config, constraints: constraints, delegate: nil) else {
fatalError("Could not create new RTCPeerConnection")
}
@ -135,9 +135,9 @@ open class WebRTCClient: NSObject {
}*/
private func createVideoTrack() -> RTCVideoTrack {
videoSource = WebRTCClient.factory.videoSource(forScreenCast: true)
videoSource = ScreenShareWebRTCClient.factory.videoSource(forScreenCast: true)
videoCapturer = RTCVideoCapturer(delegate: videoSource!)
let videoTrack = WebRTCClient.factory.videoTrack(with: videoSource!, trackId: "video0")
let videoTrack = ScreenShareWebRTCClient.factory.videoTrack(with: videoSource!, trackId: "video0")
videoTrack.isEnabled = true
return videoTrack
}
@ -157,7 +157,7 @@ open class WebRTCClient: NSObject {
// MARK: RTCPeerConnectionDelegate Methods
extension WebRTCClient: RTCPeerConnectionDelegate {
extension ScreenShareWebRTCClient: RTCPeerConnectionDelegate {
public func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
self.logger.info("peerConnection new signaling state: \(stateChanged.rawValue)")
@ -209,7 +209,7 @@ extension WebRTCClient: RTCPeerConnectionDelegate {
}
}
extension WebRTCClient {
extension ScreenShareWebRTCClient {
private func setTrackEnabled<T: RTCMediaStreamTrack>(_ type: T.Type, isEnabled: Bool) {
peerConnection.transceivers
.compactMap { return $0.sender.track as? T }
@ -219,7 +219,7 @@ extension WebRTCClient {
// MARK: - Video control
extension WebRTCClient {
extension ScreenShareWebRTCClient {
func hideVideo() {
self.setVideoEnabled(false)
}
@ -233,7 +233,7 @@ extension WebRTCClient {
// MARK:- Audio control
extension WebRTCClient {
extension ScreenShareWebRTCClient {
func muteAudio() {
self.setAudioEnabled(false)
}
@ -284,7 +284,7 @@ extension WebRTCClient {
}
}
extension WebRTCClient: RTCDataChannelDelegate {
extension ScreenShareWebRTCClient: RTCDataChannelDelegate {
public func dataChannelDidChangeState(_ dataChannel: RTCDataChannel) {
debugPrint("dataChannel did change state: \(dataChannel.readyState)")
}

View File

@ -0,0 +1,8 @@
//
// WebRTCClientDelegate.swift
// bigbluebutton-mobile-sdk
//
// Created by Tiago Daniel Jacobs on 20/04/22.
//
import Foundation

View File

@ -21,6 +21,7 @@ open class BigBlueButtonSDK: NSObject {
private static var observer4: NSKeyValueObservation?
private static var observer5: NSKeyValueObservation?
private static var observer6: NSKeyValueObservation?
private static var observer7: NSKeyValueObservation?
public static func initialize(broadcastExtensionBundleId:String, appGroupName:String) {
self.broadcastExtensionBundleId = broadcastExtensionBundleId
@ -84,6 +85,12 @@ open class BigBlueButtonSDK: NSObject {
logger.info("Detected a change in userDefaults for key addScreenShareRemoteIceCandidateCompleted")
ReactNativeEventEmitter.emitter.sendEvent(withName: ReactNativeEventEmitter.EVENT.onAddScreenShareRemoteIceCandidateCompleted.rawValue, body: nil)
}
//addScreenShareRemoteIceCandidateCompleted
observer7 = userDefaults?.observe(\.broadcastFinished, options: [.new]) { (defaults, change) in
logger.info("Detected a change in userDefaults for key broadcastFinished")
ReactNativeEventEmitter.emitter.sendEvent(withName: ReactNativeEventEmitter.EVENT.onBroadcastFinished.rawValue, body: nil)
}
}
public static func getBroadcastExtensionBundleId() -> String {

View File

@ -0,0 +1,136 @@
//
// FullAudioService.swift
// bigbluebutton-mobile-sdk
//
// Created by Tiago Daniel Jacobs on 20/04/22.
//
import os
import bigbluebutton_mobile_sdk_common
import WebRTC
open class FullAudioService {
// Logger (these messages are displayed in the console application)
private var logger = os.Logger(subsystem: "BigBlueButtonMobileSDK", category: "FullAudioService")
private var webRTCClient:AudioWebRTCClient?
private let encoder = JSONEncoder()
public func createOffer() async -> String? {
do{
webRTCClient = AudioWebRTCClient(iceServers: ["stun:stun.l.google.com:19302",
"stun:stun1.l.google.com:19302",
"stun:stun2.l.google.com:19302",
"stun:stun3.l.google.com:19302",
"stun:stun4.l.google.com:19302"])
webRTCClient!.delegate = self
var createOfferIterations = 0
while(true) {
createOfferIterations += 1;
let rtcSessionDescription = try await self.webRTCClient!.offer()
// Immediately connect when ice gathering is complete or after 5 iterations (5 seconds)
if(webRTCClient!.isIceGatheringComplete()) {
logger.debug("Ice gathering complete!");
return rtcSessionDescription.sdp
} else if ( createOfferIterations > 5 ) {
logger.debug("Ice iterations exceeded, sending what we have");
return rtcSessionDescription.sdp
} else {
logger.debug("Ice gathering not yet complete, waiting 1s");
try await Task.sleep(nanoseconds: UInt64(1 * Double(NSEC_PER_SEC)))
}
}
} catch {
logger.error("Error on webRTCClient.offer")
return nil
}
}
public func setRemoteSDP(remoteSDP:String) async -> Bool {
do {
try await self.webRTCClient!.setRemoteSDP(remoteSDP: remoteSDP)
return true
}
catch {
return false
}
}
}
extension FullAudioService: AudioWebRTCClientDelegate {
public func webRTCClient(_ client: AudioWebRTCClient, didDiscoverLocalCandidate rtcIceCandidate: RTCIceCandidate) {
do {
let iceCandidate = IceCandidate(from: rtcIceCandidate)
let iceCandidateAsJsonData = try self.encoder.encode(iceCandidate)
let iceCandidateAsJsonString = String(decoding: iceCandidateAsJsonData, as: UTF8.self)
print("---- ICE CANDIDATE \(iceCandidateAsJsonString) ")
} catch {
self.logger.error("Error handling ICE candidate")
}
}
public func webRTCClient(_ client: AudioWebRTCClient, didChangeIceConnectionState state: RTCIceConnectionState) {
switch state {
case .connected:
self.logger.info("didChangeConnectionState -> connected")
case .completed:
self.logger.info("didChangeConnectionState -> completed")
case .disconnected:
self.logger.info("didChangeConnectionState -> disconnected")
case .failed:
self.logger.info("didChangeConnectionState -> failed")
case .closed:
self.logger.info("didChangeConnectionState -> closed")
case .new, .checking, .count:
break
@unknown default:
print("Unknown connection state.")
}
}
public func webRTCClient(_ client: AudioWebRTCClient, didChangeIceGatheringState state: RTCIceGatheringState) {
switch state {
case .new:
self.logger.info("didChangeGatheringState -> new")
case .gathering:
self.logger.info("didChangeGatheringState -> gathering")
case .complete:
self.logger.info("didChangeGatheringState -> complete")
@unknown default:
self.logger.error("Unknown gathering state: \(state.rawValue)")
}
}
public func webRTCClient(_ client: AudioWebRTCClient, didChangeSignalingState state: RTCSignalingState) {
var stateString = ""
switch(state) {
case .haveLocalOffer:
self.logger.info("peerConnection new signaling state -> haveLocalOffer")
stateString = "have-local-offer"
case .haveLocalPrAnswer:
self.logger.info("peerConnection new signaling state -> haveLocalPrAnswer")
stateString = "have-local-pranswer"
case .haveRemoteOffer:
self.logger.info("peerConnection new signaling state -> haveRemoteOffer")
stateString = "have-remote-offer"
case .haveRemotePrAnswer:
self.logger.info("peerConnection new signaling state -> haveRemotePrAnswer")
stateString = "have-remote-pranswer"
case .stable:
self.logger.info("peerConnection new signaling state -> stable")
stateString = "stable"
case .closed:
self.logger.info("peerConnection new signaling state -> closed")
stateString = "closed"
default:
self.logger.error("peerConnection new signaling state -> UNKNOWN")
}
}
}

View File

@ -0,0 +1,15 @@
//
// FullAudioServiceManager.m
//
// Created by Tiago Daniel Jacobs on 20/04/22.
//
#import <Foundation/Foundation.h>
#import "React/RCTBridgeModule.h"
@interface RCT_EXTERN_REMAP_MODULE(BBBN_FullAudioService, FullAudioServiceManager, NSObject)
RCT_EXTERN_METHOD(createFullAudioOffer)
RCT_EXTERN_METHOD(setFullAudioRemoteSDP: (NSString *)remoteSDP)
RCT_EXTERN_METHOD(addFullAudioRemoteIceCandidate: (NSString *)remoteCandidate)
@end

View File

@ -0,0 +1,49 @@
//
// FullAudioServiceManager.swift
//
// Created by Tiago Daniel Jacobs on 20/04/22.
//
import Foundation
import os
import bigbluebutton_mobile_sdk_common
import AVFAudio
@objc(FullAudioServiceManager)
class FullAudioServiceManager: NSObject {
// Logger (these messages are displayed in the console application)
private var logger = os.Logger(subsystem: "BigBlueButtonMobileSDK", category: "FullAudioServiceManager")
var audioSession = AVAudioSession.sharedInstance()
var player: AVAudioPlayer!
var fullAudioService: FullAudioService = FullAudioService( )
// React native exposed method (called when user click the button to share screen)
@objc func createFullAudioOffer() -> Void {
logger.info("createFullAudioOffer")
Task.init {
let optionalSdp = await self.fullAudioService.createOffer()
if(optionalSdp != nil){
let sdp = optionalSdp!
self.logger.info("Got SDP back from fullAudioService: \(sdp)")
ReactNativeEventEmitter.emitter.sendEvent(withName: ReactNativeEventEmitter.EVENT.onFullAudioOfferCreated.rawValue, body: sdp)
}
}
}
@objc func setFullAudioRemoteSDP(_ remoteSDP:String) -> Void {
logger.info("setFullAudioRemoteSDP call arrived on swift: \(remoteSDP)")
Task.init {
let setRemoteSDPAnswer = await self.fullAudioService.setRemoteSDP(remoteSDP: remoteSDP);
self.logger.info("Got \(setRemoteSDPAnswer) back from setRemoteSDP")
ReactNativeEventEmitter.emitter.sendEvent(withName: ReactNativeEventEmitter.EVENT.onSetFullAudioRemoteSDPCompleted.rawValue, body: nil)
}
}
@objc func addFullAudioRemoteIceCandidate(_ remoteCandidate:String) -> Void {
logger.info("!! NOT IMPLEMENTED !! addFullAudioRemoteIceCandidate call arrived on swift: \(remoteCandidate)")
}
}

View File

@ -23,6 +23,8 @@ open class ReactNativeEventEmitter: RCTEventEmitter {
case onScreenShareLocalIceCandidate = "onScreenShareLocalIceCandidate"
case onScreenShareSignalingStateChange = "onScreenShareSignalingStateChange"
case onAddScreenShareRemoteIceCandidateCompleted = "onAddScreenShareRemoteIceCandidateCompleted"
case onFullAudioOfferCreated = "onFullAudioOfferCreated"
case onSetFullAudioRemoteSDPCompleted = "onSetFullAudioRemoteSDPCompleted"
}
override init() {

View File

@ -1,6 +1,6 @@
{
"name": "bigbluebutton-mobile-sdk",
"version": "0.1.15",
"version": "0.1.16",
"description": "This repository contains BigBlueButton react-native component, that's used in our [sample implementation](https://github.com/bigbluebutton/bigbluebutton-mobile).",
"main": "lib/commonjs/index",
"module": "lib/module/index",

View File

@ -0,0 +1,12 @@
import type { MutableRefObject } from 'react';
import nativeEmitter from '../native-messaging/emitter';
export function setupListener(_webViewRef: MutableRefObject<any>) {
// Resolve promise when SDP offer is available
nativeEmitter.addListener('onBroadcastFinished', () => {
console.log(`Broadcast finished`);
_webViewRef.current.injectJavaScript(
`window.bbbMobileScreenShareBroadcastFinishedCallback && window.bbbMobileScreenShareBroadcastFinishedCallback();`
);
});
}

View File

@ -12,7 +12,7 @@ export function setupListener(_webViewRef: MutableRefObject<any>) {
}
const event = { candidate: iceCandidate };
_webViewRef.current.injectJavaScript(
`window.bbbMobileScreenShareIceCandidateCallback(${JSON.stringify(
`window.bbbMobileScreenShareIceCandidateCallback && window.bbbMobileScreenShareIceCandidateCallback(${JSON.stringify(
event
)});`
);

View File

@ -6,7 +6,7 @@ export function setupListener(_webViewRef: MutableRefObject<any>) {
nativeEmitter.addListener('onScreenShareSignalingStateChange', (newState) => {
console.log(`Temos um novo state: ${newState}`);
_webViewRef.current.injectJavaScript(
`window.bbbMobileScreenShareSignalingStateChangeCallback(${JSON.stringify(
`window.bbbMobileScreenShareSignalingStateChangeCallback && window.bbbMobileScreenShareSignalingStateChangeCallback(${JSON.stringify(
newState
)});`
);

View File

@ -5,6 +5,7 @@ import { WebView } from 'react-native-webview';
import { handleWebviewMessage } from './webview/message-handler';
import * as onScreenShareLocalIceCandidate from './events/onScreenShareLocalIceCandidate';
import * as onScreenShareSignalingStateChange from './events/onScreenShareSignalingStateChange';
import * as onBroadcastFinished from './events/onBroadcastFinished';
type BigbluebuttonMobileSdkProps = {
url: string;
@ -30,6 +31,7 @@ export const BigBlueButtonMobile = ({
useEffect(() => {
onScreenShareLocalIceCandidate.setupListener(webViewRef);
onScreenShareSignalingStateChange.setupListener(webViewRef);
onBroadcastFinished.setupListener(webViewRef);
}, [webViewRef]);
return (

View File

@ -0,0 +1,32 @@
import { createFullAudioOffer as nativeCreateFullAudioOffer } from '../native-components/BBBN_FullAudioService';
import nativeEmitter from '../native-messaging/emitter';
// Reference to the resolver of last call
let resolve = (a: String) => {
console.log(
`default resolve function called, this should never happen: ${a}`
);
};
// Resolve promise when SDP offer is available
nativeEmitter.addListener('onFullAudioOfferCreated', (sdp) => {
resolve(sdp);
});
// Entry point of this method
function createFullAudioOffer() {
return new Promise((res, rej) => {
// store the resolver for later call (when event is received)
resolve = res;
try {
console.log(`>nativeCreateFullAudioOffer`);
// call native swift method that triggers the broadcast popup
nativeCreateFullAudioOffer();
} catch (e) {
rej(`Call to nativeCreateFullAudioOffer failed`);
}
});
}
export default createFullAudioOffer;

View File

@ -0,0 +1,32 @@
import { setFullAudioRemoteSDP as nativeSetFullAudioRemoteSDP } from '../native-components/BBBN_FullAudioService';
import nativeEmitter from '../native-messaging/emitter';
// Reference to the resolver of last call
let resolve = (value: unknown) => {
console.log(
`default resolve function called, this should never happen: ${value}`
);
};
// Resolve promise when SDP offer is available
nativeEmitter.addListener('onSetFullAudioRemoteSDPCompleted', () => {
resolve(undefined);
});
// Entry point of this method
function setFullAudioRemoteSDP(remoteSdp: string) {
return new Promise((res, rej) => {
// store the resolver for later call (when event is received)
resolve = res;
try {
console.log(`>nativeSetFullAudioRemoteSDP ${remoteSdp}`);
// call native swift method that triggers the broadcast popup
nativeSetFullAudioRemoteSDP(remoteSdp);
} catch (e) {
rej(`Call to nativeSetFullAudioRemoteSDP failed`);
}
});
}
export default setFullAudioRemoteSDP;

View File

@ -0,0 +1,19 @@
import { NativeModules } from 'react-native';
const FullAudioService = NativeModules.BBBN_FullAudioService;
// export function initializeFullAudio() {
// FullAudioService.initializeFullAudio();
// }
export function createFullAudioOffer() {
FullAudioService.createFullAudioOffer();
}
export function setFullAudioRemoteSDP(remoteSDP: string) {
FullAudioService.setFullAudioRemoteSDP(remoteSDP);
}
// export function addFullAudioRemoteIceCandidate(remoteCandidateJson: string) {
// FullAudioService.addFullAudioRemoteIceCandidate(remoteCandidateJson);
// }

View File

@ -3,7 +3,9 @@ import type { WebView, WebViewMessageEvent } from 'react-native-webview';
import initializeScreenShare from '../methods/initializeScreenShare';
import createScreenShareOffer from '../methods/createScreenShareOffer';
import setScreenShareRemoteSDP from '../methods/setScreenShareRemoteSDP';
import setFullAudioRemoteSDP from '../methods/setFullAudioRemoteSDP';
import addScreenShareRemoteIceCandidate from '../methods/addScreenShareRemoteIceCandidate';
import createFullAudioOffer from '../methods/createFullAudioOffer';
function observePromiseResult(
webViewRef: MutableRefObject<WebView>,
@ -42,12 +44,18 @@ export function handleWebviewMessage(
case 'initializeScreenShare':
promise = initializeScreenShare();
break;
case 'createOffer':
case 'createFullAudioOffer':
promise = createFullAudioOffer();
break;
case 'createScreenShareOffer':
promise = createScreenShareOffer();
break;
case 'setRemoteDescription':
case 'setScreenShareRemoteSDP':
promise = setScreenShareRemoteSDP(data?.arguments[0].sdp);
break;
case 'setFullAudioRemoteSDP':
promise = setFullAudioRemoteSDP(data?.arguments[0].sdp);
break;
case 'addRemoteIceCandidate':
promise = addScreenShareRemoteIceCandidate(
JSON.stringify(data?.arguments[0])