2023-03-02 16:17:01 +03:00
//
// C r e a t e d b y A v e n t l y o n 0 9 . 0 2 . 2 0 2 3 .
// C o p y r i g h t ( c ) 2 0 2 3 S i m p l e X C h a t . A l l r i g h t s r e s e r v e d .
//
import WebRTC
import LZString
import SwiftUI
import SimpleXChat
final class WebRTCClient : NSObject , RTCVideoViewDelegate , RTCFrameEncryptorDelegate , RTCFrameDecryptorDelegate {
private static let factory : RTCPeerConnectionFactory = {
RTCInitializeSSL ( )
let videoEncoderFactory = RTCDefaultVideoEncoderFactory ( )
let videoDecoderFactory = RTCDefaultVideoDecoderFactory ( )
2023-06-08 17:08:35 +03:00
videoEncoderFactory . preferredCodec = RTCVideoCodecInfo ( name : kRTCVp8CodecName )
2023-03-02 16:17:01 +03:00
return RTCPeerConnectionFactory ( encoderFactory : videoEncoderFactory , decoderFactory : videoDecoderFactory )
} ( )
private static let ivTagBytes : Int = 28
private static let enableEncryption : Bool = true
2023-12-21 00:42:40 +00:00
private var chat_ctrl = getChatCtrl ( )
2023-03-02 16:17:01 +03:00
struct Call {
var connection : RTCPeerConnection
2023-11-28 06:20:51 +08:00
var iceCandidates : IceCandidates
2023-03-02 16:17:01 +03:00
var localCamera : RTCVideoCapturer ?
2024-10-08 14:49:13 +07:00
var localAudioTrack : RTCAudioTrack ?
var localVideoTrack : RTCVideoTrack ?
var remoteAudioTrack : RTCAudioTrack ?
var remoteVideoTrack : RTCVideoTrack ?
var remoteScreenAudioTrack : RTCAudioTrack ?
var remoteScreenVideoTrack : RTCVideoTrack ?
var device : AVCaptureDevice . Position
2023-03-02 16:17:01 +03:00
var aesKey : String ?
var frameEncryptor : RTCFrameEncryptor ?
var frameDecryptor : RTCFrameDecryptor ?
2024-10-08 14:49:13 +07:00
var peerHasOldVersion : Bool
}
struct NotConnectedCall {
var audioTrack : RTCAudioTrack ?
var localCameraAndTrack : ( RTCVideoCapturer , RTCVideoTrack ) ?
var device : AVCaptureDevice . Position = . front
2023-03-02 16:17:01 +03:00
}
2023-11-28 06:20:51 +08:00
actor IceCandidates {
private var candidates : [ RTCIceCandidate ] = [ ]
func getAndClear ( ) async -> [ RTCIceCandidate ] {
let cs = candidates
candidates = [ ]
return cs
}
func append ( _ c : RTCIceCandidate ) async {
candidates . append ( c )
}
}
2023-03-02 16:17:01 +03:00
private let rtcAudioSession = RTCAudioSession . sharedInstance ( )
2024-04-11 17:31:30 +07:00
private let audioQueue = DispatchQueue ( label : " chat.simplex.app.audio " )
2023-03-02 16:17:01 +03:00
private var sendCallResponse : ( WVAPIMessage ) async -> Void
2024-10-08 14:49:13 +07:00
var activeCall : Call ?
var notConnectedCall : NotConnectedCall ?
2023-03-02 16:17:01 +03:00
private var localRendererAspectRatio : Binding < CGFloat ? >
2024-10-08 14:49:13 +07:00
var cameraRenderers : [ RTCVideoRenderer ] = [ ]
var screenRenderers : [ RTCVideoRenderer ] = [ ]
2023-03-02 16:17:01 +03:00
@ available ( * , unavailable )
override init ( ) {
fatalError ( " Unimplemented " )
}
2024-10-08 14:49:13 +07:00
required init ( _ sendCallResponse : @ escaping ( WVAPIMessage ) async -> Void , _ localRendererAspectRatio : Binding < CGFloat ? > ) {
2023-03-02 16:17:01 +03:00
self . sendCallResponse = sendCallResponse
self . localRendererAspectRatio = localRendererAspectRatio
2023-03-14 11:12:40 +03:00
rtcAudioSession . useManualAudio = CallController . useCallKit ( )
rtcAudioSession . isAudioEnabled = ! CallController . useCallKit ( )
2024-04-15 01:18:12 +07:00
logger . debug ( " WebRTCClient: rtcAudioSession has manual audio \( self . rtcAudioSession . useManualAudio ) and audio enabled \( self . rtcAudioSession . isAudioEnabled ) " )
2023-03-02 16:17:01 +03:00
super . init ( )
}
let defaultIceServers : [ WebRTC . RTCIceServer ] = [
2024-04-15 01:18:12 +07:00
WebRTC . RTCIceServer ( urlStrings : [ " stuns:stun.simplex.im:443 " ] ) ,
// W e b R T C . R T C I c e S e r v e r ( u r l S t r i n g s : [ " t u r n s : t u r n . s i m p l e x . i m : 4 4 3 ? t r a n s p o r t = u d p " ] , u s e r n a m e : " p r i v a t e 2 " , c r e d e n t i a l : " H x u q 2 Q x U j n h j 9 6 Z q 2 r 4 H j q H R j " ) ,
WebRTC . RTCIceServer ( urlStrings : [ " turns:turn.simplex.im:443?transport=tcp " ] , username : " private2 " , credential : " Hxuq2QxUjnhj96Zq2r4HjqHRj " ) ,
2023-03-02 16:17:01 +03:00
]
2023-11-28 06:20:51 +08:00
func initializeCall ( _ iceServers : [ WebRTC . RTCIceServer ] ? , _ mediaType : CallMediaType , _ aesKey : String ? , _ relay : Bool ? ) -> Call {
2023-03-02 16:17:01 +03:00
let connection = createPeerConnection ( iceServers ? ? getWebRTCIceServers ( ) ? ? defaultIceServers , relay )
connection . delegate = self
2024-10-08 14:49:13 +07:00
let device = notConnectedCall ? . device ? ? . front
2023-03-02 16:17:01 +03:00
var localCamera : RTCVideoCapturer ? = nil
2024-10-08 14:49:13 +07:00
var localAudioTrack : RTCAudioTrack ? = nil
var localVideoTrack : RTCVideoTrack ? = nil
if let localCameraAndTrack = notConnectedCall ? . localCameraAndTrack {
( localCamera , localVideoTrack ) = localCameraAndTrack
} else if notConnectedCall = = nil && mediaType = = . video {
( localCamera , localVideoTrack ) = createVideoTrackAndStartCapture ( device )
}
if let audioTrack = notConnectedCall ? . audioTrack {
localAudioTrack = audioTrack
} else if notConnectedCall = = nil {
localAudioTrack = createAudioTrack ( )
2023-03-02 16:17:01 +03:00
}
2024-10-08 14:49:13 +07:00
notConnectedCall ? . localCameraAndTrack = nil
notConnectedCall ? . audioTrack = nil
2023-03-02 16:17:01 +03:00
var frameEncryptor : RTCFrameEncryptor ? = nil
var frameDecryptor : RTCFrameDecryptor ? = nil
if aesKey != nil {
let encryptor = RTCFrameEncryptor . init ( sizeChange : Int32 ( WebRTCClient . ivTagBytes ) )
encryptor . delegate = self
frameEncryptor = encryptor
let decryptor = RTCFrameDecryptor . init ( sizeChange : - Int32 ( WebRTCClient . ivTagBytes ) )
decryptor . delegate = self
frameDecryptor = decryptor
}
return Call (
connection : connection ,
2023-11-28 06:20:51 +08:00
iceCandidates : IceCandidates ( ) ,
2023-03-02 16:17:01 +03:00
localCamera : localCamera ,
2024-10-08 14:49:13 +07:00
localAudioTrack : localAudioTrack ,
localVideoTrack : localVideoTrack ,
device : device ,
2023-03-02 16:17:01 +03:00
aesKey : aesKey ,
frameEncryptor : frameEncryptor ,
2024-10-08 14:49:13 +07:00
frameDecryptor : frameDecryptor ,
peerHasOldVersion : false
2023-03-02 16:17:01 +03:00
)
}
func createPeerConnection ( _ iceServers : [ WebRTC . RTCIceServer ] , _ relay : Bool ? ) -> RTCPeerConnection {
let constraints = RTCMediaConstraints ( mandatoryConstraints : nil ,
optionalConstraints : [ " DtlsSrtpKeyAgreement " : kRTCMediaConstraintsValueTrue ] )
guard let connection = WebRTCClient . factory . peerConnection (
with : getCallConfig ( iceServers , relay ) ,
constraints : constraints , delegate : nil
)
else {
fatalError ( " Unable to create RTCPeerConnection " )
}
return connection
}
func getCallConfig ( _ iceServers : [ WebRTC . RTCIceServer ] , _ relay : Bool ? ) -> RTCConfiguration {
let config = RTCConfiguration ( )
config . iceServers = iceServers
config . sdpSemantics = . unifiedPlan
config . continualGatheringPolicy = . gatherContinually
config . iceTransportPolicy = relay = = true ? . relay : . all
// A l l o w s t o w a i t 3 0 s e c b e f o r e ` f a i l i n g ` c o n n e c t i o n i f t h e a n s w e r f r o m r e m o t e s i d e i s n o t r e c e i v e d i n t i m e
config . iceInactiveTimeout = 30_000
return config
}
func addIceCandidates ( _ connection : RTCPeerConnection , _ remoteIceCandidates : [ RTCIceCandidate ] ) {
remoteIceCandidates . forEach { candidate in
connection . add ( candidate . toWebRTCCandidate ( ) ) { error in
if let error = error {
logger . error ( " Adding candidate error \( error ) " )
}
}
}
}
func sendCallCommand ( command : WCallCommand ) async {
var resp : WCallResponse ? = nil
2024-10-08 14:49:13 +07:00
let pc = activeCall ? . connection
2023-03-02 16:17:01 +03:00
switch command {
2024-10-08 14:49:13 +07:00
case let . capabilities ( media ) : // o u t g o i n g
let localCameraAndTrack : ( RTCVideoCapturer , RTCVideoTrack ) ? = media = = . video
? createVideoTrackAndStartCapture ( . front )
: nil
notConnectedCall = NotConnectedCall ( audioTrack : createAudioTrack ( ) , localCameraAndTrack : localCameraAndTrack , device : . front )
2023-03-02 16:17:01 +03:00
resp = . capabilities ( capabilities : CallCapabilities ( encryption : WebRTCClient . enableEncryption ) )
2024-10-08 14:49:13 +07:00
case let . start ( media : media , aesKey , iceServers , relay ) : // i n c o m i n g
2023-03-02 16:17:01 +03:00
logger . debug ( " starting incoming call - create webrtc session " )
2024-10-08 14:49:13 +07:00
if activeCall != nil { endCall ( ) }
2023-03-02 16:17:01 +03:00
let encryption = WebRTCClient . enableEncryption
2023-11-28 06:20:51 +08:00
let call = initializeCall ( iceServers ? . toWebRTCIceServers ( ) , media , encryption ? aesKey : nil , relay )
2024-10-08 14:49:13 +07:00
activeCall = call
setupLocalTracks ( true , call )
2023-11-29 01:36:05 +08:00
let ( offer , error ) = await call . connection . offer ( )
if let offer = offer {
2024-10-08 14:49:13 +07:00
setupEncryptionForLocalTracks ( call )
2023-11-29 01:36:05 +08:00
resp = . offer (
offer : compressToBase64 ( input : encodeJSON ( CustomRTCSessionDescription ( type : offer . type . toSdpType ( ) , sdp : offer . sdp ) ) ) ,
iceCandidates : compressToBase64 ( input : encodeJSON ( await self . getInitialIceCandidates ( ) ) ) ,
capabilities : CallCapabilities ( encryption : encryption )
)
self . waitForMoreIceCandidates ( )
} else {
resp = . error ( message : " offer error: \( error ? . localizedDescription ? ? " unknown error " ) " )
2023-03-02 16:17:01 +03:00
}
2024-10-08 14:49:13 +07:00
case let . offer ( offer , iceCandidates , media , aesKey , iceServers , relay ) : // o u t g o i n g
if activeCall != nil {
2023-03-02 16:17:01 +03:00
resp = . error ( message : " accept: call already started " )
} else if ! WebRTCClient . enableEncryption && aesKey != nil {
resp = . error ( message : " accept: encryption is not supported " )
} else if let offer : CustomRTCSessionDescription = decodeJSON ( decompressFromBase64 ( input : offer ) ) ,
let remoteIceCandidates : [ RTCIceCandidate ] = decodeJSON ( decompressFromBase64 ( input : iceCandidates ) ) {
2023-11-28 06:20:51 +08:00
let call = initializeCall ( iceServers ? . toWebRTCIceServers ( ) , media , WebRTCClient . enableEncryption ? aesKey : nil , relay )
2024-10-08 14:49:13 +07:00
activeCall = call
2023-03-02 16:17:01 +03:00
let pc = call . connection
if let type = offer . type , let sdp = offer . sdp {
if ( try ? await pc . setRemoteDescription ( RTCSessionDescription ( type : type . toWebRTCSdpType ( ) , sdp : sdp ) ) ) != nil {
2024-10-08 14:49:13 +07:00
setupLocalTracks ( false , call )
setupEncryptionForLocalTracks ( call )
pc . transceivers . forEach { transceiver in
transceiver . setDirection ( . sendRecv , error : nil )
}
await adaptToOldVersion ( pc . transceivers . count <= 2 )
2023-11-29 01:36:05 +08:00
let ( answer , error ) = await pc . answer ( )
if let answer = answer {
2023-03-02 16:17:01 +03:00
self . addIceCandidates ( pc , remoteIceCandidates )
2023-11-29 01:36:05 +08:00
resp = . answer (
answer : compressToBase64 ( input : encodeJSON ( CustomRTCSessionDescription ( type : answer . type . toSdpType ( ) , sdp : answer . sdp ) ) ) ,
iceCandidates : compressToBase64 ( input : encodeJSON ( await self . getInitialIceCandidates ( ) ) )
)
self . waitForMoreIceCandidates ( )
} else {
resp = . error ( message : " answer error: \( error ? . localizedDescription ? ? " unknown error " ) " )
2023-03-02 16:17:01 +03:00
}
} else {
resp = . error ( message : " accept: remote description is not set " )
}
}
}
2024-10-08 14:49:13 +07:00
case let . answer ( answer , iceCandidates ) : // i n c o m i n g
2023-03-02 16:17:01 +03:00
if pc = = nil {
resp = . error ( message : " answer: call not started " )
} else if pc ? . localDescription = = nil {
resp = . error ( message : " answer: local description is not set " )
} else if pc ? . remoteDescription != nil {
resp = . error ( message : " answer: remote description already set " )
} else if let answer : CustomRTCSessionDescription = decodeJSON ( decompressFromBase64 ( input : answer ) ) ,
let remoteIceCandidates : [ RTCIceCandidate ] = decodeJSON ( decompressFromBase64 ( input : iceCandidates ) ) ,
let type = answer . type , let sdp = answer . sdp ,
let pc = pc {
if ( try ? await pc . setRemoteDescription ( RTCSessionDescription ( type : type . toWebRTCSdpType ( ) , sdp : sdp ) ) ) != nil {
2024-10-08 14:49:13 +07:00
var currentDirection : RTCRtpTransceiverDirection = . sendOnly
pc . transceivers [ 2 ] . currentDirection ( & currentDirection )
await adaptToOldVersion ( currentDirection = = . sendOnly )
2023-03-02 16:17:01 +03:00
addIceCandidates ( pc , remoteIceCandidates )
resp = . ok
} else {
resp = . error ( message : " answer: remote description is not set " )
}
}
case let . ice ( iceCandidates ) :
if let pc = pc ,
let remoteIceCandidates : [ RTCIceCandidate ] = decodeJSON ( decompressFromBase64 ( input : iceCandidates ) ) {
addIceCandidates ( pc , remoteIceCandidates )
resp = . ok
} else {
resp = . error ( message : " ice: call not started " )
}
2024-10-08 14:49:13 +07:00
case let . media ( source , enable ) :
if activeCall = = nil {
2023-03-02 16:17:01 +03:00
resp = . error ( message : " media: call not started " )
} else {
2024-10-08 14:49:13 +07:00
await enableMedia ( source , enable )
2023-03-02 16:17:01 +03:00
resp = . ok
}
case . end :
2023-11-28 06:20:51 +08:00
// T O D O p o s s i b l y , e n d C a l l s h o u l d b e c a l l e d b e f o r e r e t u r n i n g . o k
2023-03-02 16:17:01 +03:00
await sendCallResponse ( . init ( corrId : nil , resp : . ok , command : command ) )
endCall ( )
}
if let resp = resp {
await sendCallResponse ( . init ( corrId : nil , resp : resp , command : command ) )
}
}
2023-11-28 06:20:51 +08:00
func getInitialIceCandidates ( ) async -> [ RTCIceCandidate ] {
await untilIceComplete ( timeoutMs : 750 , stepMs : 150 ) { }
2024-10-08 14:49:13 +07:00
let candidates = await activeCall ? . iceCandidates . getAndClear ( ) ? ? [ ]
2023-11-28 06:20:51 +08:00
logger . debug ( " WebRTCClient: sending initial ice candidates: \( candidates . count ) " )
return candidates
}
2023-11-29 01:36:05 +08:00
func waitForMoreIceCandidates ( ) {
Task {
await untilIceComplete ( timeoutMs : 12000 , stepMs : 1500 ) {
2024-10-08 14:49:13 +07:00
let candidates = await self . activeCall ? . iceCandidates . getAndClear ( ) ? ? [ ]
2023-11-29 01:36:05 +08:00
if candidates . count > 0 {
logger . debug ( " WebRTCClient: sending more ice candidates: \( candidates . count ) " )
await self . sendIceCandidates ( candidates )
}
2023-11-28 06:20:51 +08:00
}
}
}
func sendIceCandidates ( _ candidates : [ RTCIceCandidate ] ) async {
await self . sendCallResponse ( . init (
corrId : nil ,
resp : . ice ( iceCandidates : compressToBase64 ( input : encodeJSON ( candidates ) ) ) ,
command : nil )
)
}
2024-10-08 14:49:13 +07:00
func setupMuteUnmuteListener ( _ transceiver : RTCRtpTransceiver , _ track : RTCMediaStreamTrack ) {
// l o g g e r . l o g ( " S e t t i n g u p m u t e / u n m u t e l i s t e n e r i n t h e c a l l w i t h o u t e n c r y p t i o n f o r m i d = \ ( t r a n s c e i v e r . m i d ) " )
Task {
2024-10-09 14:37:21 +07:00
var lastBytesReceived : Int64 = 0
2024-10-08 14:49:13 +07:00
// m u t e d i n i t i a l l y
var mutedSeconds = 4
while let call = self . activeCall , transceiver . receiver . track ? . readyState = = . live {
let stats : RTCStatisticsReport = await call . connection . statistics ( for : transceiver . receiver )
let stat = stats . statistics . values . first ( where : { stat in stat . type = = " inbound-rtp " } )
if let stat {
// l o g g e r . d e b u g ( " S t a t \ ( s t a t . d e b u g D e s c r i p t i o n ) " )
2024-10-09 14:37:21 +07:00
let bytes = stat . values [ " bytesReceived " ] as ! Int64
if bytes <= lastBytesReceived {
2024-10-08 14:49:13 +07:00
mutedSeconds += 1
if mutedSeconds = = 3 {
await MainActor . run {
self . onMediaMuteUnmute ( transceiver . mid , true )
}
}
} else {
if mutedSeconds >= 3 {
await MainActor . run {
self . onMediaMuteUnmute ( transceiver . mid , false )
}
}
2024-10-09 14:37:21 +07:00
lastBytesReceived = bytes
2024-10-08 14:49:13 +07:00
mutedSeconds = 0
}
}
try ? await Task . sleep ( nanoseconds : 1000_000000 )
}
}
}
@ MainActor
func onMediaMuteUnmute ( _ transceiverMid : String ? , _ mute : Bool ) {
guard let activeCall = ChatModel . shared . activeCall else { return }
let source = mediaSourceFromTransceiverMid ( transceiverMid )
logger . log ( " Mute/unmute \( source . rawValue ) track = \( mute ) with mid = \( transceiverMid ? ? " nil " ) " )
if source = = . mic && activeCall . peerMediaSources . mic = = mute {
activeCall . peerMediaSources . mic = ! mute
} else if ( source = = . camera && activeCall . peerMediaSources . camera = = mute ) {
activeCall . peerMediaSources . camera = ! mute
} else if ( source = = . screenAudio && activeCall . peerMediaSources . screenAudio = = mute ) {
activeCall . peerMediaSources . screenAudio = ! mute
} else if ( source = = . screenVideo && activeCall . peerMediaSources . screenVideo = = mute ) {
activeCall . peerMediaSources . screenVideo = ! mute
}
}
@ MainActor
func enableMedia ( _ source : CallMediaSource , _ enable : Bool ) {
logger . debug ( " WebRTCClient: enabling media \( source . rawValue ) \( enable ) " )
source = = . camera ? setCameraEnabled ( enable ) : setAudioEnabled ( enable )
}
@ MainActor
func adaptToOldVersion ( _ peerHasOldVersion : Bool ) {
activeCall ? . peerHasOldVersion = peerHasOldVersion
if peerHasOldVersion {
logger . debug ( " The peer has an old version. Remote audio track is nil = \( self . activeCall ? . remoteAudioTrack = = nil ) , video = \( self . activeCall ? . remoteVideoTrack = = nil ) " )
onMediaMuteUnmute ( " 0 " , false )
if activeCall ? . remoteVideoTrack != nil {
onMediaMuteUnmute ( " 1 " , false )
}
if ChatModel . shared . activeCall ? . localMediaSources . camera = = true && ChatModel . shared . activeCall ? . peerMediaSources . camera = = false {
logger . debug ( " Stopping video track for the old version " )
activeCall ? . connection . senders [ 1 ] . track = nil
ChatModel . shared . activeCall ? . localMediaSources . camera = false
( activeCall ? . localCamera as ? RTCCameraVideoCapturer ) ? . stopCapture ( )
activeCall ? . localCamera = nil
activeCall ? . localVideoTrack = nil
}
}
2023-03-02 16:17:01 +03:00
}
2024-10-08 14:49:13 +07:00
func addLocalRenderer ( _ renderer : RTCEAGLVideoView ) {
if let activeCall {
if let track = activeCall . localVideoTrack {
track . add ( renderer )
}
} else if let notConnectedCall {
if let track = notConnectedCall . localCameraAndTrack ? . 1 {
track . add ( renderer )
}
}
2023-03-02 16:17:01 +03:00
// T o g e t w i d t h a n d h e i g h t o f a f r a m e , s e e v i d e o V i e w ( v i d e o V i e w : , d i d C h a n g e V i d e o S i z e )
renderer . delegate = self
}
2024-10-08 14:49:13 +07:00
func removeLocalRenderer ( _ renderer : RTCEAGLVideoView ) {
if let activeCall {
if let track = activeCall . localVideoTrack {
track . remove ( renderer )
}
} else if let notConnectedCall {
if let track = notConnectedCall . localCameraAndTrack ? . 1 {
track . remove ( renderer )
}
}
renderer . delegate = nil
}
2023-03-02 16:17:01 +03:00
func videoView ( _ videoView : RTCVideoRenderer , didChangeVideoSize size : CGSize ) {
guard size . height > 0 else { return }
localRendererAspectRatio . wrappedValue = size . width / size . height
}
2024-10-08 14:49:13 +07:00
func setupLocalTracks ( _ incomingCall : Bool , _ call : Call ) {
let pc = call . connection
let transceivers = call . connection . transceivers
let audioTrack = call . localAudioTrack
let videoTrack = call . localVideoTrack
if incomingCall {
let micCameraInit = RTCRtpTransceiverInit ( )
// s t r e a m I d s r e q u i r e d f o r o l d v e r s i o n s w h i c h a d d s t r a c k s f r o m s t r e a m , n o t f r o m t r a c k p r o p e r t y
micCameraInit . streamIds = [ " micCamera " ]
let screenAudioVideoInit = RTCRtpTransceiverInit ( )
screenAudioVideoInit . streamIds = [ " screenAudioVideo " ]
// i n c o m i n g c a l l , n o t r a n s c e i v e r s y e t . B u t t h e y s h o u l d b e a d d e d i n o r d e r : m i c , c a m e r a , s c r e e n a u d i o , s c r e e n v i d e o
// m i d = 0 , m i c
if let audioTrack {
pc . addTransceiver ( with : audioTrack , init : micCameraInit )
} else {
pc . addTransceiver ( of : . audio , init : micCameraInit )
}
// m i d = 1 , c a m e r a
if let videoTrack {
pc . addTransceiver ( with : videoTrack , init : micCameraInit )
} else {
pc . addTransceiver ( of : . video , init : micCameraInit )
}
// m i d = 2 , s c r e e n A u d i o
pc . addTransceiver ( of : . audio , init : screenAudioVideoInit )
// m i d = 3 , s c r e e n V i d e o
pc . addTransceiver ( of : . video , init : screenAudioVideoInit )
} else {
// n e w v e r s i o n
if transceivers . count > 2 {
// O u t g o i n g c a l l . A l l t r a n s c e i v e r s a r e r e a d y . D o n ' t a d d T r a c k ( ) b e c a u s e i t w i l l c r e a t e n e w t r a n s c e i v e r s , r e p l a c e e x i s t i n g ( n i l ) t r a c k s
transceivers
. first ( where : { elem in mediaSourceFromTransceiverMid ( elem . mid ) = = . mic } ) ?
. sender . track = audioTrack
transceivers
. first ( where : { elem in mediaSourceFromTransceiverMid ( elem . mid ) = = . camera } ) ?
. sender . track = videoTrack
} else {
// o l d v e r s i o n , o n l y t w o t r a n s c e i v e r s
if let audioTrack {
pc . add ( audioTrack , streamIds : [ " micCamera " ] )
} else {
// i t ' s i m p o r t a n t t o h a v e a n y t r a c k i n o r d e r t o b e a b l e t o t u r n i t o n a g a i n ( c u r r e n t l y i t ' s o f f )
let sender = pc . add ( createAudioTrack ( ) , streamIds : [ " micCamera " ] )
sender ? . track = nil
}
if let videoTrack {
pc . add ( videoTrack , streamIds : [ " micCamera " ] )
} else {
// i t ' s i m p o r t a n t t o h a v e a n y t r a c k i n o r d e r t o b e a b l e t o t u r n i t o n a g a i n ( c u r r e n t l y i t ' s o f f )
let localVideoSource = WebRTCClient . factory . videoSource ( )
let localVideoTrack = WebRTCClient . factory . videoTrack ( with : localVideoSource , trackId : " video0 " )
let sender = pc . add ( localVideoTrack , streamIds : [ " micCamera " ] )
sender ? . track = nil
}
}
}
}
func mediaSourceFromTransceiverMid ( _ mid : String ? ) -> CallMediaSource {
switch mid {
case " 0 " :
return . mic
case " 1 " :
return . camera
case " 2 " :
return . screenAudio
case " 3 " :
return . screenVideo
default :
return . unknown
}
}
// S h o u l d b e c a l l e d a f t e r l o c a l d e s c r i p t i o n s e t
func setupEncryptionForLocalTracks ( _ call : Call ) {
if let encryptor = call . frameEncryptor {
call . connection . senders . forEach { $0 . setRtcFrameEncryptor ( encryptor ) }
}
}
2023-03-02 16:17:01 +03:00
func frameDecryptor ( _ decryptor : RTCFrameDecryptor , mediaType : RTCRtpMediaType , withFrame encrypted : Data ) -> Data ? {
guard encrypted . count > 0 else { return nil }
2024-10-08 14:49:13 +07:00
if var key : [ CChar ] = activeCall ? . aesKey ? . cString ( using : . utf8 ) ,
2023-03-02 16:17:01 +03:00
let pointer : UnsafeMutableRawPointer = malloc ( encrypted . count ) {
memcpy ( pointer , ( encrypted as NSData ) . bytes , encrypted . count )
let isKeyFrame = encrypted [ 0 ] & 1 = = 0
let clearTextBytesSize = mediaType . rawValue = = 0 ? 1 : isKeyFrame ? 10 : 3
logCrypto ( " decrypt " , chat_decrypt_media ( & key , pointer . advanced ( by : clearTextBytesSize ) , Int32 ( encrypted . count - clearTextBytesSize ) ) )
return Data ( bytes : pointer , count : encrypted . count - WebRTCClient . ivTagBytes )
} else {
return nil
}
}
func frameEncryptor ( _ encryptor : RTCFrameEncryptor , mediaType : RTCRtpMediaType , withFrame unencrypted : Data ) -> Data ? {
guard unencrypted . count > 0 else { return nil }
2024-10-08 14:49:13 +07:00
if var key : [ CChar ] = activeCall ? . aesKey ? . cString ( using : . utf8 ) ,
2023-03-02 16:17:01 +03:00
let pointer : UnsafeMutableRawPointer = malloc ( unencrypted . count + WebRTCClient . ivTagBytes ) {
memcpy ( pointer , ( unencrypted as NSData ) . bytes , unencrypted . count )
let isKeyFrame = unencrypted [ 0 ] & 1 = = 0
let clearTextBytesSize = mediaType . rawValue = = 0 ? 1 : isKeyFrame ? 10 : 3
2023-12-21 00:42:40 +00:00
logCrypto ( " encrypt " , chat_encrypt_media ( chat_ctrl , & key , pointer . advanced ( by : clearTextBytesSize ) , Int32 ( unencrypted . count + WebRTCClient . ivTagBytes - clearTextBytesSize ) ) )
2023-03-02 16:17:01 +03:00
return Data ( bytes : pointer , count : unencrypted . count + WebRTCClient . ivTagBytes )
} else {
return nil
}
}
private func logCrypto ( _ op : String , _ r : UnsafeMutablePointer < CChar > ? ) {
if let r = r {
let err = fromCString ( r )
if err != " " {
logger . error ( " \( op ) error: \( err ) " )
// } e l s e {
// l o g g e r . d e b u g ( " \ ( o p ) o k " )
}
}
}
2024-10-08 14:49:13 +07:00
func addRemoteCameraRenderer ( _ renderer : RTCVideoRenderer ) {
if activeCall ? . remoteVideoTrack != nil {
activeCall ? . remoteVideoTrack ? . add ( renderer )
} else {
cameraRenderers . append ( renderer )
}
2023-03-02 16:17:01 +03:00
}
2024-10-08 14:49:13 +07:00
func removeRemoteCameraRenderer ( _ renderer : RTCVideoRenderer ) {
if activeCall ? . remoteVideoTrack != nil {
activeCall ? . remoteVideoTrack ? . remove ( renderer )
} else {
cameraRenderers . removeAll ( where : { $0 . isEqual ( renderer ) } )
}
2024-02-13 22:04:42 +07:00
}
2024-10-08 14:49:13 +07:00
func addRemoteScreenRenderer ( _ renderer : RTCVideoRenderer ) {
if activeCall ? . remoteScreenVideoTrack != nil {
activeCall ? . remoteScreenVideoTrack ? . add ( renderer )
} else {
screenRenderers . append ( renderer )
}
}
func removeRemoteScreenRenderer ( _ renderer : RTCVideoRenderer ) {
if activeCall ? . remoteScreenVideoTrack != nil {
activeCall ? . remoteScreenVideoTrack ? . remove ( renderer )
} else {
screenRenderers . removeAll ( where : { $0 . isEqual ( renderer ) } )
}
}
func startCaptureLocalVideo ( _ device : AVCaptureDevice . Position ? , _ capturer : RTCVideoCapturer ? ) {
2023-06-08 18:09:14 +03:00
#if targetEnvironment ( simulator )
guard
2024-10-08 14:49:13 +07:00
let capturer = ( activeCall ? . localCamera ? ? notConnectedCall ? . localCameraAndTrack ? . 0 ) as ? RTCFileVideoCapturer
2023-06-08 18:09:14 +03:00
else {
logger . error ( " Unable to work with a file capturer " )
return
}
capturer . stopCapture ( )
// D r a g v i d e o f i l e n a m e d ` v i d e o . m p 4 ` t o ` s o u n d s ` d i r e c t o r y i n t h e p r o j e c t f r o m a n y o t h e r p a t h i n f i l e s y s t e m
capturer . startCapturing ( fromFileNamed : " sounds/video.mp4 " )
#else
2023-03-02 16:17:01 +03:00
guard
2024-10-08 14:49:13 +07:00
let capturer = capturer as ? RTCCameraVideoCapturer ,
let camera = ( RTCCameraVideoCapturer . captureDevices ( ) . first { $0 . position = = device } )
2023-03-02 16:17:01 +03:00
else {
2024-10-08 14:49:13 +07:00
logger . error ( " Unable to find a camera or local track " )
2023-03-02 16:17:01 +03:00
return
}
let supported = RTCCameraVideoCapturer . supportedFormats ( for : camera )
let height : ( AVCaptureDevice . Format ) -> Int32 = { ( format : AVCaptureDevice . Format ) in CMVideoFormatDescriptionGetDimensions ( format . formatDescription ) . height }
let format = supported . first ( where : { height ( $0 ) = = 1280 } )
? ? supported . first ( where : { height ( $0 ) >= 480 && height ( $0 ) < 1280 } )
? ? supported . first ( where : { height ( $0 ) > 1280 } )
guard
let format = format ,
let fps = format . videoSupportedFrameRateRanges . max ( by : { $0 . maxFrameRate < $1 . maxFrameRate } )
else {
logger . error ( " Unable to find any format for camera or to choose FPS " )
return
}
logger . debug ( " Format for camera is \( format . description ) " )
capturer . stopCapture ( )
capturer . startCapture ( with : camera ,
format : format ,
fps : Int ( min ( 24 , fps . maxFrameRate ) ) )
2023-06-08 18:09:14 +03:00
#endif
2023-03-02 16:17:01 +03:00
}
private func createAudioTrack ( ) -> RTCAudioTrack {
let audioConstrains = RTCMediaConstraints ( mandatoryConstraints : nil , optionalConstraints : nil )
let audioSource = WebRTCClient . factory . audioSource ( with : audioConstrains )
let audioTrack = WebRTCClient . factory . audioTrack ( with : audioSource , trackId : " audio0 " )
return audioTrack
}
2024-10-08 14:49:13 +07:00
private func createVideoTrackAndStartCapture ( _ device : AVCaptureDevice . Position ) -> ( RTCVideoCapturer , RTCVideoTrack ) {
2023-03-02 16:17:01 +03:00
let localVideoSource = WebRTCClient . factory . videoSource ( )
#if targetEnvironment ( simulator )
let localCamera = RTCFileVideoCapturer ( delegate : localVideoSource )
#else
let localCamera = RTCCameraVideoCapturer ( delegate : localVideoSource )
#endif
let localVideoTrack = WebRTCClient . factory . videoTrack ( with : localVideoSource , trackId : " video0 " )
2024-10-08 14:49:13 +07:00
startCaptureLocalVideo ( device , localCamera )
return ( localCamera , localVideoTrack )
2023-03-02 16:17:01 +03:00
}
func endCall ( ) {
2024-08-21 17:05:59 +03:00
if #available ( iOS 16.0 , * ) {
_endCall ( )
} else {
// F i x e s ` c o n n e c t i o n . c l o s e ( ) ` g e t t i n g l o c k e d u p i n i O S 1 5
DispatchQueue . global ( qos : . utility ) . async { self . _endCall ( ) }
}
}
private func _endCall ( ) {
2024-10-08 14:49:13 +07:00
( notConnectedCall ? . localCameraAndTrack ? . 0 as ? RTCCameraVideoCapturer ) ? . stopCapture ( )
guard let call = activeCall else { return }
2023-03-15 13:21:21 +03:00
logger . debug ( " WebRTCClient: ending the call " )
2023-03-02 16:17:01 +03:00
call . connection . close ( )
call . connection . delegate = nil
call . frameEncryptor ? . delegate = nil
call . frameDecryptor ? . delegate = nil
2024-10-08 14:49:13 +07:00
( call . localCamera as ? RTCCameraVideoCapturer ) ? . stopCapture ( )
2023-03-02 16:17:01 +03:00
audioSessionToDefaults ( )
2024-10-08 14:49:13 +07:00
activeCall = nil
2023-03-02 16:17:01 +03:00
}
2023-11-28 06:20:51 +08:00
func untilIceComplete ( timeoutMs : UInt64 , stepMs : UInt64 , action : @ escaping ( ) async -> Void ) async {
var t : UInt64 = 0
repeat {
_ = try ? await Task . sleep ( nanoseconds : stepMs * 1000000 )
t += stepMs
await action ( )
2024-10-08 14:49:13 +07:00
} while t < timeoutMs && activeCall ? . connection . iceGatheringState != . complete
2023-03-02 16:17:01 +03:00
}
}
extension WebRTC . RTCPeerConnection {
func mediaConstraints ( ) -> RTCMediaConstraints {
RTCMediaConstraints (
mandatoryConstraints : [ kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue ,
kRTCMediaConstraintsOfferToReceiveVideo : kRTCMediaConstraintsValueTrue ] ,
optionalConstraints : nil )
}
2023-11-29 01:36:05 +08:00
func offer ( ) async -> ( RTCSessionDescription ? , Error ? ) {
await withCheckedContinuation { cont in
offer ( for : mediaConstraints ( ) ) { ( sdp , error ) in
self . processSDP ( cont , sdp , error )
2023-03-02 16:17:01 +03:00
}
}
}
2023-11-29 01:36:05 +08:00
func answer ( ) async -> ( RTCSessionDescription ? , Error ? ) {
await withCheckedContinuation { cont in
answer ( for : mediaConstraints ( ) ) { ( sdp , error ) in
self . processSDP ( cont , sdp , error )
2023-03-02 16:17:01 +03:00
}
2023-11-29 01:36:05 +08:00
}
}
private func processSDP ( _ cont : CheckedContinuation < ( RTCSessionDescription ? , Error ? ) , Never > , _ sdp : RTCSessionDescription ? , _ error : Error ? ) {
if let sdp = sdp {
2023-03-02 16:17:01 +03:00
self . setLocalDescription ( sdp , completionHandler : { ( error ) in
2023-11-29 01:36:05 +08:00
if let error = error {
cont . resume ( returning : ( nil , error ) )
} else {
cont . resume ( returning : ( sdp , nil ) )
}
2023-03-02 16:17:01 +03:00
} )
2023-11-29 01:36:05 +08:00
} else {
cont . resume ( returning : ( nil , error ) )
2023-03-02 16:17:01 +03:00
}
}
}
extension WebRTCClient : RTCPeerConnectionDelegate {
func peerConnection ( _ connection : RTCPeerConnection , didChange stateChanged : RTCSignalingState ) {
logger . debug ( " Connection new signaling state: \( stateChanged . rawValue ) " )
}
func peerConnection ( _ connection : RTCPeerConnection , didAdd stream : RTCMediaStream ) {
logger . debug ( " Connection did add stream " )
}
func peerConnection ( _ connection : RTCPeerConnection , didRemove stream : RTCMediaStream ) {
logger . debug ( " Connection did remove stream " )
}
func peerConnectionShouldNegotiate ( _ connection : RTCPeerConnection ) {
logger . debug ( " Connection should negotiate " )
}
2024-10-08 14:49:13 +07:00
func peerConnection ( _ peerConnection : RTCPeerConnection , didStartReceivingOn transceiver : RTCRtpTransceiver ) {
if let track = transceiver . receiver . track {
DispatchQueue . main . async {
// D o e s n ' t w o r k f o r o u t g o i n g v i d e o c a l l ( a u d i o i n v i d e o c a l l w o r k s o k s t i l l , s a m e a s i n c o m i n g c a l l )
// i f l e t d e c r y p t o r = s e l f . a c t i v e C a l l ? . f r a m e D e c r y p t o r {
// t r a n s c e i v e r . r e c e i v e r . s e t R t c F r a m e D e c r y p t o r ( d e c r y p t o r )
// }
let source = self . mediaSourceFromTransceiverMid ( transceiver . mid )
switch source {
case . mic : self . activeCall ? . remoteAudioTrack = track as ? RTCAudioTrack
case . camera :
self . activeCall ? . remoteVideoTrack = track as ? RTCVideoTrack
self . cameraRenderers . forEach ( { renderer in
self . activeCall ? . remoteVideoTrack ? . add ( renderer )
} )
self . cameraRenderers . removeAll ( )
case . screenAudio : self . activeCall ? . remoteScreenAudioTrack = track as ? RTCAudioTrack
case . screenVideo :
self . activeCall ? . remoteScreenVideoTrack = track as ? RTCVideoTrack
self . screenRenderers . forEach ( { renderer in
self . activeCall ? . remoteScreenVideoTrack ? . add ( renderer )
} )
self . screenRenderers . removeAll ( )
case . unknown : ( )
}
}
self . setupMuteUnmuteListener ( transceiver , track )
}
}
2023-03-02 16:17:01 +03:00
func peerConnection ( _ connection : RTCPeerConnection , didChange newState : RTCIceConnectionState ) {
debugPrint ( " Connection new connection state: \( newState . toString ( ) ? ? " " + newState . rawValue . description ) \( connection . receivers ) " )
2024-10-08 14:49:13 +07:00
guard let connectionStateString = newState . toString ( ) ,
2023-03-02 16:17:01 +03:00
let iceConnectionStateString = connection . iceConnectionState . toString ( ) ,
let iceGatheringStateString = connection . iceGatheringState . toString ( ) ,
let signalingStateString = connection . signalingState . toString ( )
else {
return
}
Task {
await sendCallResponse ( . init (
corrId : nil ,
resp : . connection ( state : ConnectionState (
connectionState : connectionStateString ,
iceConnectionState : iceConnectionStateString ,
iceGatheringState : iceGatheringStateString ,
signalingState : signalingStateString )
) ,
command : nil )
)
switch newState {
case . checking :
2024-10-08 14:49:13 +07:00
if let frameDecryptor = activeCall ? . frameDecryptor {
2023-03-02 16:17:01 +03:00
connection . receivers . forEach { $0 . setRtcFrameDecryptor ( frameDecryptor ) }
}
2024-10-08 14:49:13 +07:00
let enableSpeaker : Bool = ChatModel . shared . activeCall ? . localMediaSources . hasVideo = = true
2023-03-02 16:17:01 +03:00
setSpeakerEnabledAndConfigureSession ( enableSpeaker )
2023-11-28 06:20:51 +08:00
case . connected : sendConnectedEvent ( connection )
2023-03-02 16:17:01 +03:00
case . disconnected , . failed : endCall ( )
2024-10-08 14:49:13 +07:00
default : ( )
2023-03-02 16:17:01 +03:00
}
}
}
func peerConnection ( _ connection : RTCPeerConnection , didChange newState : RTCIceGatheringState ) {
logger . debug ( " connection new gathering state: \( newState . toString ( ) ? ? " " + newState . rawValue . description ) " )
}
func peerConnection ( _ connection : RTCPeerConnection , didGenerate candidate : WebRTC . RTCIceCandidate ) {
// l o g g e r . d e b u g ( " C o n n e c t i o n g e n e r a t e d c a n d i d a t e \ ( c a n d i d a t e . d e b u g D e s c r i p t i o n ) " )
2023-11-28 06:20:51 +08:00
Task {
2024-10-08 14:49:13 +07:00
await self . activeCall ? . iceCandidates . append ( candidate . toCandidate ( nil , nil ) )
2023-11-28 06:20:51 +08:00
}
2023-03-02 16:17:01 +03:00
}
func peerConnection ( _ connection : RTCPeerConnection , didRemove candidates : [ WebRTC . RTCIceCandidate ] ) {
logger . debug ( " Connection did remove candidates " )
}
func peerConnection ( _ connection : RTCPeerConnection , didOpen dataChannel : RTCDataChannel ) { }
func peerConnection ( _ connection : RTCPeerConnection ,
didChangeLocalCandidate local : WebRTC . RTCIceCandidate ,
remoteCandidate remote : WebRTC . RTCIceCandidate ,
lastReceivedMs lastDataReceivedMs : Int32 ,
changeReason reason : String ) {
// l o g g e r . d e b u g ( " C o n n e c t i o n c h a n g e d c a n d i d a t e \ ( r e a s o n ) \ ( r e m o t e . d e b u g D e s c r i p t i o n ) \ ( r e m o t e . d e s c r i p t i o n ) " )
}
2023-11-28 06:20:51 +08:00
func sendConnectedEvent ( _ connection : WebRTC . RTCPeerConnection ) {
2023-03-02 16:17:01 +03:00
connection . statistics { ( stats : RTCStatisticsReport ) in
stats . statistics . values . forEach { stat in
// l o g g e r . d e b u g ( " S t a t \ ( s t a t . d e b u g D e s c r i p t i o n ) " )
if stat . type = = " candidate-pair " , stat . values [ " state " ] as ? String = = " succeeded " ,
let localId = stat . values [ " localCandidateId " ] as ? String ,
let remoteId = stat . values [ " remoteCandidateId " ] as ? String ,
let localStats = stats . statistics [ localId ] ,
2023-11-28 06:20:51 +08:00
let remoteStats = stats . statistics [ remoteId ]
2023-03-02 16:17:01 +03:00
{
Task {
await self . sendCallResponse ( . init (
corrId : nil ,
resp : . connected ( connectionInfo : ConnectionInfo (
2023-11-28 06:20:51 +08:00
localCandidate : RTCIceCandidate (
candidateType : RTCIceCandidateType . init ( rawValue : localStats . values [ " candidateType " ] as ! String ) ,
protocol : localStats . values [ " protocol " ] as ? String ,
sdpMid : nil ,
sdpMLineIndex : nil ,
candidate : " "
2023-03-02 16:17:01 +03:00
) ,
2023-11-28 06:20:51 +08:00
remoteCandidate : RTCIceCandidate (
candidateType : RTCIceCandidateType . init ( rawValue : remoteStats . values [ " candidateType " ] as ! String ) ,
protocol : remoteStats . values [ " protocol " ] as ? String ,
sdpMid : nil ,
sdpMLineIndex : nil ,
candidate : " " ) ) ) ,
2023-03-02 16:17:01 +03:00
command : nil )
)
}
}
}
}
}
}
extension WebRTCClient {
2024-10-08 14:49:13 +07:00
static func isAuthorized ( for type : AVMediaType ) async -> Bool {
let status = AVCaptureDevice . authorizationStatus ( for : type )
var isAuthorized = status = = . authorized
if status = = . notDetermined {
isAuthorized = await AVCaptureDevice . requestAccess ( for : type )
}
return isAuthorized
2023-03-02 16:17:01 +03:00
}
2024-10-08 14:49:13 +07:00
static func showUnauthorizedAlert ( for type : AVMediaType ) {
if type = = . audio {
AlertManager . shared . showAlert ( Alert (
title : Text ( " No permission to record speech " ) ,
message : Text ( " To record speech please grant permission to use Microphone. " ) ,
primaryButton : . default ( Text ( " Open Settings " ) ) {
DispatchQueue . main . async {
UIApplication . shared . open ( URL ( string : UIApplication . openSettingsURLString ) ! , options : [ : ] , completionHandler : nil )
}
} ,
secondaryButton : . cancel ( )
) )
} else if type = = . video {
AlertManager . shared . showAlert ( Alert (
title : Text ( " No permission to record video " ) ,
message : Text ( " To record video please grant permission to use Camera. " ) ,
primaryButton : . default ( Text ( " Open Settings " ) ) {
DispatchQueue . main . async {
UIApplication . shared . open ( URL ( string : UIApplication . openSettingsURLString ) ! , options : [ : ] , completionHandler : nil )
}
} ,
secondaryButton : . cancel ( )
) )
}
}
func setSpeakerEnabledAndConfigureSession ( _ enabled : Bool , skipExternalDevice : Bool = false ) {
2023-03-15 13:21:21 +03:00
logger . debug ( " WebRTCClient: configuring session with speaker enabled \( enabled ) " )
2023-03-02 16:17:01 +03:00
audioQueue . async { [ weak self ] in
guard let self = self else { return }
self . rtcAudioSession . lockForConfiguration ( )
defer {
self . rtcAudioSession . unlockForConfiguration ( )
}
do {
2024-04-27 01:59:00 +07:00
let hasExternalAudioDevice = self . rtcAudioSession . session . hasExternalAudioDevice ( )
if enabled {
try self . rtcAudioSession . setCategory ( AVAudioSession . Category . playAndRecord . rawValue , with : [ . defaultToSpeaker , . allowBluetooth , . allowAirPlay , . allowBluetoothA2DP ] )
try self . rtcAudioSession . setMode ( AVAudioSession . Mode . videoChat . rawValue )
2024-10-08 14:49:13 +07:00
if hasExternalAudioDevice && ! skipExternalDevice , let preferred = self . rtcAudioSession . session . preferredInputDevice ( ) {
2024-04-27 01:59:00 +07:00
try self . rtcAudioSession . setPreferredInput ( preferred )
} else {
try self . rtcAudioSession . overrideOutputAudioPort ( . speaker )
}
} else {
try self . rtcAudioSession . setCategory ( AVAudioSession . Category . playAndRecord . rawValue , with : [ . allowBluetooth , . allowAirPlay , . allowBluetoothA2DP ] )
try self . rtcAudioSession . setMode ( AVAudioSession . Mode . voiceChat . rawValue )
try self . rtcAudioSession . overrideOutputAudioPort ( . none )
}
2024-10-08 14:49:13 +07:00
if hasExternalAudioDevice && ! skipExternalDevice {
2024-04-27 01:59:00 +07:00
logger . debug ( " WebRTCClient: configuring session with external device available, skip configuring speaker " )
}
2023-03-02 16:17:01 +03:00
try self . rtcAudioSession . setActive ( true )
2023-03-15 13:21:21 +03:00
logger . debug ( " WebRTCClient: configuring session with speaker enabled \( enabled ) success " )
2023-03-02 16:17:01 +03:00
} catch let error {
logger . debug ( " Error configuring AVAudioSession: \( error ) " )
}
}
}
func audioSessionToDefaults ( ) {
2023-03-15 13:21:21 +03:00
logger . debug ( " WebRTCClient: audioSession to defaults " )
2023-03-02 16:17:01 +03:00
audioQueue . async { [ weak self ] in
guard let self = self else { return }
self . rtcAudioSession . lockForConfiguration ( )
defer {
self . rtcAudioSession . unlockForConfiguration ( )
}
do {
try self . rtcAudioSession . setCategory ( AVAudioSession . Category . ambient . rawValue )
try self . rtcAudioSession . setMode ( AVAudioSession . Mode . default . rawValue )
try self . rtcAudioSession . overrideOutputAudioPort ( . none )
try self . rtcAudioSession . setActive ( false )
2023-03-15 13:21:21 +03:00
logger . debug ( " WebRTCClient: audioSession to defaults success " )
2023-03-02 16:17:01 +03:00
} catch let error {
2023-03-14 11:12:40 +03:00
logger . debug ( " Error configuring AVAudioSession with defaults: \( error ) " )
2023-03-02 16:17:01 +03:00
}
}
}
2024-10-08 14:49:13 +07:00
@ MainActor
func setAudioEnabled ( _ enabled : Bool ) {
if activeCall != nil {
activeCall ? . localAudioTrack = enabled ? createAudioTrack ( ) : nil
activeCall ? . connection . transceivers . first ( where : { t in mediaSourceFromTransceiverMid ( t . mid ) = = . mic } ) ? . sender . track = activeCall ? . localAudioTrack
} else if notConnectedCall != nil {
notConnectedCall ? . audioTrack = enabled ? createAudioTrack ( ) : nil
2023-03-02 16:17:01 +03:00
}
2024-10-08 14:49:13 +07:00
ChatModel . shared . activeCall ? . localMediaSources . mic = enabled
}
@ MainActor
func setCameraEnabled ( _ enabled : Bool ) {
if let call = activeCall {
if enabled {
if call . localVideoTrack = = nil {
let device = activeCall ? . device ? ? notConnectedCall ? . device ? ? . front
let ( camera , track ) = createVideoTrackAndStartCapture ( device )
activeCall ? . localCamera = camera
activeCall ? . localVideoTrack = track
}
} else {
( call . localCamera as ? RTCCameraVideoCapturer ) ? . stopCapture ( )
activeCall ? . localCamera = nil
activeCall ? . localVideoTrack = nil
}
call . connection . transceivers
. first ( where : { t in mediaSourceFromTransceiverMid ( t . mid ) = = . camera } ) ?
. sender . track = activeCall ? . localVideoTrack
ChatModel . shared . activeCall ? . localMediaSources . camera = activeCall ? . localVideoTrack != nil
} else if let call = notConnectedCall {
if enabled {
let device = activeCall ? . device ? ? notConnectedCall ? . device ? ? . front
notConnectedCall ? . localCameraAndTrack = createVideoTrackAndStartCapture ( device )
} else {
( call . localCameraAndTrack ? . 0 as ? RTCCameraVideoCapturer ) ? . stopCapture ( )
notConnectedCall ? . localCameraAndTrack = nil
}
ChatModel . shared . activeCall ? . localMediaSources . camera = notConnectedCall ? . localCameraAndTrack != nil
2023-03-02 16:17:01 +03:00
}
}
2024-10-08 14:49:13 +07:00
func flipCamera ( ) {
let device = activeCall ? . device ? ? notConnectedCall ? . device
if activeCall != nil {
activeCall ? . device = device = = . front ? . back : . front
} else {
notConnectedCall ? . device = device = = . front ? . back : . front
}
startCaptureLocalVideo (
activeCall ? . device ? ? notConnectedCall ? . device ,
( activeCall ? . localCamera ? ? notConnectedCall ? . localCameraAndTrack ? . 0 ) as ? RTCCameraVideoCapturer
)
2023-03-02 16:17:01 +03:00
}
}
2024-04-27 01:59:00 +07:00
extension AVAudioSession {
func hasExternalAudioDevice ( ) -> Bool {
availableInputs ? . allSatisfy ( { $0 . portType = = . builtInMic } ) != true
}
func preferredInputDevice ( ) -> AVAudioSessionPortDescription ? {
// l o g g e r . d e b u g ( " P r e f e r r e d i n p u t d e v i c e : \ ( S t r i n g ( d e s c r i b i n g : s e l f . a v a i l a b l e I n p u t s ? . f i l t e r ( { $ 0 . p o r t T y p e ! = . b u i l t I n M i c } ) ) ) " )
return availableInputs ? . filter ( { $0 . portType != . builtInMic } ) . last
}
}
2023-03-02 16:17:01 +03:00
struct CustomRTCSessionDescription : Codable {
public var type : RTCSdpType ?
public var sdp : String ?
}
enum RTCSdpType : String , Codable {
case answer
case offer
case pranswer
case rollback
}
extension RTCIceCandidate {
func toWebRTCCandidate ( ) -> WebRTC . RTCIceCandidate {
WebRTC . RTCIceCandidate (
sdp : candidate ,
sdpMLineIndex : Int32 ( sdpMLineIndex ? ? 0 ) ,
sdpMid : sdpMid
)
}
}
extension WebRTC . RTCIceCandidate {
2023-11-28 06:20:51 +08:00
func toCandidate ( _ candidateType : RTCIceCandidateType ? , _ protocol : String ? ) -> RTCIceCandidate {
2023-03-02 16:17:01 +03:00
RTCIceCandidate (
candidateType : candidateType ,
protocol : ` protocol ` ,
sdpMid : sdpMid ,
sdpMLineIndex : Int ( sdpMLineIndex ) ,
candidate : sdp
)
}
}
extension [ RTCIceServer ] {
func toWebRTCIceServers ( ) -> [ WebRTC . RTCIceServer ] {
self . map {
WebRTC . RTCIceServer (
urlStrings : $0 . urls ,
username : $0 . username ,
credential : $0 . credential
) }
}
}
extension RTCSdpType {
func toWebRTCSdpType ( ) -> WebRTC . RTCSdpType {
switch self {
case . answer : return WebRTC . RTCSdpType . answer
case . offer : return WebRTC . RTCSdpType . offer
case . pranswer : return WebRTC . RTCSdpType . prAnswer
case . rollback : return WebRTC . RTCSdpType . rollback
}
}
}
extension WebRTC . RTCSdpType {
func toSdpType ( ) -> RTCSdpType {
switch self {
case . answer : return RTCSdpType . answer
case . offer : return RTCSdpType . offer
case . prAnswer : return RTCSdpType . pranswer
case . rollback : return RTCSdpType . rollback
default : return RTCSdpType . answer // s h o u l d n e v e r b e h e r e
}
}
}
extension RTCPeerConnectionState {
func toString ( ) -> String ? {
switch self {
case . new : return " new "
case . connecting : return " connecting "
case . connected : return " connected "
case . failed : return " failed "
case . disconnected : return " disconnected "
case . closed : return " closed "
default : return nil // u n k n o w n
}
}
}
extension RTCIceConnectionState {
func toString ( ) -> String ? {
switch self {
case . new : return " new "
case . checking : return " checking "
case . connected : return " connected "
case . completed : return " completed "
case . failed : return " failed "
case . disconnected : return " disconnected "
case . closed : return " closed "
default : return nil // u n k n o w n o r u n u s e d o n t h e o t h e r s i d e
}
}
}
extension RTCIceGatheringState {
func toString ( ) -> String ? {
switch self {
case . new : return " new "
case . gathering : return " gathering "
case . complete : return " complete "
default : return nil // u n k n o w n
}
}
}
extension RTCSignalingState {
func toString ( ) -> String ? {
switch self {
case . stable : return " stable "
case . haveLocalOffer : return " have-local-offer "
case . haveLocalPrAnswer : return " have-local-pranswer "
case . haveRemoteOffer : return " have-remote-offer "
case . haveRemotePrAnswer : return " have-remote-pranswer "
case . closed : return " closed "
default : return nil // u n k n o w n
}
}
}