ios: native WebRTC (#1933)

* ios: native WebRTC

* add video showing

* make async function better working with main thread

* wrapped code in main actor, just in case

* small change

* a little better

* enable relay

* removed unused code

* allow switching calls

* testing

* enable encryption

* testing more

* another test

* one more test

* fix remote unencrypted video

* deleted unused code related to PixelBuffer

* added MediaEncryption playground

* better playground

* better playground

* fixes

* use new encryption api

* media encryption works

* small changes

* added lib dependency

* use commit reference for lib instead of version

* video format, PIP size

* remove sample.js

---------

Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
This commit is contained in:
Stanislav Dmitrenko 2023-03-02 16:17:01 +03:00 committed by GitHub
parent 01acbb970a
commit 54020250dc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 1046 additions and 279 deletions

View file

@ -0,0 +1,78 @@
import UIKit
import SimpleXChat
hs_init(0, nil)
let totalBytes = 20
let ivTagBytes = 28
var base: UnsafeMutableRawPointer = malloc(totalBytes)
let assume = base.assumingMemoryBound(to: UInt8.self)
assume[0] = 0 // key frame
for i in 1..<totalBytes {
assume[i] = UInt8(i)
}
let unencrypted = NSData(bytesNoCopy: base, length: totalBytes)
let aesKey = "PI-bV-FTgRqZM_lsDH9T21a0yRVMsvLFmvilJ9Ssk3g="
if var key: [CChar] = aesKey.cString(using: .utf8),
let pointer: UnsafeMutableRawPointer = malloc(unencrypted.count + ivTagBytes) {
debugPrint("AesKey \(aesKey), cString \(key)")
memcpy(pointer, (unencrypted as NSData).bytes, unencrypted.count)
let source_ = Data(bytes: pointer, count: unencrypted.count)
//let raw: UInt8 = (unencrypted[0] as UInt8) | ((unencrypted[1] as UInt8) << 8) | ((unencrypted[2] as UInt8) << 16)
let isKeyFrame = unencrypted[0] & 1 == 0
debugPrint("Is key frame \(isKeyFrame)")
let clearTextBytesSize = isKeyFrame ? 10 : 3
for i in 0..<48 {
debugPrint("Before \(i) \(unencrypted[i])")
}
if let res = chat_encrypt_media(&key, pointer.advanced(by: clearTextBytesSize), Int32(unencrypted.count + ivTagBytes - clearTextBytesSize)) {
printError("encrypt", res)
}
for i in 0..<48 {
debugPrint("After \(i) \(pointer.assumingMemoryBound(to: UInt8.self)[i])")
}
let res_ = Data(bytes: pointer, count: unencrypted.count + ivTagBytes)
print(source_ == res_)
// let encryptedBytes = [1, 1, 2, 3, 4, 5, 6, 7, 8, 9,
// 250, 245, 192, 217, 164, 251, 23, 40, 36, 214,
// 84, 55, 114, 237, 153, 113, 182, 123, 214, 189,
// 35, 196, 148, 164, 235, 195, 122, 157, 141, 235,
// 5, 92, 44, 35, 37, 244, 90, 254]
// var base1: UnsafeMutableRawPointer = malloc(totalBytes + ivTagBytes)
//
// let assume1 = base1.assumingMemoryBound(to: UInt8.self)
// for i in 0..<(totalBytes + ivTagBytes) {
// assume1[i] = UInt8(encryptedBytes[i])
// }
// let encrypted = NSData(bytesNoCopy: base1, length: totalBytes + ivTagBytes)
// memcpy(pointer, (encrypted as NSData).bytes, encrypted.count)
// for i in 0..<48 {
// debugPrint("Before decrypt \(i) \(pointer.assumingMemoryBound(to: UInt8.self)[i])")
// }
if let res = chat_decrypt_media(&key, pointer.advanced(by: clearTextBytesSize), Int32(unencrypted.count + ivTagBytes - clearTextBytesSize)) {
printError("decrypt", res)
}
let decrypted_ = Data(bytes: pointer, count: unencrypted.count)
for i in 0..<48 {
debugPrint("After decrypt \(i) \(pointer.assumingMemoryBound(to: UInt8.self)[i])")
}
print(source_ == decrypted_)
}
func printError(_ op: String, _ res: UnsafeMutablePointer<CChar>) {
let err = fromCString(res)
if err == "" {
print("\(op) ok")
} else {
print("\(op) error: \(err)")
}
}

View file

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<playground version='5.0' target-platform='ios' buildActiveScheme='true' importAppTypes='true'>
<timeline fileName='timeline.xctimeline'/>
</playground>

View file

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:Encryption.playground">
</FileRef>
</Workspace>

View file

@ -1224,7 +1224,6 @@ func processReceivedMsg(_ res: ChatResponse) async {
offer: offer.rtcSession, offer: offer.rtcSession,
iceCandidates: offer.rtcIceCandidates, iceCandidates: offer.rtcIceCandidates,
media: callType.media, aesKey: sharedKey, media: callType.media, aesKey: sharedKey,
useWorker: true,
iceServers: iceServers, iceServers: iceServers,
relay: useRelay relay: useRelay
) )

View file

@ -13,37 +13,60 @@ import SimpleXChat
struct ActiveCallView: View { struct ActiveCallView: View {
@EnvironmentObject var m: ChatModel @EnvironmentObject var m: ChatModel
@ObservedObject var call: Call @ObservedObject var call: Call
@State private var rtcWebView: WKWebView? = nil @State private var client: WebRTCClient? = nil
@State private var webViewMsg: WVAPIMessage? = nil @State private var activeCall: WebRTCClient.Call? = nil
@State private var localRendererAspectRatio: CGFloat? = nil
var body: some View { var body: some View {
ZStack(alignment: .bottom) { ZStack(alignment: .bottom) {
WebRTCView(rtcWebView: $rtcWebView, webViewMsg: $webViewMsg) if let client = client, [call.peerMedia, call.localMedia].contains(.video), activeCall != nil {
.onAppear() { sendCommandToWebView() } GeometryReader { g in
.onChange(of: m.callCommand) { _ in sendCommandToWebView() } let width = g.size.width * 0.3
.onChange(of: rtcWebView) { _ in sendCommandToWebView() } ZStack(alignment: .topTrailing) {
.onChange(of: webViewMsg) { _ in processWebViewMessage() } CallViewRemote(client: client, activeCall: $activeCall)
.background(.black) CallViewLocal(client: client, activeCall: $activeCall, localRendererAspectRatio: $localRendererAspectRatio)
if let call = m.activeCall, let webView = rtcWebView { .cornerRadius(10)
ActiveCallOverlay(call: call, webView: webView) .frame(width: width, height: width / (localRendererAspectRatio ?? 1))
.padding([.top, .trailing], 17)
}
}
}
if let call = m.activeCall, let client = client {
ActiveCallOverlay(call: call, client: client)
} }
} }
.onAppear {
if client == nil {
client = WebRTCClient($activeCall, { msg in await MainActor.run { processRtcMessage(msg: msg) } }, $localRendererAspectRatio)
sendCommandToClient()
}
}
.onDisappear {
client?.endCall()
}
.onChange(of: m.callCommand) { _ in sendCommandToClient()}
.background(.black)
.preferredColorScheme(.dark) .preferredColorScheme(.dark)
} }
private func sendCommandToWebView() { private func sendCommandToClient() {
if m.activeCall != nil, if call == m.activeCall,
let wv = rtcWebView, m.activeCall != nil,
let client = client,
let cmd = m.callCommand { let cmd = m.callCommand {
m.callCommand = nil m.callCommand = nil
sendCallCommand(wv, cmd) logger.debug("sendCallCommand: \(cmd.cmdType)")
Task {
await client.sendCallCommand(command: cmd)
}
} }
} }
private func processWebViewMessage() { @MainActor
if let msg = webViewMsg, private func processRtcMessage(msg: WVAPIMessage) {
let call = m.activeCall, if call == m.activeCall,
let webView = rtcWebView { let call = m.activeCall,
let client = client {
logger.debug("ActiveCallView: response \(msg.resp.respType)") logger.debug("ActiveCallView: response \(msg.resp.respType)")
switch msg.resp { switch msg.resp {
case let .capabilities(capabilities): case let .capabilities(capabilities):
@ -54,7 +77,7 @@ struct ActiveCallView: View {
} catch { } catch {
logger.error("apiSendCallInvitation \(responseError(error))") logger.error("apiSendCallInvitation \(responseError(error))")
} }
DispatchQueue.main.async { await MainActor.run {
call.callState = .invitationSent call.callState = .invitationSent
call.localCapabilities = capabilities call.localCapabilities = capabilities
} }
@ -67,7 +90,7 @@ struct ActiveCallView: View {
} catch { } catch {
logger.error("apiSendCallOffer \(responseError(error))") logger.error("apiSendCallOffer \(responseError(error))")
} }
DispatchQueue.main.async { await MainActor.run {
call.callState = .offerSent call.callState = .offerSent
call.localCapabilities = capabilities call.localCapabilities = capabilities
} }
@ -79,7 +102,7 @@ struct ActiveCallView: View {
} catch { } catch {
logger.error("apiSendCallAnswer \(responseError(error))") logger.error("apiSendCallAnswer \(responseError(error))")
} }
DispatchQueue.main.async { await MainActor.run {
call.callState = .negotiated call.callState = .negotiated
} }
} }
@ -98,13 +121,10 @@ struct ActiveCallView: View {
// CallController.shared.reportOutgoingCall(call: call, connectedAt: nil) // CallController.shared.reportOutgoingCall(call: call, connectedAt: nil)
// } // }
call.callState = .connected call.callState = .connected
// CallKit doesn't work well with WKWebView }
// This is a hack to enable microphone in WKWebView after CallKit takes over it if state.connectionState == "closed" {
if CallController.useCallKit { closeCallView(client)
DispatchQueue.main.asyncAfter(deadline: .now() + 2) { m.activeCall = nil
m.callCommand = .camera(camera: call.localCamera)
}
}
} }
Task { Task {
do { do {
@ -117,7 +137,7 @@ struct ActiveCallView: View {
call.callState = .connected call.callState = .connected
call.connectionInfo = connectionInfo call.connectionInfo = connectionInfo
case .ended: case .ended:
closeCallView(webView) closeCallView(client)
call.callState = .ended call.callState = .ended
if let uuid = call.callkitUUID { if let uuid = call.callkitUUID {
CallController.shared.endCall(callUUID: uuid) CallController.shared.endCall(callUUID: uuid)
@ -126,17 +146,8 @@ struct ActiveCallView: View {
switch msg.command { switch msg.command {
case .answer: case .answer:
call.callState = .negotiated call.callState = .negotiated
case let .camera(camera):
call.localCamera = camera
Task {
// This disables microphone if it was disabled before flipping the camera
await webView.setMicrophoneCaptureState(call.audioEnabled ? .active : .muted)
// This compensates for the bug on some devices when remote video does not appear
// await webView.setCameraCaptureState(.muted)
// await webView.setCameraCaptureState(call.videoEnabled ? .active : .muted)
}
case .end: case .end:
closeCallView(webView) closeCallView(client)
m.activeCall = nil m.activeCall = nil
default: () default: ()
} }
@ -148,11 +159,9 @@ struct ActiveCallView: View {
} }
} }
private func closeCallView(_ webView: WKWebView) { private func closeCallView(_ client: WebRTCClient) {
m.showCallView = false if m.activeCall != nil {
Task { m.showCallView = false
await webView.setMicrophoneCaptureState(.muted)
await webView.setCameraCaptureState(.muted)
} }
} }
} }
@ -160,7 +169,7 @@ struct ActiveCallView: View {
struct ActiveCallOverlay: View { struct ActiveCallOverlay: View {
@EnvironmentObject var chatModel: ChatModel @EnvironmentObject var chatModel: ChatModel
@ObservedObject var call: Call @ObservedObject var call: Call
var webView: WKWebView var client: WebRTCClient
var body: some View { var body: some View {
VStack { VStack {
@ -210,6 +219,8 @@ struct ActiveCallOverlay: View {
toggleAudioButton() toggleAudioButton()
.frame(maxWidth: .infinity, alignment: .leading) .frame(maxWidth: .infinity, alignment: .leading)
endCallButton() endCallButton()
toggleSpeakerButton()
.frame(maxWidth: .infinity, alignment: .trailing)
} }
.padding(.bottom, 60) .padding(.bottom, 60)
.padding(.horizontal, 48) .padding(.horizontal, 48)
@ -254,7 +265,7 @@ struct ActiveCallOverlay: View {
private func toggleAudioButton() -> some View { private func toggleAudioButton() -> some View {
controlButton(call, call.audioEnabled ? "mic.fill" : "mic.slash") { controlButton(call, call.audioEnabled ? "mic.fill" : "mic.slash") {
Task { Task {
await webView.setMicrophoneCaptureState(call.audioEnabled ? .muted : .active) client.setAudioEnabled(!call.audioEnabled)
DispatchQueue.main.async { DispatchQueue.main.async {
call.audioEnabled = !call.audioEnabled call.audioEnabled = !call.audioEnabled
} }
@ -262,10 +273,21 @@ struct ActiveCallOverlay: View {
} }
} }
private func toggleSpeakerButton() -> some View {
controlButton(call, call.speakerEnabled ? "speaker.fill" : "speaker.slash") {
Task {
client.setSpeakerEnabledAndConfigureSession(!call.speakerEnabled)
DispatchQueue.main.async {
call.speakerEnabled = !call.speakerEnabled
}
}
}
}
private func toggleVideoButton() -> some View { private func toggleVideoButton() -> some View {
controlButton(call, call.videoEnabled ? "video.fill" : "video.slash") { controlButton(call, call.videoEnabled ? "video.fill" : "video.slash") {
Task { Task {
await webView.setCameraCaptureState(call.videoEnabled ? .muted : .active) client.setVideoEnabled(!call.videoEnabled)
DispatchQueue.main.async { DispatchQueue.main.async {
call.videoEnabled = !call.videoEnabled call.videoEnabled = !call.videoEnabled
} }
@ -274,19 +296,10 @@ struct ActiveCallOverlay: View {
} }
@ViewBuilder private func flipCameraButton() -> some View { @ViewBuilder private func flipCameraButton() -> some View {
let cmd = WCallCommand.camera(camera: call.localCamera == .user ? .environment : .user)
controlButton(call, "arrow.triangle.2.circlepath") { controlButton(call, "arrow.triangle.2.circlepath") {
if call.audioEnabled {
chatModel.callCommand = cmd
} else {
Task { Task {
// Microphone has to be enabled before flipping the camera to avoid prompt for user permission when getUserMedia is called in webview client.flipCamera()
await webView.setMicrophoneCaptureState(.active)
DispatchQueue.main.async {
chatModel.callCommand = cmd
}
} }
}
} }
} }
@ -315,9 +328,9 @@ struct ActiveCallOverlay: View {
struct ActiveCallOverlay_Previews: PreviewProvider { struct ActiveCallOverlay_Previews: PreviewProvider {
static var previews: some View { static var previews: some View {
Group{ Group{
ActiveCallOverlay(call: Call(direction: .incoming, contact: Contact.sampleData, callkitUUID: UUID(), callState: .offerSent, localMedia: .video), webView: WKWebView()) ActiveCallOverlay(call: Call(direction: .incoming, contact: Contact.sampleData, callkitUUID: UUID(), callState: .offerSent, localMedia: .video), client: WebRTCClient(Binding.constant(nil), { _ in }, Binding.constant(nil)))
.background(.black) .background(.black)
ActiveCallOverlay(call: Call(direction: .incoming, contact: Contact.sampleData, callkitUUID: UUID(), callState: .offerSent, localMedia: .audio), webView: WKWebView()) ActiveCallOverlay(call: Call(direction: .incoming, contact: Contact.sampleData, callkitUUID: UUID(), callState: .offerSent, localMedia: .audio), client: WebRTCClient(Binding.constant(nil), { _ in }, Binding.constant(nil)))
.background(.black) .background(.black)
} }
} }

View file

@ -12,7 +12,9 @@ import SimpleXChat
class CallManager { class CallManager {
func newOutgoingCall(_ contact: Contact, _ media: CallMediaType) -> UUID { func newOutgoingCall(_ contact: Contact, _ media: CallMediaType) -> UUID {
let uuid = UUID() let uuid = UUID()
ChatModel.shared.activeCall = Call(direction: .outgoing, contact: contact, callkitUUID: uuid, callState: .waitCapabilities, localMedia: media) let call = Call(direction: .outgoing, contact: contact, callkitUUID: uuid, callState: .waitCapabilities, localMedia: media)
call.speakerEnabled = media == .video
ChatModel.shared.activeCall = call
return uuid return uuid
} }
@ -20,7 +22,7 @@ class CallManager {
let m = ChatModel.shared let m = ChatModel.shared
if let call = m.activeCall, call.callkitUUID == callUUID { if let call = m.activeCall, call.callkitUUID == callUUID {
m.showCallView = true m.showCallView = true
m.callCommand = .capabilities(media: call.localMedia, useWorker: true) m.callCommand = .capabilities(media: call.localMedia)
return true return true
} }
return false return false
@ -37,7 +39,7 @@ class CallManager {
func answerIncomingCall(invitation: RcvCallInvitation) { func answerIncomingCall(invitation: RcvCallInvitation) {
let m = ChatModel.shared let m = ChatModel.shared
m.callInvitations.removeValue(forKey: invitation.contact.id) m.callInvitations.removeValue(forKey: invitation.contact.id)
m.activeCall = Call( let call = Call(
direction: .incoming, direction: .incoming,
contact: invitation.contact, contact: invitation.contact,
callkitUUID: invitation.callkitUUID, callkitUUID: invitation.callkitUUID,
@ -45,6 +47,8 @@ class CallManager {
localMedia: invitation.callType.media, localMedia: invitation.callType.media,
sharedKey: invitation.sharedKey sharedKey: invitation.sharedKey
) )
call.speakerEnabled = invitation.callType.media == .video
m.activeCall = call
m.showCallView = true m.showCallView = true
let useRelay = UserDefaults.standard.bool(forKey: DEFAULT_WEBRTC_POLICY_RELAY) let useRelay = UserDefaults.standard.bool(forKey: DEFAULT_WEBRTC_POLICY_RELAY)
let iceServers = getIceServers() let iceServers = getIceServers()
@ -53,7 +57,6 @@ class CallManager {
m.callCommand = .start( m.callCommand = .start(
media: invitation.callType.media, media: invitation.callType.media,
aesKey: invitation.sharedKey, aesKey: invitation.sharedKey,
useWorker: true,
iceServers: iceServers, iceServers: iceServers,
relay: useRelay relay: useRelay
) )

View file

@ -0,0 +1,75 @@
//
// Created by Avently on 09.02.2023.
// Copyright (c) 2023 SimpleX Chat. All rights reserved.
//
import SwiftUI
import WebRTC
import SimpleXChat
struct CallViewRemote: UIViewRepresentable {
var client: WebRTCClient
var activeCall: Binding<WebRTCClient.Call?>
init(client: WebRTCClient, activeCall: Binding<WebRTCClient.Call?>) {
self.client = client
self.activeCall = activeCall
}
func makeUIView(context: Context) -> UIView {
let view = UIView()
if let call = activeCall.wrappedValue {
let remoteRenderer = RTCMTLVideoView(frame: view.frame)
remoteRenderer.videoContentMode = .scaleAspectFill
client.addRemoteRenderer(call, remoteRenderer)
addSubviewAndResize(remoteRenderer, into: view)
}
return view
}
func updateUIView(_ view: UIView, context: Context) {
logger.debug("CallView.updateUIView remote")
}
}
struct CallViewLocal: UIViewRepresentable {
var client: WebRTCClient
var activeCall: Binding<WebRTCClient.Call?>
var localRendererAspectRatio: Binding<CGFloat?>
init(client: WebRTCClient, activeCall: Binding<WebRTCClient.Call?>, localRendererAspectRatio: Binding<CGFloat?>) {
self.client = client
self.activeCall = activeCall
self.localRendererAspectRatio = localRendererAspectRatio
}
func makeUIView(context: Context) -> UIView {
let view = UIView()
if let call = activeCall.wrappedValue {
let localRenderer = RTCEAGLVideoView(frame: .zero)
client.addLocalRenderer(call, localRenderer)
client.startCaptureLocalVideo(call)
addSubviewAndResize(localRenderer, into: view)
}
return view
}
func updateUIView(_ view: UIView, context: Context) {
logger.debug("CallView.updateUIView local")
}
}
private func addSubviewAndResize(_ view: UIView, into containerView: UIView) {
containerView.addSubview(view)
view.translatesAutoresizingMaskIntoConstraints = false
containerView.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "H:|[view]|",
options: [],
metrics: nil,
views: ["view": view]))
containerView.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "V:|[view]|",
options: [],
metrics: nil,
views: ["view": view]))
containerView.layoutIfNeeded()
}

View file

@ -9,6 +9,7 @@
import Foundation import Foundation
import SwiftUI import SwiftUI
import SimpleXChat import SimpleXChat
import WebRTC
class Call: ObservableObject, Equatable { class Call: ObservableObject, Equatable {
static func == (lhs: Call, rhs: Call) -> Bool { static func == (lhs: Call, rhs: Call) -> Bool {
@ -24,8 +25,8 @@ class Call: ObservableObject, Equatable {
@Published var peerMedia: CallMediaType? @Published var peerMedia: CallMediaType?
@Published var sharedKey: String? @Published var sharedKey: String?
@Published var audioEnabled = true @Published var audioEnabled = true
@Published var speakerEnabled = false
@Published var videoEnabled: Bool @Published var videoEnabled: Bool
@Published var localCamera = VideoCamera.user
@Published var connectionInfo: ConnectionInfo? @Published var connectionInfo: ConnectionInfo?
init( init(
@ -103,21 +104,18 @@ struct WVAPIMessage: Equatable, Decodable, Encodable {
} }
enum WCallCommand: Equatable, Encodable, Decodable { enum WCallCommand: Equatable, Encodable, Decodable {
case capabilities(media: CallMediaType, useWorker: Bool? = nil) case capabilities(media: CallMediaType)
case start(media: CallMediaType, aesKey: String? = nil, useWorker: Bool? = nil, iceServers: [RTCIceServer]? = nil, relay: Bool? = nil) case start(media: CallMediaType, aesKey: String? = nil, iceServers: [RTCIceServer]? = nil, relay: Bool? = nil)
case offer(offer: String, iceCandidates: String, media: CallMediaType, aesKey: String? = nil, useWorker: Bool? = nil, iceServers: [RTCIceServer]? = nil, relay: Bool? = nil) case offer(offer: String, iceCandidates: String, media: CallMediaType, aesKey: String? = nil, iceServers: [RTCIceServer]? = nil, relay: Bool? = nil)
case answer(answer: String, iceCandidates: String) case answer(answer: String, iceCandidates: String)
case ice(iceCandidates: String) case ice(iceCandidates: String)
case media(media: CallMediaType, enable: Bool) case media(media: CallMediaType, enable: Bool)
case camera(camera: VideoCamera)
case end case end
enum CodingKeys: String, CodingKey { enum CodingKeys: String, CodingKey {
case type case type
case media case media
case camera
case aesKey case aesKey
case useWorker
case offer case offer
case answer case answer
case iceCandidates case iceCandidates
@ -135,7 +133,6 @@ enum WCallCommand: Equatable, Encodable, Decodable {
case .answer: return "answer" case .answer: return "answer"
case .ice: return "ice" case .ice: return "ice"
case .media: return "media" case .media: return "media"
case .camera: return "camera"
case .end: return "end" case .end: return "end"
} }
} }
@ -144,24 +141,21 @@ enum WCallCommand: Equatable, Encodable, Decodable {
func encode(to encoder: Encoder) throws { func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self) var container = encoder.container(keyedBy: CodingKeys.self)
switch self { switch self {
case let .capabilities(media, useWorker): case let .capabilities(media):
try container.encode("capabilities", forKey: .type) try container.encode("capabilities", forKey: .type)
try container.encode(media, forKey: .media) try container.encode(media, forKey: .media)
try container.encode(useWorker, forKey: .useWorker) case let .start(media, aesKey, iceServers, relay):
case let .start(media, aesKey, useWorker, iceServers, relay):
try container.encode("start", forKey: .type) try container.encode("start", forKey: .type)
try container.encode(media, forKey: .media) try container.encode(media, forKey: .media)
try container.encode(aesKey, forKey: .aesKey) try container.encode(aesKey, forKey: .aesKey)
try container.encode(useWorker, forKey: .useWorker)
try container.encode(iceServers, forKey: .iceServers) try container.encode(iceServers, forKey: .iceServers)
try container.encode(relay, forKey: .relay) try container.encode(relay, forKey: .relay)
case let .offer(offer, iceCandidates, media, aesKey, useWorker, iceServers, relay): case let .offer(offer, iceCandidates, media, aesKey, iceServers, relay):
try container.encode("offer", forKey: .type) try container.encode("offer", forKey: .type)
try container.encode(offer, forKey: .offer) try container.encode(offer, forKey: .offer)
try container.encode(iceCandidates, forKey: .iceCandidates) try container.encode(iceCandidates, forKey: .iceCandidates)
try container.encode(media, forKey: .media) try container.encode(media, forKey: .media)
try container.encode(aesKey, forKey: .aesKey) try container.encode(aesKey, forKey: .aesKey)
try container.encode(useWorker, forKey: .useWorker)
try container.encode(iceServers, forKey: .iceServers) try container.encode(iceServers, forKey: .iceServers)
try container.encode(relay, forKey: .relay) try container.encode(relay, forKey: .relay)
case let .answer(answer, iceCandidates): case let .answer(answer, iceCandidates):
@ -175,9 +169,6 @@ enum WCallCommand: Equatable, Encodable, Decodable {
try container.encode("media", forKey: .type) try container.encode("media", forKey: .type)
try container.encode(media, forKey: .media) try container.encode(media, forKey: .media)
try container.encode(enable, forKey: .enable) try container.encode(enable, forKey: .enable)
case let .camera(camera):
try container.encode("camera", forKey: .type)
try container.encode(camera, forKey: .camera)
case .end: case .end:
try container.encode("end", forKey: .type) try container.encode("end", forKey: .type)
} }
@ -189,24 +180,21 @@ enum WCallCommand: Equatable, Encodable, Decodable {
switch type { switch type {
case "capabilities": case "capabilities":
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media) let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let useWorker = try container.decode((Bool?).self, forKey: CodingKeys.useWorker) self = .capabilities(media: media)
self = .capabilities(media: media, useWorker: useWorker)
case "start": case "start":
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media) let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey) let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey)
let useWorker = try container.decode((Bool?).self, forKey: CodingKeys.useWorker)
let iceServers = try container.decode(([RTCIceServer]?).self, forKey: .iceServers) let iceServers = try container.decode(([RTCIceServer]?).self, forKey: .iceServers)
let relay = try container.decode((Bool?).self, forKey: .relay) let relay = try container.decode((Bool?).self, forKey: .relay)
self = .start(media: media, aesKey: aesKey, useWorker: useWorker, iceServers: iceServers, relay: relay) self = .start(media: media, aesKey: aesKey, iceServers: iceServers, relay: relay)
case "offer": case "offer":
let offer = try container.decode(String.self, forKey: CodingKeys.offer) let offer = try container.decode(String.self, forKey: CodingKeys.offer)
let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates) let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media) let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey) let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey)
let useWorker = try container.decode((Bool?).self, forKey: CodingKeys.useWorker)
let iceServers = try container.decode(([RTCIceServer]?).self, forKey: .iceServers) let iceServers = try container.decode(([RTCIceServer]?).self, forKey: .iceServers)
let relay = try container.decode((Bool?).self, forKey: .relay) let relay = try container.decode((Bool?).self, forKey: .relay)
self = .offer(offer: offer, iceCandidates: iceCandidates, media: media, aesKey: aesKey, useWorker: useWorker, iceServers: iceServers, relay: relay) self = .offer(offer: offer, iceCandidates: iceCandidates, media: media, aesKey: aesKey, iceServers: iceServers, relay: relay)
case "answer": case "answer":
let answer = try container.decode(String.self, forKey: CodingKeys.answer) let answer = try container.decode(String.self, forKey: CodingKeys.answer)
let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates) let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
@ -218,9 +206,6 @@ enum WCallCommand: Equatable, Encodable, Decodable {
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media) let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let enable = try container.decode(Bool.self, forKey: CodingKeys.enable) let enable = try container.decode(Bool.self, forKey: CodingKeys.enable)
self = .media(media: media, enable: enable) self = .media(media: media, enable: enable)
case "camera":
let camera = try container.decode(VideoCamera.self, forKey: CodingKeys.camera)
self = .camera(camera: camera)
case "end": case "end":
self = .end self = .end
default: default:
@ -393,6 +378,9 @@ struct RTCIceCandidate: Codable, Equatable {
var candidateType: RTCIceCandidateType? var candidateType: RTCIceCandidateType?
var `protocol`: String? var `protocol`: String?
var relayProtocol: String? var relayProtocol: String?
var sdpMid: String?
var sdpMLineIndex: Int?
var candidate: String
} }
// https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate/type // https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate/type
@ -456,3 +444,10 @@ func getIceServers() -> [RTCIceServer]? {
} }
return nil return nil
} }
func getWebRTCIceServers() -> [WebRTC.RTCIceServer]? {
if let servers = UserDefaults.standard.stringArray(forKey: DEFAULT_WEBRTC_ICE_SERVERS) {
return parseRTCIceServers(servers)?.toWebRTCIceServers()
}
return nil
}

View file

@ -0,0 +1,714 @@
//
// Created by Avently on 09.02.2023.
// Copyright (c) 2023 SimpleX Chat. All rights reserved.
//
import WebRTC
import LZString
import SwiftUI
import SimpleXChat
final class WebRTCClient: NSObject, RTCVideoViewDelegate, RTCFrameEncryptorDelegate, RTCFrameDecryptorDelegate {
private static let factory: RTCPeerConnectionFactory = {
RTCInitializeSSL()
let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
}()
private static let ivTagBytes: Int = 28
private static let enableEncryption: Bool = true
struct Call {
var connection: RTCPeerConnection
var iceCandidates: [RTCIceCandidate]
var localMedia: CallMediaType
var localCamera: RTCVideoCapturer?
var localVideoSource: RTCVideoSource?
var localStream: RTCVideoTrack?
var remoteStream: RTCVideoTrack?
var device: AVCaptureDevice.Position = .front
var aesKey: String?
var frameEncryptor: RTCFrameEncryptor?
var frameDecryptor: RTCFrameDecryptor?
}
private let rtcAudioSession = RTCAudioSession.sharedInstance()
private let audioQueue = DispatchQueue(label: "audio")
private var sendCallResponse: (WVAPIMessage) async -> Void
private var activeCall: Binding<Call?>
private var localRendererAspectRatio: Binding<CGFloat?>
@available(*, unavailable)
override init() {
fatalError("Unimplemented")
}
required init(_ activeCall: Binding<Call?>, _ sendCallResponse: @escaping (WVAPIMessage) async -> Void, _ localRendererAspectRatio: Binding<CGFloat?>) {
self.sendCallResponse = sendCallResponse
self.activeCall = activeCall
self.localRendererAspectRatio = localRendererAspectRatio
super.init()
}
let defaultIceServers: [WebRTC.RTCIceServer] = [
WebRTC.RTCIceServer(urlStrings: ["stun:stun.simplex.im:443"]),
WebRTC.RTCIceServer(urlStrings: ["turn:turn.simplex.im:443?transport=udp"], username: "private", credential: "yleob6AVkiNI87hpR94Z"),
WebRTC.RTCIceServer(urlStrings: ["turn:turn.simplex.im:443?transport=tcp"], username: "private", credential: "yleob6AVkiNI87hpR94Z"),
]
func initializeCall(_ iceServers: [WebRTC.RTCIceServer]?, _ remoteIceCandidates: [RTCIceCandidate], _ mediaType: CallMediaType, _ aesKey: String?, _ relay: Bool?) -> Call {
let connection = createPeerConnection(iceServers ?? getWebRTCIceServers() ?? defaultIceServers, relay)
connection.delegate = self
createAudioSender(connection)
var localStream: RTCVideoTrack? = nil
var remoteStream: RTCVideoTrack? = nil
var localCamera: RTCVideoCapturer? = nil
var localVideoSource: RTCVideoSource? = nil
if mediaType == .video {
(localStream, remoteStream, localCamera, localVideoSource) = createVideoSender(connection)
}
var frameEncryptor: RTCFrameEncryptor? = nil
var frameDecryptor: RTCFrameDecryptor? = nil
if aesKey != nil {
let encryptor = RTCFrameEncryptor.init(sizeChange: Int32(WebRTCClient.ivTagBytes))
encryptor.delegate = self
frameEncryptor = encryptor
connection.senders.forEach { $0.setRtcFrameEncryptor(encryptor) }
let decryptor = RTCFrameDecryptor.init(sizeChange: -Int32(WebRTCClient.ivTagBytes))
decryptor.delegate = self
frameDecryptor = decryptor
// Has no video receiver in outgoing call if applied here, see [peerConnection(_ connection: RTCPeerConnection, didChange newState]
// connection.receivers.forEach { $0.setRtcFrameDecryptor(decryptor) }
}
return Call(
connection: connection,
iceCandidates: remoteIceCandidates,
localMedia: mediaType,
localCamera: localCamera,
localVideoSource: localVideoSource,
localStream: localStream,
remoteStream: remoteStream,
aesKey: aesKey,
frameEncryptor: frameEncryptor,
frameDecryptor: frameDecryptor
)
}
func createPeerConnection(_ iceServers: [WebRTC.RTCIceServer], _ relay: Bool?) -> RTCPeerConnection {
let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
optionalConstraints: ["DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue])
guard let connection = WebRTCClient.factory.peerConnection(
with: getCallConfig(iceServers, relay),
constraints: constraints, delegate: nil
)
else {
fatalError("Unable to create RTCPeerConnection")
}
return connection
}
func getCallConfig(_ iceServers: [WebRTC.RTCIceServer], _ relay: Bool?) -> RTCConfiguration {
let config = RTCConfiguration()
config.iceServers = iceServers
config.sdpSemantics = .unifiedPlan
config.continualGatheringPolicy = .gatherContinually
config.iceTransportPolicy = relay == true ? .relay : .all
// Allows to wait 30 sec before `failing` connection if the answer from remote side is not received in time
config.iceInactiveTimeout = 30_000
return config
}
func addIceCandidates(_ connection: RTCPeerConnection, _ remoteIceCandidates: [RTCIceCandidate]) {
remoteIceCandidates.forEach { candidate in
connection.add(candidate.toWebRTCCandidate()) { error in
if let error = error {
logger.error("Adding candidate error \(error)")
}
}
}
}
func sendCallCommand(command: WCallCommand) async {
var resp: WCallResponse? = nil
let pc = activeCall.wrappedValue?.connection
switch command {
case .capabilities:
resp = .capabilities(capabilities: CallCapabilities(encryption: WebRTCClient.enableEncryption))
case let .start(media: media, aesKey, iceServers, relay):
logger.debug("starting incoming call - create webrtc session")
if activeCall.wrappedValue != nil { endCall() }
let encryption = WebRTCClient.enableEncryption
let call = initializeCall(iceServers?.toWebRTCIceServers(), [], media, encryption ? aesKey : nil, relay)
activeCall.wrappedValue = call
call.connection.offer { answer in
Task {
let gotCandidates = await self.waitWithTimeout(10_000, stepMs: 1000, until: { self.activeCall.wrappedValue?.iceCandidates.count ?? 0 > 0 })
if gotCandidates {
await self.sendCallResponse(.init(
corrId: nil,
resp: .offer(
offer: compressToBase64(input: encodeJSON(CustomRTCSessionDescription(type: answer.type.toSdpType(), sdp: answer.sdp))),
iceCandidates: compressToBase64(input: encodeJSON(self.activeCall.wrappedValue?.iceCandidates ?? [])),
capabilities: CallCapabilities(encryption: encryption)
),
command: command)
)
} else {
self.endCall()
}
}
}
case let .offer(offer, iceCandidates, media, aesKey, iceServers, relay):
if activeCall.wrappedValue != nil {
resp = .error(message: "accept: call already started")
} else if !WebRTCClient.enableEncryption && aesKey != nil {
resp = .error(message: "accept: encryption is not supported")
} else if let offer: CustomRTCSessionDescription = decodeJSON(decompressFromBase64(input: offer)),
let remoteIceCandidates: [RTCIceCandidate] = decodeJSON(decompressFromBase64(input: iceCandidates)) {
let call = initializeCall(iceServers?.toWebRTCIceServers(), remoteIceCandidates, media, WebRTCClient.enableEncryption ? aesKey : nil, relay)
activeCall.wrappedValue = call
let pc = call.connection
if let type = offer.type, let sdp = offer.sdp {
if (try? await pc.setRemoteDescription(RTCSessionDescription(type: type.toWebRTCSdpType(), sdp: sdp))) != nil {
pc.answer { answer in
self.addIceCandidates(pc, remoteIceCandidates)
// Task {
// try? await Task.sleep(nanoseconds: 32_000 * 1000000)
Task {
await self.sendCallResponse(.init(
corrId: nil,
resp: .answer(
answer: compressToBase64(input: encodeJSON(CustomRTCSessionDescription(type: answer.type.toSdpType(), sdp: answer.sdp))),
iceCandidates: compressToBase64(input: encodeJSON(call.iceCandidates))
),
command: command)
)
}
// }
}
} else {
resp = .error(message: "accept: remote description is not set")
}
}
}
case let .answer(answer, iceCandidates):
if pc == nil {
resp = .error(message: "answer: call not started")
} else if pc?.localDescription == nil {
resp = .error(message: "answer: local description is not set")
} else if pc?.remoteDescription != nil {
resp = .error(message: "answer: remote description already set")
} else if let answer: CustomRTCSessionDescription = decodeJSON(decompressFromBase64(input: answer)),
let remoteIceCandidates: [RTCIceCandidate] = decodeJSON(decompressFromBase64(input: iceCandidates)),
let type = answer.type, let sdp = answer.sdp,
let pc = pc {
if (try? await pc.setRemoteDescription(RTCSessionDescription(type: type.toWebRTCSdpType(), sdp: sdp))) != nil {
addIceCandidates(pc, remoteIceCandidates)
resp = .ok
} else {
resp = .error(message: "answer: remote description is not set")
}
}
case let .ice(iceCandidates):
if let pc = pc,
let remoteIceCandidates: [RTCIceCandidate] = decodeJSON(decompressFromBase64(input: iceCandidates)) {
addIceCandidates(pc, remoteIceCandidates)
resp = .ok
} else {
resp = .error(message: "ice: call not started")
}
case let .media(media, enable):
if activeCall.wrappedValue == nil {
resp = .error(message: "media: call not started")
} else if activeCall.wrappedValue?.localMedia == .audio && media == .video {
resp = .error(message: "media: no video")
} else {
enableMedia(media, enable)
resp = .ok
}
case .end:
await sendCallResponse(.init(corrId: nil, resp: .ok, command: command))
endCall()
}
if let resp = resp {
await sendCallResponse(.init(corrId: nil, resp: resp, command: command))
}
}
func enableMedia(_ media: CallMediaType, _ enable: Bool) {
media == .video ? setVideoEnabled(enable) : setAudioEnabled(enable)
}
func addLocalRenderer(_ activeCall: Call, _ renderer: RTCEAGLVideoView) {
activeCall.localStream?.add(renderer)
// To get width and height of a frame, see videoView(videoView:, didChangeVideoSize)
renderer.delegate = self
}
func videoView(_ videoView: RTCVideoRenderer, didChangeVideoSize size: CGSize) {
guard size.height > 0 else { return }
localRendererAspectRatio.wrappedValue = size.width / size.height
}
func frameDecryptor(_ decryptor: RTCFrameDecryptor, mediaType: RTCRtpMediaType, withFrame encrypted: Data) -> Data? {
guard encrypted.count > 0 else { return nil }
if var key: [CChar] = activeCall.wrappedValue?.aesKey?.cString(using: .utf8),
let pointer: UnsafeMutableRawPointer = malloc(encrypted.count) {
memcpy(pointer, (encrypted as NSData).bytes, encrypted.count)
let isKeyFrame = encrypted[0] & 1 == 0
let clearTextBytesSize = mediaType.rawValue == 0 ? 1 : isKeyFrame ? 10 : 3
logCrypto("decrypt", chat_decrypt_media(&key, pointer.advanced(by: clearTextBytesSize), Int32(encrypted.count - clearTextBytesSize)))
return Data(bytes: pointer, count: encrypted.count - WebRTCClient.ivTagBytes)
} else {
return nil
}
}
func frameEncryptor(_ encryptor: RTCFrameEncryptor, mediaType: RTCRtpMediaType, withFrame unencrypted: Data) -> Data? {
guard unencrypted.count > 0 else { return nil }
if var key: [CChar] = activeCall.wrappedValue?.aesKey?.cString(using: .utf8),
let pointer: UnsafeMutableRawPointer = malloc(unencrypted.count + WebRTCClient.ivTagBytes) {
memcpy(pointer, (unencrypted as NSData).bytes, unencrypted.count)
let isKeyFrame = unencrypted[0] & 1 == 0
let clearTextBytesSize = mediaType.rawValue == 0 ? 1 : isKeyFrame ? 10 : 3
logCrypto("encrypt", chat_encrypt_media(&key, pointer.advanced(by: clearTextBytesSize), Int32(unencrypted.count + WebRTCClient.ivTagBytes - clearTextBytesSize)))
return Data(bytes: pointer, count: unencrypted.count + WebRTCClient.ivTagBytes)
} else {
return nil
}
}
private func logCrypto(_ op: String, _ r: UnsafeMutablePointer<CChar>?) {
if let r = r {
let err = fromCString(r)
if err != "" {
logger.error("\(op) error: \(err)")
// } else {
// logger.debug("\(op) ok")
}
}
}
func addRemoteRenderer(_ activeCall: Call, _ renderer: RTCVideoRenderer) {
activeCall.remoteStream?.add(renderer)
}
func startCaptureLocalVideo(_ activeCall: Call) {
guard
let capturer = activeCall.localCamera as? RTCCameraVideoCapturer,
let camera = (RTCCameraVideoCapturer.captureDevices().first { $0.position == activeCall.device })
else {
logger.error("Unable to find a camera")
return
}
let supported = RTCCameraVideoCapturer.supportedFormats(for: camera)
let height: (AVCaptureDevice.Format) -> Int32 = { (format: AVCaptureDevice.Format) in CMVideoFormatDescriptionGetDimensions(format.formatDescription).height }
let format = supported.first(where: { height($0) == 1280 })
?? supported.first(where: { height($0) >= 480 && height($0) < 1280 })
?? supported.first(where: { height($0) > 1280 })
guard
let format = format,
let fps = format.videoSupportedFrameRateRanges.max(by: { $0.maxFrameRate < $1.maxFrameRate })
else {
logger.error("Unable to find any format for camera or to choose FPS")
return
}
logger.debug("Format for camera is \(format.description)")
capturer.stopCapture()
capturer.startCapture(with: camera,
format: format,
fps: Int(min(24, fps.maxFrameRate)))
}
private func createAudioSender(_ connection: RTCPeerConnection) {
let streamId = "stream"
let audioTrack = createAudioTrack()
connection.add(audioTrack, streamIds: [streamId])
}
private func createVideoSender(_ connection: RTCPeerConnection) -> (RTCVideoTrack?, RTCVideoTrack?, RTCVideoCapturer?, RTCVideoSource?) {
let streamId = "stream"
let (localVideoTrack, localCamera, localVideoSource) = createVideoTrack()
connection.add(localVideoTrack, streamIds: [streamId])
return (localVideoTrack, connection.transceivers.first { $0.mediaType == .video }?.receiver.track as? RTCVideoTrack, localCamera, localVideoSource)
}
private func createAudioTrack() -> RTCAudioTrack {
let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil)
let audioSource = WebRTCClient.factory.audioSource(with: audioConstrains)
let audioTrack = WebRTCClient.factory.audioTrack(with: audioSource, trackId: "audio0")
return audioTrack
}
private func createVideoTrack() -> (RTCVideoTrack, RTCVideoCapturer, RTCVideoSource) {
let localVideoSource = WebRTCClient.factory.videoSource()
#if targetEnvironment(simulator)
let localCamera = RTCFileVideoCapturer(delegate: localVideoSource)
#else
let localCamera = RTCCameraVideoCapturer(delegate: localVideoSource)
#endif
let localVideoTrack = WebRTCClient.factory.videoTrack(with: localVideoSource, trackId: "video0")
return (localVideoTrack, localCamera, localVideoSource)
}
func endCall() {
guard let call = activeCall.wrappedValue else { return }
activeCall.wrappedValue = nil
call.connection.close()
call.connection.delegate = nil
call.frameEncryptor?.delegate = nil
call.frameDecryptor?.delegate = nil
audioSessionToDefaults()
}
func waitWithTimeout(_ timeoutMs: UInt64, stepMs: UInt64, until success: () -> Bool) async -> Bool {
let startedAt = DispatchTime.now()
while !success() && startedAt.uptimeNanoseconds + timeoutMs * 1000000 > DispatchTime.now().uptimeNanoseconds {
guard let _ = try? await Task.sleep(nanoseconds: stepMs * 1000000) else { break }
}
return success()
}
}
extension WebRTC.RTCPeerConnection {
func mediaConstraints() -> RTCMediaConstraints {
RTCMediaConstraints(
mandatoryConstraints: [kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueTrue,
kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueTrue],
optionalConstraints: nil)
}
func offer(_ completion: @escaping (_ sdp: RTCSessionDescription) -> Void) {
offer(for: mediaConstraints()) { (sdp, error) in
guard let sdp = sdp else {
return
}
self.setLocalDescription(sdp, completionHandler: { (error) in
completion(sdp)
})
}
}
func answer(_ completion: @escaping (_ sdp: RTCSessionDescription) -> Void) {
answer(for: mediaConstraints()) { (sdp, error) in
guard let sdp = sdp else {
return
}
self.setLocalDescription(sdp, completionHandler: { (error) in
completion(sdp)
})
}
}
}
extension WebRTCClient: RTCPeerConnectionDelegate {
func peerConnection(_ connection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
logger.debug("Connection new signaling state: \(stateChanged.rawValue)")
}
func peerConnection(_ connection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
logger.debug("Connection did add stream")
}
func peerConnection(_ connection: RTCPeerConnection, didRemove stream: RTCMediaStream) {
logger.debug("Connection did remove stream")
}
func peerConnectionShouldNegotiate(_ connection: RTCPeerConnection) {
logger.debug("Connection should negotiate")
}
func peerConnection(_ connection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
debugPrint("Connection new connection state: \(newState.toString() ?? "" + newState.rawValue.description) \(connection.receivers)")
guard let call = activeCall.wrappedValue,
let connectionStateString = newState.toString(),
let iceConnectionStateString = connection.iceConnectionState.toString(),
let iceGatheringStateString = connection.iceGatheringState.toString(),
let signalingStateString = connection.signalingState.toString()
else {
return
}
Task {
await sendCallResponse(.init(
corrId: nil,
resp: .connection(state: ConnectionState(
connectionState: connectionStateString,
iceConnectionState: iceConnectionStateString,
iceGatheringState: iceGatheringStateString,
signalingState: signalingStateString)
),
command: nil)
)
switch newState {
case .checking:
if let frameDecryptor = activeCall.wrappedValue?.frameDecryptor {
connection.receivers.forEach { $0.setRtcFrameDecryptor(frameDecryptor) }
}
let enableSpeaker: Bool
switch call.localMedia {
case .video: enableSpeaker = true
default: enableSpeaker = false
}
setSpeakerEnabledAndConfigureSession(enableSpeaker)
case .disconnected, .failed: endCall()
default: do {}
}
}
}
func peerConnection(_ connection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {
logger.debug("connection new gathering state: \(newState.toString() ?? "" + newState.rawValue.description)")
}
func peerConnection(_ connection: RTCPeerConnection, didGenerate candidate: WebRTC.RTCIceCandidate) {
// logger.debug("Connection generated candidate \(candidate.debugDescription)")
activeCall.wrappedValue?.iceCandidates.append(candidate.toCandidate(nil, nil, nil))
}
func peerConnection(_ connection: RTCPeerConnection, didRemove candidates: [WebRTC.RTCIceCandidate]) {
logger.debug("Connection did remove candidates")
}
func peerConnection(_ connection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {}
func peerConnection(_ connection: RTCPeerConnection,
didChangeLocalCandidate local: WebRTC.RTCIceCandidate,
remoteCandidate remote: WebRTC.RTCIceCandidate,
lastReceivedMs lastDataReceivedMs: Int32,
changeReason reason: String) {
// logger.debug("Connection changed candidate \(reason) \(remote.debugDescription) \(remote.description)")
sendConnectedEvent(connection, local: local, remote: remote)
}
func sendConnectedEvent(_ connection: WebRTC.RTCPeerConnection, local: WebRTC.RTCIceCandidate, remote: WebRTC.RTCIceCandidate) {
connection.statistics { (stats: RTCStatisticsReport) in
stats.statistics.values.forEach { stat in
// logger.debug("Stat \(stat.debugDescription)")
if stat.type == "candidate-pair", stat.values["state"] as? String == "succeeded",
let localId = stat.values["localCandidateId"] as? String,
let remoteId = stat.values["remoteCandidateId"] as? String,
let localStats = stats.statistics[localId],
let remoteStats = stats.statistics[remoteId],
local.sdp.contains("\((localStats.values["ip"] as? String ?? "--")) \((localStats.values["port"] as? String ?? "--"))") &&
remote.sdp.contains("\((remoteStats.values["ip"] as? String ?? "--")) \((remoteStats.values["port"] as? String ?? "--"))")
{
Task {
await self.sendCallResponse(.init(
corrId: nil,
resp: .connected(connectionInfo: ConnectionInfo(
localCandidate: local.toCandidate(
RTCIceCandidateType.init(rawValue: localStats.values["candidateType"] as! String),
localStats.values["protocol"] as? String,
localStats.values["relayProtocol"] as? String
),
remoteCandidate: remote.toCandidate(
RTCIceCandidateType.init(rawValue: remoteStats.values["candidateType"] as! String),
remoteStats.values["protocol"] as? String,
remoteStats.values["relayProtocol"] as? String
))),
command: nil)
)
}
}
}
}
}
}
extension WebRTCClient {
func setAudioEnabled(_ enabled: Bool) {
setTrackEnabled(RTCAudioTrack.self, enabled)
}
func setSpeakerEnabledAndConfigureSession( _ enabled: Bool) {
audioQueue.async { [weak self] in
guard let self = self else { return }
self.rtcAudioSession.lockForConfiguration()
defer {
self.rtcAudioSession.unlockForConfiguration()
}
do {
try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
try self.rtcAudioSession.setMode(AVAudioSession.Mode.voiceChat.rawValue)
try self.rtcAudioSession.overrideOutputAudioPort(enabled ? .speaker : .none)
try self.rtcAudioSession.setActive(true)
} catch let error {
logger.debug("Error configuring AVAudioSession: \(error)")
}
}
}
func audioSessionToDefaults() {
audioQueue.async { [weak self] in
guard let self = self else { return }
self.rtcAudioSession.lockForConfiguration()
defer {
self.rtcAudioSession.unlockForConfiguration()
}
do {
try self.rtcAudioSession.setCategory(AVAudioSession.Category.ambient.rawValue)
try self.rtcAudioSession.setMode(AVAudioSession.Mode.default.rawValue)
try self.rtcAudioSession.overrideOutputAudioPort(.none)
try self.rtcAudioSession.setActive(false)
} catch let error {
logger.debug("Error configuring AVAudioSession: \(error)")
}
}
}
func setVideoEnabled(_ enabled: Bool) {
setTrackEnabled(RTCVideoTrack.self, enabled)
}
func flipCamera() {
switch activeCall.wrappedValue?.device {
case .front: activeCall.wrappedValue?.device = .back
case .back: activeCall.wrappedValue?.device = .front
default: ()
}
if let call = activeCall.wrappedValue {
startCaptureLocalVideo(call)
}
}
private func setTrackEnabled<T: RTCMediaStreamTrack>(_ type: T.Type, _ enabled: Bool) {
activeCall.wrappedValue?.connection.transceivers
.compactMap { $0.sender.track as? T }
.forEach { $0.isEnabled = enabled }
}
}
struct CustomRTCSessionDescription: Codable {
public var type: RTCSdpType?
public var sdp: String?
}
enum RTCSdpType: String, Codable {
case answer
case offer
case pranswer
case rollback
}
extension RTCIceCandidate {
func toWebRTCCandidate() -> WebRTC.RTCIceCandidate {
WebRTC.RTCIceCandidate(
sdp: candidate,
sdpMLineIndex: Int32(sdpMLineIndex ?? 0),
sdpMid: sdpMid
)
}
}
extension WebRTC.RTCIceCandidate {
func toCandidate(_ candidateType: RTCIceCandidateType?, _ protocol: String?, _ relayProtocol: String?) -> RTCIceCandidate {
RTCIceCandidate(
candidateType: candidateType,
protocol: `protocol`,
relayProtocol: relayProtocol,
sdpMid: sdpMid,
sdpMLineIndex: Int(sdpMLineIndex),
candidate: sdp
)
}
}
extension [RTCIceServer] {
func toWebRTCIceServers() -> [WebRTC.RTCIceServer] {
self.map {
WebRTC.RTCIceServer(
urlStrings: $0.urls,
username: $0.username,
credential: $0.credential
) }
}
}
extension RTCSdpType {
func toWebRTCSdpType() -> WebRTC.RTCSdpType {
switch self {
case .answer: return WebRTC.RTCSdpType.answer
case .offer: return WebRTC.RTCSdpType.offer
case .pranswer: return WebRTC.RTCSdpType.prAnswer
case .rollback: return WebRTC.RTCSdpType.rollback
}
}
}
extension WebRTC.RTCSdpType {
func toSdpType() -> RTCSdpType {
switch self {
case .answer: return RTCSdpType.answer
case .offer: return RTCSdpType.offer
case .prAnswer: return RTCSdpType.pranswer
case .rollback: return RTCSdpType.rollback
default: return RTCSdpType.answer // should never be here
}
}
}
extension RTCPeerConnectionState {
func toString() -> String? {
switch self {
case .new: return "new"
case .connecting: return "connecting"
case .connected: return "connected"
case .failed: return"failed"
case .disconnected: return "disconnected"
case .closed: return "closed"
default: return nil // unknown
}
}
}
extension RTCIceConnectionState {
func toString() -> String? {
switch self {
case .new: return "new"
case .checking: return "checking"
case .connected: return "connected"
case .completed: return "completed"
case .failed: return "failed"
case .disconnected: return "disconnected"
case .closed: return "closed"
default: return nil // unknown or unused on the other side
}
}
}
extension RTCIceGatheringState {
func toString() -> String? {
switch self {
case .new: return "new"
case .gathering: return "gathering"
case .complete: return "complete"
default: return nil // unknown
}
}
}
extension RTCSignalingState {
func toString() -> String? {
switch self {
case .stable: return "stable"
case .haveLocalOffer: return "have-local-offer"
case .haveLocalPrAnswer: return "have-local-pranswer"
case .haveRemoteOffer: return "have-remote-offer"
case .haveRemotePrAnswer: return "have-remote-pranswer"
case .closed: return "closed"
default: return nil // unknown
}
}
}

View file

@ -1,183 +0,0 @@
//
// WebRTCView.swift
// SimpleX (iOS)
//
// Created by Ian Davies on 29/04/2022.
// Copyright © 2022 SimpleX Chat. All rights reserved.
//
import SwiftUI
import WebKit
import SimpleXChat
class WebRTCCoordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler, WKUIDelegate {
var rtcWebView: Binding<WKWebView?>
var webViewMsg: Binding<WVAPIMessage?>
internal init(rtcWebView: Binding<WKWebView?>, webViewMsg: Binding<WVAPIMessage?>) {
self.rtcWebView = rtcWebView
self.webViewMsg = webViewMsg
}
func webView(_ webView: WKWebView, didFinish navigation: WKNavigation!) {
webView.allowsBackForwardNavigationGestures = false
self.rtcWebView.wrappedValue = webView
ChatModel.shared.callWebView = webView
}
func webView(_ webView: WKWebView, decideMediaCapturePermissionsFor origin : WKSecurityOrigin, initiatedBy frame: WKFrameInfo, type: WKMediaCaptureType) async -> WKPermissionDecision {
print("webView", #function)
return .grant
}
// receive message from WKWebView
func userContentController(
_ userContentController: WKUserContentController,
didReceive message: WKScriptMessage
) {
logger.debug("WebRTCCoordinator.userContentController")
switch message.name {
case "webrtc":
if let msgStr = message.body as? String,
let msg: WVAPIMessage = decodeJSON(msgStr) {
// this is the binding that communicates messages from webview to swift view
webViewMsg.wrappedValue = msg
if case .invalid = msg.resp {
logger.error("WebRTCCoordinator.userContentController: invalid message \(String(describing: message.body))")
}
} else {
logger.error("WebRTCCoordinator.userContentController: message parsing error \(String(describing: message.body))")
}
case "logger":
if let msgStr = message.body as? String {
logger.error("WebRTCCoordinator console.log: \(msgStr)")
} else {
logger.error("WebRTCCoordinator console.log: \(String(describing: message.body))")
}
default:
logger.error("WebRTCCoordinator.userContentController: invalid message.name \(message.name)")
}
}
}
struct WebRTCView: UIViewRepresentable {
@State private var coordinator: WebRTCCoordinator?
@Binding var rtcWebView: WKWebView?
@Binding var webViewMsg: WVAPIMessage?
func makeCoordinator() -> WebRTCCoordinator {
WebRTCCoordinator(rtcWebView: $rtcWebView, webViewMsg: $webViewMsg)
}
func makeUIView(context: Context) -> WKWebView {
let wkCoordinator = makeCoordinator()
DispatchQueue.main.async { coordinator = wkCoordinator }
let wkController = WKUserContentController()
let cfg = WKWebViewConfiguration()
cfg.userContentController = wkController
cfg.mediaTypesRequiringUserActionForPlayback = []
cfg.allowsInlineMediaPlayback = true
let addScript = { (handler: String, source: String) in
let script = WKUserScript(source: source, injectionTime: .atDocumentEnd, forMainFrameOnly: false)
wkController.addUserScript(script)
wkController.add(wkCoordinator, name: handler)
}
addScript("webrtc", "sendMessageToNative = (msg) => webkit.messageHandlers.webrtc.postMessage(JSON.stringify(msg))")
addScript("logger", "console.log = (arg) => webkit.messageHandlers.logger.postMessage(JSON.stringify(arg))")
let wkWebView = WKWebView(frame: .zero, configuration: cfg)
wkWebView.navigationDelegate = wkCoordinator
guard let path: String = Bundle.main.path(forResource: "call", ofType: "html", inDirectory: "www") else {
logger.error("WebRTCView.makeUIView call.html not found")
return wkWebView
}
let localHTMLUrl = URL(fileURLWithPath: path, isDirectory: false)
wkWebView.loadFileURL(localHTMLUrl, allowingReadAccessTo: localHTMLUrl)
return wkWebView
}
func updateUIView(_ webView: WKWebView, context: Context) {
logger.debug("WebRTCView.updateUIView")
}
}
func sendCallCommand(_ webView: WKWebView, _ command: WCallCommand) {
logger.debug("sendCallCommand: \(command.cmdType)")
let apiCmd = encodeJSON(WVAPICall(command: command))
let js = "processCommand(\(apiCmd))"
webView.evaluateJavaScript(js)
}
//struct CallViewDebug: View {
// @State private var commandStr = ""
// @State private var rtcWebView: WKWebView? = nil
// @State private var webViewMsg: WVAPIMessage? = nil
// @FocusState private var keyboardVisible: Bool
//
// var body: some View {
// VStack(spacing: 30) {
// WebRTCView(rtcWebView: $rtcWebView, webViewMsg: $webViewMsg).frame(maxHeight: 260)
// .onChange(of: webViewMsg) { _ in
// if let resp = webViewMsg {
// commandStr = encodeJSON(resp)
// }
// }
// TextEditor(text: $commandStr)
// .focused($keyboardVisible)
// .disableAutocorrection(true)
// .textInputAutocapitalization(.never)
// .padding(.horizontal, 5)
// .padding(.top, 2)
// .frame(height: 112)
// .overlay(
// RoundedRectangle(cornerRadius: 10)
// .strokeBorder(.secondary, lineWidth: 0.3, antialiased: true)
// )
// HStack(spacing: 20) {
// Button("Copy") {
// UIPasteboard.general.string = commandStr
// }
// Button("Paste") {
// commandStr = UIPasteboard.general.string ?? ""
// }
// Button("Clear") {
// commandStr = ""
// }
// Button("Send") {
// if let wv = rtcWebView,
// let command: WCallCommand = decodeJSON(commandStr) {
// sendCallCommand(wv, command)
// }
// }
// }
// HStack(spacing: 20) {
// Button("Capabilities") {
// if let wv = rtcWebView {
// sendCallCommand(wv, .capabilities(useWorker: true))
// }
// }
// Button("Start") {
// if let wv = rtcWebView {
// sendCallCommand(wv, .start(media: .video))
// }
// }
// Button("Accept") {
//
// }
// Button("Answer") {
//
// }
// Button("ICE") {
//
// }
// Button("End") {
//
// }
// }
// }
// }
//}

View file

@ -7,11 +7,12 @@
objects = { objects = {
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
184152CEF68D2336FC2EBCB0 /* CallViewRenderers.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415B08031E8FB0F7FC27F9 /* CallViewRenderers.swift */; };
1841538E296606C74533367C /* UserPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415835CBD939A9ABDC108A /* UserPicker.swift */; }; 1841538E296606C74533367C /* UserPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415835CBD939A9ABDC108A /* UserPicker.swift */; };
1841560FD1CD447955474C1D /* UserProfilesView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415845648CA4F5A8BCA272 /* UserProfilesView.swift */; }; 1841560FD1CD447955474C1D /* UserProfilesView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415845648CA4F5A8BCA272 /* UserProfilesView.swift */; };
1841594C978674A7B42EF0C0 /* AnimatedImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1841511920742C6E152E469F /* AnimatedImageView.swift */; }; 1841594C978674A7B42EF0C0 /* AnimatedImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1841511920742C6E152E469F /* AnimatedImageView.swift */; };
18415B0585EB5A9A0A7CA8CD /* PressedButtonStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415A7F0F189D87DEFEABCA /* PressedButtonStyle.swift */; }; 18415B0585EB5A9A0A7CA8CD /* PressedButtonStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415A7F0F189D87DEFEABCA /* PressedButtonStyle.swift */; };
3C714777281C081000CB4D4B /* WebRTCView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3C714776281C081000CB4D4B /* WebRTCView.swift */; }; 18415C6C56DBCEC2CBBD2F11 /* WebRTCClient.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415323A4082FC92887F906 /* WebRTCClient.swift */; };
3C71477A281C0F6800CB4D4B /* www in Resources */ = {isa = PBXBuildFile; fileRef = 3C714779281C0F6800CB4D4B /* www */; }; 3C71477A281C0F6800CB4D4B /* www in Resources */ = {isa = PBXBuildFile; fileRef = 3C714779281C0F6800CB4D4B /* www */; };
3C8C548928133C84000A3EC7 /* PasteToConnectView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */; }; 3C8C548928133C84000A3EC7 /* PasteToConnectView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */; };
3CDBCF4227FAE51000354CDD /* ComposeLinkView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */; }; 3CDBCF4227FAE51000354CDD /* ComposeLinkView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */; };
@ -164,8 +165,10 @@
64AA1C6C27F3537400AC7277 /* DeletedItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 64AA1C6B27F3537400AC7277 /* DeletedItemView.swift */; }; 64AA1C6C27F3537400AC7277 /* DeletedItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 64AA1C6B27F3537400AC7277 /* DeletedItemView.swift */; };
64E972072881BB22008DBC02 /* CIGroupInvitationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 64E972062881BB22008DBC02 /* CIGroupInvitationView.swift */; }; 64E972072881BB22008DBC02 /* CIGroupInvitationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 64E972062881BB22008DBC02 /* CIGroupInvitationView.swift */; };
64F1CC3B28B39D8600CD1FB1 /* IncognitoHelp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 64F1CC3A28B39D8600CD1FB1 /* IncognitoHelp.swift */; }; 64F1CC3B28B39D8600CD1FB1 /* IncognitoHelp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 64F1CC3A28B39D8600CD1FB1 /* IncognitoHelp.swift */; };
D7197A1829AE89660055C05A /* WebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = D7197A1729AE89660055C05A /* WebRTC */; };
D72A9088294BD7A70047C86D /* NativeTextEditor.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A9087294BD7A70047C86D /* NativeTextEditor.swift */; }; D72A9088294BD7A70047C86D /* NativeTextEditor.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A9087294BD7A70047C86D /* NativeTextEditor.swift */; };
D77B92DC2952372200A5A1CC /* SwiftyGif in Frameworks */ = {isa = PBXBuildFile; productRef = D77B92DB2952372200A5A1CC /* SwiftyGif */; }; D77B92DC2952372200A5A1CC /* SwiftyGif in Frameworks */ = {isa = PBXBuildFile; productRef = D77B92DB2952372200A5A1CC /* SwiftyGif */; };
D7F0E33929964E7E0068AF69 /* LZString in Frameworks */ = {isa = PBXBuildFile; productRef = D7F0E33829964E7E0068AF69 /* LZString */; };
/* End PBXBuildFile section */ /* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */ /* Begin PBXContainerItemProxy section */
@ -226,10 +229,11 @@
/* Begin PBXFileReference section */ /* Begin PBXFileReference section */
1841511920742C6E152E469F /* AnimatedImageView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AnimatedImageView.swift; sourceTree = "<group>"; }; 1841511920742C6E152E469F /* AnimatedImageView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AnimatedImageView.swift; sourceTree = "<group>"; };
18415323A4082FC92887F906 /* WebRTCClient.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = WebRTCClient.swift; sourceTree = "<group>"; };
18415835CBD939A9ABDC108A /* UserPicker.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UserPicker.swift; sourceTree = "<group>"; }; 18415835CBD939A9ABDC108A /* UserPicker.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UserPicker.swift; sourceTree = "<group>"; };
18415845648CA4F5A8BCA272 /* UserProfilesView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UserProfilesView.swift; sourceTree = "<group>"; }; 18415845648CA4F5A8BCA272 /* UserProfilesView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UserProfilesView.swift; sourceTree = "<group>"; };
18415A7F0F189D87DEFEABCA /* PressedButtonStyle.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PressedButtonStyle.swift; sourceTree = "<group>"; }; 18415A7F0F189D87DEFEABCA /* PressedButtonStyle.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PressedButtonStyle.swift; sourceTree = "<group>"; };
3C714776281C081000CB4D4B /* WebRTCView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCView.swift; sourceTree = "<group>"; }; 18415B08031E8FB0F7FC27F9 /* CallViewRenderers.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallViewRenderers.swift; sourceTree = "<group>"; };
3C714779281C0F6800CB4D4B /* www */ = {isa = PBXFileReference; lastKnownFileType = folder; name = www; path = ../android/app/src/main/assets/www; sourceTree = "<group>"; }; 3C714779281C0F6800CB4D4B /* www */ = {isa = PBXFileReference; lastKnownFileType = folder; name = www; path = ../android/app/src/main/assets/www; sourceTree = "<group>"; };
3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PasteToConnectView.swift; sourceTree = "<group>"; }; 3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PasteToConnectView.swift; sourceTree = "<group>"; };
3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ComposeLinkView.swift; sourceTree = "<group>"; }; 3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ComposeLinkView.swift; sourceTree = "<group>"; };
@ -407,6 +411,7 @@
64E972062881BB22008DBC02 /* CIGroupInvitationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CIGroupInvitationView.swift; sourceTree = "<group>"; }; 64E972062881BB22008DBC02 /* CIGroupInvitationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CIGroupInvitationView.swift; sourceTree = "<group>"; };
64F1CC3A28B39D8600CD1FB1 /* IncognitoHelp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IncognitoHelp.swift; sourceTree = "<group>"; }; 64F1CC3A28B39D8600CD1FB1 /* IncognitoHelp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IncognitoHelp.swift; sourceTree = "<group>"; };
D72A9087294BD7A70047C86D /* NativeTextEditor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativeTextEditor.swift; sourceTree = "<group>"; }; D72A9087294BD7A70047C86D /* NativeTextEditor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativeTextEditor.swift; sourceTree = "<group>"; };
D7AA2C3429A936B400737B40 /* MediaEncryption.playground */ = {isa = PBXFileReference; lastKnownFileType = file.playground; name = MediaEncryption.playground; path = Shared/MediaEncryption.playground; sourceTree = SOURCE_ROOT; xcLanguageSpecificationIdentifier = xcode.lang.swift; };
/* End PBXFileReference section */ /* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */ /* Begin PBXFrameworksBuildPhase section */
@ -415,7 +420,9 @@
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
5CE2BA702845308900EC33A6 /* SimpleXChat.framework in Frameworks */, 5CE2BA702845308900EC33A6 /* SimpleXChat.framework in Frameworks */,
D7197A1829AE89660055C05A /* WebRTC in Frameworks */,
D77B92DC2952372200A5A1CC /* SwiftyGif in Frameworks */, D77B92DC2952372200A5A1CC /* SwiftyGif in Frameworks */,
D7F0E33929964E7E0068AF69 /* LZString in Frameworks */,
646BB38C283BEEB9001CE359 /* LocalAuthentication.framework in Frameworks */, 646BB38C283BEEB9001CE359 /* LocalAuthentication.framework in Frameworks */,
5C8F01CD27A6F0D8007D2C8D /* CodeScanner in Frameworks */, 5C8F01CD27A6F0D8007D2C8D /* CodeScanner in Frameworks */,
); );
@ -456,13 +463,14 @@
3C714775281C080100CB4D4B /* Call */ = { 3C714775281C080100CB4D4B /* Call */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
3C714776281C081000CB4D4B /* WebRTCView.swift */,
5C9D13A2282187BB00AB8B43 /* WebRTC.swift */, 5C9D13A2282187BB00AB8B43 /* WebRTC.swift */,
5C5E5D3A2824468B00B0488A /* ActiveCallView.swift */, 5C5E5D3A2824468B00B0488A /* ActiveCallView.swift */,
5C029EA9283942EA004A9677 /* CallController.swift */, 5C029EA9283942EA004A9677 /* CallController.swift */,
5C55A91E283AD0E400C4E99E /* CallManager.swift */, 5C55A91E283AD0E400C4E99E /* CallManager.swift */,
5C55A920283CCCB700C4E99E /* IncomingCallView.swift */, 5C55A920283CCCB700C4E99E /* IncomingCallView.swift */,
5C55A922283CEDE600C4E99E /* SoundPlayer.swift */, 5C55A922283CEDE600C4E99E /* SoundPlayer.swift */,
18415323A4082FC92887F906 /* WebRTCClient.swift */,
18415B08031E8FB0F7FC27F9 /* CallViewRenderers.swift */,
); );
path = Call; path = Call;
sourceTree = "<group>"; sourceTree = "<group>";
@ -516,6 +524,7 @@
5C764E7A279C71D4000C6508 /* Frameworks */ = { 5C764E7A279C71D4000C6508 /* Frameworks */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
5C646BB429AA0E970005A88F /* WebRTC.xcframework */,
646BB38B283BEEB9001CE359 /* LocalAuthentication.framework */, 646BB38B283BEEB9001CE359 /* LocalAuthentication.framework */,
5CDCAD6028187D7900503DA2 /* libz.tbd */, 5CDCAD6028187D7900503DA2 /* libz.tbd */,
5CDCAD5E28187D4A00503DA2 /* libiconv.tbd */, 5CDCAD5E28187D4A00503DA2 /* libiconv.tbd */,
@ -585,6 +594,7 @@
5C764E87279CBC8E000C6508 /* Model */, 5C764E87279CBC8E000C6508 /* Model */,
5C2E260D27A30E2400F70299 /* Views */, 5C2E260D27A30E2400F70299 /* Views */,
5CA059C5279559F40002BEB4 /* Assets.xcassets */, 5CA059C5279559F40002BEB4 /* Assets.xcassets */,
D7AA2C3429A936B400737B40 /* MediaEncryption.playground */,
5C13730C2815740A00F43030 /* DebugJSON.playground */, 5C13730C2815740A00F43030 /* DebugJSON.playground */,
); );
path = Shared; path = Shared;
@ -814,6 +824,8 @@
packageProductDependencies = ( packageProductDependencies = (
5C8F01CC27A6F0D8007D2C8D /* CodeScanner */, 5C8F01CC27A6F0D8007D2C8D /* CodeScanner */,
D77B92DB2952372200A5A1CC /* SwiftyGif */, D77B92DB2952372200A5A1CC /* SwiftyGif */,
D7F0E33829964E7E0068AF69 /* LZString */,
D7197A1729AE89660055C05A /* WebRTC */,
); );
productName = "SimpleX (iOS)"; productName = "SimpleX (iOS)";
productReference = 5CA059CA279559F40002BEB4 /* SimpleX.app */; productReference = 5CA059CA279559F40002BEB4 /* SimpleX.app */;
@ -922,6 +934,8 @@
packageReferences = ( packageReferences = (
5C8F01CB27A6F0D8007D2C8D /* XCRemoteSwiftPackageReference "CodeScanner" */, 5C8F01CB27A6F0D8007D2C8D /* XCRemoteSwiftPackageReference "CodeScanner" */,
D77B92DA2952372200A5A1CC /* XCRemoteSwiftPackageReference "SwiftyGif" */, D77B92DA2952372200A5A1CC /* XCRemoteSwiftPackageReference "SwiftyGif" */,
D7F0E33729964E7D0068AF69 /* XCRemoteSwiftPackageReference "lzstring-swift" */,
D7197A1629AE89660055C05A /* XCRemoteSwiftPackageReference "WebRTC" */,
); );
productRefGroup = 5CA059CB279559F40002BEB4 /* Products */; productRefGroup = 5CA059CB279559F40002BEB4 /* Products */;
projectDirPath = ""; projectDirPath = "";
@ -1077,7 +1091,6 @@
5CC1C99527A6CF7F000D9FF6 /* ShareSheet.swift in Sources */, 5CC1C99527A6CF7F000D9FF6 /* ShareSheet.swift in Sources */,
5C5E5D3B2824468B00B0488A /* ActiveCallView.swift in Sources */, 5C5E5D3B2824468B00B0488A /* ActiveCallView.swift in Sources */,
5C2E260727A2941F00F70299 /* SimpleXAPI.swift in Sources */, 5C2E260727A2941F00F70299 /* SimpleXAPI.swift in Sources */,
3C714777281C081000CB4D4B /* WebRTCView.swift in Sources */,
6440CA00288857A10062C672 /* CIEventView.swift in Sources */, 6440CA00288857A10062C672 /* CIEventView.swift in Sources */,
5CB0BA92282713FD00B3292C /* CreateProfile.swift in Sources */, 5CB0BA92282713FD00B3292C /* CreateProfile.swift in Sources */,
5C5F2B7027EBC704006A9D5F /* ProfileImage.swift in Sources */, 5C5F2B7027EBC704006A9D5F /* ProfileImage.swift in Sources */,
@ -1099,6 +1112,8 @@
1841538E296606C74533367C /* UserPicker.swift in Sources */, 1841538E296606C74533367C /* UserPicker.swift in Sources */,
18415B0585EB5A9A0A7CA8CD /* PressedButtonStyle.swift in Sources */, 18415B0585EB5A9A0A7CA8CD /* PressedButtonStyle.swift in Sources */,
1841560FD1CD447955474C1D /* UserProfilesView.swift in Sources */, 1841560FD1CD447955474C1D /* UserProfilesView.swift in Sources */,
18415C6C56DBCEC2CBBD2F11 /* WebRTCClient.swift in Sources */,
184152CEF68D2336FC2EBCB0 /* CallViewRenderers.swift in Sources */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
@ -1676,6 +1691,14 @@
minimumVersion = 2.0.0; minimumVersion = 2.0.0;
}; };
}; };
D7197A1629AE89660055C05A /* XCRemoteSwiftPackageReference "WebRTC" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/simplex-chat/WebRTC.git";
requirement = {
kind = revision;
revision = 34bedc50f9c58dccf4967ea59c7e6a47d620803b;
};
};
D77B92DA2952372200A5A1CC /* XCRemoteSwiftPackageReference "SwiftyGif" */ = { D77B92DA2952372200A5A1CC /* XCRemoteSwiftPackageReference "SwiftyGif" */ = {
isa = XCRemoteSwiftPackageReference; isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/kirualex/SwiftyGif"; repositoryURL = "https://github.com/kirualex/SwiftyGif";
@ -1684,6 +1707,14 @@
kind = branch; kind = branch;
}; };
}; };
D7F0E33729964E7D0068AF69 /* XCRemoteSwiftPackageReference "lzstring-swift" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/Ibrahimhass/lzstring-swift";
requirement = {
kind = revision;
revision = 7f62f21de5b18582a950e1753b775cc614722407;
};
};
/* End XCRemoteSwiftPackageReference section */ /* End XCRemoteSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */ /* Begin XCSwiftPackageProductDependency section */
@ -1692,11 +1723,21 @@
package = 5C8F01CB27A6F0D8007D2C8D /* XCRemoteSwiftPackageReference "CodeScanner" */; package = 5C8F01CB27A6F0D8007D2C8D /* XCRemoteSwiftPackageReference "CodeScanner" */;
productName = CodeScanner; productName = CodeScanner;
}; };
D7197A1729AE89660055C05A /* WebRTC */ = {
isa = XCSwiftPackageProductDependency;
package = D7197A1629AE89660055C05A /* XCRemoteSwiftPackageReference "WebRTC" */;
productName = WebRTC;
};
D77B92DB2952372200A5A1CC /* SwiftyGif */ = { D77B92DB2952372200A5A1CC /* SwiftyGif */ = {
isa = XCSwiftPackageProductDependency; isa = XCSwiftPackageProductDependency;
package = D77B92DA2952372200A5A1CC /* XCRemoteSwiftPackageReference "SwiftyGif" */; package = D77B92DA2952372200A5A1CC /* XCRemoteSwiftPackageReference "SwiftyGif" */;
productName = SwiftyGif; productName = SwiftyGif;
}; };
D7F0E33829964E7E0068AF69 /* LZString */ = {
isa = XCSwiftPackageProductDependency;
package = D7F0E33729964E7D0068AF69 /* XCRemoteSwiftPackageReference "lzstring-swift" */;
productName = LZString;
};
/* End XCSwiftPackageProductDependency section */ /* End XCSwiftPackageProductDependency section */
}; };
rootObject = 5CA059BE279559F40002BEB4 /* Project object */; rootObject = 5CA059BE279559F40002BEB4 /* Project object */;

View file

@ -9,6 +9,14 @@
"version" : "2.1.1" "version" : "2.1.1"
} }
}, },
{
"identity" : "lzstring-swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/Ibrahimhass/lzstring-swift",
"state" : {
"revision" : "7f62f21de5b18582a950e1753b775cc614722407"
}
},
{ {
"identity" : "swiftygif", "identity" : "swiftygif",
"kind" : "remoteSourceControl", "kind" : "remoteSourceControl",
@ -17,6 +25,14 @@
"branch" : "master", "branch" : "master",
"revision" : "5e8619335d394901379c9add5c4c1c2f420b3800" "revision" : "5e8619335d394901379c9add5c4c1c2f420b3800"
} }
},
{
"identity" : "webrtc",
"kind" : "remoteSourceControl",
"location" : "https://github.com/simplex-chat/WebRTC.git",
"state" : {
"revision" : "34bedc50f9c58dccf4967ea59c7e6a47d620803b"
}
} }
], ],
"version" : 2 "version" : 2

View file

@ -109,7 +109,7 @@ struct ParsedServerAddress: Decodable {
var parseError: String var parseError: String
} }
private func fromCString(_ c: UnsafeMutablePointer<CChar>) -> String { public func fromCString(_ c: UnsafeMutablePointer<CChar>) -> String {
let s = String.init(cString: c) let s = String.init(cString: c)
free(c) free(c)
return s return s

View file

@ -83,6 +83,10 @@ public enum VideoCamera: String, Codable, Equatable {
public struct CallCapabilities: Codable, Equatable { public struct CallCapabilities: Codable, Equatable {
public var encryption: Bool public var encryption: Bool
public init(encryption: Bool) {
self.encryption = encryption
}
} }
public enum WebRTCCallStatus: String, Encodable { public enum WebRTCCallStatus: String, Encodable {

View file

@ -29,6 +29,7 @@
"devDependencies": { "devDependencies": {
"@types/lz-string": "^1.3.34", "@types/lz-string": "^1.3.34",
"husky": "^7.0.4", "husky": "^7.0.4",
"isomorphic-webcrypto": "^2.3.8",
"lint-staged": "^12.4.1", "lint-staged": "^12.4.1",
"prettier": "^2.6.2", "prettier": "^2.6.2",
"typescript": "^4.6.4" "typescript": "^4.6.4"
@ -39,4 +40,4 @@
"dependencies": { "dependencies": {
"lz-string": "^1.4.4" "lz-string": "^1.4.4"
} }
} }