mirror of
https://github.com/simplex-chat/simplex-chat.git
synced 2025-06-28 12:19:54 +00:00
webrtc: call overlays for ios/android, support for flipping camera (#669)
This commit is contained in:
parent
5cddf8e2d3
commit
6cc4323571
14 changed files with 636 additions and 219 deletions
|
@ -5,8 +5,19 @@
|
|||
<script src="./lz-string.min.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="remote-video-stream" autoplay playsinline></video>
|
||||
<video id="local-video-stream" muted autoplay playsinline></video>
|
||||
<video
|
||||
id="remote-video-stream"
|
||||
autoplay
|
||||
playsinline
|
||||
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
|
||||
></video>
|
||||
<video
|
||||
id="local-video-stream"
|
||||
muted
|
||||
autoplay
|
||||
playsinline
|
||||
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
|
||||
></video>
|
||||
</body>
|
||||
<footer>
|
||||
<script src="./call.js"></script>
|
||||
|
|
|
@ -6,6 +6,11 @@ var CallMediaType;
|
|||
CallMediaType["Audio"] = "audio";
|
||||
CallMediaType["Video"] = "video";
|
||||
})(CallMediaType || (CallMediaType = {}));
|
||||
var VideoCamera;
|
||||
(function (VideoCamera) {
|
||||
VideoCamera["User"] = "user";
|
||||
VideoCamera["Environment"] = "environment";
|
||||
})(VideoCamera || (VideoCamera = {}));
|
||||
// for debugging
|
||||
// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
|
||||
var sendMessageToNative = (msg) => console.log(JSON.stringify(msg));
|
||||
|
@ -16,8 +21,8 @@ var TransformOperation;
|
|||
TransformOperation["Encrypt"] = "encrypt";
|
||||
TransformOperation["Decrypt"] = "decrypt";
|
||||
})(TransformOperation || (TransformOperation = {}));
|
||||
let activeCall;
|
||||
const processCommand = (function () {
|
||||
let activeCall;
|
||||
const defaultIceServers = [
|
||||
{ urls: ["stun:stun.simplex.chat:5349"] },
|
||||
{ urls: ["turn:turn.simplex.chat:5349"], username: "private", credential: "yleob6AVkiNI87hpR94Z" },
|
||||
|
@ -31,19 +36,14 @@ const processCommand = (function () {
|
|||
iceTransportPolicy: relay ? "relay" : "all",
|
||||
},
|
||||
iceCandidates: {
|
||||
delay: 2000,
|
||||
delay: 3000,
|
||||
extrasInterval: 2000,
|
||||
extrasTimeout: 8000,
|
||||
},
|
||||
};
|
||||
}
|
||||
async function initializeCall(config, mediaType, aesKey, useWorker) {
|
||||
const conn = new RTCPeerConnection(config.peerConnectionConfig);
|
||||
const remoteStream = new MediaStream();
|
||||
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType));
|
||||
await setUpMediaStreams(conn, localStream, remoteStream, aesKey, useWorker);
|
||||
conn.addEventListener("connectionstatechange", connectionStateChange);
|
||||
const iceCandidates = new Promise((resolve, _) => {
|
||||
function getIceCandidates(conn, config) {
|
||||
return new Promise((resolve, _) => {
|
||||
let candidates = [];
|
||||
let resolved = false;
|
||||
let extrasInterval;
|
||||
|
@ -91,28 +91,36 @@ const processCommand = (function () {
|
|||
sendMessageToNative({ resp: { type: "ice", iceCandidates } });
|
||||
}
|
||||
});
|
||||
return { connection: conn, iceCandidates, localMedia: mediaType, localStream };
|
||||
}
|
||||
async function initializeCall(config, mediaType, aesKey, useWorker) {
|
||||
const pc = new RTCPeerConnection(config.peerConnectionConfig);
|
||||
const remoteStream = new MediaStream();
|
||||
const localCamera = VideoCamera.User;
|
||||
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType, localCamera));
|
||||
const iceCandidates = getIceCandidates(pc, config);
|
||||
const call = { connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey, useWorker };
|
||||
await setupMediaStreams(call);
|
||||
pc.addEventListener("connectionstatechange", connectionStateChange);
|
||||
return call;
|
||||
async function connectionStateChange() {
|
||||
sendMessageToNative({
|
||||
resp: {
|
||||
type: "connection",
|
||||
state: {
|
||||
connectionState: conn.connectionState,
|
||||
iceConnectionState: conn.iceConnectionState,
|
||||
iceGatheringState: conn.iceGatheringState,
|
||||
signalingState: conn.signalingState,
|
||||
connectionState: pc.connectionState,
|
||||
iceConnectionState: pc.iceConnectionState,
|
||||
iceGatheringState: pc.iceGatheringState,
|
||||
signalingState: pc.signalingState,
|
||||
},
|
||||
},
|
||||
});
|
||||
if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
|
||||
conn.removeEventListener("connectionstatechange", connectionStateChange);
|
||||
conn.close();
|
||||
activeCall = undefined;
|
||||
resetVideoElements();
|
||||
if (pc.connectionState == "disconnected" || pc.connectionState == "failed") {
|
||||
pc.removeEventListener("connectionstatechange", connectionStateChange);
|
||||
endCall();
|
||||
setTimeout(() => sendMessageToNative({ resp: { type: "ended" } }), 0);
|
||||
}
|
||||
else if (conn.connectionState == "connected") {
|
||||
const stats = (await conn.getStats());
|
||||
else if (pc.connectionState == "connected") {
|
||||
const stats = (await pc.getStats());
|
||||
for (const stat of stats.values()) {
|
||||
const { type, state } = stat;
|
||||
if (type === "candidate-pair" && state === "succeeded") {
|
||||
|
@ -148,40 +156,37 @@ const processCommand = (function () {
|
|||
const encryption = supportsInsertableStreams(command.useWorker);
|
||||
resp = { type: "capabilities", capabilities: { encryption } };
|
||||
break;
|
||||
case "start":
|
||||
case "start": {
|
||||
console.log("starting call");
|
||||
if (activeCall) {
|
||||
// TODO cancel current call
|
||||
resp = { type: "error", message: "start: call already started" };
|
||||
}
|
||||
else {
|
||||
const { media, useWorker, iceServers, relay } = command;
|
||||
const encryption = supportsInsertableStreams(useWorker);
|
||||
const aesKey = encryption ? command.aesKey : undefined;
|
||||
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey, useWorker);
|
||||
const pc = activeCall.connection;
|
||||
const offer = await pc.createOffer();
|
||||
await pc.setLocalDescription(offer);
|
||||
// for debugging, returning the command for callee to use
|
||||
// resp = {
|
||||
// type: "offer",
|
||||
// offer: serialize(offer),
|
||||
// iceCandidates: await activeCall.iceCandidates,
|
||||
// capabilities: {encryption},
|
||||
// media,
|
||||
// iceServers,
|
||||
// relay,
|
||||
// aesKey,
|
||||
// useWorker,
|
||||
// }
|
||||
resp = {
|
||||
type: "offer",
|
||||
offer: serialize(offer),
|
||||
iceCandidates: await activeCall.iceCandidates,
|
||||
capabilities: { encryption },
|
||||
};
|
||||
}
|
||||
if (activeCall)
|
||||
endCall();
|
||||
const { media, useWorker, iceServers, relay } = command;
|
||||
const encryption = supportsInsertableStreams(useWorker);
|
||||
const aesKey = encryption ? command.aesKey : undefined;
|
||||
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey, useWorker);
|
||||
const pc = activeCall.connection;
|
||||
const offer = await pc.createOffer();
|
||||
await pc.setLocalDescription(offer);
|
||||
// for debugging, returning the command for callee to use
|
||||
// resp = {
|
||||
// type: "offer",
|
||||
// offer: serialize(offer),
|
||||
// iceCandidates: await activeCall.iceCandidates,
|
||||
// capabilities: {encryption},
|
||||
// media,
|
||||
// iceServers,
|
||||
// relay,
|
||||
// aesKey,
|
||||
// useWorker,
|
||||
// }
|
||||
resp = {
|
||||
type: "offer",
|
||||
offer: serialize(offer),
|
||||
iceCandidates: await activeCall.iceCandidates,
|
||||
capabilities: { encryption },
|
||||
};
|
||||
break;
|
||||
}
|
||||
case "offer":
|
||||
if (activeCall) {
|
||||
resp = { type: "error", message: "accept: call already started" };
|
||||
|
@ -247,17 +252,29 @@ const processCommand = (function () {
|
|||
resp = { type: "ok" };
|
||||
}
|
||||
break;
|
||||
case "end":
|
||||
if (pc) {
|
||||
pc.close();
|
||||
activeCall = undefined;
|
||||
resetVideoElements();
|
||||
resp = { type: "ok" };
|
||||
case "camera":
|
||||
if (!activeCall || !pc) {
|
||||
resp = { type: "error", message: "camera: call not started" };
|
||||
}
|
||||
else if (activeCall.localMedia == CallMediaType.Audio) {
|
||||
resp = { type: "error", message: "camera: no video" };
|
||||
}
|
||||
else {
|
||||
resp = { type: "error", message: "end: call not started" };
|
||||
try {
|
||||
if (command.camera != activeCall.localCamera) {
|
||||
await replaceCamera(activeCall, command.camera);
|
||||
}
|
||||
resp = { type: "ok" };
|
||||
}
|
||||
catch (e) {
|
||||
resp = { type: "error", message: `camera: ${e.message}` };
|
||||
}
|
||||
}
|
||||
break;
|
||||
case "end":
|
||||
endCall();
|
||||
resp = { type: "ok" };
|
||||
break;
|
||||
default:
|
||||
resp = { type: "error", message: "unknown command" };
|
||||
break;
|
||||
|
@ -270,46 +287,77 @@ const processCommand = (function () {
|
|||
sendMessageToNative(apiResp);
|
||||
return apiResp;
|
||||
}
|
||||
function endCall() {
|
||||
var _a;
|
||||
try {
|
||||
(_a = activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection) === null || _a === void 0 ? void 0 : _a.close();
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
activeCall = undefined;
|
||||
resetVideoElements();
|
||||
}
|
||||
function addIceCandidates(conn, iceCandidates) {
|
||||
for (const c of iceCandidates) {
|
||||
conn.addIceCandidate(new RTCIceCandidate(c));
|
||||
}
|
||||
}
|
||||
async function setUpMediaStreams(pc, localStream, remoteStream, aesKey, useWorker) {
|
||||
var _a;
|
||||
async function setupMediaStreams(call) {
|
||||
const videos = getVideoElements();
|
||||
if (!videos)
|
||||
throw Error("no video elements");
|
||||
let key;
|
||||
let worker;
|
||||
if (aesKey) {
|
||||
key = await callCrypto.decodeAesKey(aesKey);
|
||||
if (useWorker) {
|
||||
await setupEncryptionWorker(call);
|
||||
setupLocalStream(call);
|
||||
setupRemoteStream(call);
|
||||
setupCodecPreferences(call);
|
||||
// setupVideoElement(videos.local)
|
||||
// setupVideoElement(videos.remote)
|
||||
videos.local.srcObject = call.localStream;
|
||||
videos.remote.srcObject = call.remoteStream;
|
||||
}
|
||||
async function setupEncryptionWorker(call) {
|
||||
if (call.aesKey) {
|
||||
if (!call.key)
|
||||
call.key = await callCrypto.decodeAesKey(call.aesKey);
|
||||
if (call.useWorker && !call.worker) {
|
||||
const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`;
|
||||
worker = new Worker(URL.createObjectURL(new Blob([workerCode], { type: "text/javascript" })));
|
||||
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], { type: "text/javascript" })));
|
||||
}
|
||||
}
|
||||
}
|
||||
function setupLocalStream(call) {
|
||||
const videos = getVideoElements();
|
||||
if (!videos)
|
||||
throw Error("no video elements");
|
||||
const pc = call.connection;
|
||||
let { localStream } = call;
|
||||
for (const track of localStream.getTracks()) {
|
||||
pc.addTrack(track, localStream);
|
||||
}
|
||||
if (aesKey && key) {
|
||||
if (call.aesKey && call.key) {
|
||||
console.log("set up encryption for sending");
|
||||
for (const sender of pc.getSenders()) {
|
||||
setupPeerTransform(TransformOperation.Encrypt, sender, worker, aesKey, key);
|
||||
setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key);
|
||||
}
|
||||
}
|
||||
}
|
||||
function setupRemoteStream(call) {
|
||||
// Pull tracks from remote stream as they arrive add them to remoteStream video
|
||||
const pc = call.connection;
|
||||
pc.ontrack = (event) => {
|
||||
if (aesKey && key) {
|
||||
if (call.aesKey && call.key) {
|
||||
console.log("set up decryption for receiving");
|
||||
setupPeerTransform(TransformOperation.Decrypt, event.receiver, worker, aesKey, key);
|
||||
setupPeerTransform(TransformOperation.Decrypt, event.receiver, call.worker, call.aesKey, call.key);
|
||||
}
|
||||
for (const stream of event.streams) {
|
||||
for (const track of stream.getTracks()) {
|
||||
remoteStream.addTrack(track);
|
||||
call.remoteStream.addTrack(track);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function setupCodecPreferences(call) {
|
||||
// We assume VP8 encoding in the decode/encode stages to get the initial
|
||||
// bytes to pass as plaintext so we enforce that here.
|
||||
// VP8 is supported by all supports of webrtc.
|
||||
|
@ -322,6 +370,7 @@ const processCommand = (function () {
|
|||
// which is 10 bytes for key frames and 3 bytes for delta frames.
|
||||
// For opus (where encodedFrame.type is not set) this is the TOC byte from
|
||||
// https://tools.ietf.org/html/rfc6716#section-3.1
|
||||
var _a;
|
||||
const capabilities = RTCRtpSender.getCapabilities("video");
|
||||
if (capabilities) {
|
||||
const { codecs } = capabilities;
|
||||
|
@ -329,16 +378,33 @@ const processCommand = (function () {
|
|||
const selectedCodec = codecs[selectedCodecIndex];
|
||||
codecs.splice(selectedCodecIndex, 1);
|
||||
codecs.unshift(selectedCodec);
|
||||
for (const t of pc.getTransceivers()) {
|
||||
for (const t of call.connection.getTransceivers()) {
|
||||
if (((_a = t.sender.track) === null || _a === void 0 ? void 0 : _a.kind) === "video") {
|
||||
t.setCodecPreferences(codecs);
|
||||
}
|
||||
}
|
||||
}
|
||||
// setupVideoElement(videos.local)
|
||||
// setupVideoElement(videos.remote)
|
||||
}
|
||||
async function replaceCamera(call, camera) {
|
||||
const videos = getVideoElements();
|
||||
if (!videos)
|
||||
throw Error("no video elements");
|
||||
const pc = call.connection;
|
||||
for (const t of call.localStream.getTracks())
|
||||
t.stop();
|
||||
call.localCamera = camera;
|
||||
const constraints = callMediaConstraints(call.localMedia, camera);
|
||||
const localStream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
replaceTracks(pc, localStream.getVideoTracks());
|
||||
replaceTracks(pc, localStream.getAudioTracks());
|
||||
call.localStream = localStream;
|
||||
videos.local.srcObject = localStream;
|
||||
videos.remote.srcObject = remoteStream;
|
||||
}
|
||||
function replaceTracks(pc, tracks) {
|
||||
const sender = pc.getSenders().find((s) => { var _a; return ((_a = s.track) === null || _a === void 0 ? void 0 : _a.kind) === tracks[0].kind; });
|
||||
if (sender)
|
||||
for (const t of tracks)
|
||||
sender.replaceTrack(t);
|
||||
}
|
||||
function setupPeerTransform(operation, peer, worker, aesKey, key) {
|
||||
if (worker && "RTCRtpScriptTransform" in window) {
|
||||
|
@ -361,7 +427,7 @@ const processCommand = (function () {
|
|||
console.log(`no ${operation}`);
|
||||
}
|
||||
}
|
||||
function callMediaConstraints(mediaType) {
|
||||
function callMediaConstraints(mediaType, facingMode) {
|
||||
switch (mediaType) {
|
||||
case CallMediaType.Audio:
|
||||
return { audio: true, video: false };
|
||||
|
@ -376,6 +442,7 @@ const processCommand = (function () {
|
|||
max: 1280,
|
||||
},
|
||||
aspectRatio: 1.33,
|
||||
facingMode,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ body {
|
|||
max-width: 30%;
|
||||
object-fit: cover;
|
||||
margin: 16px;
|
||||
margin-bottom: 20%;
|
||||
border-radius: 16px;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
|
|
|
@ -3,18 +3,31 @@ package chat.simplex.app.views.call
|
|||
import android.Manifest
|
||||
import android.content.ClipData
|
||||
import android.content.ClipboardManager
|
||||
import android.os.Build
|
||||
import android.service.controls.templates.ControlButton
|
||||
import android.util.Log
|
||||
import android.view.ViewGroup
|
||||
import android.webkit.*
|
||||
import androidx.activity.compose.BackHandler
|
||||
import androidx.annotation.RequiresApi
|
||||
import androidx.annotation.StringRes
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.layout.*
|
||||
import androidx.compose.foundation.magnifier
|
||||
import androidx.compose.material.*
|
||||
import androidx.compose.material.icons.Icons
|
||||
import androidx.compose.material.icons.filled.*
|
||||
import androidx.compose.material.icons.outlined.*
|
||||
import androidx.compose.runtime.*
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.graphics.vector.ImageVector
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.platform.LocalLifecycleOwner
|
||||
import androidx.compose.ui.res.stringResource
|
||||
import androidx.compose.ui.text.capitalize
|
||||
import androidx.compose.ui.tooling.preview.Preview
|
||||
import androidx.compose.ui.unit.dp
|
||||
import androidx.compose.ui.viewinterop.AndroidView
|
||||
import androidx.core.content.ContextCompat
|
||||
|
@ -22,11 +35,12 @@ import androidx.lifecycle.Lifecycle
|
|||
import androidx.lifecycle.LifecycleEventObserver
|
||||
import androidx.webkit.WebViewAssetLoader
|
||||
import androidx.webkit.WebViewClientCompat
|
||||
import chat.simplex.app.R
|
||||
import chat.simplex.app.TAG
|
||||
import chat.simplex.app.model.ChatModel
|
||||
import chat.simplex.app.model.json
|
||||
import chat.simplex.app.views.helpers.TextEditor
|
||||
import chat.simplex.app.views.helpers.withApi
|
||||
import chat.simplex.app.model.*
|
||||
import chat.simplex.app.ui.theme.SimpleXTheme
|
||||
import chat.simplex.app.views.chat.ChatInfoLayout
|
||||
import chat.simplex.app.views.helpers.*
|
||||
import com.google.accompanist.permissions.rememberMultiplePermissionsState
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.serialization.decodeFromString
|
||||
|
@ -67,7 +81,7 @@ fun ActiveCallView(chatModel: ChatModel) {
|
|||
}
|
||||
is WCallResponse.Connection ->
|
||||
try {
|
||||
val callStatus = WebRTCCallStatus.valueOf(r.state.connectionState)
|
||||
val callStatus = json.decodeFromString<WebRTCCallStatus>("\"${r.state.connectionState}\"")
|
||||
if (callStatus == WebRTCCallStatus.Connected) {
|
||||
chatModel.activeCall.value = call.copy(callState = CallState.Connected)
|
||||
}
|
||||
|
@ -86,6 +100,7 @@ fun ActiveCallView(chatModel: ChatModel) {
|
|||
CallMediaType.Audio -> chatModel.activeCall.value = call.copy(audioEnabled = cmd.enable)
|
||||
}
|
||||
}
|
||||
is WCallCommand.Camera -> chatModel.activeCall.value = call.copy(localCamera = cmd.camera)
|
||||
is WCallCommand.End -> endCall()
|
||||
else -> {}
|
||||
}
|
||||
|
@ -95,13 +110,108 @@ fun ActiveCallView(chatModel: ChatModel) {
|
|||
}
|
||||
}
|
||||
}
|
||||
ActiveCallOverlay()
|
||||
val call = chatModel.activeCall.value
|
||||
if (call != null) ActiveCallOverlay(call, chatModel, endCall)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
fun ActiveCallOverlay() {
|
||||
private fun ActiveCallOverlay(call: Call, chatModel: ChatModel, endCall: () -> Unit) {
|
||||
ActiveCallOverlayLayout(
|
||||
call = call,
|
||||
dismiss = {
|
||||
chatModel.callCommand.value = WCallCommand.End
|
||||
withApi {
|
||||
chatModel.controller.apiEndCall(call.contact)
|
||||
endCall()
|
||||
}
|
||||
},
|
||||
toggleAudio = { chatModel.callCommand.value = WCallCommand.Media(CallMediaType.Audio, enable = !call.audioEnabled) },
|
||||
toggleVideo = { chatModel.callCommand.value = WCallCommand.Media(CallMediaType.Video, enable = !call.videoEnabled) },
|
||||
flipCamera = { chatModel.callCommand.value = WCallCommand.Camera(call.localCamera.flipped) }
|
||||
)
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun ActiveCallOverlayLayout(
|
||||
call: Call,
|
||||
dismiss: () -> Unit,
|
||||
toggleAudio: () -> Unit,
|
||||
toggleVideo: () -> Unit,
|
||||
flipCamera: () -> Unit
|
||||
) {
|
||||
Column(Modifier.padding(16.dp)) {
|
||||
when (call.peerMedia ?: call.localMedia) {
|
||||
CallMediaType.Video -> {
|
||||
CallInfoView(call, alignment = Alignment.Start)
|
||||
Spacer(Modifier.fillMaxHeight().weight(1f))
|
||||
Row(Modifier.fillMaxWidth().padding(horizontal = 6.dp), horizontalArrangement = Arrangement.SpaceBetween, verticalAlignment = Alignment.CenterVertically) {
|
||||
ToggleAudioButton(call, toggleAudio)
|
||||
Spacer(Modifier.size(40.dp))
|
||||
IconButton(onClick = dismiss) {
|
||||
Icon(Icons.Filled.CallEnd, stringResource(R.string.icon_descr_hang_up), tint = Color.Red, modifier = Modifier.size(64.dp))
|
||||
}
|
||||
ControlButton(call, Icons.Filled.FlipCameraAndroid, R.string.icon_descr_flip_camera, flipCamera)
|
||||
if (call.videoEnabled) {
|
||||
ControlButton(call, Icons.Filled.Videocam, R.string.icon_descr_video_off, toggleVideo)
|
||||
} else {
|
||||
ControlButton(call, Icons.Outlined.VideocamOff, R.string.icon_descr_video_on, toggleVideo)
|
||||
}
|
||||
}
|
||||
}
|
||||
CallMediaType.Audio -> {
|
||||
Spacer(Modifier.fillMaxHeight().weight(1f))
|
||||
Column(
|
||||
Modifier.fillMaxWidth(),
|
||||
horizontalAlignment = Alignment.CenterHorizontally,
|
||||
verticalArrangement = Arrangement.Center
|
||||
) {
|
||||
ProfileImage(size = 192.dp, image = call.contact.profile.image)
|
||||
CallInfoView(call, alignment = Alignment.CenterHorizontally)
|
||||
}
|
||||
Spacer(Modifier.fillMaxHeight().weight(1f))
|
||||
Box(Modifier.fillMaxWidth().padding(bottom = 48.dp), contentAlignment = Alignment.CenterStart) {
|
||||
Box(Modifier.fillMaxWidth(), contentAlignment = Alignment.Center) {
|
||||
IconButton(onClick = dismiss) {
|
||||
Icon(Icons.Filled.CallEnd, stringResource(R.string.icon_descr_hang_up), tint = Color.Red, modifier = Modifier.size(64.dp))
|
||||
}
|
||||
}
|
||||
Box(Modifier.padding(start = 32.dp)) {
|
||||
ToggleAudioButton(call, toggleAudio)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun ControlButton(call: Call, icon: ImageVector, @StringRes iconText: Int, action: () -> Unit) {
|
||||
if (call.hasMedia) {
|
||||
IconButton(onClick = action) {
|
||||
Icon(icon, stringResource(iconText), tint = Color.White, modifier = Modifier.size(40.dp))
|
||||
}
|
||||
} else {
|
||||
Spacer(Modifier.size(40.dp))
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun ToggleAudioButton(call: Call, toggleAudio: () -> Unit) {
|
||||
if (call.audioEnabled) {
|
||||
ControlButton(call, Icons.Outlined.Mic, R.string.icon_descr_video_off, toggleAudio)
|
||||
} else {
|
||||
ControlButton(call, Icons.Outlined.MicOff, R.string.icon_descr_audio_on, toggleAudio)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun CallInfoView(call: Call, alignment: Alignment.Horizontal) {
|
||||
Column(horizontalAlignment = alignment) {
|
||||
Text(call.contact.chatViewName, color = Color.White, style = MaterialTheme.typography.body2)
|
||||
Text(call.callState.text, color = Color.White, style = MaterialTheme.typography.body2)
|
||||
Text(call.encryptionStatus, color = Color.White, style = MaterialTheme.typography.body2)
|
||||
}
|
||||
}
|
||||
|
||||
//@Composable
|
||||
|
@ -270,3 +380,41 @@ private class LocalContentWebViewClient(private val assetLoader: WebViewAssetLoa
|
|||
return assetLoader.shouldInterceptRequest(request.url)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview
|
||||
@Composable
|
||||
fun PreviewActiveCallOverlayVideo() {
|
||||
SimpleXTheme {
|
||||
ActiveCallOverlayLayout(
|
||||
call = Call(
|
||||
contact = Contact.sampleData,
|
||||
callState = CallState.Negotiated,
|
||||
localMedia = CallMediaType.Video,
|
||||
peerMedia = CallMediaType.Video
|
||||
),
|
||||
dismiss = {},
|
||||
toggleAudio = {},
|
||||
toggleVideo = {},
|
||||
flipCamera = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview
|
||||
@Composable
|
||||
fun PreviewActiveCallOverlayAudio() {
|
||||
SimpleXTheme {
|
||||
ActiveCallOverlayLayout(
|
||||
call = Call(
|
||||
contact = Contact.sampleData,
|
||||
callState = CallState.Negotiated,
|
||||
localMedia = CallMediaType.Audio,
|
||||
peerMedia = CallMediaType.Audio
|
||||
),
|
||||
dismiss = {},
|
||||
toggleAudio = {},
|
||||
toggleVideo = {},
|
||||
flipCamera = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package chat.simplex.app.views.call
|
||||
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.ui.res.stringResource
|
||||
import chat.simplex.app.R
|
||||
import chat.simplex.app.model.Contact
|
||||
import chat.simplex.app.views.helpers.generalGetString
|
||||
|
@ -15,9 +17,20 @@ data class Call(
|
|||
val sharedKey: String? = null,
|
||||
val audioEnabled: Boolean = true,
|
||||
val videoEnabled: Boolean = localMedia == CallMediaType.Video,
|
||||
var localCamera: VideoCamera = VideoCamera.User,
|
||||
val connectionInfo: ConnectionInfo? = null
|
||||
) {
|
||||
val encrypted: Boolean get() = (localCapabilities?.encryption ?: false) && sharedKey != null
|
||||
val encrypted: Boolean get() = localEncrypted && sharedKey != null
|
||||
val localEncrypted: Boolean get() = localCapabilities?.encryption ?: false
|
||||
|
||||
val encryptionStatus: String @Composable get() = when(callState) {
|
||||
CallState.WaitCapabilities -> ""
|
||||
CallState.InvitationSent -> stringResource(if (localEncrypted) R.string.status_e2e_encrypted else R.string.status_no_e2e_encryption)
|
||||
CallState.InvitationReceived -> stringResource(if (sharedKey == null) R.string.status_contact_has_no_e2e_encryption else R.string.status_contact_has_e2e_encryption)
|
||||
else -> stringResource(if (!localEncrypted) R.string.status_no_e2e_encryption else if (sharedKey == null) R.string.status_contact_has_no_e2e_encryption else R.string.status_e2e_encrypted)
|
||||
}
|
||||
|
||||
val hasMedia: Boolean get() = callState == CallState.OfferSent || callState == CallState.Negotiated || callState == CallState.Connected
|
||||
}
|
||||
|
||||
enum class CallState {
|
||||
|
@ -29,14 +42,14 @@ enum class CallState {
|
|||
Negotiated,
|
||||
Connected;
|
||||
|
||||
val text: String get() = when(this) {
|
||||
WaitCapabilities -> generalGetString(R.string.callstate_starting)
|
||||
InvitationSent -> generalGetString(R.string.callstate_waiting_for_answer)
|
||||
InvitationReceived -> generalGetString(R.string.callstate_starting)
|
||||
OfferSent -> generalGetString(R.string.callstate_waiting_for_confirmation)
|
||||
OfferReceived -> generalGetString(R.string.callstate_received_answer)
|
||||
Negotiated -> generalGetString(R.string.callstate_connecting)
|
||||
Connected -> generalGetString(R.string.callstate_connected)
|
||||
val text: String @Composable get() = when(this) {
|
||||
WaitCapabilities -> stringResource(R.string.callstate_starting)
|
||||
InvitationSent -> stringResource(R.string.callstate_waiting_for_answer)
|
||||
InvitationReceived -> stringResource(R.string.callstate_starting)
|
||||
OfferSent -> stringResource(R.string.callstate_waiting_for_confirmation)
|
||||
OfferReceived -> stringResource(R.string.callstate_received_answer)
|
||||
Negotiated -> stringResource(R.string.callstate_connecting)
|
||||
Connected -> stringResource(R.string.callstate_connected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -51,6 +64,7 @@ sealed class WCallCommand {
|
|||
@Serializable @SerialName("answer") class Answer (val answer: String, val iceCandidates: String): WCallCommand()
|
||||
@Serializable @SerialName("ice") class Ice(val iceCandidates: String): WCallCommand()
|
||||
@Serializable @SerialName("media") class Media(val media: CallMediaType, val enable: Boolean): WCallCommand()
|
||||
@Serializable @SerialName("camera") class Camera(val camera: VideoCamera): WCallCommand()
|
||||
@Serializable @SerialName("end") object End: WCallCommand()
|
||||
}
|
||||
|
||||
|
@ -100,6 +114,7 @@ enum class RTCIceCandidateType {
|
|||
@Serializable
|
||||
enum class WebRTCCallStatus {
|
||||
@SerialName("connected") Connected,
|
||||
@SerialName("connecting") Connecting,
|
||||
@SerialName("disconnected") Disconnected,
|
||||
@SerialName("failed") Failed
|
||||
}
|
||||
|
@ -110,6 +125,13 @@ enum class CallMediaType {
|
|||
@SerialName("audio") Audio
|
||||
}
|
||||
|
||||
@Serializable
|
||||
enum class VideoCamera {
|
||||
@SerialName("user") User,
|
||||
@SerialName("environment") Environment;
|
||||
val flipped: VideoCamera get() = if (this == User) Environment else User
|
||||
}
|
||||
|
||||
@Serializable
|
||||
class ConnectionState(
|
||||
val connectionState: String,
|
||||
|
|
|
@ -350,4 +350,16 @@
|
|||
<string name="if_you_accept_this_call_your_ip_address_visible">If you accept this call, your IP address might be visible to your contact, unless you connect via relay.</string>
|
||||
<string name="answer">Answer</string>
|
||||
<string name="call_already_ended">Call already ended!</string>
|
||||
|
||||
<!-- Call overlay -->
|
||||
<string name="status_e2e_encrypted">e2e encrypted</string>
|
||||
<string name="status_no_e2e_encryption">no e2e encryption</string>
|
||||
<string name="status_contact_has_e2e_encryption">contact has e2e encryption</string>
|
||||
<string name="status_contact_has_no_e2e_encryption">contact has no e2e encryption</string>
|
||||
<string name="icon_descr_hang_up">Hang up</string>
|
||||
<string name="icon_descr_video_off">Video off</string>
|
||||
<string name="icon_descr_video_on">Video on</string>
|
||||
<string name="icon_descr_audio_off">Audio off</string>
|
||||
<string name="icon_descr_audio_on">Audio on</string>
|
||||
<string name="icon_descr_flip_camera">Flip camera</string>
|
||||
</resources>
|
||||
|
|
|
@ -351,4 +351,16 @@
|
|||
<string name="if_you_accept_this_call_your_ip_address_visible">If you accept this call, your IP address might be visible to your contact, unless you connect via relay.</string>
|
||||
<string name="answer">Answer</string>
|
||||
<string name="call_already_ended">Call already ended!</string>
|
||||
|
||||
<!-- Call overlay -->
|
||||
<string name="status_e2e_encrypted">e2e encrypted</string>
|
||||
<string name="status_no_e2e_encryption">no e2e encryption</string>
|
||||
<string name="status_contact_has_e2e_encryption">contact has e2e encryption</string>
|
||||
<string name="status_contact_has_no_e2e_encryption">contact has no e2e encryption</string>
|
||||
<string name="icon_descr_hang_up">Hang up</string>
|
||||
<string name="icon_descr_video_off">Video off</string>
|
||||
<string name="icon_descr_video_on">Video on</string>
|
||||
<string name="icon_descr_audio_off">Audio off</string>
|
||||
<string name="icon_descr_audio_on">Audio on</string>
|
||||
<string name="icon_descr_flip_camera">Flip camera</string>
|
||||
</resources>
|
||||
|
|
|
@ -55,12 +55,18 @@ enum CallMediaType: String, Codable, Equatable {
|
|||
case audio = "audio"
|
||||
}
|
||||
|
||||
enum VideoCamera: String, Codable, Equatable {
|
||||
case user = "user"
|
||||
case environment = "environment"
|
||||
}
|
||||
|
||||
struct CallCapabilities: Codable, Equatable {
|
||||
var encryption: Bool
|
||||
}
|
||||
|
||||
enum WebRTCCallStatus: String, Encodable {
|
||||
case connected = "connected"
|
||||
case connecting = "connecting"
|
||||
case disconnected = "disconnected"
|
||||
case failed = "failed"
|
||||
}
|
||||
|
|
|
@ -78,6 +78,8 @@ struct ActiveCallView: View {
|
|||
case .video: m.activeCall = call.copy(videoEnabled: enable)
|
||||
case .audio: m.activeCall = call.copy(audioEnabled: enable)
|
||||
}
|
||||
case let .camera(camera):
|
||||
m.activeCall = call.copy(localCamera: camera)
|
||||
case .end:
|
||||
m.activeCall = nil
|
||||
m.activeCallInvitation = nil
|
||||
|
@ -109,6 +111,31 @@ struct ActiveCallOverlay: View {
|
|||
.foregroundColor(.white)
|
||||
.opacity(0.8)
|
||||
.padding()
|
||||
|
||||
Spacer()
|
||||
|
||||
HStack {
|
||||
controlButton(call, call.audioEnabled ? "mic.fill" : "mic.slash") {
|
||||
chatModel.callCommand = .media(media: .audio, enable: !call.audioEnabled)
|
||||
}
|
||||
Spacer()
|
||||
Color.clear.frame(width: 40, height: 40)
|
||||
Spacer()
|
||||
callButton("phone.down.fill", size: 60) { dismiss() }
|
||||
.foregroundColor(.red)
|
||||
Spacer()
|
||||
controlButton(call, "arrow.triangle.2.circlepath") {
|
||||
chatModel.callCommand = .camera(camera: call.localCamera == .user ? .environment : .user)
|
||||
}
|
||||
Spacer()
|
||||
controlButton(call, call.videoEnabled ? "video.fill" : "video.slash") {
|
||||
chatModel.callCommand = .media(media: .video, enable: !call.videoEnabled)
|
||||
}
|
||||
}
|
||||
.padding(.horizontal, 20)
|
||||
.padding(.bottom, 16)
|
||||
.frame(maxWidth: .infinity, alignment: .center)
|
||||
|
||||
case .audio:
|
||||
VStack {
|
||||
ProfileImage(imageStr: call.contact.profile.image)
|
||||
|
@ -120,30 +147,20 @@ struct ActiveCallOverlay: View {
|
|||
.opacity(0.8)
|
||||
.padding()
|
||||
.frame(maxHeight: .infinity)
|
||||
}
|
||||
Spacer()
|
||||
ZStack(alignment: .bottom) {
|
||||
VStack(alignment: .leading) {
|
||||
if call.localMedia == .video {
|
||||
callButton(call.videoEnabled ? "video.fill" : "video.slash", size: 48) {
|
||||
chatModel.callCommand = .media(media: .video, enable: !call.videoEnabled)
|
||||
}
|
||||
.foregroundColor(.white)
|
||||
.opacity(0.85)
|
||||
}
|
||||
callButton(call.audioEnabled ? "mic.fill" : "mic.slash", size: 48) {
|
||||
|
||||
Spacer()
|
||||
|
||||
ZStack(alignment: .bottom) {
|
||||
controlButton(call, call.audioEnabled ? "mic.fill" : "mic.slash") {
|
||||
chatModel.callCommand = .media(media: .audio, enable: !call.audioEnabled)
|
||||
}
|
||||
.foregroundColor(.white)
|
||||
.opacity(0.85)
|
||||
.frame(maxWidth: .infinity, alignment: .leading)
|
||||
.padding(.top)
|
||||
callButton("phone.down.fill", size: 60) { dismiss() }
|
||||
.foregroundColor(.red)
|
||||
}
|
||||
callButton("phone.down.fill", size: 60) { dismiss() }
|
||||
.foregroundColor(.red)
|
||||
.padding(.bottom, 60)
|
||||
.padding(.horizontal, 48)
|
||||
}
|
||||
.padding(.bottom, 60)
|
||||
.padding(.horizontal, 48)
|
||||
}
|
||||
}
|
||||
.frame(maxWidth: .infinity)
|
||||
|
@ -155,18 +172,26 @@ struct ActiveCallOverlay: View {
|
|||
.lineLimit(1)
|
||||
.font(.title)
|
||||
.frame(maxWidth: .infinity, alignment: alignment)
|
||||
let status = call.callState == .connected
|
||||
? call.encrypted
|
||||
? "end-to-end encrypted"
|
||||
: "no end-to-end encryption"
|
||||
: call.callState.text
|
||||
Text(status)
|
||||
.font(.subheadline)
|
||||
.frame(maxWidth: .infinity, alignment: alignment)
|
||||
Group {
|
||||
Text(call.callState.text)
|
||||
Text(call.encryptionStatus)
|
||||
}
|
||||
.font(.subheadline)
|
||||
.frame(maxWidth: .infinity, alignment: alignment)
|
||||
}
|
||||
}
|
||||
|
||||
private func callButton(_ imageName: String, size: CGFloat, perform: @escaping () -> Void) -> some View {
|
||||
@ViewBuilder private func controlButton(_ call: Call, _ imageName: String, _ perform: @escaping () -> Void) -> some View {
|
||||
if call.hasMedia {
|
||||
callButton(imageName, size: 40, perform)
|
||||
.foregroundColor(.white)
|
||||
.opacity(0.85)
|
||||
} else {
|
||||
Color.clear.frame(width: 40, height: 40)
|
||||
}
|
||||
}
|
||||
|
||||
private func callButton(_ imageName: String, size: CGFloat, _ perform: @escaping () -> Void) -> some View {
|
||||
Button {
|
||||
perform()
|
||||
} label: {
|
||||
|
|
|
@ -22,6 +22,7 @@ class Call: Equatable {
|
|||
var sharedKey: String?
|
||||
var audioEnabled: Bool
|
||||
var videoEnabled: Bool
|
||||
var localCamera: VideoCamera
|
||||
var connectionInfo: ConnectionInfo?
|
||||
|
||||
init(
|
||||
|
@ -33,6 +34,7 @@ class Call: Equatable {
|
|||
sharedKey: String? = nil,
|
||||
audioEnabled: Bool? = nil,
|
||||
videoEnabled: Bool? = nil,
|
||||
localCamera: VideoCamera = .user,
|
||||
connectionInfo: ConnectionInfo? = nil
|
||||
) {
|
||||
self.contact = contact
|
||||
|
@ -43,6 +45,7 @@ class Call: Equatable {
|
|||
self.sharedKey = sharedKey
|
||||
self.audioEnabled = audioEnabled ?? true
|
||||
self.videoEnabled = videoEnabled ?? (localMedia == .video)
|
||||
self.localCamera = localCamera
|
||||
self.connectionInfo = connectionInfo
|
||||
}
|
||||
|
||||
|
@ -55,6 +58,7 @@ class Call: Equatable {
|
|||
sharedKey: String? = nil,
|
||||
audioEnabled: Bool? = nil,
|
||||
videoEnabled: Bool? = nil,
|
||||
localCamera: VideoCamera? = nil,
|
||||
connectionInfo: ConnectionInfo? = nil
|
||||
) -> Call {
|
||||
Call (
|
||||
|
@ -66,11 +70,24 @@ class Call: Equatable {
|
|||
sharedKey: sharedKey ?? self.sharedKey,
|
||||
audioEnabled: audioEnabled ?? self.audioEnabled,
|
||||
videoEnabled: videoEnabled ?? self.videoEnabled,
|
||||
localCamera: localCamera ?? self.localCamera,
|
||||
connectionInfo: connectionInfo ?? self.connectionInfo
|
||||
)
|
||||
}
|
||||
|
||||
var encrypted: Bool { get { (localCapabilities?.encryption ?? false) && sharedKey != nil } }
|
||||
var encrypted: Bool { get { localEncrypted && sharedKey != nil } }
|
||||
var localEncrypted: Bool { get { localCapabilities?.encryption ?? false } }
|
||||
var encryptionStatus: LocalizedStringKey {
|
||||
get {
|
||||
switch callState {
|
||||
case .waitCapabilities: return ""
|
||||
case .invitationSent: return localEncrypted ? "e2e encrypted" : "no e2e encryption"
|
||||
case .invitationReceived: return sharedKey == nil ? "contact has no e2e encryption" : "contact has e2e encryption"
|
||||
default: return !localEncrypted ? "no e2e encryption" : sharedKey == nil ? "contact has no e2e encryption" : "e2e encrypted"
|
||||
}
|
||||
}
|
||||
}
|
||||
var hasMedia: Bool { get { callState == .offerSent || callState == .negotiated || callState == .connected } }
|
||||
}
|
||||
|
||||
enum CallState {
|
||||
|
@ -113,11 +130,13 @@ enum WCallCommand: Equatable, Encodable, Decodable {
|
|||
case answer(answer: String, iceCandidates: String)
|
||||
case ice(iceCandidates: String)
|
||||
case media(media: CallMediaType, enable: Bool)
|
||||
case camera(camera: VideoCamera)
|
||||
case end
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case type
|
||||
case media
|
||||
case camera
|
||||
case aesKey
|
||||
case useWorker
|
||||
case offer
|
||||
|
@ -137,6 +156,7 @@ enum WCallCommand: Equatable, Encodable, Decodable {
|
|||
case .answer: return "answer"
|
||||
case .ice: return "ice"
|
||||
case .media: return "media"
|
||||
case .camera: return "camera"
|
||||
case .end: return "end"
|
||||
}
|
||||
}
|
||||
|
@ -175,6 +195,9 @@ enum WCallCommand: Equatable, Encodable, Decodable {
|
|||
try container.encode("media", forKey: .type)
|
||||
try container.encode(media, forKey: .media)
|
||||
try container.encode(enable, forKey: .enable)
|
||||
case let .camera(camera):
|
||||
try container.encode("camera", forKey: .type)
|
||||
try container.encode(camera, forKey: .camera)
|
||||
case .end:
|
||||
try container.encode("end", forKey: .type)
|
||||
}
|
||||
|
@ -214,6 +237,9 @@ enum WCallCommand: Equatable, Encodable, Decodable {
|
|||
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
|
||||
let enable = try container.decode(Bool.self, forKey: CodingKeys.enable)
|
||||
self = .media(media: media, enable: enable)
|
||||
case "camera":
|
||||
let camera = try container.decode(VideoCamera.self, forKey: CodingKeys.camera)
|
||||
self = .camera(camera: camera)
|
||||
case "end":
|
||||
self = .end
|
||||
default:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@simplex-chat/webrtc",
|
||||
"version": "0.0.3",
|
||||
"version": "0.0.4",
|
||||
"description": "WebRTC call in browser and webview for SimpleX Chat clients",
|
||||
"main": "dist/call.js",
|
||||
"types": "dist/call.d.ts",
|
||||
|
|
|
@ -5,8 +5,19 @@
|
|||
<script src="./lz-string.min.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="remote-video-stream" autoplay playsinline></video>
|
||||
<video id="local-video-stream" muted autoplay playsinline></video>
|
||||
<video
|
||||
id="remote-video-stream"
|
||||
autoplay
|
||||
playsinline
|
||||
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
|
||||
></video>
|
||||
<video
|
||||
id="local-video-stream"
|
||||
muted
|
||||
autoplay
|
||||
playsinline
|
||||
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
|
||||
></video>
|
||||
</body>
|
||||
<footer>
|
||||
<script src="./call.js"></script>
|
||||
|
|
|
@ -7,7 +7,15 @@ interface WVApiMessage {
|
|||
command?: WCallCommand
|
||||
}
|
||||
|
||||
type WCallCommand = WCCapabilities | WCStartCall | WCAcceptOffer | WCallAnswer | WCallIceCandidates | WCEnableMedia | WCEndCall
|
||||
type WCallCommand =
|
||||
| WCCapabilities
|
||||
| WCStartCall
|
||||
| WCAcceptOffer
|
||||
| WCallAnswer
|
||||
| WCallIceCandidates
|
||||
| WCEnableMedia
|
||||
| WCToggleCamera
|
||||
| WCEndCall
|
||||
|
||||
type WCallResponse =
|
||||
| WRCapabilities
|
||||
|
@ -21,7 +29,7 @@ type WCallResponse =
|
|||
| WRError
|
||||
| WCAcceptOffer
|
||||
|
||||
type WCallCommandTag = "capabilities" | "start" | "offer" | "answer" | "ice" | "media" | "end"
|
||||
type WCallCommandTag = "capabilities" | "start" | "offer" | "answer" | "ice" | "media" | "camera" | "end"
|
||||
|
||||
type WCallResponseTag = "capabilities" | "offer" | "answer" | "ice" | "connection" | "connected" | "ended" | "ok" | "error"
|
||||
|
||||
|
@ -30,6 +38,11 @@ enum CallMediaType {
|
|||
Video = "video",
|
||||
}
|
||||
|
||||
enum VideoCamera {
|
||||
User = "user",
|
||||
Environment = "environment",
|
||||
}
|
||||
|
||||
interface IWCallCommand {
|
||||
type: WCallCommandTag
|
||||
}
|
||||
|
@ -91,6 +104,11 @@ interface WCEnableMedia extends IWCallCommand {
|
|||
enable: boolean
|
||||
}
|
||||
|
||||
interface WCToggleCamera extends IWCallCommand {
|
||||
type: "camera"
|
||||
camera: VideoCamera
|
||||
}
|
||||
|
||||
interface WRCapabilities extends IWCallResponse {
|
||||
type: "capabilities"
|
||||
capabilities: CallCapabilities
|
||||
|
@ -158,6 +176,21 @@ interface WVAPICall {
|
|||
command: WCallCommand
|
||||
}
|
||||
|
||||
interface Call {
|
||||
connection: RTCPeerConnection
|
||||
iceCandidates: Promise<string> // JSON strings for RTCIceCandidate
|
||||
localMedia: CallMediaType
|
||||
localCamera: VideoCamera
|
||||
localStream: MediaStream
|
||||
remoteStream: MediaStream
|
||||
aesKey?: string
|
||||
useWorker?: boolean
|
||||
worker?: Worker
|
||||
key?: CryptoKey
|
||||
}
|
||||
|
||||
let activeCall: Call | undefined
|
||||
|
||||
const processCommand = (function () {
|
||||
type RTCRtpSenderWithEncryption = RTCRtpSender & {
|
||||
createEncodedStreams: () => TransformStream
|
||||
|
@ -173,13 +206,6 @@ const processCommand = (function () {
|
|||
encodedInsertableStreams: boolean
|
||||
}
|
||||
|
||||
interface Call {
|
||||
connection: RTCPeerConnection
|
||||
iceCandidates: Promise<string> // JSON strings for RTCIceCandidate
|
||||
localMedia: CallMediaType
|
||||
localStream: MediaStream
|
||||
}
|
||||
|
||||
interface CallConfig {
|
||||
peerConnectionConfig: RTCConfigurationWithEncryption
|
||||
iceCandidates: {
|
||||
|
@ -189,8 +215,6 @@ const processCommand = (function () {
|
|||
}
|
||||
}
|
||||
|
||||
let activeCall: Call | undefined
|
||||
|
||||
const defaultIceServers: RTCIceServer[] = [
|
||||
{urls: ["stun:stun.simplex.chat:5349"]},
|
||||
{urls: ["turn:turn.simplex.chat:5349"], username: "private", credential: "yleob6AVkiNI87hpR94Z"},
|
||||
|
@ -205,20 +229,15 @@ const processCommand = (function () {
|
|||
iceTransportPolicy: relay ? "relay" : "all",
|
||||
},
|
||||
iceCandidates: {
|
||||
delay: 2000,
|
||||
delay: 3000,
|
||||
extrasInterval: 2000,
|
||||
extrasTimeout: 8000,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesKey?: string, useWorker?: boolean): Promise<Call> {
|
||||
const conn = new RTCPeerConnection(config.peerConnectionConfig)
|
||||
const remoteStream = new MediaStream()
|
||||
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType))
|
||||
await setUpMediaStreams(conn, localStream, remoteStream, aesKey, useWorker)
|
||||
conn.addEventListener("connectionstatechange", connectionStateChange)
|
||||
const iceCandidates = new Promise<string>((resolve, _) => {
|
||||
function getIceCandidates(conn: RTCPeerConnection, config: CallConfig) {
|
||||
return new Promise<string>((resolve, _) => {
|
||||
let candidates: RTCIceCandidate[] = []
|
||||
let resolved = false
|
||||
let extrasInterval: number | undefined
|
||||
|
@ -264,29 +283,37 @@ const processCommand = (function () {
|
|||
sendMessageToNative({resp: {type: "ice", iceCandidates}})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return {connection: conn, iceCandidates, localMedia: mediaType, localStream}
|
||||
async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesKey?: string, useWorker?: boolean): Promise<Call> {
|
||||
const pc = new RTCPeerConnection(config.peerConnectionConfig)
|
||||
const remoteStream = new MediaStream()
|
||||
const localCamera = VideoCamera.User
|
||||
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType, localCamera))
|
||||
const iceCandidates = getIceCandidates(pc, config)
|
||||
const call = {connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey, useWorker}
|
||||
await setupMediaStreams(call)
|
||||
pc.addEventListener("connectionstatechange", connectionStateChange)
|
||||
return call
|
||||
|
||||
async function connectionStateChange() {
|
||||
sendMessageToNative({
|
||||
resp: {
|
||||
type: "connection",
|
||||
state: {
|
||||
connectionState: conn.connectionState,
|
||||
iceConnectionState: conn.iceConnectionState,
|
||||
iceGatheringState: conn.iceGatheringState,
|
||||
signalingState: conn.signalingState,
|
||||
connectionState: pc.connectionState,
|
||||
iceConnectionState: pc.iceConnectionState,
|
||||
iceGatheringState: pc.iceGatheringState,
|
||||
signalingState: pc.signalingState,
|
||||
},
|
||||
},
|
||||
})
|
||||
if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
|
||||
conn.removeEventListener("connectionstatechange", connectionStateChange)
|
||||
conn.close()
|
||||
activeCall = undefined
|
||||
resetVideoElements()
|
||||
if (pc.connectionState == "disconnected" || pc.connectionState == "failed") {
|
||||
pc.removeEventListener("connectionstatechange", connectionStateChange)
|
||||
endCall()
|
||||
setTimeout(() => sendMessageToNative({resp: {type: "ended"}}), 0)
|
||||
} else if (conn.connectionState == "connected") {
|
||||
const stats = (await conn.getStats()) as Map<string, any>
|
||||
} else if (pc.connectionState == "connected") {
|
||||
const stats = (await pc.getStats()) as Map<string, any>
|
||||
for (const stat of stats.values()) {
|
||||
const {type, state} = stat
|
||||
if (type === "candidate-pair" && state === "succeeded") {
|
||||
|
@ -325,39 +352,36 @@ const processCommand = (function () {
|
|||
const encryption = supportsInsertableStreams(command.useWorker)
|
||||
resp = {type: "capabilities", capabilities: {encryption}}
|
||||
break
|
||||
case "start":
|
||||
case "start": {
|
||||
console.log("starting call")
|
||||
if (activeCall) {
|
||||
// TODO cancel current call
|
||||
resp = {type: "error", message: "start: call already started"}
|
||||
} else {
|
||||
const {media, useWorker, iceServers, relay} = command
|
||||
const encryption = supportsInsertableStreams(useWorker)
|
||||
const aesKey = encryption ? command.aesKey : undefined
|
||||
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey, useWorker)
|
||||
const pc = activeCall.connection
|
||||
const offer = await pc.createOffer()
|
||||
await pc.setLocalDescription(offer)
|
||||
// for debugging, returning the command for callee to use
|
||||
// resp = {
|
||||
// type: "offer",
|
||||
// offer: serialize(offer),
|
||||
// iceCandidates: await activeCall.iceCandidates,
|
||||
// capabilities: {encryption},
|
||||
// media,
|
||||
// iceServers,
|
||||
// relay,
|
||||
// aesKey,
|
||||
// useWorker,
|
||||
// }
|
||||
resp = {
|
||||
type: "offer",
|
||||
offer: serialize(offer),
|
||||
iceCandidates: await activeCall.iceCandidates,
|
||||
capabilities: {encryption},
|
||||
}
|
||||
if (activeCall) endCall()
|
||||
const {media, useWorker, iceServers, relay} = command
|
||||
const encryption = supportsInsertableStreams(useWorker)
|
||||
const aesKey = encryption ? command.aesKey : undefined
|
||||
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey, useWorker)
|
||||
const pc = activeCall.connection
|
||||
const offer = await pc.createOffer()
|
||||
await pc.setLocalDescription(offer)
|
||||
// for debugging, returning the command for callee to use
|
||||
// resp = {
|
||||
// type: "offer",
|
||||
// offer: serialize(offer),
|
||||
// iceCandidates: await activeCall.iceCandidates,
|
||||
// capabilities: {encryption},
|
||||
// media,
|
||||
// iceServers,
|
||||
// relay,
|
||||
// aesKey,
|
||||
// useWorker,
|
||||
// }
|
||||
resp = {
|
||||
type: "offer",
|
||||
offer: serialize(offer),
|
||||
iceCandidates: await activeCall.iceCandidates,
|
||||
capabilities: {encryption},
|
||||
}
|
||||
break
|
||||
}
|
||||
case "offer":
|
||||
if (activeCall) {
|
||||
resp = {type: "error", message: "accept: call already started"}
|
||||
|
@ -415,16 +439,26 @@ const processCommand = (function () {
|
|||
resp = {type: "ok"}
|
||||
}
|
||||
break
|
||||
case "end":
|
||||
if (pc) {
|
||||
pc.close()
|
||||
activeCall = undefined
|
||||
resetVideoElements()
|
||||
resp = {type: "ok"}
|
||||
case "camera":
|
||||
if (!activeCall || !pc) {
|
||||
resp = {type: "error", message: "camera: call not started"}
|
||||
} else if (activeCall.localMedia == CallMediaType.Audio) {
|
||||
resp = {type: "error", message: "camera: no video"}
|
||||
} else {
|
||||
resp = {type: "error", message: "end: call not started"}
|
||||
try {
|
||||
if (command.camera != activeCall.localCamera) {
|
||||
await replaceCamera(activeCall, command.camera)
|
||||
}
|
||||
resp = {type: "ok"}
|
||||
} catch (e) {
|
||||
resp = {type: "error", message: `camera: ${(e as Error).message}`}
|
||||
}
|
||||
}
|
||||
break
|
||||
case "end":
|
||||
endCall()
|
||||
resp = {type: "ok"}
|
||||
break
|
||||
default:
|
||||
resp = {type: "error", message: "unknown command"}
|
||||
break
|
||||
|
@ -437,55 +471,80 @@ const processCommand = (function () {
|
|||
return apiResp
|
||||
}
|
||||
|
||||
function endCall() {
|
||||
try {
|
||||
activeCall?.connection?.close()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
activeCall = undefined
|
||||
resetVideoElements()
|
||||
}
|
||||
|
||||
function addIceCandidates(conn: RTCPeerConnection, iceCandidates: RTCIceCandidateInit[]) {
|
||||
for (const c of iceCandidates) {
|
||||
conn.addIceCandidate(new RTCIceCandidate(c))
|
||||
}
|
||||
}
|
||||
|
||||
async function setUpMediaStreams(
|
||||
pc: RTCPeerConnection,
|
||||
localStream: MediaStream,
|
||||
remoteStream: MediaStream,
|
||||
aesKey?: string,
|
||||
useWorker?: boolean
|
||||
): Promise<void> {
|
||||
async function setupMediaStreams(call: Call): Promise<void> {
|
||||
const videos = getVideoElements()
|
||||
if (!videos) throw Error("no video elements")
|
||||
await setupEncryptionWorker(call)
|
||||
setupLocalStream(call)
|
||||
setupRemoteStream(call)
|
||||
setupCodecPreferences(call)
|
||||
// setupVideoElement(videos.local)
|
||||
// setupVideoElement(videos.remote)
|
||||
videos.local.srcObject = call.localStream
|
||||
videos.remote.srcObject = call.remoteStream
|
||||
}
|
||||
|
||||
let key: CryptoKey | undefined
|
||||
let worker: Worker | undefined
|
||||
if (aesKey) {
|
||||
key = await callCrypto.decodeAesKey(aesKey)
|
||||
if (useWorker) {
|
||||
async function setupEncryptionWorker(call: Call) {
|
||||
if (call.aesKey) {
|
||||
if (!call.key) call.key = await callCrypto.decodeAesKey(call.aesKey)
|
||||
if (call.useWorker && !call.worker) {
|
||||
const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`
|
||||
worker = new Worker(URL.createObjectURL(new Blob([workerCode], {type: "text/javascript"})))
|
||||
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], {type: "text/javascript"})))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setupLocalStream(call: Call) {
|
||||
const videos = getVideoElements()
|
||||
if (!videos) throw Error("no video elements")
|
||||
const pc = call.connection
|
||||
let {localStream} = call
|
||||
|
||||
for (const track of localStream.getTracks()) {
|
||||
pc.addTrack(track, localStream)
|
||||
}
|
||||
|
||||
if (aesKey && key) {
|
||||
if (call.aesKey && call.key) {
|
||||
console.log("set up encryption for sending")
|
||||
for (const sender of pc.getSenders() as RTCRtpSenderWithEncryption[]) {
|
||||
setupPeerTransform(TransformOperation.Encrypt, sender, worker, aesKey, key)
|
||||
setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setupRemoteStream(call: Call) {
|
||||
// Pull tracks from remote stream as they arrive add them to remoteStream video
|
||||
const pc = call.connection
|
||||
pc.ontrack = (event) => {
|
||||
if (aesKey && key) {
|
||||
if (call.aesKey && call.key) {
|
||||
console.log("set up decryption for receiving")
|
||||
setupPeerTransform(TransformOperation.Decrypt, event.receiver as RTCRtpReceiverWithEncryption, worker, aesKey, key)
|
||||
setupPeerTransform(TransformOperation.Decrypt, event.receiver as RTCRtpReceiverWithEncryption, call.worker, call.aesKey, call.key)
|
||||
}
|
||||
for (const stream of event.streams) {
|
||||
for (const track of stream.getTracks()) {
|
||||
remoteStream.addTrack(track)
|
||||
call.remoteStream.addTrack(track)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setupCodecPreferences(call: Call) {
|
||||
// We assume VP8 encoding in the decode/encode stages to get the initial
|
||||
// bytes to pass as plaintext so we enforce that here.
|
||||
// VP8 is supported by all supports of webrtc.
|
||||
|
@ -506,16 +565,31 @@ const processCommand = (function () {
|
|||
const selectedCodec = codecs[selectedCodecIndex]
|
||||
codecs.splice(selectedCodecIndex, 1)
|
||||
codecs.unshift(selectedCodec)
|
||||
for (const t of pc.getTransceivers()) {
|
||||
for (const t of call.connection.getTransceivers()) {
|
||||
if (t.sender.track?.kind === "video") {
|
||||
t.setCodecPreferences(codecs)
|
||||
}
|
||||
}
|
||||
}
|
||||
// setupVideoElement(videos.local)
|
||||
// setupVideoElement(videos.remote)
|
||||
}
|
||||
|
||||
async function replaceCamera(call: Call, camera: VideoCamera): Promise<void> {
|
||||
const videos = getVideoElements()
|
||||
if (!videos) throw Error("no video elements")
|
||||
const pc = call.connection
|
||||
for (const t of call.localStream.getTracks()) t.stop()
|
||||
call.localCamera = camera
|
||||
const constraints = callMediaConstraints(call.localMedia, camera)
|
||||
const localStream = await navigator.mediaDevices.getUserMedia(constraints)
|
||||
replaceTracks(pc, localStream.getVideoTracks())
|
||||
replaceTracks(pc, localStream.getAudioTracks())
|
||||
call.localStream = localStream
|
||||
videos.local.srcObject = localStream
|
||||
videos.remote.srcObject = remoteStream
|
||||
}
|
||||
|
||||
function replaceTracks(pc: RTCPeerConnection, tracks: MediaStreamTrack[]) {
|
||||
const sender = pc.getSenders().find((s) => s.track?.kind === tracks[0].kind)
|
||||
if (sender) for (const t of tracks) sender.replaceTrack(t)
|
||||
}
|
||||
|
||||
function setupPeerTransform(
|
||||
|
@ -543,7 +617,7 @@ const processCommand = (function () {
|
|||
}
|
||||
}
|
||||
|
||||
function callMediaConstraints(mediaType: CallMediaType): MediaStreamConstraints {
|
||||
function callMediaConstraints(mediaType: CallMediaType, facingMode: VideoCamera): MediaStreamConstraints {
|
||||
switch (mediaType) {
|
||||
case CallMediaType.Audio:
|
||||
return {audio: true, video: false}
|
||||
|
@ -558,6 +632,7 @@ const processCommand = (function () {
|
|||
max: 1280,
|
||||
},
|
||||
aspectRatio: 1.33,
|
||||
facingMode,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ body {
|
|||
max-width: 30%;
|
||||
object-fit: cover;
|
||||
margin: 16px;
|
||||
margin-bottom: 20%;
|
||||
border-radius: 16px;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue