desktop: screen sharing in video calls (#3310)

* desktop: screen sharing

* use async function

* fit/fill of the video

* disconnect camera button from screen share

* enable video on audio call

* temp

* Revert "temp"

This reverts commit 8f8a2f7f88.

* Revert "enable video on audio call"

This reverts commit 120068d09a.

* different logic

---------

Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
This commit is contained in:
Stanislav Dmitrenko 2023-11-05 00:59:07 +08:00 committed by GitHub
parent 4816150b99
commit 10cbb13c26
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 243 additions and 28 deletions

View file

@ -589,18 +589,30 @@ fun ChatInfoToolbar(
if (chat.chatInfo is ChatInfo.Direct && chat.chatInfo.contact.allowsFeature(ChatFeature.Calls)) { if (chat.chatInfo is ChatInfo.Direct && chat.chatInfo.contact.allowsFeature(ChatFeature.Calls)) {
if (activeCall == null) { if (activeCall == null) {
barButtons.add { barButtons.add {
IconButton( if (appPlatform.isAndroid) {
{ IconButton({
showMenu.value = false showMenu.value = false
startCall(CallMediaType.Audio) startCall(CallMediaType.Audio)
}, }, enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active
enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active ) {
) { Icon(
Icon( painterResource(MR.images.ic_call_500),
painterResource(MR.images.ic_call_500), stringResource(MR.strings.icon_descr_audio_call).capitalize(Locale.current),
stringResource(MR.strings.icon_descr_more_button), tint = if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) MaterialTheme.colors.primary else MaterialTheme.colors.secondary
tint = if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) MaterialTheme.colors.primary else MaterialTheme.colors.secondary )
) }
} else {
IconButton({
showMenu.value = false
startCall(CallMediaType.Video)
}, enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active
) {
Icon(
painterResource(MR.images.ic_videocam),
stringResource(MR.strings.icon_descr_video_call).capitalize(Locale.current),
tint = if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) MaterialTheme.colors.primary else MaterialTheme.colors.secondary
)
}
} }
} }
} else if (activeCall?.contact?.id == chat.id) { } else if (activeCall?.contact?.id == chat.id) {
@ -634,10 +646,17 @@ fun ChatInfoToolbar(
} }
if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active && activeCall == null) { if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active && activeCall == null) {
menuItems.add { menuItems.add {
ItemAction(stringResource(MR.strings.icon_descr_video_call).capitalize(Locale.current), painterResource(MR.images.ic_videocam), onClick = { if (appPlatform.isAndroid) {
showMenu.value = false ItemAction(stringResource(MR.strings.icon_descr_video_call).capitalize(Locale.current), painterResource(MR.images.ic_videocam), onClick = {
startCall(CallMediaType.Video) showMenu.value = false
}) startCall(CallMediaType.Video)
})
} else {
ItemAction(stringResource(MR.strings.icon_descr_audio_call).capitalize(Locale.current), painterResource(MR.images.ic_call_500), onClick = {
showMenu.value = false
startCall(CallMediaType.Audio)
})
}
} }
} }
} else if (chat.chatInfo is ChatInfo.Group && chat.chatInfo.groupInfo.canAddMembers) { } else if (chat.chatInfo is ChatInfo.Group && chat.chatInfo.groupInfo.canAddMembers) {

View file

@ -11,6 +11,7 @@
autoplay autoplay
playsinline playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII=" poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video> ></video>
<video <video
id="local-video-stream" id="local-video-stream"

View file

@ -14,6 +14,7 @@ var VideoCamera;
// for debugging // for debugging
// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp})) // var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
var sendMessageToNative = (msg) => console.log(JSON.stringify(msg)); var sendMessageToNative = (msg) => console.log(JSON.stringify(msg));
var toggleScreenShare = async () => { };
// Global object with cryptrographic/encoding functions // Global object with cryptrographic/encoding functions
const callCrypto = callCryptoFunction(); const callCrypto = callCryptoFunction();
var TransformOperation; var TransformOperation;
@ -24,6 +25,7 @@ var TransformOperation;
let activeCall; let activeCall;
let answerTimeout = 30000; let answerTimeout = 30000;
var useWorker = false; var useWorker = false;
var isDesktop = false;
var localizedState = ""; var localizedState = "";
var localizedDescription = ""; var localizedDescription = "";
const processCommand = (function () { const processCommand = (function () {
@ -106,8 +108,24 @@ const processCommand = (function () {
const remoteStream = new MediaStream(); const remoteStream = new MediaStream();
const localCamera = VideoCamera.User; const localCamera = VideoCamera.User;
const localStream = await getLocalMediaStream(mediaType, localCamera); const localStream = await getLocalMediaStream(mediaType, localCamera);
if (isDesktop) {
localStream
.getTracks()
.filter((elem) => elem.kind == "video")
.forEach((elem) => (elem.enabled = false));
}
const iceCandidates = getIceCandidates(pc, config); const iceCandidates = getIceCandidates(pc, config);
const call = { connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey }; const call = {
connection: pc,
iceCandidates,
localMedia: mediaType,
localCamera,
localStream,
remoteStream,
aesKey,
screenShareEnabled: false,
cameraEnabled: true,
};
await setupMediaStreams(call); await setupMediaStreams(call);
let connectionTimeout = setTimeout(connectionHandler, answerTimeout); let connectionTimeout = setTimeout(connectionHandler, answerTimeout);
pc.addEventListener("connectionstatechange", connectionStateChange); pc.addEventListener("connectionstatechange", connectionStateChange);
@ -430,12 +448,31 @@ const processCommand = (function () {
if (!videos) if (!videos)
throw Error("no video elements"); throw Error("no video elements");
const pc = call.connection; const pc = call.connection;
const oldAudioTracks = call.localStream.getAudioTracks();
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled);
let localStream;
try {
localStream = call.screenShareEnabled ? await getLocalScreenCaptureStream() : await getLocalMediaStream(call.localMedia, camera);
}
catch (e) {
if (call.screenShareEnabled) {
call.screenShareEnabled = false;
}
return;
}
for (const t of call.localStream.getTracks()) for (const t of call.localStream.getTracks())
t.stop(); t.stop();
call.localCamera = camera; call.localCamera = camera;
const localStream = await getLocalMediaStream(call.localMedia, camera); const audioTracks = localStream.getAudioTracks();
replaceTracks(pc, localStream.getVideoTracks()); const videoTracks = localStream.getVideoTracks();
replaceTracks(pc, localStream.getAudioTracks()); if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false));
}
if (!call.cameraEnabled && !call.screenShareEnabled) {
videoTracks.forEach((elem) => (elem.enabled = false));
}
replaceTracks(pc, audioTracks);
replaceTracks(pc, videoTracks);
call.localStream = localStream; call.localStream = localStream;
videos.local.srcObject = localStream; videos.local.srcObject = localStream;
} }
@ -472,6 +509,21 @@ const processCommand = (function () {
const constraints = callMediaConstraints(mediaType, facingMode); const constraints = callMediaConstraints(mediaType, facingMode);
return navigator.mediaDevices.getUserMedia(constraints); return navigator.mediaDevices.getUserMedia(constraints);
} }
function getLocalScreenCaptureStream() {
const constraints /* DisplayMediaStreamConstraints */ = {
video: {
frameRate: 24,
//width: {
//min: 480,
//ideal: 720,
//max: 1280,
//},
//aspectRatio: 1.33,
},
audio: true,
};
return navigator.mediaDevices.getDisplayMedia(constraints);
}
function callMediaConstraints(mediaType, facingMode) { function callMediaConstraints(mediaType, facingMode) {
switch (mediaType) { switch (mediaType) {
case CallMediaType.Audio: case CallMediaType.Audio:
@ -526,9 +578,23 @@ const processCommand = (function () {
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks(); const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
for (const t of tracks) for (const t of tracks)
t.enabled = enable; t.enabled = enable;
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = enable;
}
} }
toggleScreenShare = async function () {
const call = activeCall;
if (!call)
return;
call.screenShareEnabled = !call.screenShareEnabled;
await replaceMedia(call, call.localCamera);
};
return processCommand; return processCommand;
})(); })();
function toggleRemoteVideoFitFill() {
const remote = document.getElementById("remote-video-stream");
remote.style.objectFit = remote.style.objectFit != "contain" ? "contain" : "cover";
}
function toggleMedia(s, media) { function toggleMedia(s, media) {
let res = false; let res = false;
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks(); const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
@ -536,6 +602,9 @@ function toggleMedia(s, media) {
t.enabled = !t.enabled; t.enabled = !t.enabled;
res = t.enabled; res = t.enabled;
} }
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = res;
}
return res; return res;
} }
// Cryptography function - it is loaded both in the main window and in worker context (if the worker is used) // Cryptography function - it is loaded both in the main window and in worker context (if the worker is used)

View file

@ -12,6 +12,7 @@
autoplay autoplay
playsinline playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII=" poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video> ></video>
<video <video
id="local-video-stream" id="local-video-stream"
@ -29,6 +30,9 @@
<img src="/desktop/images/ic_phone_in_talk.svg" /> <img src="/desktop/images/ic_phone_in_talk.svg" />
</div> </div>
<p id="manage-call"> <p id="manage-call">
<button id="toggle-screen" style="display: none" onclick="javascript:toggleScreenManually()">
<img src="/desktop/images/ic_screen_share.svg" />
</button>
<button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()"> <button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()">
<img src="/desktop/images/ic_mic.svg" /> <img src="/desktop/images/ic_mic.svg" />
</button> </button>
@ -39,7 +43,7 @@
<img src="/desktop/images/ic_volume_up.svg" /> <img src="/desktop/images/ic_volume_up.svg" />
</button> </button>
<button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()"> <button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()">
<img src="/desktop/images/ic_videocam_filled.svg" /> <img src="/desktop/images/ic_videocam_off.svg" />
</button> </button>
</p> </p>
</body> </body>

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="M335.5-388H393v-89q0-23.875 16.35-40.438Q425.7-534 450.175-534H530v68l97-96.5-97-97v68h-80.077q-47.756 0-81.09 33.396Q335.5-524.708 335.5-477v89ZM74-126q-12.25 0-20.625-8.425Q45-142.851 45-154.925 45-167 53.375-175.25T74-183.5h812.5q11.675 0 20.088 8.463Q915-166.574 915-154.825q0 12.325-8.412 20.575Q898.175-126 886.5-126H74Zm68.5-117q-22.969 0-40.234-17.266Q85-277.531 85-300.5V-777q0-22.969 17.266-40.234Q119.531-834.5 142.5-834.5h675q22.969 0 40.234 17.266Q875-799.969 875-777v476.5q0 22.969-17.266 40.234Q840.469-243 817.5-243h-675Zm0-57.5h675V-777h-675v476.5Zm0 0V-777v476.5Z"/></svg>

After

Width:  |  Height:  |  Size: 700 B

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="m549-484.5-107-107h88v-68l97 97-78 78ZM799.5-233 741-291.5h70.5V-771h-550L204-828.5h607.5q22.969 0 40.234 17.266Q869-793.969 869-771v479.53q0 26.088-20.25 43.779Q828.5-230 799.5-233Zm36 200.5L737-131H45v-57.5h635L634.5-234h-485q-22.969 0-40.234-17.266Q92-268.531 92-291.5v-481.25q0-2.75.5-3.75l-56-55L78-873 877-74l-41.5 41.5ZM393-475.5v88h-57.5V-477q0-10 2.25-22.5t7.25-23.534L149.5-719v427.5H577l-184-184ZM502-532Zm-138 26.5Z"/></svg>

After

Width:  |  Height:  |  Size: 546 B

View file

@ -1,6 +1,7 @@
"use strict"; "use strict";
// Override defaults to enable worker on Chrome and Safari // Override defaults to enable worker on Chrome and Safari
useWorker = typeof window.Worker !== "undefined"; useWorker = typeof window.Worker !== "undefined";
isDesktop = true;
// Create WebSocket connection. // Create WebSocket connection.
const socket = new WebSocket(`ws://${location.host}`); const socket = new WebSocket(`ws://${location.host}`);
socket.addEventListener("open", (_event) => { socket.addEventListener("open", (_event) => {
@ -42,11 +43,28 @@ function toggleSpeakerManually() {
} }
function toggleVideoManually() { function toggleVideoManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMedia) { if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMedia) {
document.getElementById("toggle-video").innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Video) let res;
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled) {
activeCall.cameraEnabled = !activeCall.cameraEnabled;
res = activeCall.cameraEnabled;
}
else {
res = toggleMedia(activeCall.localStream, CallMediaType.Video);
}
document.getElementById("toggle-video").innerHTML = res
? '<img src="/desktop/images/ic_videocam_filled.svg" />' ? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />'; : '<img src="/desktop/images/ic_videocam_off.svg" />';
} }
} }
async function toggleScreenManually() {
const was = activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled;
await toggleScreenShare();
if (was != (activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled)) {
document.getElementById("toggle-screen").innerHTML = (activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled)
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />';
}
}
function reactOnMessageFromServer(msg) { function reactOnMessageFromServer(msg) {
var _a; var _a;
switch ((_a = msg.command) === null || _a === void 0 ? void 0 : _a.type) { switch ((_a = msg.command) === null || _a === void 0 ? void 0 : _a.type) {
@ -57,8 +75,9 @@ function reactOnMessageFromServer(msg) {
case "start": case "start":
document.getElementById("toggle-audio").style.display = "inline-block"; document.getElementById("toggle-audio").style.display = "inline-block";
document.getElementById("toggle-speaker").style.display = "inline-block"; document.getElementById("toggle-speaker").style.display = "inline-block";
if (msg.command.media == "video") { if (msg.command.media == CallMediaType.Video) {
document.getElementById("toggle-video").style.display = "inline-block"; document.getElementById("toggle-video").style.display = "inline-block";
document.getElementById("toggle-screen").style.display = "inline-block";
} }
document.getElementById("info-block").className = msg.command.media; document.getElementById("info-block").className = msg.command.media;
break; break;

View file

@ -11,6 +11,7 @@
autoplay autoplay
playsinline playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII=" poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video> ></video>
<video <video
id="local-video-stream" id="local-video-stream"

View file

@ -165,6 +165,7 @@ interface ConnectionInfo {
// for debugging // for debugging
// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp})) // var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
var sendMessageToNative = (msg: WVApiMessage) => console.log(JSON.stringify(msg)) var sendMessageToNative = (msg: WVApiMessage) => console.log(JSON.stringify(msg))
var toggleScreenShare = async () => {}
// Global object with cryptrographic/encoding functions // Global object with cryptrographic/encoding functions
const callCrypto = callCryptoFunction() const callCrypto = callCryptoFunction()
@ -193,6 +194,8 @@ interface Call {
localCamera: VideoCamera localCamera: VideoCamera
localStream: MediaStream localStream: MediaStream
remoteStream: MediaStream remoteStream: MediaStream
screenShareEnabled: boolean
cameraEnabled: boolean
aesKey?: string aesKey?: string
worker?: Worker worker?: Worker
key?: CryptoKey key?: CryptoKey
@ -201,6 +204,7 @@ interface Call {
let activeCall: Call | undefined let activeCall: Call | undefined
let answerTimeout = 30_000 let answerTimeout = 30_000
var useWorker = false var useWorker = false
var isDesktop = false
var localizedState = "" var localizedState = ""
var localizedDescription = "" var localizedDescription = ""
@ -308,8 +312,24 @@ const processCommand = (function () {
const remoteStream = new MediaStream() const remoteStream = new MediaStream()
const localCamera = VideoCamera.User const localCamera = VideoCamera.User
const localStream = await getLocalMediaStream(mediaType, localCamera) const localStream = await getLocalMediaStream(mediaType, localCamera)
if (isDesktop) {
localStream
.getTracks()
.filter((elem) => elem.kind == "video")
.forEach((elem) => (elem.enabled = false))
}
const iceCandidates = getIceCandidates(pc, config) const iceCandidates = getIceCandidates(pc, config)
const call = {connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey} const call = {
connection: pc,
iceCandidates,
localMedia: mediaType,
localCamera,
localStream,
remoteStream,
aesKey,
screenShareEnabled: false,
cameraEnabled: true,
}
await setupMediaStreams(call) await setupMediaStreams(call)
let connectionTimeout: number | undefined = setTimeout(connectionHandler, answerTimeout) let connectionTimeout: number | undefined = setTimeout(connectionHandler, answerTimeout)
pc.addEventListener("connectionstatechange", connectionStateChange) pc.addEventListener("connectionstatechange", connectionStateChange)
@ -626,11 +646,31 @@ const processCommand = (function () {
const videos = getVideoElements() const videos = getVideoElements()
if (!videos) throw Error("no video elements") if (!videos) throw Error("no video elements")
const pc = call.connection const pc = call.connection
const oldAudioTracks = call.localStream.getAudioTracks()
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled)
let localStream: MediaStream
try {
localStream = call.screenShareEnabled ? await getLocalScreenCaptureStream() : await getLocalMediaStream(call.localMedia, camera)
} catch (e: any) {
if (call.screenShareEnabled) {
call.screenShareEnabled = false
}
return
}
for (const t of call.localStream.getTracks()) t.stop() for (const t of call.localStream.getTracks()) t.stop()
call.localCamera = camera call.localCamera = camera
const localStream = await getLocalMediaStream(call.localMedia, camera)
replaceTracks(pc, localStream.getVideoTracks()) const audioTracks = localStream.getAudioTracks()
replaceTracks(pc, localStream.getAudioTracks()) const videoTracks = localStream.getVideoTracks()
if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false))
}
if (!call.cameraEnabled && !call.screenShareEnabled) {
videoTracks.forEach((elem) => (elem.enabled = false))
}
replaceTracks(pc, audioTracks)
replaceTracks(pc, videoTracks)
call.localStream = localStream call.localStream = localStream
videos.local.srcObject = localStream videos.local.srcObject = localStream
} }
@ -671,6 +711,22 @@ const processCommand = (function () {
return navigator.mediaDevices.getUserMedia(constraints) return navigator.mediaDevices.getUserMedia(constraints)
} }
function getLocalScreenCaptureStream(): Promise<MediaStream> {
const constraints: any /* DisplayMediaStreamConstraints */ = {
video: {
frameRate: 24,
//width: {
//min: 480,
//ideal: 720,
//max: 1280,
//},
//aspectRatio: 1.33,
},
audio: true,
}
return navigator.mediaDevices.getDisplayMedia(constraints)
}
function callMediaConstraints(mediaType: CallMediaType, facingMode: VideoCamera): MediaStreamConstraints { function callMediaConstraints(mediaType: CallMediaType, facingMode: VideoCamera): MediaStreamConstraints {
switch (mediaType) { switch (mediaType) {
case CallMediaType.Audio: case CallMediaType.Audio:
@ -735,10 +791,26 @@ const processCommand = (function () {
function enableMedia(s: MediaStream, media: CallMediaType, enable: boolean) { function enableMedia(s: MediaStream, media: CallMediaType, enable: boolean) {
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks() const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
for (const t of tracks) t.enabled = enable for (const t of tracks) t.enabled = enable
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = enable
}
} }
toggleScreenShare = async function () {
const call = activeCall
if (!call) return
call.screenShareEnabled = !call.screenShareEnabled
await replaceMedia(call, call.localCamera)
}
return processCommand return processCommand
})() })()
function toggleRemoteVideoFitFill() {
const remote = document.getElementById("remote-video-stream")!
remote.style.objectFit = remote.style.objectFit != "contain" ? "contain" : "cover"
}
function toggleMedia(s: MediaStream, media: CallMediaType): boolean { function toggleMedia(s: MediaStream, media: CallMediaType): boolean {
let res = false let res = false
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks() const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
@ -746,6 +818,9 @@ function toggleMedia(s: MediaStream, media: CallMediaType): boolean {
t.enabled = !t.enabled t.enabled = !t.enabled
res = t.enabled res = t.enabled
} }
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = res
}
return res return res
} }

View file

@ -12,6 +12,7 @@
autoplay autoplay
playsinline playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII=" poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video> ></video>
<video <video
id="local-video-stream" id="local-video-stream"
@ -29,6 +30,9 @@
<img src="/desktop/images/ic_phone_in_talk.svg" /> <img src="/desktop/images/ic_phone_in_talk.svg" />
</div> </div>
<p id="manage-call"> <p id="manage-call">
<button id="toggle-screen" style="display: none" onclick="javascript:toggleScreenManually()">
<img src="/desktop/images/ic_screen_share.svg" />
</button>
<button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()"> <button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()">
<img src="/desktop/images/ic_mic.svg" /> <img src="/desktop/images/ic_mic.svg" />
</button> </button>
@ -39,7 +43,7 @@
<img src="/desktop/images/ic_volume_up.svg" /> <img src="/desktop/images/ic_volume_up.svg" />
</button> </button>
<button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()"> <button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()">
<img src="/desktop/images/ic_videocam_filled.svg" /> <img src="/desktop/images/ic_videocam_off.svg" />
</button> </button>
</p> </p>
</body> </body>

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="M335.5-388H393v-89q0-23.875 16.35-40.438Q425.7-534 450.175-534H530v68l97-96.5-97-97v68h-80.077q-47.756 0-81.09 33.396Q335.5-524.708 335.5-477v89ZM74-126q-12.25 0-20.625-8.425Q45-142.851 45-154.925 45-167 53.375-175.25T74-183.5h812.5q11.675 0 20.088 8.463Q915-166.574 915-154.825q0 12.325-8.412 20.575Q898.175-126 886.5-126H74Zm68.5-117q-22.969 0-40.234-17.266Q85-277.531 85-300.5V-777q0-22.969 17.266-40.234Q119.531-834.5 142.5-834.5h675q22.969 0 40.234 17.266Q875-799.969 875-777v476.5q0 22.969-17.266 40.234Q840.469-243 817.5-243h-675Zm0-57.5h675V-777h-675v476.5Zm0 0V-777v476.5Z"/></svg>

After

Width:  |  Height:  |  Size: 700 B

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="m549-484.5-107-107h88v-68l97 97-78 78ZM799.5-233 741-291.5h70.5V-771h-550L204-828.5h607.5q22.969 0 40.234 17.266Q869-793.969 869-771v479.53q0 26.088-20.25 43.779Q828.5-230 799.5-233Zm36 200.5L737-131H45v-57.5h635L634.5-234h-485q-22.969 0-40.234-17.266Q92-268.531 92-291.5v-481.25q0-2.75.5-3.75l-56-55L78-873 877-74l-41.5 41.5ZM393-475.5v88h-57.5V-477q0-10 2.25-22.5t7.25-23.534L149.5-719v427.5H577l-184-184ZM502-532Zm-138 26.5Z"/></svg>

After

Width:  |  Height:  |  Size: 546 B

View file

@ -1,5 +1,6 @@
// Override defaults to enable worker on Chrome and Safari // Override defaults to enable worker on Chrome and Safari
useWorker = typeof window.Worker !== "undefined" useWorker = typeof window.Worker !== "undefined"
isDesktop = true
// Create WebSocket connection. // Create WebSocket connection.
const socket = new WebSocket(`ws://${location.host}`) const socket = new WebSocket(`ws://${location.host}`)
@ -49,12 +50,29 @@ function toggleSpeakerManually() {
function toggleVideoManually() { function toggleVideoManually() {
if (activeCall?.localMedia) { if (activeCall?.localMedia) {
document.getElementById("toggle-video")!!.innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Video) let res: boolean
if (activeCall?.screenShareEnabled) {
activeCall.cameraEnabled = !activeCall.cameraEnabled
res = activeCall.cameraEnabled
} else {
res = toggleMedia(activeCall.localStream, CallMediaType.Video)
}
document.getElementById("toggle-video")!!.innerHTML = res
? '<img src="/desktop/images/ic_videocam_filled.svg" />' ? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />' : '<img src="/desktop/images/ic_videocam_off.svg" />'
} }
} }
async function toggleScreenManually() {
const was = activeCall?.screenShareEnabled
await toggleScreenShare()
if (was != activeCall?.screenShareEnabled) {
document.getElementById("toggle-screen")!!.innerHTML = activeCall?.screenShareEnabled
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />'
}
}
function reactOnMessageFromServer(msg: WVApiMessage) { function reactOnMessageFromServer(msg: WVApiMessage) {
switch (msg.command?.type) { switch (msg.command?.type) {
case "capabilities": case "capabilities":
@ -64,8 +82,9 @@ function reactOnMessageFromServer(msg: WVApiMessage) {
case "start": case "start":
document.getElementById("toggle-audio")!!.style.display = "inline-block" document.getElementById("toggle-audio")!!.style.display = "inline-block"
document.getElementById("toggle-speaker")!!.style.display = "inline-block" document.getElementById("toggle-speaker")!!.style.display = "inline-block"
if (msg.command.media == "video") { if (msg.command.media == CallMediaType.Video) {
document.getElementById("toggle-video")!!.style.display = "inline-block" document.getElementById("toggle-video")!!.style.display = "inline-block"
document.getElementById("toggle-screen")!!.style.display = "inline-block"
} }
document.getElementById("info-block")!!.className = msg.command.media document.getElementById("info-block")!!.className = msg.command.media
break break