From f325d3ee0ba7443c86abd12728a019a47e13bee9 Mon Sep 17 00:00:00 2001
From: Dan Isla
Date: Fri, 22 Sep 2023 03:24:05 +0000
Subject: [PATCH] Video and audio in separate webrtcbins
- Reduces latency when streaming with audio enabled.
- AV sync is within reason.
- Client connects separate RTCPeerConnections for audio and video.
- Connections use the same config and STUN/TURN servers.
- Backend signal clients and pipelines are also duplicated.
- Removed option to disable audio from client and cli args.
---
addons/gst-web/src/app.js | 59 +++++++--------
addons/gst-web/src/index.html | 10 ++-
addons/gst-web/src/signalling.js | 11 +--
addons/gst-web/src/webrtc.js | 11 ++-
src/selkies_gstreamer/__main__.py | 87 ++++++++++++++--------
src/selkies_gstreamer/gstwebrtc_app.py | 63 ++++++----------
src/selkies_gstreamer/webrtc_input.py | 7 --
src/selkies_gstreamer/webrtc_signalling.py | 4 +-
8 files changed, 130 insertions(+), 122 deletions(-)
diff --git a/addons/gst-web/src/app.js b/addons/gst-web/src/app.js
index f59db04a..72897eb7 100644
--- a/addons/gst-web/src/app.js
+++ b/addons/gst-web/src/app.js
@@ -76,7 +76,6 @@ var app = new Vue({
{ text: '60 fps', value: 60 },
{ text: '100 fps', value: 100 },
],
- audioEnabled: false,
audioBitRate: 32000,
audioBitRateOptions: [
{ text: '32 kb/s', value: 32000 },
@@ -242,12 +241,6 @@ var app = new Vue({
webrtc.sendDataChannelMessage('_arg_fps,' + newValue);
this.setIntParam("videoFramerate", newValue);
},
- audioEnabled(newValue, oldValue) {
- if (newValue === null) return;
- console.log("audio enabled changed from " + oldValue + " to " + newValue);
- if (oldValue !== null && newValue !== oldValue) webrtc.sendDataChannelMessage('_arg_audio,' + newValue);
- this.setBoolParam("audioEnabled", newValue);
- },
resizeRemote(newValue, oldValue) {
if (newValue === null) return;
console.log("resize remote changed from " + oldValue + " to " + newValue);
@@ -326,11 +319,18 @@ if (videoElement === null) {
throw 'videoElement not found on page';
}
+var audioElement = document.getElementById("audio_stream");
+if (audioElement === null) {
+ throw 'audioElement not found on page';
+}
+
// WebRTC entrypoint, connect to the signalling server
/*global WebRTCDemoSignalling, WebRTCDemo*/
var protocol = (location.protocol == "http:" ? "ws://" : "wss://");
-var signalling = new WebRTCDemoSignalling(new URL(protocol + window.location.host + "/" + app.appName + "/signalling/"), 1);
-var webrtc = new WebRTCDemo(signalling, videoElement);
+var signalling = new WebRTCDemoSignalling(new URL(protocol + window.location.host + "/" + app.appName + "/signalling/"));
+var webrtc = new WebRTCDemo(signalling, videoElement, 1);
+var audio_signalling = new WebRTCDemoSignalling(new URL(protocol + window.location.host + "/" + app.appName + "/signalling/"));
+var audio_webrtc = new WebRTCDemo(audio_signalling, audioElement, 3);
// Function to add timestamp to logs.
var applyTimestamp = (msg) => {
@@ -351,15 +351,19 @@ signalling.ondisconnect = () => {
app.status = 'connecting';
videoElement.style.cursor = "auto";
webrtc.reset();
+ audio_webrtc.reset();
}
// Send webrtc status and error messages to logs.
webrtc.onstatus = (message) => { app.logEntries.push(applyTimestamp("[webrtc] " + message)) };
webrtc.onerror = (message) => { app.logEntries.push(applyTimestamp("[webrtc] [ERROR] " + message)) };
+audio_webrtc.onstatus = (message) => { app.logEntries.push(applyTimestamp("[audio webrtc] " + message)) };
+audio_webrtc.onerror = (message) => { app.logEntries.push(applyTimestamp("[audio webrtc] [ERROR] " + message)) };
if (app.debug) {
signalling.ondebug = (message) => { app.debugEntries.push("[signalling] " + message); };
webrtc.ondebug = (message) => { app.debugEntries.push(applyTimestamp("[webrtc] " + message)) };
+ audio_webrtc.ondebug = (message) => { app.debugEntries.push(applyTimestamp("[audio webrtc] " + message)) };
}
webrtc.ongpustats = (data) => {
@@ -409,19 +413,13 @@ webrtc.onconnectionstatechange = (state) => {
videoBytesReceivedStart = stats.video.bytesReceived;
// Audio stats.
- if (app.audioEnabled) {
- app.connectionLatency += stats.audio.jitterBufferDelay * 1000;
- app.connectionPacketsReceived += stats.audio.packetsReceived;
- app.connectionPacketsLost += stats.audio.packetsLost;
- app.connectionAudioLatency = parseInt(stats.audio.jitterBufferDelay * 1000);
- app.connectionAudioCodecName = stats.audio.codecName;
- app.connectionAudioBitrate = (((stats.audio.bytesReceived - audioBytesReceivedStart) / (now - statsStart)) * 8 / 1e+3).toFixed(2);
- audioBytesReceivedStart = stats.audio.bytesReceived;
- } else {
- app.connectionAudioBitrate = 0;
- app.connectionAudioCodecName = "NA";
- app.connectionAudioLatency = "NA";
- }
+ app.connectionLatency += stats.audio.jitterBufferDelay * 1000;
+ app.connectionPacketsReceived += stats.audio.packetsReceived;
+ app.connectionPacketsLost += stats.audio.packetsLost;
+ app.connectionAudioLatency = parseInt(stats.audio.jitterBufferDelay * 1000);
+ app.connectionAudioCodecName = stats.audio.codecName;
+ app.connectionAudioBitrate = (((stats.audio.bytesReceived - audioBytesReceivedStart) / (now - statsStart)) * 8 / 1e+3).toFixed(2);
+ audioBytesReceivedStart = stats.audio.bytesReceived;
// Format latency
app.connectionLatency = parseInt(app.connectionLatency);
@@ -485,6 +483,10 @@ webrtc.onplayvideorequired = () => {
app.showStart = true;
}
+audio_webrtc.onplayvideorequired = () => {
+ app.showStart = true;
+}
+
// Actions to take whenever window changes focus
window.addEventListener('focus', () => {
// reset keyboard to avoid stuck keys.
@@ -572,16 +574,6 @@ webrtc.onsystemaction = (action) => {
// Use the server setting.
app.audioBitRate = parseInt(action.split(",")[1]);
}
- } else if (action.startsWith('audio')) {
- // Server received audio enabled setting.
- const audioEnabledSetting = app.getBoolParam("audioEnabled" , null);
- if (audioEnabledSetting !== null) {
- // Prefer the user saved value.
- app.audioEnabled = audioEnabledSetting;
- } else {
- // Use the server setting.
- app.audioEnabled = (action.split(",")[1].toLowerCase() === 'true');
- }
} else if (action.startsWith('resize')) {
// Remote resize enabled/disabled action.
const resizeSetting = app.getBoolParam("resize", null);
@@ -687,6 +679,7 @@ fetch("/turn/")
.then((config) => {
// for debugging, force use of relay server.
webrtc.forceTurn = app.turnSwitch;
+ audio_webrtc.forceTurn = app.turnSwitch;
// get initial local resolution
app.windowResolution = webrtc.input.getWindowResolution();
@@ -702,5 +695,7 @@ fetch("/turn/")
app.debugEntries.push(applyTimestamp("[app] no TURN servers found."));
}
webrtc.rtcPeerConfig = config;
+ audio_webrtc.rtcPeerConfig = config;
webrtc.connect();
+ audio_webrtc.connect();
});
diff --git a/addons/gst-web/src/index.html b/addons/gst-web/src/index.html
index ba2ef2ce..a045438c 100644
--- a/addons/gst-web/src/index.html
+++ b/addons/gst-web/src/index.html
@@ -274,7 +274,7 @@
@@ -311,8 +311,6 @@
- Enable Audio
- Disable Audio
@@ -355,6 +353,12 @@
+
+
+
+