Skip to content

Commit

Permalink
Video and audio in separate webrtcbins
Browse files Browse the repository at this point in the history
- Reduces latency when streaming with audio enabled.
- AV sync is within reason.
- Client connects separate RTCPeerConnections for audio and video.
- Connections use the same config and STUN/TURN servers.
- Backend signal clients and pipelines are also duplicated.
- Removed option to disable audio from client and cli args.
  • Loading branch information
danisla committed Sep 22, 2023
1 parent 7b3b799 commit f325d3e
Show file tree
Hide file tree
Showing 8 changed files with 130 additions and 122 deletions.
59 changes: 27 additions & 32 deletions addons/gst-web/src/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ var app = new Vue({
{ text: '60 fps', value: 60 },
{ text: '100 fps', value: 100 },
],
audioEnabled: false,
audioBitRate: 32000,
audioBitRateOptions: [
{ text: '32 kb/s', value: 32000 },
Expand Down Expand Up @@ -242,12 +241,6 @@ var app = new Vue({
webrtc.sendDataChannelMessage('_arg_fps,' + newValue);
this.setIntParam("videoFramerate", newValue);
},
audioEnabled(newValue, oldValue) {
if (newValue === null) return;
console.log("audio enabled changed from " + oldValue + " to " + newValue);
if (oldValue !== null && newValue !== oldValue) webrtc.sendDataChannelMessage('_arg_audio,' + newValue);
this.setBoolParam("audioEnabled", newValue);
},
resizeRemote(newValue, oldValue) {
if (newValue === null) return;
console.log("resize remote changed from " + oldValue + " to " + newValue);
Expand Down Expand Up @@ -326,11 +319,18 @@ if (videoElement === null) {
throw 'videoElement not found on page';
}

var audioElement = document.getElementById("audio_stream");
if (audioElement === null) {
throw 'audioElement not found on page';
}

// WebRTC entrypoint, connect to the signalling server
/*global WebRTCDemoSignalling, WebRTCDemo*/
var protocol = (location.protocol == "http:" ? "ws://" : "wss://");
var signalling = new WebRTCDemoSignalling(new URL(protocol + window.location.host + "/" + app.appName + "/signalling/"), 1);
var webrtc = new WebRTCDemo(signalling, videoElement);
var signalling = new WebRTCDemoSignalling(new URL(protocol + window.location.host + "/" + app.appName + "/signalling/"));
var webrtc = new WebRTCDemo(signalling, videoElement, 1);
var audio_signalling = new WebRTCDemoSignalling(new URL(protocol + window.location.host + "/" + app.appName + "/signalling/"));
var audio_webrtc = new WebRTCDemo(audio_signalling, audioElement, 3);

// Function to add timestamp to logs.
var applyTimestamp = (msg) => {
Expand All @@ -351,15 +351,19 @@ signalling.ondisconnect = () => {
app.status = 'connecting';
videoElement.style.cursor = "auto";
webrtc.reset();
audio_webrtc.reset();
}

// Send webrtc status and error messages to logs.
webrtc.onstatus = (message) => { app.logEntries.push(applyTimestamp("[webrtc] " + message)) };
webrtc.onerror = (message) => { app.logEntries.push(applyTimestamp("[webrtc] [ERROR] " + message)) };
audio_webrtc.onstatus = (message) => { app.logEntries.push(applyTimestamp("[audio webrtc] " + message)) };
audio_webrtc.onerror = (message) => { app.logEntries.push(applyTimestamp("[audio webrtc] [ERROR] " + message)) };

if (app.debug) {
signalling.ondebug = (message) => { app.debugEntries.push("[signalling] " + message); };
webrtc.ondebug = (message) => { app.debugEntries.push(applyTimestamp("[webrtc] " + message)) };
audio_webrtc.ondebug = (message) => { app.debugEntries.push(applyTimestamp("[audio webrtc] " + message)) };
}

webrtc.ongpustats = (data) => {
Expand Down Expand Up @@ -409,19 +413,13 @@ webrtc.onconnectionstatechange = (state) => {
videoBytesReceivedStart = stats.video.bytesReceived;

// Audio stats.
if (app.audioEnabled) {
app.connectionLatency += stats.audio.jitterBufferDelay * 1000;
app.connectionPacketsReceived += stats.audio.packetsReceived;
app.connectionPacketsLost += stats.audio.packetsLost;
app.connectionAudioLatency = parseInt(stats.audio.jitterBufferDelay * 1000);
app.connectionAudioCodecName = stats.audio.codecName;
app.connectionAudioBitrate = (((stats.audio.bytesReceived - audioBytesReceivedStart) / (now - statsStart)) * 8 / 1e+3).toFixed(2);
audioBytesReceivedStart = stats.audio.bytesReceived;
} else {
app.connectionAudioBitrate = 0;
app.connectionAudioCodecName = "NA";
app.connectionAudioLatency = "NA";
}
app.connectionLatency += stats.audio.jitterBufferDelay * 1000;
app.connectionPacketsReceived += stats.audio.packetsReceived;
app.connectionPacketsLost += stats.audio.packetsLost;
app.connectionAudioLatency = parseInt(stats.audio.jitterBufferDelay * 1000);
app.connectionAudioCodecName = stats.audio.codecName;
app.connectionAudioBitrate = (((stats.audio.bytesReceived - audioBytesReceivedStart) / (now - statsStart)) * 8 / 1e+3).toFixed(2);
audioBytesReceivedStart = stats.audio.bytesReceived;

// Format latency
app.connectionLatency = parseInt(app.connectionLatency);
Expand Down Expand Up @@ -485,6 +483,10 @@ webrtc.onplayvideorequired = () => {
app.showStart = true;
}

audio_webrtc.onplayvideorequired = () => {
app.showStart = true;
}

// Actions to take whenever window changes focus
window.addEventListener('focus', () => {
// reset keyboard to avoid stuck keys.
Expand Down Expand Up @@ -572,16 +574,6 @@ webrtc.onsystemaction = (action) => {
// Use the server setting.
app.audioBitRate = parseInt(action.split(",")[1]);
}
} else if (action.startsWith('audio')) {
// Server received audio enabled setting.
const audioEnabledSetting = app.getBoolParam("audioEnabled" , null);
if (audioEnabledSetting !== null) {
// Prefer the user saved value.
app.audioEnabled = audioEnabledSetting;
} else {
// Use the server setting.
app.audioEnabled = (action.split(",")[1].toLowerCase() === 'true');
}
} else if (action.startsWith('resize')) {
// Remote resize enabled/disabled action.
const resizeSetting = app.getBoolParam("resize", null);
Expand Down Expand Up @@ -687,6 +679,7 @@ fetch("/turn/")
.then((config) => {
// for debugging, force use of relay server.
webrtc.forceTurn = app.turnSwitch;
audio_webrtc.forceTurn = app.turnSwitch;

// get initial local resolution
app.windowResolution = webrtc.input.getWindowResolution();
Expand All @@ -702,5 +695,7 @@ fetch("/turn/")
app.debugEntries.push(applyTimestamp("[app] no TURN servers found."));
}
webrtc.rtcPeerConfig = config;
audio_webrtc.rtcPeerConfig = config;
webrtc.connect();
audio_webrtc.connect();
});
10 changes: 7 additions & 3 deletions addons/gst-web/src/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@
</p>
<p>
<v-select :items="audioBitRateOptions" label="Audio bit rate" menu-props="left" v-model="audioBitRate"
:disabled="!audioEnabled" hint="Dynamic bit rate selection for audio encoder on server"
hint="streaming bit rate selection for audio encoder on server"
persistent-hint>
</v-select>
</p>
Expand Down Expand Up @@ -311,8 +311,6 @@
<v-textarea bottom class="scrolly" label="Debug Logs" readonly :value="debugEntries.join('\n\n')">
</v-textarea>
<p>
<v-btn color="primary" v-if="!audioEnabled" small v-on:click="audioEnabled=true">Enable Audio</v-btn>
<v-btn color="primary" v-else small v-on:click="audioEnabled=false">Disable Audio</v-btn>
<v-switch v-model="resizeRemote" :label="`Resize remote to fit window: ${resizeRemote.toString()}`"></v-switch>
<v-switch v-model="scaleLocal" :label="`Scale to fit window: ${scaleLocal.toString()}`"></v-switch>
</p>
Expand Down Expand Up @@ -355,6 +353,12 @@
</v-container>
</v-navigation-drawer>

<div id="audio_container" class="audio-container">
<audio id="audio_stream" class="audio" preload="none" playsinline>
Your browser doesn't support audio
</audio>
</div>

<div id="video_container" class="video-container">
<video id="stream" class="video" preload="none" playsinline>
Your browser doesn't support video
Expand Down
11 changes: 4 additions & 7 deletions addons/gst-web/src/signalling.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,8 @@ class WebRTCDemoSignalling {
* The URL object of the signalling server to connect to, created with `new URL()`.
* Signalling implementation is here:
* https://github.com/GStreamer/gstreamer/tree/main/subprojects/gst-examples/webrtc/signalling
* @param {number} [peer_id]
* The peer ID established during signalling that the sending peer (server) will connect to.
* This can be anything, but must match what the server will attempt to connect to.
*/
constructor(server, peer_id) {
constructor(server) {
/**
* @private
* @type {URL}
Expand All @@ -58,7 +55,7 @@ class WebRTCDemoSignalling {
* @private
* @type {number}
*/
this._peer_id = peer_id;
this.peer_id = 1;

/**
* @private
Expand Down Expand Up @@ -186,8 +183,8 @@ class WebRTCDemoSignalling {
"scale": window.devicePixelRatio
};
this.state = 'connected';
this._ws_conn.send(`HELLO ${this._peer_id} ${btoa(JSON.stringify(meta))}`);
this._setStatus("Registering with server, peer ID: " + this._peer_id);
this._ws_conn.send(`HELLO ${this.peer_id} ${btoa(JSON.stringify(meta))}`);
this._setStatus("Registering with server, peer ID: " + this.peer_id);
this.retry_count = 0;
}

Expand Down
11 changes: 9 additions & 2 deletions addons/gst-web/src/webrtc.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class WebRTCDemo {
* @param {Element} [element]
* video element to attach stream to.
*/
constructor(signalling, element) {
constructor(signalling, element, peer_id) {
/**
* @type {WebRTCDemoSignalling}
*/
Expand All @@ -60,6 +60,11 @@ class WebRTCDemo {
*/
this.element = element;

/**
* @type {Element}
*/
this.peer_id = peer_id;

/**
* @type {boolean}
*/
Expand Down Expand Up @@ -313,7 +318,7 @@ class WebRTCDemo {
this._setStatus("Received incoming " + event.track.kind + " stream from peer");
if (!this.streams) this.streams = [];
this.streams.push([event.track.kind, event.streams]);
if (event.track.kind === "video") {
if (event.track.kind === "video" || event.track.kind === "audio") {
this.element.srcObject = event.streams[0];
this.playVideo();
}
Expand Down Expand Up @@ -667,6 +672,8 @@ class WebRTCDemo {
config.iceTransportPolicy = "relay";
this.peerConnection.setConfiguration(config);
}

this.signalling.peer_id = this.peer_id;
this.signalling.connect();
}

Expand Down
Loading

0 comments on commit f325d3e

Please sign in to comment.