+ element = document.getElementById("playerUI");
+ }
+ if(!element) {
+ return;
+ }
+ if (element.requestFullscreen) {
+ element.requestFullscreen();
+ } else if (element.mozRequestFullScreen) {
+ element.mozRequestFullScreen();
+ } else if (element.webkitRequestFullscreen) {
+ element.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT);
+ } else if (element.msRequestFullscreen) {
+ element.msRequestFullscreen();
+ } else if (element.webkitEnterFullscreen) {
+ element.webkitEnterFullscreen(); //for iphone this code worked
+ }
+ }
+ onFullscreenChange()
+}
+
+function onFullscreenChange() {
+ isFullscreen = (document.webkitIsFullScreen
+ || document.mozFullScreen
+ || (document.msFullscreenElement && document.msFullscreenElement !== null)
+ || (document.fullscreenElement && document.fullscreenElement !== null));
+
+ let minimize = document.getElementById('minimize');
+ let maximize = document.getElementById('maximize');
+ if(minimize && maximize){
+ if(isFullscreen) {
+ minimize.style.display = 'inline';
+ maximize.style.display = 'none';
+ } else {
+ minimize.style.display = 'none';
+ maximize.style.display = 'inline';
+ }
+ }
+}
+
+function parseURLParams() {
+ let urlParams = new URLSearchParams(window.location.search);
+ inputOptions.controlScheme = (urlParams.has('hoveringMouse') ? ControlSchemeType.HoveringMouse : ControlSchemeType.LockedMouse);
+ let schemeToggle = document.getElementById("control-scheme-text");
+ switch (inputOptions.controlScheme) {
+ case ControlSchemeType.HoveringMouse:
+ schemeToggle.innerHTML = "Control Scheme: Hovering Mouse";
+ break;
+ case ControlSchemeType.LockedMouse:
+ schemeToggle.innerHTML = "Control Scheme: Locked Mouse";
+ break;
+ default:
+ schemeToggle.innerHTML = "Control Scheme: Locked Mouse";
+ console.log(`ERROR: Unknown control scheme ${inputOptions.controlScheme}, defaulting to Locked Mouse`);
+ break;
+ }
+
+ if(urlParams.has('noWatermark')) {
+ let watermark = document.getElementById("unrealengine");
+ watermark.style.display = 'none';
+ }
+
+ inputOptions.hideBrowserCursor = (urlParams.has('hideBrowserCursor') ? true : false);
+}
+
+
+function setupHtmlEvents() {
+ //Window events
+ window.addEventListener('resize', resizePlayerStyle, true);
+ window.addEventListener('orientationchange', onOrientationChange);
+
+ //Gamepad events
+ if (haveEvents) {
+ window.addEventListener("gamepadconnected", gamepadConnectHandler);
+ window.addEventListener("gamepaddisconnected", gamepadDisconnectHandler);
+ } else if (haveWebkitEvents) {
+ window.addEventListener("webkitgamepadconnected", gamepadConnectHandler);
+ window.addEventListener("webkitgamepaddisconnected", gamepadDisconnectHandler);
+ }
+
+ document.addEventListener('webkitfullscreenchange', onFullscreenChange, false);
+ document.addEventListener('mozfullscreenchange', onFullscreenChange, false);
+ document.addEventListener('fullscreenchange', onFullscreenChange, false);
+ document.addEventListener('MSFullscreenChange', onFullscreenChange, false);
+
+ let settingsBtn = document.getElementById('settingsBtn');
+ settingsBtn.addEventListener('click', settingsClicked);
+
+ let statsBtn = document.getElementById('statsBtn');
+ statsBtn.addEventListener('click', statsClicked);
+
+ let controlBtn = document.getElementById('control-tgl');
+ controlBtn.addEventListener('change', toggleControlScheme);
+
+ let cursorBtn = document.getElementById('cursor-tgl');
+ cursorBtn.addEventListener('change', toggleBrowserCursorVisibility);
+
+ let resizeCheckBox = document.getElementById('enlarge-display-to-fill-window-tgl');
+ if (resizeCheckBox !== null) {
+ resizeCheckBox.onchange = function(event) {
+ resizePlayerStyle();
+ };
+ }
+
+ qualityControlOwnershipCheckBox = document.getElementById('quality-control-ownership-tgl');
+ if (qualityControlOwnershipCheckBox !== null) {
+ qualityControlOwnershipCheckBox.onchange = function(event) {
+ requestQualityControl();
+ };
+ }
+
+ let encoderParamsSubmit = document.getElementById('encoder-params-submit');
+ if (encoderParamsSubmit !== null) {
+ encoderParamsSubmit.onclick = function(event) {
+
+ let minQP = document.getElementById('encoder-min-qp-text').value;
+ let maxQP = document.getElementById('encoder-max-qp-text').value;
+
+ emitCommand({ "Encoder.MinQP": minQP });
+ emitCommand({ "Encoder.MaxQP": maxQP });
+ };
+ }
+
+ let webrtcParamsSubmit = document.getElementById('webrtc-params-submit');
+ if (webrtcParamsSubmit !== null) {
+ webrtcParamsSubmit.onclick = function(event) {
+ let FPS = document.getElementById('webrtc-fps-text').value;
+ let minBitrate = document.getElementById('webrtc-min-bitrate-text').value * 1000;
+ let maxBitrate = document.getElementById('webrtc-max-bitrate-text').value * 1000;
+
+ emitCommand({ 'WebRTC.Fps': FPS });
+ emitCommand({ 'WebRTC.MinBitrate': minBitrate });
+ emitCommand({ 'WebRTC.MaxBitrate': maxBitrate });
+ };
+ }
+
+ let showFPSButton = document.getElementById('show-fps-button');
+ if (showFPSButton !== null) {
+ showFPSButton.onclick = function (event) {
+ emitCommand({ "Stat.FPS": '' });
+ };
+ }
+
+ let requestKeyframeButton = document.getElementById('request-keyframe-button');
+ if (requestKeyframeButton !== null) {
+ requestKeyframeButton.onclick = function (event) {
+ toStreamerHandlers.IFrameRequest("IFrameRequest");
+ };
+ }
+
+ let restartStreamButton = document.getElementById('restart-stream-button');
+ if (restartStreamButton !== null) {
+ restartStreamButton.onmousedown = function (event) {
+ restartStream();
+ };
+ }
+
+ let matchViewportResolutionCheckBox = document.getElementById('match-viewport-res-tgl');
+ if (matchViewportResolutionCheckBox !== null) {
+ matchViewportResolutionCheckBox.onchange = function (event) {
+ matchViewportResolution = matchViewportResolutionCheckBox.checked;
+ updateVideoStreamSize();
+ };
+ }
+
+ let statsCheckBox = document.getElementById('show-stats-tgl');
+ if (statsCheckBox !== null) {
+ statsCheckBox.onchange = function(event) {
+ let stats = document.getElementById('statsContainer');
+ stats.style.display = event.target.checked ? "block" : "none";
+ };
+ }
+
+ let latencyButton = document.getElementById('test-latency-button');
+ if (latencyButton) {
+ latencyButton.onclick = () => {
+ sendStartLatencyTest();
+ };
+ }
+
+ // Setup toggle and pair with some URL query string param.
+ setupToggleWithUrlParams("prefer-sfu-tgl", "preferSFU");
+ setupToggleWithUrlParams("use-mic-tgl", "useMic");
+ setupToggleWithUrlParams("force-turn-tgl", "ForceTURN");
+ setupToggleWithUrlParams("force-mono-tgl", "ForceMonoAudio");
+ setupToggleWithUrlParams("control-tgl", "hoveringMouse");
+ setupToggleWithUrlParams("cursor-tgl", "hideBrowserCursor");
+ setupToggleWithUrlParams("offer-receive-tgl", "offerToReceive");
+
+
+ var streamSelector = document.getElementById('stream-select');
+ var trackSelector = document.getElementById('track-select');
+ if (streamSelector) {
+ streamSelector.onchange = function(event) {
+ const stream = webRtcPlayerObj.availableVideoStreams.get(streamSelector.value);
+ webRtcPlayerObj.video.srcObject = stream;
+ streamTrackSource = stream;
+ webRtcPlayerObj.video.play();
+ updateTrackList();
+ }
+
+ if (trackSelector) {
+ trackSelector.onchange = function(event) {
+ if (!streamTrackSource) {
+ streamTrackSource = webRtcPlayerObj.availableVideoStreams.get(streamSelector.value);
+ }
+ if (streamTrackSource) {
+ for (const track of streamTrackSource.getVideoTracks()) {
+ if (track.id == trackSelector.value) {
+ webRtcPlayerObj.video.srcObject = new MediaStream([track]);
+ webRtcPlayerObj.video.play();
+ streamSelector.value = "";
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+function setupToggleWithUrlParams(toggleId, urlParameterKey){
+ let toggleElem = document.getElementById(toggleId);
+ if(toggleElem) {
+ toggleElem.checked = new URLSearchParams(window.location.search).has(urlParameterKey);
+ toggleElem.addEventListener('change', (event) => {
+ const urlParams = new URLSearchParams(window.location.search);
+ if (event.currentTarget.checked) {
+ urlParams.set(urlParameterKey, "true");
+ } else {
+ urlParams.delete(urlParameterKey);
+ }
+ window.history.replaceState({}, '', urlParams.toString() !== "" ? `${location.pathname}?${urlParams}` : `${location.pathname}`);
+ });
+ }
+}
+
+function UrlParamsCheck(urlParameterKey) {
+ return new URLSearchParams(window.location.search).has(urlParameterKey);
+}
+
+var streamTrackSource = null;
+
+function updateStreamList() {
+ const streamSelector = document.getElementById('stream-select');
+ for (let i = streamSelector.options.length - 1; i >= 0; i--) {
+ streamSelector.remove(i);
+ }
+ streamSelector.value = null;
+ for (const [streamId, stream] of webRtcPlayerObj.availableVideoStreams) {
+ var opt = document.createElement('option');
+ opt.value = streamId;
+ opt.innerHTML = streamId;
+ streamSelector.appendChild(opt);
+ if (streamSelector.value == null) {
+ streamSelector.value = streamId;
+ }
+ }
+
+ updateTrackList();
+}
+
+function updateTrackList() {
+ const streamSelector = document.getElementById('stream-select');
+ const trackSelector = document.getElementById('track-select');
+ const stream = webRtcPlayerObj.availableVideoStreams.get(streamSelector.value);
+ for (let i = trackSelector.options.length - 1; i >= 0; i--) {
+ trackSelector.remove(i);
+ }
+ trackSelector.value = null;
+ for (const track of stream.getVideoTracks()) {
+ var opt = document.createElement('option');
+ opt.value = track.id;
+ opt.innerHTML = track.label;
+ trackSelector.appendChild(opt);
+ if (track.selected) {
+ trackSelector.value = track.id;
+ }
+ }
+}
+
+function sendStartLatencyTest() {
+ // We need WebRTC to be active to do a latency test.
+ if (!webRtcPlayerObj) {
+ return;
+ }
+
+ let onTestStarted = function(StartTimeMs) {
+ let descriptor = {
+ StartTime: StartTimeMs
+ };
+ emitDescriptor("LatencyTest", descriptor);
+ };
+
+ webRtcPlayerObj.startLatencyTest(onTestStarted);
+}
+
+function setOverlay(htmlClass, htmlElement, onClickFunction) {
+ let videoPlayOverlay = document.getElementById('videoPlayOverlay');
+ if (!videoPlayOverlay) {
+ let playerDiv = document.getElementById('player');
+ videoPlayOverlay = document.createElement('div');
+ videoPlayOverlay.id = 'videoPlayOverlay';
+ playerDiv.appendChild(videoPlayOverlay);
+ }
+
+ // Remove existing html child elements so we can add the new one
+ while (videoPlayOverlay.lastChild) {
+ videoPlayOverlay.removeChild(videoPlayOverlay.lastChild);
+ }
+
+ if (htmlElement)
+ videoPlayOverlay.appendChild(htmlElement);
+
+ if (onClickFunction) {
+ videoPlayOverlay.addEventListener('click', function onOverlayClick(event) {
+ onClickFunction(event);
+ videoPlayOverlay.removeEventListener('click', onOverlayClick);
+ });
+ }
+
+ // Remove existing html classes so we can set the new one
+ let cl = videoPlayOverlay.classList;
+ for (let i = cl.length - 1; i >= 0; i--) {
+ cl.remove(cl[i]);
+ }
+
+ videoPlayOverlay.classList.add(htmlClass);
+}
+
+function showConnectOverlay() {
+ let startText = document.createElement('div');
+ startText.id = 'playButton';
+ startText.innerHTML = 'Click to start'.toUpperCase();
+
+ setOverlay('clickableState', startText, event => {
+ connect();
+ startAfkWarningTimer();
+ });
+}
+
+function showTextOverlay(text) {
+ let textOverlay = document.createElement('div');
+ textOverlay.id = 'messageOverlay';
+ textOverlay.innerHTML = text ? text : '';
+ setOverlay('textDisplayState', textOverlay);
+}
+
+function playStream() {
+ if(webRtcPlayerObj && webRtcPlayerObj.video) {
+ if(webRtcPlayerObj.audio.srcObject && autoPlayAudio) {
+ // Video and Audio are seperate tracks
+ webRtcPlayerObj.audio.play().then(() => {
+ // audio play has succeeded, start playing video
+ playVideo();
+ }).catch((onRejectedReason) => {
+ console.error(onRejectedReason);
+ console.log("Browser does not support autoplaying audio without interaction - to resolve this we are going to show the play button overlay.")
+ showPlayOverlay();
+ });
+ } else {
+ // Video and audio are combined in the video element
+ playVideo();
+ }
+ showFreezeFrameOverlay();
+ hideOverlay();
+ }
+}
+
+function playVideo() {
+ webRtcPlayerObj.video.play().catch((onRejectedReason) => {
+ if(webRtcPlayerObj.audio.srcObject) {
+ webRtcPlayerObj.audio.stop();
+ }
+ console.error(onRejectedReason);
+ console.log("Browser does not support autoplaying video without interaction - to resolve this we are going to show the play button overlay.")
+ showPlayOverlay();
+ });
+}
+
+function showPlayOverlay() {
+ let img = document.createElement('img');
+ img.id = 'playButton';
+ img.src = '/images/Play.png';
+ img.alt = 'Start Streaming';
+ setOverlay('clickableState', img, event => {
+ playStream();
+ });
+ shouldShowPlayOverlay = false;
+}
+
+function updateAfkOverlayText() {
+ afk.overlay.innerHTML = '
No activity detected
Disconnecting in ' + afk.countdown + ' seconds
Click to continue
';
+}
+
+function showAfkOverlay() {
+ // Pause the timer while the user is looking at the inactivity warning overlay.
+ stopAfkWarningTimer();
+
+ // Show the inactivity warning overlay.
+ afk.overlay = document.createElement('div');
+ afk.overlay.id = 'afkOverlay';
+ setOverlay('clickableState', afk.overlay, event => {
+ // The user clicked so start the timer again and carry on.
+ hideOverlay();
+ clearInterval(afk.countdownTimer);
+ startAfkWarningTimer();
+ });
+
+ afk.countdown = afk.closeTimeout;
+ updateAfkOverlayText();
+
+ if (inputOptions.controlScheme == ControlSchemeType.LockedMouse && document.exitPointerLock) {
+ document.exitPointerLock();
+ }
+
+ afk.countdownTimer = setInterval(function() {
+ afk.countdown--;
+ if (afk.countdown == 0) {
+ // The user failed to click so disconnect them.
+ hideOverlay();
+ ws.close();
+ } else {
+ // Update the countdown message.
+ updateAfkOverlayText();
+ }
+ }, 1000);
+}
+
+function hideOverlay() {
+ setOverlay('hiddenState');
+}
+
+// Start a timer which when elapsed will warn the user they are inactive.
+function startAfkWarningTimer() {
+ afk.active = afk.enabled;
+ resetAfkWarningTimer();
+}
+
+// Stop the timer which when elapsed will warn the user they are inactive.
+function stopAfkWarningTimer() {
+ afk.active = false;
+}
+
+// If the user interacts then reset the warning timer.
+function resetAfkWarningTimer() {
+ if (afk.active) {
+ clearTimeout(afk.warnTimer);
+ afk.warnTimer = setTimeout(function () {
+ showAfkOverlay();
+ }, afk.warnTimeout * 1000);
+ }
+}
+
+function createWebRtcOffer() {
+ if (webRtcPlayerObj) {
+ console.log('Creating offer');
+ showTextOverlay('Starting connection to server, please wait');
+ webRtcPlayerObj.createOffer();
+ } else {
+ console.log('WebRTC player not setup, cannot create offer');
+ showTextOverlay('Unable to setup video');
+ }
+}
+
+function sendInputData(data) {
+ if (webRtcPlayerObj) {
+ resetAfkWarningTimer();
+ webRtcPlayerObj.send(data);
+ }
+}
+
+function addResponseEventListener(name, listener) {
+ responseEventListeners.set(name, listener);
+}
+
+function removeResponseEventListener(name) {
+ responseEventListeners.delete(name);
+}
+
+function showFreezeFrame() {
+ let base64 = btoa(freezeFrame.jpeg.reduce((data, byte) => data + String.fromCharCode(byte), ''));
+ let freezeFrameImage = document.getElementById("freezeFrameOverlay").childNodes[0];
+ freezeFrameImage.src = 'data:image/jpeg;base64,' + base64;
+ freezeFrameImage.onload = function () {
+ freezeFrame.height = freezeFrameImage.naturalHeight;
+ freezeFrame.width = freezeFrameImage.naturalWidth;
+ resizeFreezeFrameOverlay();
+ if (shouldShowPlayOverlay) {
+ showPlayOverlay();
+ resizePlayerStyle();
+ } else {
+ showFreezeFrameOverlay();
+ }
+ setTimeout(() => {
+ webRtcPlayerObj.setVideoEnabled(false);
+ }, freezeFrameDelay);
+ };
+}
+
+function processFileExtension(view) {
+ // Reset file if we got a file message and we are not "receiving" it yet
+ if (!file.receiving) {
+ file.mimetype = "";
+ file.extension = "";
+ file.receiving = true;
+ file.valid = false;
+ file.size = 0;
+ file.data = [];
+ file.timestampStart = (new Date()).getTime();
+ console.log('Received first chunk of file');
+ }
+
+ let extensionAsString = new TextDecoder("utf-16").decode(view.slice(1));
+ console.log(extensionAsString);
+ file.extension = extensionAsString;
+}
+
+function processFileMimeType(view) {
+ // Reset file if we got a file message and we are not "receiving" it yet
+ if (!file.receiving) {
+ file.mimetype = "";
+ file.extension = "";
+ file.receiving = true;
+ file.valid = false;
+ file.size = 0;
+ file.data = [];
+ file.timestampStart = (new Date()).getTime();
+ console.log('Received first chunk of file');
+ }
+
+ let mimeAsString = new TextDecoder("utf-16").decode(view.slice(1));
+ console.log(mimeAsString);
+ file.mimetype = mimeAsString;
+}
+
+
+function processFileContents(view) {
+ // If we haven't received the intial setup instructions, return
+ if (!file.receiving) return;
+
+ // Extract the toal size of the file (across all chunks)
+ file.size = Math.ceil((new DataView(view.slice(1, 5).buffer)).getInt32(0, true) / 16379 /* The maximum number of payload bits per message*/);
+
+ // Get the file part of the payload
+ let fileBytes = view.slice(1 + 4);
+
+ // Append to existing data that holds the file
+ file.data.push(fileBytes);
+
+ // Uncomment for debug
+ console.log(`Received file chunk: ${file.data.length}/${file.size}`);
+
+ if (file.data.length === file.size) {
+ file.receiving = false;
+ file.valid = true;
+ console.log("Received complete file");
+ const transferDuration = ((new Date()).getTime() - file.timestampStart);
+ const transferBitrate = Math.round(file.size * 16 * 1024 / transferDuration);
+ console.log(`Average transfer bitrate: ${transferBitrate}kb/s over ${transferDuration / 1000} seconds`);
+
+ // File reconstruction
+ /**
+ * Example code to reconstruct the file
+ *
+ * This code reconstructs the received data into the original file based on the mime type and extension provided and then downloads the reconstructed file
+ */
+ var received = new Blob(file.data, { type: file.mimetype });
+ var a = document.createElement('a');
+ a.setAttribute('href', URL.createObjectURL(received));
+ a.setAttribute('download', `transfer.${file.extension}`);
+ document.body.append(a);
+ // if you are so inclined to make it auto-download, do something like: a.click();
+ a.remove();
+ }
+ else if (file.data.length > file.size) {
+ file.receiving = false;
+ console.error(`Received bigger file than advertised: ${file.data.length}/${file.size}`);
+ }
+}
+
+function processFreezeFrameMessage(view) {
+ // Reset freeze frame if we got a freeze frame message and we are not "receiving" yet.
+ if (!freezeFrame.receiving) {
+ freezeFrame.receiving = true;
+ freezeFrame.valid = false;
+ freezeFrame.size = 0;
+ freezeFrame.jpeg = undefined;
+ }
+
+ // Extract total size of freeze frame (across all chunks)
+ freezeFrame.size = (new DataView(view.slice(1, 5).buffer)).getInt32(0, true);
+
+ // Get the jpeg part of the payload
+ let jpegBytes = view.slice(1 + 4);
+
+ // Append to existing jpeg that holds the freeze frame
+ if (freezeFrame.jpeg) {
+ let jpeg = new Uint8Array(freezeFrame.jpeg.length + jpegBytes.length);
+ jpeg.set(freezeFrame.jpeg, 0);
+ jpeg.set(jpegBytes, freezeFrame.jpeg.length);
+ freezeFrame.jpeg = jpeg;
+ }
+ // No existing freeze frame jpeg, make one
+ else {
+ freezeFrame.jpeg = jpegBytes;
+ freezeFrame.receiving = true;
+ console.log(`received first chunk of freeze frame: ${freezeFrame.jpeg.length}/${freezeFrame.size}`);
+ }
+
+ // Uncomment for debug
+ //console.log(`Received freeze frame chunk: ${freezeFrame.jpeg.length}/${freezeFrame.size}`);
+
+ // Finished receiving freeze frame, we can show it now
+ if (freezeFrame.jpeg.length === freezeFrame.size) {
+ freezeFrame.receiving = false;
+ freezeFrame.valid = true;
+ console.log(`received complete freeze frame ${freezeFrame.size}`);
+ showFreezeFrame();
+ }
+ // We received more data than the freeze frame payload message indicate (this is an error)
+ else if (freezeFrame.jpeg.length > freezeFrame.size) {
+ console.error(`received bigger freeze frame than advertised: ${freezeFrame.jpeg.length}/${freezeFrame.size}`);
+ freezeFrame.jpeg = undefined;
+ freezeFrame.receiving = false;
+ }
+}
+
+function setupWebRtcPlayer(htmlElement, config) {
+ webRtcPlayerObj = new webRtcPlayer(config);
+ autoPlayAudio = typeof config.autoPlayAudio !== 'undefined' ? config.autoPlayAudio : true;
+ htmlElement.appendChild(webRtcPlayerObj.video);
+ htmlElement.appendChild(webRtcPlayerObj.audio);
+ htmlElement.appendChild(freezeFrameOverlay);
+
+ webRtcPlayerObj.onWebRtcOffer = function(offer) {
+ if (ws && ws.readyState === WS_OPEN_STATE) {
+ let offerStr = JSON.stringify(offer);
+ console.log("%c[Outbound SS message (offer)]", "background: lightgreen; color: black", offer);
+ ws.send(offerStr);
+ }
+ };
+
+ webRtcPlayerObj.onWebRtcCandidate = function(candidate) {
+ if (ws && ws.readyState === WS_OPEN_STATE) {
+ ws.send(JSON.stringify({
+ type: 'iceCandidate',
+ candidate: candidate
+ }));
+ }
+ };
+
+ webRtcPlayerObj.onWebRtcAnswer = function (answer) {
+ if (ws && ws.readyState === WS_OPEN_STATE) {
+ let answerStr = JSON.stringify(answer);
+ console.log("%c[Outbound SS message (answer)]", "background: lightgreen; color: black", answer);
+ ws.send(answerStr);
+
+ if (webRtcPlayerObj.sfu) {
+ // Send data channel setup request to the SFU
+ const requestMsg = { type: "dataChannelRequest" };
+ console.log("%c[Outbound SS message (dataChannelRequest)]", "background: lightgreen; color: black", requestMsg);
+ ws.send(JSON.stringify(requestMsg));
+ }
+ }
+ };
+
+ webRtcPlayerObj.onSFURecvDataChannelReady = function() {
+ if (webRtcPlayerObj.sfu) {
+ // Send SFU a message to let it know browser data channels are ready
+ const requestMsg = { type: "peerDataChannelsReady" };
+ console.log("%c[Outbound SS message (peerDataChannelsReady)]", "background: lightgreen; color: black", requestMsg);
+ ws.send(JSON.stringify(requestMsg));
+ }
+ }
+
+ webRtcPlayerObj.onVideoInitialised = function() {
+ if (ws && ws.readyState === WS_OPEN_STATE) {
+ if (shouldShowPlayOverlay) {
+ showPlayOverlay();
+ resizePlayerStyle();
+ }
+ else {
+ resizePlayerStyle();
+ playStream();
+ }
+ }
+ };
+
+ webRtcPlayerObj.onNewVideoTrack = function (streams) {
+ if (webRtcPlayerObj.video && webRtcPlayerObj.video.srcObject && webRtcPlayerObj.onVideoInitialised) {
+ webRtcPlayerObj.onVideoInitialised();
+ }
+ updateStreamList();
+ }
+
+ webRtcPlayerObj.onDataChannelMessage = function(data) {
+ let view = new Uint8Array(data);
+ try {
+ let messageType = fromStreamerMessages.getFromValue(view[0]);
+ fromStreamerHandlers[messageType](data);
+ } catch (e) {
+ console.error(`Custom data channel message with message type that is unknown to the Pixel Streaming protocol. Does your PixelStreamingProtocol need updating? The message type was: ${view[0]}`);
+ }
+ };
+
+ registerInputs(webRtcPlayerObj.video);
+
+ // On a touch device we will need special ways to show the on-screen keyboard.
+ if ('ontouchstart' in document.documentElement) {
+ createOnScreenKeyboardHelpers(htmlElement);
+ }
+
+ if (UrlParamsCheck('offerToReceive')) {
+ createWebRtcOffer();
+ }
+
+ return webRtcPlayerObj.video;
+}
+
+function setupStats(){
+ webRtcPlayerObj.aggregateStats(1 * 1000 /*Check every 1 second*/ );
+
+ let printInterval = 5 * 60 * 1000; /*Print every 5 minutes*/
+ let nextPrintDuration = printInterval;
+
+ webRtcPlayerObj.onAggregatedStats = (aggregatedStats) => {
+ let numberFormat = new Intl.NumberFormat(window.navigator.language, {
+ maximumFractionDigits: 0
+ });
+ let timeFormat = new Intl.NumberFormat(window.navigator.language, {
+ maximumFractionDigits: 0,
+ minimumIntegerDigits: 2
+ });
+
+ // Calculate duration of run
+ let runTime = (aggregatedStats.timestamp - aggregatedStats.timestampStart) / 1000;
+ let timeValues = [];
+ let timeDurations = [60, 60];
+ for (let timeIndex = 0; timeIndex < timeDurations.length; timeIndex++) {
+ timeValues.push(runTime % timeDurations[timeIndex]);
+ runTime = runTime / timeDurations[timeIndex];
+ }
+ timeValues.push(runTime);
+
+ let runTimeSeconds = timeValues[0];
+ let runTimeMinutes = Math.floor(timeValues[1]);
+ let runTimeHours = Math.floor([timeValues[2]]);
+
+ receivedBytesMeasurement = 'B';
+ receivedBytes = aggregatedStats.hasOwnProperty('bytesReceived') ? aggregatedStats.bytesReceived : 0;
+ let dataMeasurements = ['kB', 'MB', 'GB'];
+ for (let index = 0; index < dataMeasurements.length; index++) {
+ if (receivedBytes < 100 * 1000)
+ break;
+ receivedBytes = receivedBytes / 1000;
+ receivedBytesMeasurement = dataMeasurements[index];
+ }
+
+ let qualityStatus = document.getElementById("connectionStrength");
+ // "blinks" quality status element for 1 sec by making it transparent, speed = number of blinks
+ let blinkQualityStatus = function(speed) {
+ let iter = speed;
+ let opacity = 1; // [0..1]
+ let tickId = setInterval(
+ function() {
+ opacity -= 0.1;
+ // map `opacity` to [-0.5..0.5] range, decrement by 0.2 per step and take `abs` to make it blink: 1 -> 0 -> 1
+ qualityStatus.style.opacity = `${Math.abs((opacity - 0.5) * 2)}`;
+ if (opacity <= 0.1) {
+ if (--iter == 0) {
+ clearInterval(tickId);
+ } else { // next blink
+ opacity = 1;
+ }
+ }
+ },
+ 100 / speed // msecs
+ );
+ };
+
+ const orangeQP = 26;
+ const redQP = 35;
+
+ let statsText = '';
+ let qualityTip = document.getElementById("qualityText");
+ let color;
+
+ // Wifi strength elements
+ let outer = document.getElementById("outer");
+ let middle = document.getElementById("middle");
+ let inner = document.getElementById("inner");
+ let dot = document.getElementById("dot");
+
+ if (VideoEncoderQP > redQP) {
+ color = "red";
+ blinkQualityStatus(2);
+ statsText += `
Poor encoding quality
`;
+ outer.style.fill = "#3c3b40";
+ middle.style.fill = "#3c3b40";
+ inner.style.fill = color;
+ dot.style.fill = color;
+
+ } else if (VideoEncoderQP > orangeQP) {
+ color = "orange";
+ blinkQualityStatus(1);
+ statsText += `
Blocky encoding quality
`;
+ outer.style.fill = "#3c3b40";
+ middle.style.fill = color;
+ inner.style.fill = color;
+ dot.style.fill = color;
+ } else {
+ color = "lime";
+ qualityStatus.style.opacity = '1';
+ statsText += `
Clear encoding quality
`;
+ outer.style.fill = color;
+ middle.style.fill = color;
+ inner.style.fill = color;
+ dot.style.fill = color;
+ }
+ qualityTip.innerHTML = statsText;
+
+ statsText += `
Duration: ${timeFormat.format(runTimeHours)}:${timeFormat.format(runTimeMinutes)}:${timeFormat.format(runTimeSeconds)}
`;
+ statsText += `
Controls stream input: ${inputController === null ? "Not sent yet" : (inputController ? "true" : "false")}
`;
+ statsText += `
Audio codec: ${aggregatedStats.hasOwnProperty('audioCodec') ? aggregatedStats.audioCodec : "Not set" }
`;
+ statsText += `
Video codec: ${aggregatedStats.hasOwnProperty('videoCodec') ? aggregatedStats.videoCodec : "Not set" }
`;
+ statsText += `
Video Resolution: ${
+ aggregatedStats.hasOwnProperty('frameWidth') && aggregatedStats.frameWidth && aggregatedStats.hasOwnProperty('frameHeight') && aggregatedStats.frameHeight ?
+ aggregatedStats.frameWidth + 'x' + aggregatedStats.frameHeight : 'Chrome only'
+ }
`;
+ statsText += `
Received (${receivedBytesMeasurement}): ${numberFormat.format(receivedBytes)}
`;
+ statsText += `
Frames Decoded: ${aggregatedStats.hasOwnProperty('framesDecoded') ? numberFormat.format(aggregatedStats.framesDecoded) : 'Chrome only'}
`;
+ statsText += `
Packets Lost: ${aggregatedStats.hasOwnProperty('packetsLost') ? numberFormat.format(aggregatedStats.packetsLost) : 'Chrome only'}
`;
+ statsText += `
Framerate: ${aggregatedStats.hasOwnProperty('framerate') ? numberFormat.format(aggregatedStats.framerate) : 'Chrome only'}
`;
+ statsText += `
Frames dropped: ${aggregatedStats.hasOwnProperty('framesDropped') ? numberFormat.format(aggregatedStats.framesDropped) : 'Chrome only'}
`;
+ statsText += `
Net RTT (ms): ${aggregatedStats.hasOwnProperty('currentRoundTripTime') ? numberFormat.format(aggregatedStats.currentRoundTripTime * 1000) : 'Can\'t calculate'}
`;
+ statsText += `
Browser receive to composite (ms): ${aggregatedStats.hasOwnProperty('receiveToCompositeMs') ? numberFormat.format(aggregatedStats.receiveToCompositeMs) : 'Chrome only'}
`;
+ statsText += `
Audio Bitrate (kbps): ${aggregatedStats.hasOwnProperty('audioBitrate') ? numberFormat.format(aggregatedStats.audioBitrate) : 'Chrome only'}
`;
+ statsText += `
Video Bitrate (kbps): ${aggregatedStats.hasOwnProperty('bitrate') ? numberFormat.format(aggregatedStats.bitrate) : 'Chrome only'}
`;
+ statsText += `
Video Quantization Parameter: ${VideoEncoderQP}
`;
+
+ let statsDiv = document.getElementById("stats");
+ statsDiv.innerHTML = statsText;
+
+ if (print_stats) {
+ if (aggregatedStats.timestampStart) {
+ if ((aggregatedStats.timestamp - aggregatedStats.timestampStart) > nextPrintDuration) {
+ if (ws && ws.readyState === WS_OPEN_STATE) {
+ console.log(`-> SS: stats\n${JSON.stringify(aggregatedStats)}`);
+ ws.send(JSON.stringify({
+ type: 'stats',
+ data: aggregatedStats
+ }));
+ }
+ nextPrintDuration += printInterval;
+ }
+ }
+ }
+ };
+
+ webRtcPlayerObj.latencyTestTimings.OnAllLatencyTimingsReady = function(timings) {
+
+ if (!timings.BrowserReceiptTimeMs) {
+ return;
+ }
+
+ let latencyExcludingDecode = timings.BrowserReceiptTimeMs - timings.TestStartTimeMs;
+ let encodeLatency = timings.UEEncodeMs;
+ let uePixelStreamLatency = timings.UECaptureToSendMs;
+ let ueTestDuration = timings.UETransmissionTimeMs - timings.UEReceiptTimeMs;
+ let networkLatency = latencyExcludingDecode - ueTestDuration;
+
+ //these ones depend on FrameDisplayDeltaTimeMs
+ let endToEndLatency = null;
+ let browserSideLatency = null;
+
+ if (timings.FrameDisplayDeltaTimeMs && timings.BrowserReceiptTimeMs) {
+ endToEndLatency = timings.FrameDisplayDeltaTimeMs + networkLatency + (typeof uePixelStreamLatency === "string" ? 0 : uePixelStreamLatency);
+ browserSideLatency = timings.FrameDisplayDeltaTimeMs + (latencyExcludingDecode - networkLatency - ueTestDuration);
+ }
+
+ let latencyStatsInnerHTML = '';
+ latencyStatsInnerHTML += `
Net latency RTT (ms): ${networkLatency.toFixed(2)}
`;
+ latencyStatsInnerHTML += `
UE Encode (ms): ${(typeof encodeLatency === "string" ? encodeLatency : encodeLatency.toFixed(2))}
`;
+ latencyStatsInnerHTML += `
UE Send to capture (ms): ${(typeof uePixelStreamLatency === "string" ? uePixelStreamLatency : uePixelStreamLatency.toFixed(2))}
`;
+ latencyStatsInnerHTML += `
UE probe duration (ms): ${ueTestDuration.toFixed(2)}
`;
+ latencyStatsInnerHTML += timings.FrameDisplayDeltaTimeMs && timings.BrowserReceiptTimeMs ? `
Browser composite latency (ms): ${timings.FrameDisplayDeltaTimeMs.toFixed(2)}
` : "";
+ latencyStatsInnerHTML += browserSideLatency ? `
Total browser latency (ms): ${browserSideLatency.toFixed(2)}
` : "";
+ latencyStatsInnerHTML += endToEndLatency ? `
Total latency (ms): ${endToEndLatency.toFixed(2)}
` : "";
+ document.getElementById("LatencyStats").innerHTML = latencyStatsInnerHTML;
+ }
+}
+
+function onWebRtcOffer(webRTCData) {
+ webRtcPlayerObj.receiveOffer(webRTCData);
+ setupStats();
+}
+
+function onWebRtcAnswer(webRTCData) {
+ webRtcPlayerObj.receiveAnswer(webRTCData);
+ setupStats();
+}
+
+function onWebRtcSFUPeerDatachannels(webRTCData) {
+ webRtcPlayerObj.receiveSFUPeerDataChannelRequest(webRTCData);
+}
+
+function onWebRtcIce(iceCandidate) {
+ if (webRtcPlayerObj){
+ webRtcPlayerObj.handleCandidateFromServer(iceCandidate);
+ }
+}
+
+let styleWidth;
+let styleHeight;
+let styleTop;
+let styleLeft;
+let styleCursor = 'default';
+let styleAdditional;
+
+const ControlSchemeType = {
+ // A mouse can lock inside the WebRTC player so the user can simply move the
+ // mouse to control the orientation of the camera. The user presses the
+ // Escape key to unlock the mouse.
+ LockedMouse: 0,
+
+ // A mouse can hover over the WebRTC player so the user needs to click and
+ // drag to control the orientation of the camera.
+ HoveringMouse: 1
+};
+
+let inputOptions = {
+ // The control scheme controls the behaviour of the mouse when it interacts
+ // with the WebRTC player.
+ controlScheme: ControlSchemeType.LockedMouse,
+
+ // Browser keys are those which are typically used by the browser UI. We
+ // usually want to suppress these to allow, for example, UE to show shader
+ // complexity with the F5 key without the web page refreshing.
+ suppressBrowserKeys: true,
+
+ // UE has a faketouches option which fakes a single finger touch when the
+ // user drags with their mouse. We may perform the reverse; a single finger
+ // touch may be converted into a mouse drag UE side. This allows a
+ // non-touch application to be controlled partially via a touch device.
+ fakeMouseWithTouches: false,
+
+ // Hiding the browser cursor enables the use of UE's inbuilt software cursor,
+ // without having the browser cursor display on top
+ hideBrowserCursor: false
+};
+
+function resizePlayerStyleToFillWindow(playerElement) {
+ let videoElement = playerElement.getElementsByTagName("VIDEO");
+
+ // Fill the player display in window, keeping picture's aspect ratio.
+ let windowAspectRatio = window.innerHeight / window.innerWidth;
+ let playerAspectRatio = playerElement.clientHeight / playerElement.clientWidth;
+ // We want to keep the video ratio correct for the video stream
+ let videoAspectRatio = videoElement.videoHeight / videoElement.videoWidth;
+ if (isNaN(videoAspectRatio)) {
+ //Video is not initialised yet so set playerElement to size of window
+ styleWidth = window.innerWidth;
+ styleHeight = window.innerHeight;
+ styleTop = 0;
+ styleLeft = 0;
+ playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional;
+ } else if (windowAspectRatio < playerAspectRatio) {
+ // Window height is the constraining factor so to keep aspect ratio change width appropriately
+ styleWidth = Math.floor(window.innerHeight / videoAspectRatio);
+ styleHeight = window.innerHeight;
+ styleTop = 0;
+ styleLeft = Math.floor((window.innerWidth - styleWidth) * 0.5);
+ //Video is now 100% of the playerElement, so set the playerElement style
+ playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional;
+ } else {
+ // Window width is the constraining factor so to keep aspect ratio change height appropriately
+ styleWidth = window.innerWidth;
+ styleHeight = Math.floor(window.innerWidth * videoAspectRatio);
+ styleTop = Math.floor((window.innerHeight - styleHeight) * 0.5);
+ styleLeft = 0;
+ //Video is now 100% of the playerElement, so set the playerElement style
+ playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional;
+ }
+}
+
+function resizePlayerStyleToActualSize(playerElement) {
+ let videoElement = playerElement.getElementsByTagName("VIDEO");
+
+ if (videoElement.length > 0) {
+ // Display image in its actual size
+ styleWidth = videoElement[0].videoWidth;
+ styleHeight = videoElement[0].videoHeight;
+ let Top = Math.floor((window.innerHeight - styleHeight) * 0.5);
+ let Left = Math.floor((window.innerWidth - styleWidth) * 0.5);
+ styleTop = (Top > 0) ? Top : 0;
+ styleLeft = (Left > 0) ? Left : 0;
+ //Video is now 100% of the playerElement, so set the playerElement style
+ playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional;
+ }
+}
+
+function resizePlayerStyleToArbitrarySize(playerElement) {
+ let videoElement = playerElement.getElementsByTagName("VIDEO");
+ //Video is now 100% of the playerElement, so set the playerElement style
+ playerElement.style = "top: 0px; left: 0px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional;
+}
+
+function setupFreezeFrameOverlay() {
+ freezeFrameOverlay = document.createElement('div');
+ freezeFrameOverlay.id = 'freezeFrameOverlay';
+ freezeFrameOverlay.style.display = 'none';
+ freezeFrameOverlay.style.pointerEvents = 'none';
+ freezeFrameOverlay.style.position = 'absolute';
+ freezeFrameOverlay.style.zIndex = '20';
+
+ let freezeFrameImage = document.createElement('img');
+ freezeFrameImage.style.position = 'absolute';
+ freezeFrameOverlay.appendChild(freezeFrameImage);
+}
+
+function showFreezeFrameOverlay() {
+ if (freezeFrame.valid) {
+ freezeFrameOverlay.classList.add("freezeframeBackground");
+ freezeFrameOverlay.style.display = 'block';
+ }
+}
+
+function invalidateFreezeFrameOverlay() {
+ setTimeout(() => {
+ freezeFrameOverlay.style.display = 'none';
+ freezeFrame.valid = false;
+ freezeFrameOverlay.classList.remove("freezeframeBackground");
+ }, freezeFrameDelay);
+
+ if (webRtcPlayerObj) {
+ webRtcPlayerObj.setVideoEnabled(true);
+ }
+}
+
+function resizeFreezeFrameOverlay() {
+ if (freezeFrame.width !== 0 && freezeFrame.height !== 0) {
+ let displayWidth = 0;
+ let displayHeight = 0;
+ let displayTop = 0;
+ let displayLeft = 0;
+ let checkBox = document.getElementById('enlarge-display-to-fill-window-tgl');
+ let playerElement = document.getElementById('player');
+ if (checkBox !== null && checkBox.checked) {
+ // We are fitting video to screen, we care about the screen (window) size
+ let windowAspectRatio = window.innerWidth / window.innerHeight;
+ let videoAspectRatio = freezeFrame.width / freezeFrame.height;
+ if (windowAspectRatio < videoAspectRatio) {
+ displayWidth = window.innerWidth;
+ displayHeight = Math.floor(window.innerWidth / videoAspectRatio);
+ displayTop = Math.floor((window.innerHeight - displayHeight) * 0.5);
+ displayLeft = 0;
+ } else {
+ displayWidth = Math.floor(window.innerHeight * videoAspectRatio);
+ displayHeight = window.innerHeight;
+ displayTop = 0;
+ displayLeft = Math.floor((window.innerWidth - displayWidth) * 0.5);
+ }
+ } else {
+ // Video is coming in at native resolution, we care more about the player size
+ let playerAspectRatio = playerElement.offsetWidth / playerElement.offsetHeight;
+ let videoAspectRatio = freezeFrame.width / freezeFrame.height;
+ if (playerAspectRatio < videoAspectRatio) {
+ displayWidth = playerElement.offsetWidth;
+ displayHeight = Math.floor(playerElement.offsetWidth / videoAspectRatio);
+ displayTop = Math.floor((playerElement.offsetHeight - displayHeight) * 0.5);
+ displayLeft = 0;
+ } else {
+ displayWidth = Math.floor(playerElement.offsetHeight * videoAspectRatio);
+ displayHeight = playerElement.offsetHeight;
+ displayTop = 0;
+ displayLeft = Math.floor((playerElement.offsetWidth - displayWidth) * 0.5);
+ }
+ }
+ let freezeFrameImage = document.getElementById("freezeFrameOverlay").childNodes[0];
+ freezeFrameOverlay.style.width = playerElement.offsetWidth + 'px';
+ freezeFrameOverlay.style.height = playerElement.offsetHeight + 'px';
+ freezeFrameOverlay.style.left = 0 + 'px';
+ freezeFrameOverlay.style.top = 0 + 'px';
+
+ freezeFrameImage.style.width = displayWidth + 'px';
+ freezeFrameImage.style.height = displayHeight + 'px';
+ freezeFrameImage.style.left = displayLeft + 'px';
+ freezeFrameImage.style.top = displayTop + 'px';
+ }
+}
+
+function resizePlayerStyle(event) {
+ let playerElement = document.getElementById('player');
+
+ if (!playerElement)
+ return;
+
+ updateVideoStreamSize();
+
+ if (playerElement.classList.contains('fixed-size')) {
+ setupMouseAndFreezeFrame(playerElement)
+ return;
+ }
+
+
+ let checkBox = document.getElementById('enlarge-display-to-fill-window-tgl');
+ let windowSmallerThanPlayer = window.innerWidth < playerElement.videoWidth || window.innerHeight < playerElement.videoHeight;
+ if (checkBox !== null) {
+ if (checkBox.checked || windowSmallerThanPlayer) {
+ resizePlayerStyleToFillWindow(playerElement);
+ } else {
+ resizePlayerStyleToActualSize(playerElement);
+ }
+ } else {
+ resizePlayerStyleToArbitrarySize(playerElement);
+ }
+
+ setupMouseAndFreezeFrame(playerElement)
+}
+
+function setupMouseAndFreezeFrame(playerElement) {
+ // Calculating and normalizing positions depends on the width and height of
+ // the player.
+ playerElementClientRect = playerElement.getBoundingClientRect();
+ setupNormalizeAndQuantize();
+ resizeFreezeFrameOverlay();
+}
+
+function updateVideoStreamSize() {
+ if (!matchViewportResolution) {
+ return;
+ }
+
+ let now = new Date().getTime();
+ if (now - lastTimeResized > 1000) {
+ let playerElement = document.getElementById('player');
+ if (!playerElement)
+ return;
+
+ let descriptor = {
+ "Resolution.Width": playerElement.clientWidth,
+ "Resolution.Height": playerElement.clientHeight
+ };
+ emitCommand(descriptor);
+ console.log(descriptor);
+ lastTimeResized = new Date().getTime();
+ } else {
+ console.log('Resizing too often - skipping');
+ clearTimeout(resizeTimeout);
+ resizeTimeout = setTimeout(updateVideoStreamSize, 1000);
+ }
+}
+
+// Fix for bug in iOS where windowsize is not correct at instance or orientation change
+// https://github.com/dimsemenov/PhotoSwipe/issues/1315
+let _orientationChangeTimeout;
+
+function onOrientationChange(event) {
+ clearTimeout(_orientationChangeTimeout);
+ _orientationChangeTimeout = setTimeout(function() {
+ resizePlayerStyle();
+ }, 500);
+}
+
+function sendMessageToStreamer(messageType, indata = []) {
+ messageFormat = toStreamerMessages.getFromKey(messageType);
+ if(messageFormat === undefined) {
+ console.error(`Attempted to send a message to the streamer with message type: ${messageType}, but the frontend hasn't been configured to send such a message. Check you've added the message type in your cpp`);
+ return;
+ }
+ // console.log(`Calculate size: ${new Blob(JSON.stringify(indata)).size}, Specified size: ${messageFormat.byteLength}`);
+ data = new DataView(new ArrayBuffer(messageFormat.byteLength + 1));
+
+ data.setUint8(0, messageFormat.id);
+ byteOffset = 1;
+
+ indata.forEach((element, idx) => {
+ type = messageFormat.structure[idx];
+ switch (type) {
+ case "uint8":
+ data.setUint8(byteOffset, element);
+ byteOffset += 1;
+ break;
+
+ case "uint16":
+ data.setUint16(byteOffset, element, true);
+ byteOffset += 2;
+ break;
+
+ case "int16":
+ data.setInt16(byteOffset, element, true);
+ byteOffset += 2;
+ break;
+
+ case "double":
+ data.setFloat64(byteOffset, element, true);
+ byteOffset += 8;
+ break;
+ }
+ });
+ sendInputData(data.buffer);
+}
+
+// A generic message has a type and a descriptor.
+function emitDescriptor(messageType, descriptor) {
+ // Convert the descriptor object into a JSON string.
+ let descriptorAsString = JSON.stringify(descriptor);
+ let messageFormat = toStreamerMessages.getFromKey(messageType);
+ if(messageFormat === undefined) {
+ console.error(`Attempted to emit descriptor with message type: ${messageType}, but the frontend hasn't been configured to send such a message. Check you've added the message type in your cpp`);
+ }
+ // Add the UTF-16 JSON string to the array byte buffer, going two bytes at
+ // a time.
+ let data = new DataView(new ArrayBuffer(1 + 2 + 2 * descriptorAsString.length));
+ let byteIdx = 0;
+ data.setUint8(byteIdx, messageFormat.id);
+ byteIdx++;
+ data.setUint16(byteIdx, descriptorAsString.length, true);
+ byteIdx += 2;
+ for (let i = 0; i < descriptorAsString.length; i++) {
+ data.setUint16(byteIdx, descriptorAsString.charCodeAt(i), true);
+ byteIdx += 2;
+ }
+ sendInputData(data.buffer);
+}
+
+// A built-in command can be sent to UE client. The commands are defined by a
+// JSON descriptor and will be executed automatically.
+// The currently supported commands are:
+//
+// 1. A command to run any console command:
+// "{ ConsoleCommand:
}"
+//
+// 2. A command to change the resolution to the given width and height.
+// "{ Resolution.Width: , Resolution.Height: } }"
+//
+function emitCommand(descriptor) {
+ emitDescriptor("Command", descriptor);
+}
+
+// A UI interation will occur when the user presses a button powered by
+// JavaScript as opposed to pressing a button which is part of the pixel
+// streamed UI from the UE client.
+function emitUIInteraction(descriptor) {
+ emitDescriptor("UIInteraction", descriptor);
+}
+
+function requestInitialSettings() {
+ sendMessageToStreamer("RequestInitialSettings");
+}
+
+function requestQualityControl() {
+ if (!qualityController) {
+ sendMessageToStreamer("RequestQualityControl");
+ }
+}
+
+let playerElementClientRect = undefined;
+let normalizeAndQuantizeUnsigned = undefined;
+let normalizeAndQuantizeSigned = undefined;
+let unquantizeAndDenormalizeUnsigned = undefined;
+
+function setupNormalizeAndQuantize() {
+ let playerElement = document.getElementById('player');
+ let videoElement = playerElement.getElementsByTagName("video");
+
+ if (playerElement && videoElement.length > 0) {
+ let playerAspectRatio = playerElement.clientHeight / playerElement.clientWidth;
+ let videoAspectRatio = videoElement[0].videoHeight / videoElement[0].videoWidth;
+
+ // Unsigned XY positions are the ratio (0.0..1.0) along a viewport axis,
+ // quantized into an uint16 (0..65536).
+ // Signed XY deltas are the ratio (-1.0..1.0) along a viewport axis,
+ // quantized into an int16 (-32767..32767).
+ // This allows the browser viewport and client viewport to have a different
+ // size.
+ // Hack: Currently we set an out-of-range position to an extreme (65535)
+ // as we can't yet accurately detect mouse enter and leave events
+ // precisely inside a video with an aspect ratio which causes mattes.
+ if (playerAspectRatio > videoAspectRatio) {
+ if (print_inputs) {
+ console.log('Setup Normalize and Quantize for playerAspectRatio > videoAspectRatio');
+ }
+ let ratio = playerAspectRatio / videoAspectRatio;
+ // Unsigned.
+ normalizeAndQuantizeUnsigned = (x, y) => {
+ let normalizedX = x / playerElement.clientWidth;
+ let normalizedY = ratio * (y / playerElement.clientHeight - 0.5) + 0.5;
+ if (normalizedX < 0.0 || normalizedX > 1.0 || normalizedY < 0.0 || normalizedY > 1.0) {
+ return {
+ inRange: false,
+ x: 65535,
+ y: 65535
+ };
+ } else {
+ return {
+ inRange: true,
+ x: normalizedX * 65536,
+ y: normalizedY * 65536
+ };
+ }
+ };
+ unquantizeAndDenormalizeUnsigned = (x, y) => {
+ let normalizedX = x / 65536;
+ let normalizedY = (y / 65536 - 0.5) / ratio + 0.5;
+ return {
+ x: normalizedX * playerElement.clientWidth,
+ y: normalizedY * playerElement.clientHeight
+ };
+ };
+ // Signed.
+ normalizeAndQuantizeSigned = (x, y) => {
+ let normalizedX = x / (0.5 * playerElement.clientWidth);
+ let normalizedY = (ratio * y) / (0.5 * playerElement.clientHeight);
+ return {
+ x: normalizedX * 32767,
+ y: normalizedY * 32767
+ };
+ };
+ } else {
+ if (print_inputs) {
+ console.log('Setup Normalize and Quantize for playerAspectRatio <= videoAspectRatio');
+ }
+ let ratio = videoAspectRatio / playerAspectRatio;
+ // Unsigned.
+ normalizeAndQuantizeUnsigned = (x, y) => {
+ let normalizedX = ratio * (x / playerElement.clientWidth - 0.5) + 0.5;
+ let normalizedY = y / playerElement.clientHeight;
+ if (normalizedX < 0.0 || normalizedX > 1.0 || normalizedY < 0.0 || normalizedY > 1.0) {
+ return {
+ inRange: false,
+ x: 65535,
+ y: 65535
+ };
+ } else {
+ return {
+ inRange: true,
+ x: normalizedX * 65536,
+ y: normalizedY * 65536
+ };
+ }
+ };
+ unquantizeAndDenormalizeUnsigned = (x, y) => {
+ let normalizedX = (x / 65536 - 0.5) / ratio + 0.5;
+ let normalizedY = y / 65536;
+ return {
+ x: normalizedX * playerElement.clientWidth,
+ y: normalizedY * playerElement.clientHeight
+ };
+ };
+ // Signed.
+ normalizeAndQuantizeSigned = (x, y) => {
+ let normalizedX = (ratio * x) / (0.5 * playerElement.clientWidth);
+ let normalizedY = y / (0.5 * playerElement.clientHeight);
+ return {
+ x: normalizedX * 32767,
+ y: normalizedY * 32767
+ };
+ };
+ }
+ }
+}
+
+// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/button
+const MouseButton = {
+ MainButton: 0, // Left button.
+ AuxiliaryButton: 1, // Wheel button.
+ SecondaryButton: 2, // Right button.
+ FourthButton: 3, // Browser Back button.
+ FifthButton: 4 // Browser Forward button.
+};
+
+// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/buttons
+const MouseButtonsMask = {
+ PrimaryButton: 1, // Left button.
+ SecondaryButton: 2, // Right button.
+ AuxiliaryButton: 4, // Wheel button.
+ FourthButton: 8, // Browser Back button.
+ FifthButton: 16 // Browser Forward button.
+};
+
+// If the user has any mouse buttons pressed then release them.
+function releaseMouseButtons(buttons, x, y) {
+ let coord = normalizeAndQuantizeUnsigned(x, y);
+ if (buttons & MouseButtonsMask.PrimaryButton) {
+ toStreamerHandlers.MouseUp("MouseUp", [MouseButton.MainButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.SecondaryButton) {
+ toStreamerHandlers.MouseUp("MouseUp", [MouseButton.SecondaryButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.AuxiliaryButton) {
+ toStreamerHandlers.MouseUp("MouseUp", [MouseButton.AuxiliaryButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.FourthButton) {
+ toStreamerHandlers.MouseUp("MouseUp", [MouseButton.FourthButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.FifthButton) {
+ toStreamerHandlers.MouseUp("MouseUp", [MouseButton.FifthButton, coord.x, coord.y]);
+ }
+}
+
+// If the user has any Mouse buttons pressed then press them again.
+function pressMouseButtons(buttons, x, y) {
+ let coord = normalizeAndQuantizeUnsigned(x, y);
+ if (buttons & MouseButtonsMask.PrimaryButton) {
+ toStreamerHandlers.MouseDown("MouseDown", [MouseButton.MainButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.SecondaryButton) {
+ toStreamerHandlers.MouseDown("MouseDown", [MouseButton.SecondaryButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.AuxiliaryButton) {
+ toStreamerHandlers.MouseDown("MouseDown", [MouseButton.AuxiliaryButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.FourthButton) {
+ toStreamerHandlers.MouseDown("MouseDown", [MouseButton.FourthButton, coord.x, coord.y]);
+ }
+ if (buttons & MouseButtonsMask.FifthButton) {
+ toStreamerHandlers.MouseDown("MouseDown", [MouseButton.FifthButton, coord.x, coord.y]);
+ }
+}
+
+function registerInputs(playerElement) {
+ if (!playerElement)
+ return;
+
+ registerMouseEnterAndLeaveEvents(playerElement);
+ registerTouchEvents(playerElement);
+}
+
+function createOnScreenKeyboardHelpers(htmlElement) {
+ if (document.getElementById('hiddenInput') === null) {
+ hiddenInput = document.createElement('input');
+ hiddenInput.id = 'hiddenInput';
+ hiddenInput.maxLength = 0;
+ htmlElement.appendChild(hiddenInput);
+ }
+
+ if (document.getElementById('editTextButton') === null) {
+ editTextButton = document.createElement('button');
+ editTextButton.id = 'editTextButton';
+ editTextButton.innerHTML = 'edit text';
+ htmlElement.appendChild(editTextButton);
+
+ // Hide the 'edit text' button.
+ editTextButton.classList.add('hiddenState');
+
+ editTextButton.addEventListener('click', function() {
+ // Show the on-screen keyboard.
+ hiddenInput.focus();
+ });
+ }
+}
+
+function showOnScreenKeyboard(command) {
+ if (command.showOnScreenKeyboard) {
+ // Show the 'edit text' button.
+ editTextButton.classList.remove('hiddenState');
+ // Place the 'edit text' button near the UE input widget.
+ let pos = unquantizeAndDenormalizeUnsigned(command.x, command.y);
+ editTextButton.style.top = pos.y.toString() + 'px';
+ editTextButton.style.left = (pos.x - 40).toString() + 'px';
+ } else {
+ // Hide the 'edit text' button.
+ editTextButton.classList.add('hiddenState');
+ // Hide the on-screen keyboard.
+ hiddenInput.blur();
+ }
+}
+
+function registerMouseEnterAndLeaveEvents(playerElement) {
+ playerElement.onmouseenter = function(e) {
+ if (print_inputs) {
+ console.log('mouse enter');
+ }
+ toStreamerHandlers.MouseEnter("MouseEnter");
+ playerElement.pressMouseButtons(e);
+ };
+
+ playerElement.onmouseleave = function(e) {
+ if (print_inputs) {
+ console.log('mouse leave');
+ }
+ toStreamerHandlers.MouseLeave("MouseLeave");
+ playerElement.releaseMouseButtons(e);
+ };
+}
+
+// A locked mouse works by the user clicking in the browser player and the
+// cursor disappears and is locked. The user moves the cursor and the camera
+// moves, for example. The user presses escape to free the mouse.
+function registerLockedMouseEvents(playerElement) {
+ styleCursor = (inputOptions.hideBrowserCursor ? 'none' : 'default');
+ let x = playerElement.width / 2;
+ let y = playerElement.height / 2;
+ let coord = normalizeAndQuantizeUnsigned(x, y);
+
+ playerElement.requestPointerLock = playerElement.requestPointerLock || playerElement.mozRequestPointerLock;
+ document.exitPointerLock = document.exitPointerLock || document.mozExitPointerLock;
+
+ playerElement.onclick = function() {
+ playerElement.requestPointerLock();
+ };
+
+ // Respond to lock state change events
+ document.addEventListener('pointerlockchange', lockStateChange, false);
+ document.addEventListener('mozpointerlockchange', lockStateChange, false);
+
+ function lockStateChange() {
+ if (document.pointerLockElement === playerElement ||
+ document.mozPointerLockElement === playerElement) {
+ console.log('Pointer locked');
+ document.addEventListener("mousemove", updatePosition, false);
+ } else {
+ console.log('The pointer lock status is now unlocked');
+ document.removeEventListener("mousemove", updatePosition, false);
+
+ // If mouse loses focus, send a key up for all of the currently held-down keys
+ // This is necessary as when the mouse loses focus, the windows stops listening for events and as such
+ // the keyup listener won't get fired
+ [...new Set(activeKeys)].forEach((uniqueKeycode) => {
+ toStreamerHandlers.KeyUp("KeyUp", [uniqueKeycode]);
+ });
+ // Reset the active keys back to nothing
+ activeKeys = [];
+ }
+ }
+
+ function updatePosition(e) {
+ x += e.movementX;
+ y += e.movementY;
+ if (x > styleWidth) {
+ x -= styleWidth;
+ }
+ if (y > styleHeight) {
+ y -= styleHeight;
+ }
+ if (x < 0) {
+ x = styleWidth + x;
+ }
+ if (y < 0) {
+ y = styleHeight - y;
+ }
+
+ let coord = normalizeAndQuantizeUnsigned(x, y);
+ let delta = normalizeAndQuantizeSigned(e.movementX, e.movementY);
+ toStreamerHandlers.MouseMove("MouseMove", [coord.x, coord.y, delta.x, delta.y]);
+ }
+
+
+ playerElement.onmousedown = function (e) {
+ toStreamerHandlers.MouseDown("MouseDown", [e.button, coord.x, coord.y]);
+ };
+
+ playerElement.onmouseup = function (e) {
+ toStreamerHandlers.MouseUp("MouseUp", [e.button, coord.x, coord.y]);
+ };
+
+ playerElement.onwheel = function (e) {
+ toStreamerHandlers.MouseWheel("MouseWheel", [e.wheelDelta, coord.x, coord.y]);
+ };
+
+ playerElement.ondblclick = function (e) {
+ toStreamerHandlers.MouseDown("MouseDouble", [e.button, coord.x, coord.y]);
+ };
+
+ playerElement.pressMouseButtons = function(e) {
+ pressMouseButtons(e.buttons, x, y);
+ };
+
+ playerElement.releaseMouseButtons = function(e) {
+ releaseMouseButtons(e.buttons, x, y);
+ };
+}
+
+// A hovering mouse works by the user clicking the mouse button when they want
+// the cursor to have an effect over the video. Otherwise the cursor just
+// passes over the browser.
+function registerHoveringMouseEvents(playerElement) {
+ styleCursor = (inputOptions.hideBrowserCursor ? 'none' : 'default');
+
+ playerElement.onmousemove = function (e) {
+ let coord = normalizeAndQuantizeUnsigned(e.offsetX, e.offsetY);
+ let delta = normalizeAndQuantizeSigned(e.movementX, e.movementY);
+ toStreamerHandlers.MouseMove("MouseMove", [coord.x, coord.y, delta.x, delta.y]);
+ e.preventDefault();
+ };
+
+ playerElement.onmousedown = function (e) {
+ let coord = normalizeAndQuantizeUnsigned(e.offsetX, e.offsetY);
+ toStreamerHandlers.MouseDown("MouseDown", [e.button, coord.x, coord.y]);
+ e.preventDefault();
+ };
+
+ playerElement.onmouseup = function (e) {
+ let coord = normalizeAndQuantizeUnsigned(e.offsetX, e.offsetY);
+ toStreamerHandlers.MouseUp("MouseUp", [e.button, coord.x, coord.y]);
+ e.preventDefault();
+ };
+
+ // When the context menu is shown then it is safest to release the button
+ // which was pressed when the event happened. This will guarantee we will
+ // get at least one mouse up corresponding to a mouse down event. Otherwise
+ // the mouse can get stuck.
+ // https://github.com/facebook/react/issues/5531
+ playerElement.oncontextmenu = function (e) {
+ let coord = normalizeAndQuantizeUnsigned(e.offsetX, e.offsetY);
+ toStreamerHandlers.MouseUp("MouseUp", [e.button, coord.x, coord.y]);
+ e.preventDefault();
+ };
+
+ playerElement.onwheel = function (e) {
+ let coord = normalizeAndQuantizeUnsigned(e.offsetX, e.offsetY);
+ toStreamerHandlers.MouseWheel("MouseWheel", [e.wheelDelta, coord.x, coord.y]);
+ e.preventDefault();
+ };
+
+ playerElement.ondblclick = function (e) {
+ let coord = normalizeAndQuantizeUnsigned(e.offsetX, e.offsetY);
+ toStreamerHandlers.MouseDown("MouseDouble", [e.button, coord.x, coord.y]);
+ };
+
+ playerElement.pressMouseButtons = function(e) {
+ pressMouseButtons(e.buttons, e.offsetX, e.offsetY);
+ };
+
+ playerElement.releaseMouseButtons = function(e) {
+ releaseMouseButtons(e.buttons, e.offsetX, e.offsetY);
+ };
+}
+
+function registerTouchEvents(playerElement) {
+ // We need to assign a unique identifier to each finger.
+ // We do this by mapping each Touch object to the identifier.
+ let fingers = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0];
+ let fingerIds = {};
+
+ function rememberTouch(touch) {
+ let finger = fingers.pop();
+ if (finger === undefined) {
+ console.log('exhausted touch indentifiers');
+ }
+ fingerIds[touch.identifier] = finger;
+ }
+
+ function forgetTouch(touch) {
+ fingers.push(fingerIds[touch.identifier]);
+ // Sort array back into descending order. This means if finger '1' were to lift after finger '0', we would ensure that 0 will be the first index to pop
+ fingers.sort(function(a, b){return b - a});
+ delete fingerIds[touch.identifier];
+ }
+
+ function emitTouchData(type, touches) {
+ for (let t = 0; t < touches.length; t++) {
+ let numTouches = 1; // the number of touches to be sent this message
+ let touch = touches[t];
+ let x = touch.clientX - playerElement.offsetLeft;
+ let y = touch.clientY - playerElement.offsetTop;
+ if (print_inputs) {
+ console.log(`F${fingerIds[touch.identifier]}=(${x}, ${y})`);
+ }
+ let coord = normalizeAndQuantizeUnsigned(x, y);
+
+ switch(type) {
+ case "TouchStart":
+ toStreamerHandlers.TouchStart("TouchStart", [numTouches, coord.x, coord.y, fingerIds[touch.identifier], MaxByteValue * touch.force, coord.inRange ? 1 : 0]);
+ break;
+ case "TouchEnd":
+ toStreamerHandlers.TouchStart("TouchEnd", [numTouches, coord.x, coord.y, fingerIds[touch.identifier], MaxByteValue * touch.force, coord.inRange ? 1 : 0]);
+ break;
+ case "TouchMove":
+ toStreamerHandlers.TouchStart("TouchMove", [numTouches, coord.x, coord.y, fingerIds[touch.identifier], MaxByteValue * touch.force, coord.inRange ? 1 : 0]);
+ break;
+ }
+ }
+ }
+
+ if (inputOptions.fakeMouseWithTouches) {
+
+ let finger = undefined;
+
+ playerElement.ontouchstart = function(e) {
+ if (finger === undefined) {
+ let firstTouch = e.changedTouches[0];
+ finger = {
+ id: firstTouch.identifier,
+ x: firstTouch.clientX - playerElementClientRect.left,
+ y: firstTouch.clientY - playerElementClientRect.top
+ };
+ // Hack: Mouse events require an enter and leave so we just
+ // enter and leave manually with each touch as this event
+ // is not fired with a touch device.
+ playerElement.onmouseenter(e);
+ let coord = normalizeAndQuantizeUnsigned(finger.x, finger.y);
+ toStreamerHandlers.MouseDown("MouseDown", [MouseButton.MainButton, coord.x, coord.y]);
+ }
+ e.preventDefault();
+ };
+
+ playerElement.ontouchend = function(e) {
+ for (let t = 0; t < e.changedTouches.length; t++) {
+ let touch = e.changedTouches[t];
+ if (touch.identifier === finger.id) {
+ let x = touch.clientX - playerElementClientRect.left;
+ let y = touch.clientY - playerElementClientRect.top;
+ let coord = normalizeAndQuantizeUnsigned(x, y);
+ toStreamerHandlers.MouseUp("MouseUp", [MouseButton.MainButton, coord.x, coord.y]);
+ // Hack: Manual mouse leave event.
+ playerElement.onmouseleave(e);
+ finger = undefined;
+ break;
+ }
+ }
+ e.preventDefault();
+ };
+
+ playerElement.ontouchmove = function(e) {
+ for (let t = 0; t < e.touches.length; t++) {
+ let touch = e.touches[t];
+ if (touch.identifier === finger.id) {
+ let x = touch.clientX - playerElementClientRect.left;
+ let y = touch.clientY - playerElementClientRect.top;
+ let coord = normalizeAndQuantizeUnsigned(x, y);
+ let delta = normalizeAndQuantizeSigned(x - finger.x, y - finger.y);
+ toStreamerHandlers.MouseMove("MouseMove", [coord.x, coord.y, delta.x, delta.y]);
+ finger.x = x;
+ finger.y = y;
+ break;
+ }
+ }
+ e.preventDefault();
+ };
+ } else {
+ playerElement.ontouchstart = function(e) {
+ // Assign a unique identifier to each touch.
+ for (let t = 0; t < e.changedTouches.length; t++) {
+ rememberTouch(e.changedTouches[t]);
+ }
+
+ if (print_inputs) {
+ console.log('touch start');
+ }
+ emitTouchData("TouchStart", e.changedTouches);
+ e.preventDefault();
+ };
+
+ playerElement.ontouchend = function(e) {
+ if (print_inputs) {
+ console.log('touch end');
+ }
+ emitTouchData("TouchEnd", e.changedTouches);
+
+ // Re-cycle unique identifiers previously assigned to each touch.
+ for (let t = 0; t < e.changedTouches.length; t++) {
+ forgetTouch(e.changedTouches[t]);
+ }
+ e.preventDefault();
+ };
+
+ playerElement.ontouchmove = function(e) {
+ if (print_inputs) {
+ console.log('touch move');
+ }
+ emitTouchData("TouchMove", e.touches);
+ e.preventDefault();
+ };
+ }
+}
+
+// Browser keys do not have a charCode so we only need to test keyCode.
+function isKeyCodeBrowserKey(keyCode) {
+ // Function keys or tab key.
+ return keyCode >= 112 && keyCode <= 123 || keyCode === 9;
+}
+
+// Must be kept in sync with JavaScriptKeyCodeToFKey C++ array. The index of the
+// entry in the array is the special key code given below.
+const SpecialKeyCodes = {
+ BackSpace: 8,
+ Shift: 16,
+ Control: 17,
+ Alt: 18,
+ RightShift: 253,
+ RightControl: 254,
+ RightAlt: 255
+};
+
+// We want to be able to differentiate between left and right versions of some
+// keys.
+function getKeyCode(e) {
+ if (e.keyCode === SpecialKeyCodes.Shift && e.code === 'ShiftRight') return SpecialKeyCodes.RightShift;
+ else if (e.keyCode === SpecialKeyCodes.Control && e.code === 'ControlRight') return SpecialKeyCodes.RightControl;
+ else if (e.keyCode === SpecialKeyCodes.Alt && e.code === 'AltRight') return SpecialKeyCodes.RightAlt;
+ else return e.keyCode;
+}
+
+function registerKeyboardEvents() {
+ document.onkeydown = function(e) {
+ if (print_inputs) {
+ console.log(`key down ${e.keyCode}, repeat = ${e.repeat}`);
+ }
+ toStreamerHandlers.KeyDown("KeyDown", [getKeyCode(e), e.repeat]);
+ activeKeys.push(getKeyCode(e));
+ // Backspace is not considered a keypress in JavaScript but we need it
+ // to be so characters may be deleted in a UE text entry field.
+ if (e.keyCode === SpecialKeyCodes.BackSpace) {
+ document.onkeypress({
+ charCode: SpecialKeyCodes.BackSpace
+ });
+ }
+ if (inputOptions.suppressBrowserKeys && isKeyCodeBrowserKey(e.keyCode)) {
+ e.preventDefault();
+ }
+ };
+
+ document.onkeyup = function(e) {
+ if (print_inputs) {
+ console.log(`key up ${e.keyCode}`);
+ }
+ toStreamerHandlers.KeyUp("KeyUp", [getKeyCode(e), e.repeat]);
+ if (inputOptions.suppressBrowserKeys && isKeyCodeBrowserKey(e.keyCode)) {
+ e.preventDefault();
+ }
+ };
+
+ document.onkeypress = function(e) {
+ if (print_inputs) {
+ console.log(`key press ${e.charCode}`);
+ }
+ toStreamerHandlers.KeyPress("KeyPress", [e.charCode]);
+ };
+}
+
+function settingsClicked( /* e */ ) {
+ /**
+ * Toggle settings panel. If stats panel is already open, close it and then open settings
+ */
+ let settings = document.getElementById('settings-panel');
+ let stats = document.getElementById('stats-panel');
+
+ if(stats.classList.contains("panel-wrap-visible"))
+ {
+ stats.classList.toggle("panel-wrap-visible");
+ }
+
+ settings.classList.toggle("panel-wrap-visible");
+}
+
+function statsClicked( /* e */ ) {
+ /**
+ * Toggle stats panel. If settings panel is already open, close it and then open stats
+ */
+ let settings = document.getElementById('settings-panel');
+ let stats = document.getElementById('stats-panel');
+
+ if(settings.classList.contains("panel-wrap-visible"))
+ {
+ settings.classList.toggle("panel-wrap-visible");
+ }
+
+ stats.classList.toggle("panel-wrap-visible");
+}
+
+
+
+function start(isReconnection) {
+ // update "quality status" to "disconnected" state
+ let qualityStatus = document.getElementById("qualityStatus");
+ if (qualityStatus) {
+ qualityStatus.className = "grey-status";
+ }
+
+
+ let statsDiv = document.getElementById("stats");
+ if (statsDiv) {
+ statsDiv.innerHTML = 'Not connected';
+ }
+
+ if (!connect_on_load || isReconnection) {
+ showConnectOverlay();
+ invalidateFreezeFrameOverlay();
+ shouldShowPlayOverlay = true;
+ resizePlayerStyle();
+ } else {
+ connect();
+ }
+}
+
+function connect() {
+ "use strict";
+
+ window.WebSocket = window.WebSocket || window.MozWebSocket;
+
+ if (!window.WebSocket) {
+ alert('Your browser doesn\'t support WebSocket');
+ return;
+ }
+
+ // Make a new websocket connection
+ let connectionUrl = window.location.href.replace('http://', 'ws://').replace('https://', 'wss://');
+ console.log(`Creating a websocket connection to: ${connectionUrl}`);
+ ws = new WebSocket(connectionUrl);
+ ws.attemptStreamReconnection = true;
+
+ ws.onmessagebinary = function(event) {
+ if(!event || !event.data) { return; }
+
+ event.data.text().then(function(messageString){
+ // send the new stringified event back into `onmessage`
+ ws.onmessage({ data: messageString });
+ }).catch(function(error){
+ console.error(`Failed to parse binary blob from websocket, reason: ${error}`);
+ });
+ }
+
+ ws.onmessage = function(event) {
+
+ // Check if websocket message is binary, if so, stringify it.
+ if(event.data && event.data instanceof Blob) {
+ ws.onmessagebinary(event);
+ return;
+ }
+
+ let msg = JSON.parse(event.data);
+ if (msg.type === 'config') {
+ console.log("%c[Inbound SS (config)]", "background: lightblue; color: black", msg);
+ onConfig(msg);
+ } else if (msg.type === 'playerCount') {
+ console.log("%c[Inbound SS (playerCount)]", "background: lightblue; color: black", msg);
+ } else if (msg.type === 'offer') {
+ console.log("%c[Inbound SS (offer)]", "background: lightblue; color: black", msg);
+ if (!UrlParamsCheck('offerToReceive')) {
+ onWebRtcOffer(msg);
+ }
+ } else if (msg.type === 'answer') {
+ console.log("%c[Inbound SS (answer)]", "background: lightblue; color: black", msg);
+ onWebRtcAnswer(msg);
+ } else if (msg.type === 'iceCandidate') {
+ onWebRtcIce(msg.candidate);
+ } else if(msg.type === 'warning' && msg.warning) {
+ console.warn(msg.warning);
+ } else if (msg.type === 'peerDataChannels') {
+ onWebRtcSFUPeerDatachannels(msg);
+ } else {
+ console.error("Invalid SS message type", msg.type);
+ }
+ };
+
+ ws.onerror = function(event) {
+ console.log(`WS error: ${JSON.stringify(event)}`);
+ };
+
+ ws.onclose = function(event) {
+
+ closeStream();
+
+ if(ws.attemptStreamReconnection === true){
+ console.log(`WS closed: ${JSON.stringify(event.code)} - ${event.reason}`);
+ if(event.reason !== "")
+ {
+ showTextOverlay(`DISCONNECTED: ${event.reason.toUpperCase()}`);
+ }
+ else
+ {
+ showTextOverlay(`DISCONNECTED`);
+ }
+
+
+ let reclickToStart = setTimeout(function(){
+ start(true)
+ }, 4000);
+ }
+
+ ws = undefined;
+ };
+}
+
+// Config data received from WebRTC sender via the Cirrus web server
+function onConfig(config) {
+ let playerDiv = document.getElementById('player');
+ let playerElement = setupWebRtcPlayer(playerDiv, config);
+ resizePlayerStyle();
+ registerMouse(playerElement);
+}
+
+
+function registerMouse(playerElement) {
+ clearMouseEvents(playerElement);
+
+ switch (inputOptions.controlScheme) {
+ case ControlSchemeType.HoveringMouse:
+ registerHoveringMouseEvents(playerElement);
+ break;
+ case ControlSchemeType.LockedMouse:
+ registerLockedMouseEvents(playerElement);
+ break;
+ default:
+ registerLockedMouseEvents(playerElement);
+ break;
+ }
+
+ let player = document.getElementById("player");
+ player.style.cursor = styleCursor;
+}
+
+function clearMouseEvents(playerElement) {
+ playerElement.onclick = null;
+ playerElement.onmousedown = null;
+ playerElement.onmouseup = null;
+ playerElement.onwheel = null;
+ playerElement.onmousemove = null;
+ playerElement.oncontextmenu = null;
+}
+
+function toggleControlScheme() {
+ let schemeToggle = document.getElementById("control-scheme-text");
+
+ switch (inputOptions.controlScheme) {
+ case ControlSchemeType.HoveringMouse:
+ inputOptions.controlScheme = ControlSchemeType.LockedMouse;
+ schemeToggle.innerHTML = "Control Scheme: Locked Mouse";
+ break;
+ case ControlSchemeType.LockedMouse:
+ inputOptions.controlScheme = ControlSchemeType.HoveringMouse;
+ schemeToggle.innerHTML = "Control Scheme: Hovering Mouse";
+ break;
+ default:
+ inputOptions.controlScheme = ControlSchemeType.LockedMouse;
+ schemeToggle.innerHTML = "Control Scheme: Locked Mouse";
+ console.log(`ERROR: Unknown control scheme ${inputOptions.controlScheme}, defaulting to Locked Mouse`);
+ break;
+ }
+
+ console.log(`Updating control scheme to: ${inputOptions.controlScheme ? "Hovering Mouse" : "Locked Mouse"}`)
+ if(webRtcPlayerObj && webRtcPlayerObj.video)
+ {
+ registerMouse(webRtcPlayerObj.video);
+ }
+}
+
+function toggleBrowserCursorVisibility() {
+ inputOptions.hideBrowserCursor = !inputOptions.hideBrowserCursor;
+ styleCursor = (inputOptions.hideBrowserCursor ? 'none' : 'default');
+ let player = document.getElementById("player");
+ player.style.cursor = styleCursor;
+}
+
+function restartStream() {
+ if(!ws){
+ return;
+ }
+ ws.attemptStreamReconnection = false;
+
+ let existingOnClose = ws.onclose;
+
+ ws.onclose = function(event) {
+ existingOnClose(event);
+ // this is how we restart
+ connect_on_load = true;
+ start(false);
+ }
+
+ // Closing the websocket closes the connection to signalling server, ending the peer connection, and closing the clientside stream too.
+ ws.close();
+}
+
+function closeStream() {
+ console.log("----------------------Closing stream----------------------")
+ if (webRtcPlayerObj) {
+ // Remove video element from the page.
+ let playerDiv = document.getElementById('player');
+ if(playerDiv){
+ playerDiv.removeChild(webRtcPlayerObj.video);
+ }
+ let outer = document.getElementById("outer");
+ let middle = document.getElementById("middle");
+ let inner = document.getElementById("inner");
+ let dot = document.getElementById("dot");
+
+ outer.style.fill = middle.style.fill = inner.style.fill = dot.style.fill = "#3c3b40";
+ let qualityText = document.getElementById("qualityText");
+ qualityText.innerHTML = 'Not connected';
+ // Close the peer connection and associated webrtc machinery.
+ webRtcPlayerObj.close();
+ webRtcPlayerObj = undefined;
+ }
+}
+
+function load() {
+ parseURLParams();
+ setupHtmlEvents();
+ registerMessageHandlers();
+ populateDefaultProtocol();
+ setupFreezeFrameOverlay();
+ registerKeyboardEvents();
+ // Example response event listener that logs to console
+ addResponseEventListener('logListener', (response) => {console.log(`Received response message from streamer: "${response}"`)})
+ start(false);
+}
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/scripts/stressTest.js b/WebServers/SignallingWebServer/scripts/stressTest.js
new file mode 100644
index 0000000..adc3225
--- /dev/null
+++ b/WebServers/SignallingWebServer/scripts/stressTest.js
@@ -0,0 +1,165 @@
+// Copyright Epic Games, Inc. All Rights Reserved.
+
+// This is the entrypoint to the stress test, all setup happens here
+function stressTest() {
+
+
+ // This stress test creates a number of Pixel Streaming pages on the same page
+ // using iframes and then tries to auto-connect them.
+
+ // The purpose of the stress test is to automate testing a large number of peers
+ // connecting and disconnecting regularly from a single Unreal Engine streaming instance.
+
+ let self = this;
+ this.play = true;
+ this.maxPeers = 2;
+ this.totalStreams = 0;
+ this.streamCreationIntervalMs = 200;
+ this.streamDeletionIntervalMs = 2000;
+ this.pixelStreamingFrames = [];
+ this.creationIntervalHandle = null;
+ this.deletionIntervalHandle = null;
+
+ // Create a container to put the "Pixel Streaming" pages in.
+ let streamsContainer = document.getElementById("streamsContainer");
+
+ function startStressTest() {
+ setupNumPeersSlider();
+ startStreamCreation();
+ startStreamDeletion();
+ setupPlayPause();
+
+ document.getElementById("creationIntervalInput").addEventListener("change", function(){
+ self.streamCreationIntervalMs = document.getElementById("creationIntervalInput").value * 1000.0;
+ startStreamCreation();
+ });
+
+ document.getElementById("deletionIntervalInput").addEventListener("change", function(){
+ self.streamDeletionIntervalMs = document.getElementById("deletionIntervalInput").value * 1000.0;
+ startStreamDeletion();
+ });
+ }
+
+ function startStreamCreation() {
+
+ if(self.creationIntervalHandle) {
+ clearInterval(self.creationIntervalHandle);
+ }
+
+ // Create iframes of Pixel Streaming as a given interval (up to the max nPeers)
+ self.creationIntervalHandle = setInterval(function(){
+
+ if(self.play) {
+ let curNPeers = self.pixelStreamingFrames.length;
+ if(curNPeers >= self.maxPeers) {
+ return;
+ }
+
+ // Make a random amount of peers between 0 and up to max peers.
+ let maxPeersToCreate = self.maxPeers - curNPeers;
+ let nPeersToCreate = Math.ceil(Math.random() * maxPeersToCreate);
+
+ for(let i = 0; i < nPeersToCreate; i++) {
+ let frame = createPixelStreamingFrame();
+ let n = self.pixelStreamingFrames.length;
+ frame.id = "PixelStreamingFrame_" + (n + 1);
+ streamsContainer.append(frame);
+ self.pixelStreamingFrames.push(frame);
+ self.totalStreams += 1;
+ updateTotalStreams();
+ }
+ }
+ }, self.streamCreationIntervalMs);
+ }
+
+ function startStreamDeletion() {
+
+ if(self.deletionIntervalHandle) {
+ clearInterval(self.deletionIntervalHandle);
+ }
+
+ self.deletionIntervalHandle = setInterval(function(){
+ if(self.play) {
+ let curNPeers = self.pixelStreamingFrames.length;
+ if(curNPeers == 0) {
+ return;
+ }
+
+ // Delete a random amount of peers up to current number of peers
+ let nPeersToDelete = Math.ceil(Math.random() * curNPeers);
+
+ for(let i = 0; i < nPeersToDelete; i++) {
+ let frame = self.pixelStreamingFrames.shift();
+ frame.parentNode.removeChild(frame);
+ }
+ }
+ }, self.streamDeletionIntervalMs);
+
+ }
+
+ function updateTotalStreams() {
+ let nStreamsLabel = document.getElementById("nStreamsLabel");
+ nStreamsLabel.innerHTML = self.totalStreams;
+ }
+
+
+ function setupPlayPause() {
+ let playPauseBtn = document.getElementById("playPause");
+ playPauseBtn.addEventListener("click", () => {
+ if(self.play) {
+ self.play = false;
+ playPauseBtn.innerHTML = "Play"
+ } else {
+ self.play = true;
+ playPauseBtn.innerHTML = "Pause"
+ }
+ });
+ }
+
+ function setupNumPeersSlider() {
+ // Tie number of peers to the slider
+ let nPeersSlider = document.getElementById("nPeersSlider");
+ nPeersSlider.value = self.maxPeers;
+
+ let nPeersLabel = document.getElementById("nPeerLabel");
+ nPeersLabel.innerHTML = self.maxPeers;
+
+ // When the slide changes update the nPeers variable
+ nPeersSlider.addEventListener("change", function(){
+ self.maxPeers = nPeersSlider.value;
+ nPeersLabel.innerHTML = nPeersSlider.value;
+ });
+ }
+
+ function createPixelStreamingFrame() {
+ // Create an iframe that holds the Pixel Streaming page
+ let streamIFrame = document.createElement("iframe");
+ streamIFrame.src = "player.html";
+ streamIFrame.onload = function(){
+
+ let pixelStreamingJS = streamIFrame.contentWindow;
+
+ // Don't show the play button
+ pixelStreamingJS.connect_on_load = true;
+ pixelStreamingJS.shouldShowPlayOverlay = false;
+
+ // Create a hook for when webRTCPlayer is setup
+ let existingSetupPlayerFunc = pixelStreamingJS.setupWebRtcPlayer;
+ let newSetupPlayerFunc = function(htmlElement, config){
+ config.startVideoMuted = true;
+ config.autoPlayAudio = false;
+ let webrtcPlayer = existingSetupPlayerFunc(htmlElement, config);
+ return webrtcPlayer;
+ }
+ pixelStreamingJS.setupWebRtcPlayer = newSetupPlayerFunc;
+
+
+ pixelStreamingJS.connect();
+ }
+ return streamIFrame;
+ }
+
+ // Start here
+ startStressTest();
+
+}
diff --git a/WebServers/SignallingWebServer/scripts/webRtcPlayer.js b/WebServers/SignallingWebServer/scripts/webRtcPlayer.js
new file mode 100644
index 0000000..aa9b685
--- /dev/null
+++ b/WebServers/SignallingWebServer/scripts/webRtcPlayer.js
@@ -0,0 +1,694 @@
+// Copyright Epic Games, Inc. All Rights Reserved.
+
+function webRtcPlayer(parOptions) {
+ parOptions = typeof parOptions !== 'undefined' ? parOptions : {};
+
+ var self = this;
+ const urlParams = new URLSearchParams(window.location.search);
+
+ //**********************
+ //Config setup
+ //**********************
+ this.cfg = typeof parOptions.peerConnectionOptions !== 'undefined' ? parOptions.peerConnectionOptions : {};
+ this.cfg.sdpSemantics = 'unified-plan';
+
+ // If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
+ // However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
+ // tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
+ this.cfg.offerExtmapAllowMixed = false;
+
+ this.forceTURN = urlParams.has('ForceTURN');
+ if(this.forceTURN)
+ {
+ console.log("Forcing TURN usage by setting ICE Transport Policy in peer connection config.");
+ this.cfg.iceTransportPolicy = "relay";
+ }
+
+ this.cfg.bundlePolicy = "balanced";
+ this.forceMaxBundle = urlParams.has('ForceMaxBundle');
+ if(this.forceMaxBundle)
+ {
+ this.cfg.bundlePolicy = "max-bundle";
+ }
+
+ //**********************
+ //Variables
+ //**********************
+ this.pcClient = null;
+ this.dcClient = null;
+ this.tnClient = null;
+ this.sfu = false;
+
+ this.sdpConstraints = {
+ offerToReceiveAudio: 1, //Note: if you don't need audio you can get improved latency by turning this off.
+ offerToReceiveVideo: 1,
+ voiceActivityDetection: false
+ };
+
+ // See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
+ this.dataChannelOptions = {ordered: true};
+
+ // This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
+ this.startVideoMuted = typeof parOptions.startVideoMuted !== 'undefined' ? parOptions.startVideoMuted : false;
+ this.autoPlayAudio = typeof parOptions.autoPlayAudio !== 'undefined' ? parOptions.autoPlayAudio : true;
+
+ // To force mono playback of WebRTC audio
+ this.forceMonoAudio = urlParams.has('ForceMonoAudio');
+ if(this.forceMonoAudio){
+ console.log("Will attempt to force mono audio by munging the sdp in the browser.")
+ }
+
+ // To enable mic in browser use SSL/localhost and have ?useMic in the query string.
+ this.useMic = urlParams.has('useMic');
+ if(!this.useMic){
+ console.log("Microphone access is not enabled. Pass ?useMic in the url to enable it.");
+ }
+
+ // When ?useMic check for SSL or localhost
+ let isLocalhostConnection = location.hostname === "localhost" || location.hostname === "127.0.0.1";
+ let isHttpsConnection = location.protocol === 'https:';
+ if(this.useMic && !isLocalhostConnection && !isHttpsConnection)
+ {
+ this.useMic = false;
+ console.error("Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access.");
+ console.error("For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'");
+ }
+
+ // Prefer SFU or P2P connection
+ this.preferSFU = urlParams.has('preferSFU');
+ console.log(this.preferSFU ?
+ "The browser will signal it would prefer an SFU connection. Remove ?preferSFU from the url to signal for P2P usage." :
+ "The browser will signal for a P2P connection. Pass ?preferSFU in the url to signal for SFU usage.");
+
+ // Latency tester
+ this.latencyTestTimings =
+ {
+ TestStartTimeMs: null,
+ UEReceiptTimeMs: null,
+ UEEncodeMs: null,
+ UECaptureToSendMs: null,
+ UETransmissionTimeMs: null,
+ BrowserReceiptTimeMs: null,
+ FrameDisplayDeltaTimeMs: null,
+ Reset: function()
+ {
+ this.TestStartTimeMs = null;
+ this.UEReceiptTimeMs = null;
+ this.UEEncodeMs = null,
+ this.UECaptureToSendMs = null,
+ this.UETransmissionTimeMs = null;
+ this.BrowserReceiptTimeMs = null;
+ this.FrameDisplayDeltaTimeMs = null;
+ },
+ SetUETimings: function(UETimings)
+ {
+ this.UEReceiptTimeMs = UETimings.ReceiptTimeMs;
+ this.UEEncodeMs = UETimings.EncodeMs,
+ this.UECaptureToSendMs = UETimings.CaptureToSendMs,
+ this.UETransmissionTimeMs = UETimings.TransmissionTimeMs;
+ this.BrowserReceiptTimeMs = Date.now();
+ this.OnAllLatencyTimingsReady(this);
+ },
+ SetFrameDisplayDeltaTime: function(DeltaTimeMs)
+ {
+ if(this.FrameDisplayDeltaTimeMs == null)
+ {
+ this.FrameDisplayDeltaTimeMs = Math.round(DeltaTimeMs);
+ this.OnAllLatencyTimingsReady(this);
+ }
+ },
+ OnAllLatencyTimingsReady: function(Timings){}
+ }
+
+ //**********************
+ //Functions
+ //**********************
+
+ //Create Video element and expose that as a parameter
+ this.createWebRtcVideo = function() {
+ var video = document.createElement('video');
+
+ video.id = "streamingVideo";
+ video.playsInline = true;
+ video.disablePictureInPicture = true;
+ video.muted = self.startVideoMuted;;
+
+ video.addEventListener('loadedmetadata', function(e){
+ if(self.onVideoInitialised){
+ self.onVideoInitialised();
+ }
+ }, true);
+
+ video.addEventListener('pause', function(e) {
+ video.play();
+ })
+
+ // Check if request video frame callback is supported
+ if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
+ // The API is supported!
+
+ const onVideoFrameReady = (now, metadata) => {
+
+ if(metadata.receiveTime && metadata.expectedDisplayTime)
+ {
+ const receiveToCompositeMs = metadata.presentationTime - metadata.receiveTime;
+ self.aggregatedStats.receiveToCompositeMs = receiveToCompositeMs;
+ }
+
+
+ // Re-register the callback to be notified about the next frame.
+ video.requestVideoFrameCallback(onVideoFrameReady);
+ };
+
+ // Initially register the callback to be notified about the first frame.
+ video.requestVideoFrameCallback(onVideoFrameReady);
+ }
+
+ return video;
+ }
+
+ this.createWebRtcAudio = function() {
+ var audio = document.createElement('audio');
+ audio.id = 'streamingAudio';
+
+ return audio;
+ }
+
+ this.video = this.createWebRtcVideo();
+ this.audio = this.createWebRtcAudio();
+ this.availableVideoStreams = new Map();
+
+ onsignalingstatechange = function(state) {
+ console.info('Signaling state change. |', state.srcElement.signalingState, "|")
+ };
+
+ oniceconnectionstatechange = function(state) {
+ console.info('Browser ICE connection |', state.srcElement.iceConnectionState, '|')
+ };
+
+ onicegatheringstatechange = function(state) {
+ console.info('Browser ICE gathering |', state.srcElement.iceGatheringState, '|')
+ };
+
+ handleOnTrack = function(e) {
+ if (e.track)
+ {
+ console.log('Got track. | Kind=' + e.track.kind + ' | Id=' + e.track.id + ' | readyState=' + e.track.readyState + ' |');
+ }
+
+ if(e.track.kind == "audio")
+ {
+ handleOnAudioTrack(e.streams[0]);
+ return;
+ }
+ else(e.track.kind == "video")
+ {
+ for (const s of e.streams) {
+ if (!self.availableVideoStreams.has(s.id)) {
+ self.availableVideoStreams.set(s.id, s);
+ }
+ }
+
+ self.video.srcObject = e.streams[0];
+
+ // All tracks are added "muted" by WebRTC/browser and become unmuted when media is being sent
+ e.track.onunmute = () => {
+ self.video.srcObject = e.streams[0];
+ self.onNewVideoTrack(e.streams);
+ }
+ }
+ };
+
+ handleOnAudioTrack = function(audioMediaStream)
+ {
+ // do nothing the video has the same media stream as the audio track we have here (they are linked)
+ if(self.video.srcObject == audioMediaStream)
+ {
+ return;
+ }
+ // video element has some other media stream that is not associated with this audio track
+ else if(self.video.srcObject && self.video.srcObject !== audioMediaStream)
+ {
+ self.audio.srcObject = audioMediaStream;
+ }
+
+ }
+
+ onDataChannel = function(dataChannelEvent){
+ // This is the primary data channel code path when we are "receiving"
+ console.log("Data channel created for us by browser as we are a receiving peer.");
+ self.dcClient = dataChannelEvent.channel;
+ setupDataChannelCallbacks(self.dcClient);
+ }
+
+ createDataChannel = function(pc, label, options){
+ // This is the primary data channel code path when we are "offering"
+ let datachannel = pc.createDataChannel(label, options);
+ console.log(`Created datachannel (${label})`);
+ setupDataChannelCallbacks(datachannel);
+ return datachannel;
+ }
+
+ setupDataChannelCallbacks = function(datachannel) {
+ try {
+ // Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
+ datachannel.binaryType = "arraybuffer";
+
+ datachannel.addEventListener('open', e => {
+ console.log(`Data channel connected: ${datachannel.label}(${datachannel.id})`);
+ if(self.onDataChannelConnected){
+ self.onDataChannelConnected();
+ }
+ });
+
+ datachannel.addEventListener('close', e => {
+ console.log(`Data channel disconnected: ${datachannel.label}(${datachannel.id}`, e);
+ });
+
+ datachannel.addEventListener('message', e => {
+ if (self.onDataChannelMessage){
+ self.onDataChannelMessage(e.data);
+ }
+ });
+
+ datachannel.addEventListener('error', e => {
+ console.error(`Data channel error: ${datachannel.label}(${datachannel.id}`, e);
+ });
+
+ return datachannel;
+ } catch (e) {
+ console.warn('Datachannel setup caused an exception: ', e);
+ return null;
+ }
+ }
+
+ onicecandidate = function (e) {
+ let candidate = e.candidate;
+ if (candidate && candidate.candidate) {
+ console.log("%c[Browser ICE candidate]", "background: violet; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
+ self.onWebRtcCandidate(candidate);
+ }
+ };
+
+ handleCreateOffer = function (pc) {
+ pc.createOffer(self.sdpConstraints).then(function (offer) {
+
+ // Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
+ mungeSDP(offer);
+
+ // Set our munged SDP on the local peer connection so it is "set" and will be send across
+ pc.setLocalDescription(offer);
+ if (self.onWebRtcOffer) {
+ self.onWebRtcOffer(offer);
+ }
+ },
+ function () { console.warn("Couldn't create offer") });
+ }
+
+ mungeSDP = function (offer) {
+
+ let audioSDP = '';
+
+ // set max bitrate to highest bitrate Opus supports
+ audioSDP += 'maxaveragebitrate=510000;';
+
+ if(self.useMic){
+ // set the max capture rate to 48khz (so we can send high quality audio from mic)
+ audioSDP += 'sprop-maxcapturerate=48000;';
+ }
+
+ // Force mono or stereo based on whether ?forceMono was passed or not
+ audioSDP += self.forceMonoAudio ? 'sprop-stereo=0;stereo=0;' : 'sprop-stereo=1;stereo=1;';
+
+ // enable in-band forward error correction for opus audio
+ audioSDP += 'useinbandfec=1';
+
+ // We use the line 'useinbandfec=1' (which Opus uses) to set our Opus specific audio parameters.
+ offer.sdp = offer.sdp.replace('useinbandfec=1', audioSDP);
+ }
+
+ setupPeerConnection = function (pc) {
+ //Setup peerConnection events
+ pc.onsignalingstatechange = onsignalingstatechange;
+ pc.oniceconnectionstatechange = oniceconnectionstatechange;
+ pc.onicegatheringstatechange = onicegatheringstatechange;
+
+ pc.ontrack = handleOnTrack;
+ pc.onicecandidate = onicecandidate;
+ pc.ondatachannel = onDataChannel;
+ };
+
+ generateAggregatedStatsFunction = function(){
+ if(!self.aggregatedStats)
+ self.aggregatedStats = {};
+
+ return function(stats){
+
+ let newStat = {};
+
+ // store each type of codec we can get stats on
+ newStat.codecs = {};
+
+ stats.forEach(stat => {
+
+ // Get the inbound-rtp for video
+ if (stat.type === 'inbound-rtp'
+ && !stat.isRemote
+ && (stat.mediaType === 'video' || stat.id.toLowerCase().includes('video'))) {
+
+ newStat.timestamp = stat.timestamp;
+ newStat.bytesReceived = stat.bytesReceived;
+ newStat.framesDecoded = stat.framesDecoded;
+ newStat.packetsLost = stat.packetsLost;
+ newStat.bytesReceivedStart = self.aggregatedStats && self.aggregatedStats.bytesReceivedStart ? self.aggregatedStats.bytesReceivedStart : stat.bytesReceived;
+ newStat.framesDecodedStart = self.aggregatedStats && self.aggregatedStats.framesDecodedStart ? self.aggregatedStats.framesDecodedStart : stat.framesDecoded;
+ newStat.timestampStart = self.aggregatedStats && self.aggregatedStats.timestampStart ? self.aggregatedStats.timestampStart : stat.timestamp;
+
+ if(self.aggregatedStats && self.aggregatedStats.timestamp){
+
+ // Get the mimetype of the video codec being used
+ if(stat.codecId && self.aggregatedStats.codecs && self.aggregatedStats.codecs.hasOwnProperty(stat.codecId)){
+ newStat.videoCodec = self.aggregatedStats.codecs[stat.codecId];
+ }
+
+ if(self.aggregatedStats.bytesReceived){
+ // bitrate = bits received since last time / number of ms since last time
+ //This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other)
+ newStat.bitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceived) / (newStat.timestamp - self.aggregatedStats.timestamp);
+ newStat.bitrate = Math.floor(newStat.bitrate);
+ newStat.lowBitrate = self.aggregatedStats.lowBitrate && self.aggregatedStats.lowBitrate < newStat.bitrate ? self.aggregatedStats.lowBitrate : newStat.bitrate
+ newStat.highBitrate = self.aggregatedStats.highBitrate && self.aggregatedStats.highBitrate > newStat.bitrate ? self.aggregatedStats.highBitrate : newStat.bitrate
+ }
+
+ if(self.aggregatedStats.bytesReceivedStart){
+ newStat.avgBitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceivedStart) / (newStat.timestamp - self.aggregatedStats.timestampStart);
+ newStat.avgBitrate = Math.floor(newStat.avgBitrate);
+ }
+
+ if(self.aggregatedStats.framesDecoded){
+ // framerate = frames decoded since last time / number of seconds since last time
+ newStat.framerate = (newStat.framesDecoded - self.aggregatedStats.framesDecoded) / ((newStat.timestamp - self.aggregatedStats.timestamp) / 1000);
+ newStat.framerate = Math.floor(newStat.framerate);
+ newStat.lowFramerate = self.aggregatedStats.lowFramerate && self.aggregatedStats.lowFramerate < newStat.framerate ? self.aggregatedStats.lowFramerate : newStat.framerate
+ newStat.highFramerate = self.aggregatedStats.highFramerate && self.aggregatedStats.highFramerate > newStat.framerate ? self.aggregatedStats.highFramerate : newStat.framerate
+ }
+
+ if(self.aggregatedStats.framesDecodedStart){
+ newStat.avgframerate = (newStat.framesDecoded - self.aggregatedStats.framesDecodedStart) / ((newStat.timestamp - self.aggregatedStats.timestampStart) / 1000);
+ newStat.avgframerate = Math.floor(newStat.avgframerate);
+ }
+ }
+ }
+
+ // Get inbound-rtp for audio
+ if (stat.type === 'inbound-rtp'
+ && !stat.isRemote
+ && (stat.mediaType === 'audio' || stat.id.toLowerCase().includes('audio'))) {
+
+ // Get audio bytes received
+ if(stat.bytesReceived){
+ newStat.audioBytesReceived = stat.bytesReceived;
+ }
+
+ // As we loop back through we may wish to compute some stats based on a delta of the previous time we recorded the stat
+ if(self.aggregatedStats && self.aggregatedStats.timestamp){
+
+ // Get the mimetype of the audio codec being used
+ if(stat.codecId && self.aggregatedStats.codecs && self.aggregatedStats.codecs.hasOwnProperty(stat.codecId)){
+ newStat.audioCodec = self.aggregatedStats.codecs[stat.codecId];
+ }
+
+ // Determine audio bitrate delta over the time period
+ if(self.aggregatedStats.audioBytesReceived){
+ newStat.audioBitrate = 8 * (newStat.audioBytesReceived - self.aggregatedStats.audioBytesReceived) / (stat.timestamp - self.aggregatedStats.timestamp);
+ newStat.audioBitrate = Math.floor(newStat.audioBitrate);
+ }
+ }
+ }
+
+ //Read video track stats
+ if(stat.type === 'track' && (stat.trackIdentifier === 'video_label' || stat.kind === 'video')) {
+ newStat.framesDropped = stat.framesDropped;
+ newStat.framesReceived = stat.framesReceived;
+ newStat.framesDroppedPercentage = stat.framesDropped / stat.framesReceived * 100;
+ newStat.frameHeight = stat.frameHeight;
+ newStat.frameWidth = stat.frameWidth;
+ newStat.frameHeightStart = self.aggregatedStats && self.aggregatedStats.frameHeightStart ? self.aggregatedStats.frameHeightStart : stat.frameHeight;
+ newStat.frameWidthStart = self.aggregatedStats && self.aggregatedStats.frameWidthStart ? self.aggregatedStats.frameWidthStart : stat.frameWidth;
+ }
+
+ if(stat.type ==='candidate-pair' && stat.hasOwnProperty('currentRoundTripTime') && stat.currentRoundTripTime != 0){
+ newStat.currentRoundTripTime = stat.currentRoundTripTime;
+ }
+
+ // Store mimetype of each codec
+ if(newStat.hasOwnProperty('codecs') && stat.type === 'codec' && stat.mimeType && stat.id){
+ const codecId = stat.id;
+ const codecType = stat.mimeType.replace("video/", "").replace("audio/", "");
+ newStat.codecs[codecId] = codecType;
+ }
+
+ });
+
+ if(self.aggregatedStats.receiveToCompositeMs)
+ {
+ newStat.receiveToCompositeMs = self.aggregatedStats.receiveToCompositeMs;
+ self.latencyTestTimings.SetFrameDisplayDeltaTime(self.aggregatedStats.receiveToCompositeMs);
+ }
+
+ self.aggregatedStats = newStat;
+
+ if(self.onAggregatedStats)
+ self.onAggregatedStats(newStat)
+ }
+ };
+
+ setupTransceiversAsync = async function(pc){
+
+ let hasTransceivers = pc.getTransceivers().length > 0;
+
+ // Setup a transceiver for getting UE video
+ pc.addTransceiver("video", { direction: "recvonly" });
+
+ // Setup a transceiver for sending mic audio to UE and receiving audio from UE
+ if(!self.useMic)
+ {
+ pc.addTransceiver("audio", { direction: "recvonly" });
+ }
+ else
+ {
+ let audioSendOptions = self.useMic ?
+ {
+ autoGainControl: false,
+ channelCount: 1,
+ echoCancellation: false,
+ latency: 0,
+ noiseSuppression: false,
+ sampleRate: 48000,
+ sampleSize: 16,
+ volume: 1.0
+ } : false;
+
+ // Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
+ const stream = await navigator.mediaDevices.getUserMedia({video: false, audio: audioSendOptions});
+ if(stream)
+ {
+ if(hasTransceivers){
+ for(let transceiver of pc.getTransceivers()){
+ if(transceiver && transceiver.receiver && transceiver.receiver.track && transceiver.receiver.track.kind === "audio")
+ {
+ for (const track of stream.getTracks()) {
+ if(track.kind && track.kind == "audio")
+ {
+ transceiver.sender.replaceTrack(track);
+ transceiver.direction = "sendrecv";
+ }
+ }
+ }
+ }
+ }
+ else
+ {
+ for (const track of stream.getTracks()) {
+ if(track.kind && track.kind == "audio")
+ {
+ pc.addTransceiver(track, { direction: "sendrecv" });
+ }
+ }
+ }
+ }
+ else
+ {
+ pc.addTransceiver("audio", { direction: "recvonly" });
+ }
+ }
+ };
+
+
+ //**********************
+ //Public functions
+ //**********************
+
+ this.setVideoEnabled = function(enabled) {
+ self.video.srcObject.getTracks().forEach(track => track.enabled = enabled);
+ }
+
+ this.startLatencyTest = function(onTestStarted) {
+ // Can't start latency test without a video element
+ if(!self.video)
+ {
+ return;
+ }
+
+ self.latencyTestTimings.Reset();
+ self.latencyTestTimings.TestStartTimeMs = Date.now();
+ onTestStarted(self.latencyTestTimings.TestStartTimeMs);
+ }
+
+ //This is called when revceiving new ice candidates individually instead of part of the offer
+ this.handleCandidateFromServer = function(iceCandidate) {
+ let candidate = new RTCIceCandidate(iceCandidate);
+
+ console.log("%c[Unreal ICE candidate]", "background: pink; color: black" ,"| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
+
+ // if forcing TURN, reject any candidates not relay
+ if(self.forceTURN)
+ {
+ // check if no relay address is found, if so, we are assuming it means no TURN server
+ if(candidate.candidate.indexOf("relay") < 0) {
+ console.warn("Dropping candidate because it was not TURN relay.", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|")
+ return;
+ }
+ }
+
+ self.pcClient.addIceCandidate(candidate).catch(function(e){
+ console.error("Failed to add ICE candidate", e);
+ });
+ };
+
+ //Called externaly to create an offer for the server
+ this.createOffer = function() {
+ if(self.pcClient){
+ console.log("Closing existing PeerConnection")
+ self.pcClient.close();
+ self.pcClient = null;
+ }
+ self.pcClient = new RTCPeerConnection(self.cfg);
+ setupPeerConnection(self.pcClient);
+
+ setupTransceiversAsync(self.pcClient).finally(function()
+ {
+ self.dcClient = createDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
+ handleCreateOffer(self.pcClient);
+ });
+
+ };
+
+ //Called externaly when an offer is received from the server
+ this.receiveOffer = function(offer) {
+ if (offer.sfu) {
+ this.sfu = true;
+ delete offer.sfu;
+ }
+
+ if (!self.pcClient){
+ console.log("Creating a new PeerConnection in the browser.")
+ self.pcClient = new RTCPeerConnection(self.cfg);
+ setupPeerConnection(self.pcClient);
+
+ // Put things here that happen post transceiver setup
+ self.pcClient.setRemoteDescription(offer)
+ .then(() =>
+ {
+ setupTransceiversAsync(self.pcClient).finally(function(){
+ self.pcClient.createAnswer()
+ .then(answer => {
+ mungeSDP(answer);
+ return self.pcClient.setLocalDescription(answer);
+ })
+ .then(() => {
+ if (self.onWebRtcAnswer) {
+ self.onWebRtcAnswer(self.pcClient.currentLocalDescription);
+ }
+ })
+ .then(()=> {
+ let receivers = self.pcClient.getReceivers();
+ for(let receiver of receivers)
+ {
+ receiver.playoutDelayHint = 0;
+ }
+ })
+ .catch((error) => console.error("createAnswer() failed:", error));
+ });
+ });
+ }
+ };
+
+ //Called externaly when an answer is received from the server
+ this.receiveAnswer = function(answer) {
+ self.pcClient.setRemoteDescription(answer);
+ };
+
+ this.receiveSFUPeerDataChannelRequest = function(channelData) {
+ const sendOptions = {
+ ordered: true,
+ negotiated: true,
+ id: channelData.sendStreamId
+ };
+ const unidirectional = channelData.sendStreamId != channelData.recvStreamId;
+ const sendDataChannel = self.pcClient.createDataChannel(unidirectional ? 'send-datachannel' : 'datachannel', sendOptions);
+ setupDataChannelCallbacks(sendDataChannel);
+
+ if (unidirectional) {
+ const recvOptions = {
+ ordered: true,
+ negotiated: true,
+ id: channelData.recvStreamId
+ };
+ const recvDataChannel = self.pcClient.createDataChannel('recv-datachannel', recvOptions);
+
+ // when recv data channel is "open" we want to let SFU know so it can tell streamer
+ recvDataChannel.addEventListener('open', e => {
+ if(self.onSFURecvDataChannelReady) {
+ self.onSFURecvDataChannelReady();
+ }
+ });
+
+ setupDataChannelCallbacks(recvDataChannel);
+ }
+ this.dcClient = sendDataChannel;
+ }
+
+ this.close = function(){
+ if(self.pcClient){
+ console.log("Closing existing peerClient")
+ self.pcClient.close();
+ self.pcClient = null;
+ }
+ if(self.aggregateStatsIntervalId){
+ clearInterval(self.aggregateStatsIntervalId);
+ }
+ }
+
+ //Sends data across the datachannel
+ this.send = function(data){
+ if(self.dcClient && self.dcClient.readyState == 'open'){
+ //console.log('Sending data on dataconnection', self.dcClient)
+ self.dcClient.send(data);
+ }
+ };
+
+ this.getStats = function(onStats){
+ if(self.pcClient && onStats){
+ self.pcClient.getStats(null).then((stats) => {
+ onStats(stats);
+ });
+ }
+ }
+
+ this.aggregateStats = function(checkInterval){
+ let calcAggregatedStats = generateAggregatedStatsFunction();
+ let printAggregatedStats = () => { self.getStats(calcAggregatedStats); }
+ self.aggregateStatsIntervalId = setInterval(printAggregatedStats, checkInterval);
+ }
+}
diff --git a/WebServers/SignallingWebServer/tps/Bootstrap.tps b/WebServers/SignallingWebServer/tps/Bootstrap.tps
new file mode 100644
index 0000000..156174c
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/Bootstrap.tps
@@ -0,0 +1,13 @@
+
+
+ bootstrap v3.x, v4.x
+ /Samples/PixelStreaming/WebServers/
+ This is a requirement to using Bootstrap, providing better UI elements for the client web pages created for demoing pixelstreaming
+ https://github.com/twitter/bootstrap/blob/master/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Source/ThirdParty/Licenses/Bootstrap_License.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/Express.tps b/WebServers/SignallingWebServer/tps/Express.tps
new file mode 100644
index 0000000..813a5dc
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/Express.tps
@@ -0,0 +1,13 @@
+
+
+ Express v4.16.2
+ /Samples/PixelStreaming/WebServers/
+ Express is a web framework for Node.js.
+ https://github.com/expressjs/express/blob/master/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Plugins/Experimental/PixelStreaming/Source/Express_License.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/FontAwesome.tps b/WebServers/SignallingWebServer/tps/FontAwesome.tps
new file mode 100644
index 0000000..0a00d5d
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/FontAwesome.tps
@@ -0,0 +1,11 @@
+
+
+ FontAwesome Free v5.1
+ /Samples/PixelStreaming/WebServers/
+ Provides a consistent icon style to use in the sites for demoing pixelstreaming.
+ https://github.com/FortAwesome/Font-Awesome/blob/master/LICENSE.txt
+
+ P4
+
+ None
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/Helmet.tps b/WebServers/SignallingWebServer/tps/Helmet.tps
new file mode 100644
index 0000000..eb45e85
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/Helmet.tps
@@ -0,0 +1,13 @@
+
+
+ Helmet v.3.21.3
+ /Samples/PixelStreaming/WebServers/SignallingWebServer
+ Helmet helps you secure your Express apps by setting various HTTP headers.
+ https://github.com/helmetjs/helmet/blob/v3.21.3/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ NONE (but keep license with code)
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/JQuery.tps b/WebServers/SignallingWebServer/tps/JQuery.tps
new file mode 100644
index 0000000..0169eb3
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/JQuery.tps
@@ -0,0 +1,13 @@
+
+
+ JQuery
+ /Samples/PixelStreaming/WebServers/
+ This is a requirement to using Bootstrap, providing access to the DOM in the browser for easier and more advanced client side interactions and UI. Used for Project Cirrus.
+ https://github.com/jquery/jquery/blob/master/LICENSE.txt; https://js.foundation/pdf/ip-policy.pdf
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Source/ThirdParty/Licenses/JQuery_License.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/Popper.tps b/WebServers/SignallingWebServer/tps/Popper.tps
new file mode 100644
index 0000000..220de47
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/Popper.tps
@@ -0,0 +1,13 @@
+
+
+ Popper.js v1.14.3
+ /Samples/PixelStreaming/WebServers/
+ A requirement to using Bootstrap.
+ https://github.com/FezVrasta/popper.js/blob/master/LICENSE.md
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Source/ThirdParty/Licenses/Popper.js_License.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/Socket.io.tps b/WebServers/SignallingWebServer/tps/Socket.io.tps
new file mode 100644
index 0000000..fb8706e
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/Socket.io.tps
@@ -0,0 +1,13 @@
+
+
+ Socket.io v2.0.4
+ /Samples/PixelStreaming/WebServers/
+ Enables real-time bidirectional event-based communication.
+ https://github.com/socketio/socket.io/blob/master/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Plugins/Experimental/PixelStreaming/Source/Socket.io_License.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/WS.tps b/WebServers/SignallingWebServer/tps/WS.tps
new file mode 100644
index 0000000..2b6af9f
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/WS.tps
@@ -0,0 +1,13 @@
+
+
+ WS
+ /Samples/PixelStreaming/WebServers/SignallingWebServer/
+ It's used by SignallingWebServer (based on Node.js web-server) that is part of our PixelStreaming project.We add a dependency to WS library to Node.js configuration and it's downloaded automatically.
+ https://github.com/websockets/ws/blob/HEAD/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ //depot/UE4/Engine/Source/ThirdParty/Licenses/WS_License.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/WebRTCadapter.tps b/WebServers/SignallingWebServer/tps/WebRTCadapter.tps
new file mode 100644
index 0000000..42025d2
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/WebRTCadapter.tps
@@ -0,0 +1,13 @@
+
+
+ WebRTC adapter (adapter.js) v6.3.2
+ /Samples/PixelStreaming/WebServers/
+ Used as a cross browser interface for WebRTC.
+ https://github.com/webrtc/adapter/blob/master/LICENSE.md
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Source/ThirdParty/Licenses/WebRTCadapter_License.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/Yargs.tps b/WebServers/SignallingWebServer/tps/Yargs.tps
new file mode 100644
index 0000000..b7eb1a8
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/Yargs.tps
@@ -0,0 +1,13 @@
+
+
+ Yargs v15.3.0
+ /Samples/PixelStreaming/WebServers/SignallingWebServer/
+ A module for Node.js, used to parse command line arguments, which is downloaded automatically by Node Package Manager.
+ https://github.com/yargs/yargs/blob/v15.3.0/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ None
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/bcrypt-js.tps b/WebServers/SignallingWebServer/tps/bcrypt-js.tps
new file mode 100644
index 0000000..ded83c7
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/bcrypt-js.tps
@@ -0,0 +1,11 @@
+
+
+ Bcrypt.js
+ /Samples/PixelStreaming/WebServers/SignallingWebServer/
+ This is used to verify passwords match the ones stored using the bcrypt algorithm. The passwords are always stored using bcrypt and so we never know the unencrypted password. This allows us to implement a authentication system on the web server so that only people we give accounts to can access the web server. This use is only for prototype stage, production will use the Epic unreal account system
+ https://github.com/dcodeIO/bcrypt.js/blob/master/LICENSE
+
+ P4
+
+ /UE4/Main/Engine/Source/ThirdParty/Licenses
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/expression-session.tps b/WebServers/SignallingWebServer/tps/expression-session.tps
new file mode 100644
index 0000000..fec8e7e
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/expression-session.tps
@@ -0,0 +1,13 @@
+
+
+ express-session v1.15.6
+ /Samples/PixelStreaming/WebServers/
+ Used to create session id's used to remember a person who has logged into a server across page loads so that they don't have to log in every time the reload or navigate to a different page hosted on a webserver
+ https://github.com/expressjs/session/blob/master/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Source/ThirdParty/Licenses/express-session_license.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/passport-local.tps b/WebServers/SignallingWebServer/tps/passport-local.tps
new file mode 100644
index 0000000..b47cab7
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/passport-local.tps
@@ -0,0 +1,13 @@
+
+
+ passport-local v1.0.0
+ /Samples/PixelStreaming/WebServers/
+ This is a implementation for the Passport middleware that allows you to store user credentials locally on the machine (passwords are stored with bcrypt and not reversible) to be used to authenticate users on a node.js webserver. This use is only for prototype stage, production will use the Epic unreal account system.
+ https://github.com/jaredhanson/passport-local/blob/master/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Source/ThirdParty/Licenses/passport-local_license.txt
+
\ No newline at end of file
diff --git a/WebServers/SignallingWebServer/tps/passport.tps b/WebServers/SignallingWebServer/tps/passport.tps
new file mode 100644
index 0000000..a2c7b54
--- /dev/null
+++ b/WebServers/SignallingWebServer/tps/passport.tps
@@ -0,0 +1,13 @@
+
+
+ passport v0.4.0
+ /Samples/PixelStreaming/WebServers/
+ Is the authentication middleware that adds the ability to securely log in a user to the webserver. This is a generic framework that you add specific implementation frameworks (separate TPS's will be provided for these) to to provide authentication on a node.js webserver.
+ https://github.com/jaredhanson/passport/blob/master/LICENSE
+
+ Licensees
+ Git
+ P4
+
+ /Engine/Source/ThirdParty/Licenses/passport_license.txt
+
\ No newline at end of file
diff --git a/WebServers/get_ps_servers.bat b/WebServers/get_ps_servers.bat
new file mode 100644
index 0000000..d816f11
--- /dev/null
+++ b/WebServers/get_ps_servers.bat
@@ -0,0 +1,110 @@
+@Rem Copyright Epic Games, Inc. All Rights Reserved.
+
+@echo off
+
+@Rem Set script location as working directory for commands.
+pushd "%~dp0"
+
+:arg_loop_start
+SET ARG=%1
+if DEFINED ARG (
+ if "%ARG%"=="/h" (
+ goto print_help
+ )
+ if "%ARG%"=="/v" (
+ SET UEVersion=%2
+ SHIFT
+ )
+ if "%ARG%"=="/b" (
+ SET PSInfraTagOrBranch=%2
+ SET IsTag=0
+ SHIFT
+ )
+ if "%ARG%"=="/t" (
+ SET PSInfraTagOrBranch=%2
+ SET IsTag=1
+ SHIFT
+ )
+ SHIFT
+ goto arg_loop_start
+)
+
+@Rem Name and version of ps-infra that we are downloading
+SET PSInfraOrg=EpicGames
+SET PSInfraRepo=PixelStreamingInfrastructure
+
+@Rem If a UE version is supplied set the right branch or tag to fetch for that version of UE
+if DEFINED UEVersion (
+ if "%UEVersion%"=="4.26" (
+ SET PSInfraTagOrBranch=UE4.26
+ SET IsTag=0
+ )
+ if "%UEVersion%"=="4.27" (
+ SET PSInfraTagOrBranch=UE4.27
+ SET IsTag=0
+ )
+ if "%UEVersion%"=="5.0" (
+ SET PSInfraTagOrBranch=UE5.0
+ SET IsTag=0
+ )
+)
+
+@Rem If no arguments select a specific version, fetch the appropriate default
+if NOT DEFINED PSInfraTagOrBranch (
+ SET PSInfraTagOrBranch=master
+ SET IsTag=0
+)
+
+@Rem Whether the named reference is a tag or a branch affects the URL we fetch it on
+if %IsTag%==1 (
+ SET RefType=tags
+) else (
+ SET RefType=heads
+)
+
+@Rem Look for a SignallingWebServer directory next to this script
+if exist SignallingWebServer\ (
+ echo SignallingWebServer directory found...skipping install.
+) else (
+ echo SignallingWebServer directory not found...beginning ps-infra download.
+
+ @Rem Download ps-infra and follow redirects.
+ curl -L https://github.com/%PSInfraOrg%/%PSInfraRepo%/archive/refs/%RefType%/%PSInfraTagOrBranch%.zip > ps-infra.zip
+
+ @Rem Unarchive the .zip
+ tar -xmf ps-infra.zip || echo bad archive, contents: && type ps-infra.zip && exit 0
+
+ @Rem Rename the extracted, versioned, directory
+ for /d %%i in ("PixelStreamingInfrastructure-*") do (
+ for /d %%j in ("%%i/*") do (
+ echo "%%i\%%j"
+ move "%%i\%%j" .
+ )
+ for %%j in ("%%i/*") do (
+ echo "%%i\%%j"
+ move "%%i\%%j" .
+ )
+
+ echo "%%i"
+ rmdir /s /q "%%i"
+ )
+
+ @Rem Delete the downloaded zip
+ del ps-infra.zip
+)
+
+exit 0
+
+:print_help
+echo.
+echo Tool for fetching PixelStreaming Infrastructure. If no flags are set specifying a version to fetch,
+echo the recommended version will be chosen as a default.
+echo.
+echo Usage:
+echo %~n0%~x0 [^/h] [^/v ^] [^/b ^] [^/t ^]
+echo Where:
+echo /v Specify a version of Unreal Engine to download the recommended release for
+echo /b Specify a specific branch for the tool to download from repo
+echo /t Specify a specific tag for the tool to download from repo
+echo /h Display this help message
+exit 1
\ No newline at end of file
diff --git a/WebServers/get_ps_servers.sh b/WebServers/get_ps_servers.sh
new file mode 100644
index 0000000..a402611
--- /dev/null
+++ b/WebServers/get_ps_servers.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+# Copyright Epic Games, Inc. All Rights Reserved.
+
+BASH_LOCATION=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+pushd "${BASH_LOCATION}" > /dev/null
+
+print_help() {
+ echo "
+ Tool for fetching PixelStreaming Infrastructure. If no flags are set specifying a version to fetch,
+ the recommended version will be chosen as a default.
+
+ Usage:
+ ${0} [-h] [-v ] [-b ] [-t ]
+ Where:
+ -v Specify a version of Unreal Engine to download the recommended
+ release for
+ -b Specify a specific branch for the tool to download from repo
+ -t Specify a specific tag for the tool to download from repo
+ -h Display this help message
+"
+ exit 1
+}
+
+while(($#)) ; do
+ case "$1" in
+ -h ) print_help;;
+ -v ) UEVersion="$2"; shift 2;;
+ -b ) PSInfraTagOrBranch="$2"; IsTag=0; shift 2;;
+ -t ) PSInfraTagOrBranch="$2"; IsTag=1; shift 2;;
+ * ) echo "Unknown command: $1"; shift;;
+ esac
+ done
+
+# Name and version of ps-infra that we are downloading
+PSInfraOrg=EpicGames
+PSInfraRepo=PixelStreamingInfrastructure
+
+# If a UE version is supplied set the right branch or tag to fetch for that version of UE
+if [ ! -z "$UEVersion" ]
+then
+ if [ "$UEVersion" = "4.26" ]
+ then
+ PSInfraTagOrBranch=UE4.26
+ IsTag=0
+ fi
+ if [ "$UEVersion" = "4.27" ]
+ then
+ PSInfraTagOrBranch=UE4.27
+ IsTag=0
+ fi
+ if [ "$UEVersion" = "5.0" ]
+ then
+ PSInfraTagOrBranch=UE5.0
+ IsTag=0
+ fi
+fi
+
+# If no arguments select a specific version, fetch the appropriate default
+if [ -z "$PSInfraTagOrBranch" ]
+then
+ PSInfraTagOrBranch=master
+ IsTag=0
+fi
+
+# Whether the named reference is a tag or a branch affects the URL we fetch it on
+if [ "$IsTag" -eq 1 ]
+then
+ RefType=tags
+else
+ RefType=heads
+fi
+
+# Look for a SignallingWebServer directory next to this script
+if [ -d SignallingWebServer ]
+then
+ echo "SignallingWebServer directory found...skipping install."
+else
+ echo "SignallingWebServer directory not found...beginning ps-infra download."
+
+ # Download ps-infra and follow redirects.
+ curl -L https://github.com/$PSInfraOrg/$PSInfraRepo/archive/refs/$RefType/$PSInfraTagOrBranch.tar.gz > ps-infra.tar.gz
+
+ # Unarchive the .tar
+ tar -xmf ps-infra.tar.gz || $(echo "bad archive, contents:" && head --lines=20 ps-infra.tar.gz && exit 0)
+
+ # Move the server folders into the current directory (WebServers) and delete the original directory
+ mv PixelStreamingInfrastructure-*/* .
+ rm -rf PixelStreamingInfrastructure-*
+
+ # Delete the downloaded tar
+ rm ps-infra.tar.gz
+fi
\ No newline at end of file