added webRtcPlayer

This commit is contained in:
DmitriyB
2023-03-13 15:18:24 +05:00
parent 240f28935d
commit 61efa5611d
10 changed files with 4521 additions and 16 deletions
+1 -2
View File
@@ -51,6 +51,7 @@ const App: React.FC = () => {
return (
<Switch>
<Route exact path="/">
<Header></Header>
<div className="main">
<h2 className="card-title">{error ? error : t("demo-title")}</h2>
<div className="card-container">
@@ -63,8 +64,6 @@ const App: React.FC = () => {
<Route path="/connect-page">
{currentCard ? (
<div className="background">
{isLoading && (<Header></Header>
)}
<div className="popup-container">
<div className="content__container">
<PopupComponent></PopupComponent>
@@ -0,0 +1,221 @@
import './player.css'
export const Player: React.FC = () => {
return (
<>
<div id="overlay">
<div id="controls">
<button className="tooltip" id="fullscreen-btn">
<span className="tooltiptext">Fullscreen</span>
</button>
<button className="tooltip" id="settingsBtn">
<span className="tooltiptext">Settings</span>
</button>
<button className="tooltip" id="statsBtn">
<span className="tooltiptext">Information</span>
</button>
</div>
<div id="unrealengine">
</div>
<div id="connection" className="tooltip">
<span className="tooltiptext" id="qualityText">Not connected</span>
</div>
<div className="panel-wrap" id="settings-panel">
<div className="panel">
<div id="heading">Settings</div>
<div id="close"></div>
<div id="content">
<div id="fillWindow" className="setting">
<div className="settings-text">Enlarge display to fill window</div>
<label className="tgl-switch">
<input type="checkbox" id="enlarge-display-to-fill-window-tgl" className="tgl tgl-flat" checked />
<div className="tgl-slider"></div>
</label>
</div>
<div id="qualityControlOwnership" className="setting">
<div className="settings-text">Is quality controller?</div>
<label className="tgl-switch">
<input type="checkbox" id="quality-control-ownership-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</div>
<div id="matchViewportResolution" className="setting">
<div className="settings-text">Match viewport resolution</div>
<label className="tgl-switch">
<input type="checkbox" id="match-viewport-res-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</div>
<div id="offerToReceive" className="setting">
<div className="settings-text">Offer To Receive</div>
<label className="tgl-switch">
<input type="checkbox" id="offer-receive-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</div>
<div id="preferSFU" className="setting">
<div className="settings-text">Prefer SFU</div>
<label className="tgl-switch">
<input type="checkbox" id="prefer-sfu-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</div>
<div id="useMic" className="setting">
<div className="settings-text">Use microphone</div>
<label className="tgl-switch">
<input type="checkbox" id="use-mic-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</div>
<div id="forceMonoAudio" className="setting">
<div className="settings-text">Force mono audio</div>
<label className="tgl-switch">
<input type="checkbox" id="force-mono-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</div>
<div id="forceTURN" className="setting">
<div className="settings-text">Force TURN</div>
<label className="tgl-switch">
<input type="checkbox" id="force-turn-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</div>
<div id="toggleControl" className="setting">
<div className="settings-text" id="control-scheme-text">Control Scheme</div>
<label className="btn-overlay">
<label className="tgl-switch">
<input type="checkbox" id="control-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</label>
</div>
<div id="toggleCursor" className="setting">
<div className="settings-text" id="cursor-text">Hide Browser Cursor</div>
<label className="btn-overlay">
<label className="tgl-switch">
<input type="checkbox" id="cursor-tgl" className="tgl tgl-flat" />
<div className="tgl-slider"></div>
</label>
</label>
</div>
<div id="showFPS" className="setting">
<div className="settings-text">Show FPS</div>
<label className="btn-overlay">
<input type="button" id="show-fps-button" className="overlay-button btn-flat" value="Toggle" />
</label>
</div>
<div id="keyframeRequest" className="setting">
<div className="settings-text">Request KeyFrame</div>
<label className="btn-overlay">
<input type="button" id="request-keyframe-button" className="overlay-button btn-flat" value="Request" />
</label>
</div>
<section id="encoderSettings">
<div id="encoderSettingsHeader" className="settings-text">
<div>Encoder Settings</div>
</div>
<div id="encoderParamsContainer" className="collapse">
<div className="form-group">
<label htmlFor="encoder-min-qp-text">Min QP</label>
<input type="number" className="form-control" id="encoder-min-qp-text" value="0" min="0"
max="51" />
<label htmlFor="encoder-max-qp-text">Max QP</label>
<input type="number" className="form-control" id="encoder-max-qp-text" value="51" min="0"
max="51" />
<br></br>
<input id="encoder-params-submit" className="overlay-button btn-flat" type="button"
value="Apply" />
</div>
</div>
</section>
<section id="webRTCSettings">
<div id="webRTCSettingsHeader" className="settings-text">
<div>WebRTC Settings</div>
</div>
<div id="webrtcParamsContainer" className="collapse">
<div className="form-group">
<label htmlFor="webrtc-fps-text">FPS</label>
<input type="number" className="form-control" id="webrtc-fps-text" value="60" min="1"
max="999" />
<label htmlFor="webrtc-min-bitrate-text">Min bitrate (kbps)</label>
<input type="number" className="form-control" id="webrtc-min-bitrate-text" value="0" min="0"
max="100000" />
<label htmlFor="webrtc-max-bitrate-text">Max bitrate (kbps)</label>
<input type="number" className="form-control" id="webrtc-max-bitrate-text" value="0" min="0"
max="100000" />
<br></br>
<input id="webrtc-params-submit" className="overlay-button btn-flat" type="button"
value="Apply" />
</div>
</div>
</section>
<section id="streamSettings">
<div id="streamSettingsHeader" className="settings-text">
<div>Stream Settings</div>
</div>
<div id="streamSettingsContainer" className="collapse">
<div className="form-group">
<div className="settings-text">Player stream</div>
<select className="form-control" id="stream-select"></select>
<div className="settings-text">Player track</div>
<select className="form-control" id="track-select"></select>
</div>
</div>
</section>
<br></br>
<section id="connectionSettings">
<div id="connectionHeader" className="settings-text">
<div>Stream Settings</div>
</div>
<div id="connectionContainer" className="collapse">
<div className="setting">
<div className="settings-text"></div>
<label className="btn-overlay">
<input type="button" id="restart-stream-button" className="overlay-button btn-flat" value="Restart stream" />
</label>
</div>
</div>
</section>
</div>
</div>
</div>
<div className="panel-wrap" id="stats-panel">
<div className="panel">
<div id="heading">Information</div>
<div id="close"></div>
<div id="content">
<section id="statsPanel">
<div className="setting settings-text">
<div>Session Stats</div>
</div>
<div id="statsContainer" className="statsContainer">
<div id="stats" className="stats"></div>
</div>
</section>
<br></br>
<section id="latencyTest">
<div className="setting">
<div className="settings-text">
<div>Latency Report</div>
</div>
<label className="btn-overlay">
<input type="button" id="test-latency-button" className="overlay-button btn-flat"
value="Get Report" />
</label>
</div>
<div id="latencyStatsContainer" className="statsContainer">
<div className="stats">No report yet</div>
</div>
</section>
</div>
</div>
</div>
</div>
</>
)
}
@@ -0,0 +1,727 @@
#loader {
width: 106px;
height: 106px;
border-radius: 50%;
display: inline-block;
position: relative;
background: conic-gradient(from 135deg at 50% 50%,
rgba(255, 255, 255, 0) -6.26deg,
#ffffff 314.83deg,
rgba(255, 255, 255, 0) 353.74deg,
#ffffff 674.83deg);
box-sizing: border-box;
animation: rotation 1s linear infinite;
}
@keyframes rotation {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
#loader::after {
content: "";
box-sizing: border-box;
position: absolute;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
width: 100px;
height: 100px;
border-radius: 50%;
background: #151619;
}
body {
margin: 0px;
background-color: #151619;
font-family: 'Montserrat', sans-serif;
}
h2 {
font-family: "GilroyWebRegular";
}
#playerUI {
width: 100%;
height: 100%;
}
canvas {
image-rendering: crisp-edges;
position: absolute;
}
video {
position: absolute;
width: 100%;
height: 100%;
}
#player {
width: 100%;
height: 100%;
position: absolute;
}
#videoPlayOverlay {
position: absolute;
font-size: 1.8em;
width: 100%;
height: 100%;
color: var(--colour2)
}
/* State for element to be clickable */
.clickableState {
align-items: center;
justify-content: center;
display: flex;
}
/* State for element to show text, this is for informational use*/
.textDisplayState {
align-items: center;
justify-content: center;
display: flex;
cursor: pointer;
}
/* State to hide overlay, WebRTC communication is in progress and or is playing */
.hiddenState {
display: none;
}
#playButton {
display: block;
width: 88px;
height: 88px;
z-index: 30;
backdrop-filter: blur(10px);
border-radius: 112px;
cursor: pointer;
}
#playButtonMob {
display: block;
width: 88px;
height: 88px;
z-index: 30;
backdrop-filter: blur(10px);
border-radius: 112px;
cursor: pointer;
}
#container {
width: 400px;
height: 100%;
justify-content: center;
/* Background */
background: transparent;
/* Button_1 */
border-width: 0px 2px;
border-style: solid;
border-color: #23242A;
display: flex;
flex-direction: column;
align-items: center;
gap: 40px;
padding: 40px 56px;
box-sizing: border-box;
}
@media screen and (max-width: 500px) {
#container {
width: 100%;
border: none;
}
}
#playButtonMob:hover {
background: linear-gradient(180deg, #BC75FF 0%, #798FFF 100%);
backdrop-filter: blur(10px)
}
#title {
font-style: normal;
font-weight: 400;
font-size: 38px;
line-height: 100%;
/* or 38px */
/* White */
color: #F2F2F2;
margin: 0 0 16px 0;
}
#caption {
font-family: 'Inter';
font-style: normal;
font-weight: 400;
font-size: 16px;
line-height: 140%;
/* or 22px */
text-align: center;
/* Inactive */
color: #C5C7CE;
margin: 0;
text-align: left;
}
#link {
font-family: 'Inter';
cursor: pointer;
background: #1C1D21;
border-radius: 4px;
width: 100%;
display: flex;
align-items: center;
justify-content: center;
text-decoration: none;
padding: 8px 16px;
box-sizing: border-box;
font-weight: 400;
font-size: 12px;
line-height: 130%;
/* identical to box height, or 16px */
/* Inactive */
color: #C5C7CE;
}
#link:hover {
background: #23242A;
}
#freezeFrameOverlay {
background-color: transparent;
}
.freezeframeBackground {
background-color: #000 !important;
}
#overlay {
width: 100%;
height: 100%;
z-index: 20;
position: absolute;
color: var(--colour2);
pointer-events: none;
overflow: hidden;
}
#overlay button {
background-color: var(--colour7);
border: 1px solid var(--colour7);
color: var(--colour2);
position: relative;
width: 3rem;
height: 3rem;
padding: 0.5rem;
text-align: center;
}
#fullscreen-btn {
padding: 0.6rem !important;
}
#overlay button:hover {
background-color: var(--colour3);
border: 3px solid var(--colour3);
transition: 0.25s ease;
padding-left: 0.55rem;
padding-top: 0.55rem;
}
#overlay button:active {
border: 3px solid var(--colour3);
background-color: var(--colour7);
padding-left: 0.55rem;
padding-top: 0.55rem;
}
#overlay img {
width: 100%;
height: 100%;
}
.tooltip .tooltiptext {
visibility: hidden;
width: auto;
color: var(--colour2);
text-align: center;
border-radius: 15px;
padding: 0px 10px;
font-family: 'Montserrat', sans-serif;
font-size: 0.75rem;
letter-spacing: 0.75px;
/* Position the tooltip */
position: absolute;
top: 0;
transform: translateY(25%);
left: 125%;
z-index: 20;
}
.tooltip:hover .tooltiptext {
visibility: visible;
background-color: var(--colour7);
}
#connection .tooltiptext {
top: 125%;
transform: translateX(-25%);
left: 0;
z-index: 20;
padding: 5px 10px;
}
#settings-panel .tooltiptext {
display: block;
top: 125%;
transform: translateX(-50%);
left: 0;
z-index: 20;
padding: 5px 10px;
border: 3px solid var(--colour5);
width: max-content;
}
#controls {
position: absolute;
top: 2%;
left: 1%;
font-family: 'Michroma', sans-serif;
pointer-events: all;
display: none;
}
#controls>* {
margin-bottom: 0.5rem;
border-radius: 50%;
display: block;
height: 2rem;
line-height: 1.75rem;
padding: 0.5rem;
}
#controls #additionalinfo {
text-align: center;
font-family: 'Montserrat', sans-serif;
}
#unrealengine {
position: absolute;
bottom: 5%;
right: 10%;
font-family: 'Michroma', sans-serif;
pointer-events: all;
visibility: hidden;
width: min-content;
}
#unrealengine p {
visibility: hidden;
width: 15rem;
}
#connection {
position: absolute;
bottom: 5%;
left: 10%;
font-family: 'Michroma', sans-serif;
height: 3rem;
width: 3rem;
pointer-events: none;
visibility: hidden;
}
.noselect {
-webkit-touch-callout: none;
/* iOS Safari */
-webkit-user-select: none;
/* Safari */
-khtml-user-select: none;
/* Konqueror HTML */
-moz-user-select: none;
/* Old versions of Firefox */
-ms-user-select: none;
/* Internet Explorer/Edge */
user-select: none;
/* Non-prefixed version, currently
supported by Chrome, Edge, Opera and Firefox */
}
.panel-wrap {
position: fixed;
top: 0;
bottom: 0;
right: 0;
height: 100%;
min-width: 20vw;
transform: translateX(100%);
transition: .3s ease-out;
pointer-events: all;
backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(10px);
overflow-y: auto;
overflow-x: hidden;
background-color: rgba(30, 29, 34, 0.5)
}
.panel-wrap-visible {
transform: translateX(0%);
}
.panel {
color: #eee;
overflow-y: auto;
padding: 1em;
}
#heading {
display: inline-block;
font-size: 2em;
margin-block-start: 0.67em;
margin-block-end: 0.67em;
margin-inline-start: 0px;
margin-inline-end: 0px;
position: relative;
padding: 0 0 0 2rem;
}
#close {
margin: 0.5rem;
padding-top: 0.5rem;
padding-bottom: 0.5rem;
padding-right: 0.5rem;
font-size: 2em;
float: right;
}
#close:after {
padding-left: 0.5rem;
display: inline-block;
content: "\00d7";
/* This will render the 'X' */
}
#close:hover {
color: var(--colour3);
transition: ease 0.3s;
}
#content {
margin: 2rem;
}
.setting {
display: flex;
flex-direction: row;
justify-content: space-between;
padding: 0;
margin: 0.5rem 0;
}
.settings-text {
margin-right: 2rem;
display: flex;
}
/*** Toggle Switch styles ***/
.tgl-switch {
vertical-align: middle;
display: inline-block;
}
.tgl-switch .tgl {
display: none;
}
.tgl,
.tgl:after,
.tgl:before,
.tgl *,
.tgl *:after,
.tgl *:before,
.tgl+.tgl-slider {
-webkit-box-sizing: border-box;
box-sizing: border-box;
}
.tgl::-moz-selection,
.tgl:after::-moz-selection,
.tgl:before::-moz-selection,
.tgl *::-moz-selection,
.tgl *:after::-moz-selection,
.tgl *:before::-moz-selection,
.tgl+.tgl-slider::-moz-selection {
background: none;
}
.tgl::selection,
.tgl:after::selection,
.tgl:before::selection,
.tgl *::selection,
.tgl *:after::selection,
.tgl *:before::selection,
.tgl+.tgl-slider::selection {
background: none;
}
.tgl+.tgl-slider {
outline: 0;
display: block;
width: 40px;
height: 18px;
position: relative;
cursor: pointer;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
.tgl+.tgl-slider:after,
.tgl+.tgl-slider:before {
position: relative;
display: block;
content: "";
width: 50%;
height: 100%;
}
.tgl+.tgl-slider:after {
left: 0;
}
.tgl+.tgl-slider:before {
display: none;
}
.tgl-flat+.tgl-slider {
padding: 2px;
-webkit-transition: all .2s ease;
transition: all .2s ease;
background: var(--colour6);
border: 3px solid var(--colour7);
border-radius: 2em;
}
.tgl-flat+.tgl-slider:after {
-webkit-transition: all .2s ease;
transition: all .2s ease;
background: var(--colour7);
content: "";
border-radius: 1em;
}
.tgl-flat:checked+.tgl-slider {
border: 3px solid var(--colour3);
}
.tgl-flat:checked+.tgl-slider:after {
left: 50%;
background: var(--colour3);
}
.subtitle-text {
margin: 0 0 0 1rem;
color: var(--colour5);
position: relative;
}
.form-group {
padding-top: 4px;
display: grid;
grid-template-columns: 50% 50%;
row-gap: 4px;
padding-right: 10px;
padding-left: 10px;
}
.form-group label {
color: var(--colour2);
vertical-align: middle;
font-weight: normal;
}
#stats {
margin-left: 1rem;
}
#LatencyStats {
margin-left: 1rem;
}
#hiddenInput {
position: absolute;
left: -10%;
/* Although invisible, push off-screen to prevent user interaction. */
width: 0px;
opacity: 0;
}
#editTextButton {
position: absolute;
height: 40px;
width: 40px;
}
.form-group label {
margin-right: 2rem;
min-width: 75%;
}
input {
text-align: right;
}
.warning {
box-sizing: border-box;
position: relative;
transform: scale(var(--ggs, 1));
width: 20px;
height: 20px;
border: 2px solid;
border-radius: 40px;
display: none;
}
.warning::after,
.warning::before {
content: "";
display: block;
box-sizing: border-box;
position: absolute;
border-radius: 3px;
width: 2px;
background: currentColor;
left: 7px
}
.warning::after {
top: 2px;
height: 8px
}
.warning::before {
height: 2px;
bottom: 2px
}
/* Flat buttons */
input[type="button"] {
background-color: transparent;
color: var(--colour2);
font-family: 'Montserrat';
border: 3px solid var(--colour3);
border-radius: 1rem;
font-size: 0.75rem;
padding-left: 0.5rem;
padding-right: 0.5rem;
}
input[type="button"]:hover {
background-color: var(--colour3);
transition: ease 0.3s;
}
input[type="button"]:active {
background-color: transparent;
}
#encoder-params-submit,
#webrtc-params-submit {
text-align: center;
}
select,
input[type="number"] {
background-color: var(--colour7);
color: var(--colour2);
border: 1px solid var(--colour6);
padding: 0.25rem;
font-family: 'Montserrat';
border-radius: 0.25rem;
}
input[type=number]::-webkit-inner-spin-button {
margin-left: 0.5rem;
}
input[type="number"]:disabled {
padding-right: 0.5rem;
-moz-appearance: textfield;
}
input[type=number]:disabled::-webkit-inner-spin-button {
display: none;
}
#settingsBtn,
#statsBtn {
cursor: pointer;
}
#streamingVideo {
pointer-events: all;
}
embed {
border: none;
width: 100%;
height: 100%;
}
g {
fill: var(--colour2);
}
object {
pointer-events: none;
}
#connectionStrength {
fill: var(--colour7);
}
#minimize {
display: none;
}
#afkOverlay {
z-index: 999;
background-color: rgba(30, 29, 34, 0.5);
display: inline-block;
height: 100vh;
width: 100vw;
line-height: 100vh;
text-align: center;
overflow: hidden;
}
#afkOverlay center {
display: inline-block;
line-height: 1.5;
height: 100vh;
}
@@ -2,13 +2,14 @@ import "./PlayerStyles.css";
import React, { useEffect, useState, useRef } from "react";
import { useHistory, useParams } from "react-router-dom";
import useWindowDimensions from "hooks/useWindowDimensions";
import { load } from "utils/app";
import useMobile from "hooks/useMobile";
import { Sidebar } from "components/pages/Stream/Sidebar/Sidebar";
import { connectSession } from "store/reducers/ActionCreator";
import { useAppDispatch, useAppSelector } from "hooks/redux";
import { sessionSlice } from "store/reducers/sessionSlice";
import { Player } from "../Player/Player";
type link = {
id: string;
@@ -31,6 +32,7 @@ export const PlayerComponent: React.FC<any> = ({ closeStream }) => {
useEffect(() => {
dispatch(connectSession(id)).then((res: any) => {
load()
if (res.error) {
alert(res.payload);
}
@@ -68,17 +70,12 @@ export const PlayerComponent: React.FC<any> = ({ closeStream }) => {
<h2>Переверните устройство</h2>
</div>
)}
<iframe
ref={frameRef}
onLoad={(e: any) => e.target.focus()}
id="player"
onBlur={(e) => e.target.focus()} /// element loosing focus and keyboard input doesn't work
src={url}
className={"player playerOn"}
security={""}
allowFullScreen={true}
></iframe>
<div id="playerUI" className="noselect">
<div id="player"></div>
<div id="overlay">
</div>
</div>
<Player></Player>
<Sidebar
heightDevice={height}
isMobile={isMobile}
Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

+2 -1
View File
@@ -12,7 +12,8 @@ import { initReactI18next } from "react-i18next";
import HttpApi from "i18next-http-backend";
import LanguageDetector from "i18next-browser-languagedetector";
const store = setupStore();
export const store = setupStore();
i18next
.use(HttpApi)
+1 -1
View File
@@ -53,7 +53,7 @@ export const sessionSlice = createSlice({
action: PayloadAction<ConnectSessionResponseInterface>
) => {
state.isLoading = false;
const url = action.payload.websocket_url.replace("wss://", "https://") + '?offerToReceive=true';
const url = action.payload.websocket_url;
state.url = url;
},
[connectSession.rejected.type]: (state, action: PayloadAction<any>) => {
Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

+2892
View File
File diff suppressed because it is too large Load Diff
+668
View File
@@ -0,0 +1,668 @@
// Copyright Epic Games, Inc. All Rights Reserved.
export function webRtcPlayer(parOptions) {
parOptions = typeof parOptions !== 'undefined' ? parOptions : {};
var self = this;
const urlParams = new URLSearchParams(window.location.search);
//**********************
//Config setup
//**********************
this.cfg = typeof parOptions.peerConnectionOptions !== 'undefined' ? parOptions.peerConnectionOptions : {};
this.cfg.sdpSemantics = 'unified-plan';
// If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
// However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
// tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
this.cfg.offerExtmapAllowMixed = false;
this.forceTURN = urlParams.has('ForceTURN');
if (this.forceTURN) {
console.log("Forcing TURN usage by setting ICE Transport Policy in peer connection config.");
this.cfg.iceTransportPolicy = "relay";
}
this.cfg.bundlePolicy = "balanced";
this.forceMaxBundle = urlParams.has('ForceMaxBundle');
if (this.forceMaxBundle) {
this.cfg.bundlePolicy = "max-bundle";
}
//**********************
//Variables
//**********************
this.pcClient = null;
this.dcClient = null;
this.tnClient = null;
this.sfu = false;
this.sdpConstraints = {
offerToReceiveAudio: 1, //Note: if you don't need audio you can get improved latency by turning this off.
offerToReceiveVideo: 1,
voiceActivityDetection: false
};
// See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
this.dataChannelOptions = { ordered: true };
// This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
this.startVideoMuted = typeof parOptions.startVideoMuted !== 'undefined' ? parOptions.startVideoMuted : false;
this.autoPlayAudio = typeof parOptions.autoPlayAudio !== 'undefined' ? parOptions.autoPlayAudio : true;
// To force mono playback of WebRTC audio
this.forceMonoAudio = urlParams.has('ForceMonoAudio');
if (this.forceMonoAudio) {
console.log("Will attempt to force mono audio by munging the sdp in the browser.")
}
// To enable mic in browser use SSL/localhost and have ?useMic in the query string.
this.useMic = urlParams.has('useMic');
if (!this.useMic) {
console.log("Microphone access is not enabled. Pass ?useMic in the url to enable it.");
}
// When ?useMic check for SSL or localhost
let isLocalhostConnection = window.location.hostname === "localhost" || window.location.hostname === "127.0.0.1";
let isHttpsConnection = window.location.protocol === 'https:';
if (this.useMic && !isLocalhostConnection && !isHttpsConnection) {
this.useMic = false;
console.error("Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access.");
console.error("For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'");
}
// Prefer SFU or P2P connection
this.preferSFU = urlParams.has('preferSFU');
console.log(this.preferSFU ?
"The browser will signal it would prefer an SFU connection. Remove ?preferSFU from the url to signal for P2P usage." :
"The browser will signal for a P2P connection. Pass ?preferSFU in the url to signal for SFU usage.");
// Latency tester
this.latencyTestTimings =
{
TestStartTimeMs: null,
UEReceiptTimeMs: null,
UEEncodeMs: null,
UECaptureToSendMs: null,
UETransmissionTimeMs: null,
BrowserReceiptTimeMs: null,
FrameDisplayDeltaTimeMs: null,
Reset: function () {
this.TestStartTimeMs = null;
this.UEReceiptTimeMs = null;
// eslint-disable-next-line no-unused-expressions
this.UEEncodeMs = null,
this.UECaptureToSendMs = null,
this.UETransmissionTimeMs = null;
this.BrowserReceiptTimeMs = null;
this.FrameDisplayDeltaTimeMs = null;
},
SetUETimings: function (UETimings) {
this.UEReceiptTimeMs = UETimings.ReceiptTimeMs;
// eslint-disable-next-line no-unused-expressions
this.UEEncodeMs = UETimings.EncodeMs,
this.UECaptureToSendMs = UETimings.CaptureToSendMs,
this.UETransmissionTimeMs = UETimings.TransmissionTimeMs;
this.BrowserReceiptTimeMs = Date.now();
this.OnAllLatencyTimingsReady(this);
},
SetFrameDisplayDeltaTime: function (DeltaTimeMs) {
if (this.FrameDisplayDeltaTimeMs == null) {
this.FrameDisplayDeltaTimeMs = Math.round(DeltaTimeMs);
this.OnAllLatencyTimingsReady(this);
}
},
OnAllLatencyTimingsReady: function (Timings) { }
}
//**********************
//Functions
//**********************
//Create Video element and expose that as a parameter
this.createWebRtcVideo = function () {
var video = document.createElement('video');
video.id = "streamingVideo";
video.playsInline = true;
video.disablePictureInPicture = true;
video.muted = self.startVideoMuted;;
video.addEventListener('loadedmetadata', function (e) {
if (self.onVideoInitialised) {
self.onVideoInitialised();
}
}, true);
video.addEventListener('pause', function (e) {
video.play();
})
// Check if request video frame callback is supported
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
// The API is supported!
const onVideoFrameReady = (now, metadata) => {
if (metadata.receiveTime && metadata.expectedDisplayTime) {
const receiveToCompositeMs = metadata.presentationTime - metadata.receiveTime;
self.aggregatedStats.receiveToCompositeMs = receiveToCompositeMs;
}
// Re-register the callback to be notified about the next frame.
video.requestVideoFrameCallback(onVideoFrameReady);
};
// Initially register the callback to be notified about the first frame.
video.requestVideoFrameCallback(onVideoFrameReady);
}
return video;
}
this.createWebRtcAudio = function () {
var audio = document.createElement('audio');
audio.id = 'streamingAudio';
return audio;
}
this.video = this.createWebRtcVideo();
this.audio = this.createWebRtcAudio();
this.availableVideoStreams = new Map();
const onsignalingstatechange = function (state) {
console.info('Signaling state change. |', state.srcElement.signalingState, "|")
};
const oniceconnectionstatechange = function (state) {
console.info('Browser ICE connection |', state.srcElement.iceConnectionState, '|')
};
const onicegatheringstatechange = function (state) {
console.info('Browser ICE gathering |', state.srcElement.iceGatheringState, '|')
};
const handleOnTrack = function (e) {
if (e.track) {
console.log('Got track. | Kind=' + e.track.kind + ' | Id=' + e.track.id + ' | readyState=' + e.track.readyState + ' |');
}
if (e.track.kind == "audio") {
handleOnAudioTrack(e.streams[0]);
return;
}
else if (e.track.kind == "video") {
for (const s of e.streams) {
if (!self.availableVideoStreams.has(s.id)) {
self.availableVideoStreams.set(s.id, s);
}
}
self.video.srcObject = e.streams[0];
// All tracks are added "muted" by WebRTC/browser and become unmuted when media is being sent
e.track.onunmute = () => {
self.video.srcObject = e.streams[0];
self.onNewVideoTrack(e.streams);
}
}
};
const handleOnAudioTrack = function (audioMediaStream) {
// do nothing the video has the same media stream as the audio track we have here (they are linked)
if (self.video.srcObject == audioMediaStream) {
return;
}
// video element has some other media stream that is not associated with this audio track
else if (self.video.srcObject && self.video.srcObject !== audioMediaStream) {
self.audio.srcObject = audioMediaStream;
}
}
const onDataChannel = function (dataChannelEvent) {
// This is the primary data channel code path when we are "receiving"
console.log("Data channel created for us by browser as we are a receiving peer.");
self.dcClient = dataChannelEvent.channel;
setupDataChannelCallbacks(self.dcClient);
}
const createDataChannel = function (pc, label, options) {
// This is the primary data channel code path when we are "offering"
let datachannel = pc.createDataChannel(label, options);
console.log(`Created datachannel (${label})`);
setupDataChannelCallbacks(datachannel);
return datachannel;
}
const setupDataChannelCallbacks = function (datachannel) {
try {
// Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
datachannel.binaryType = "arraybuffer";
datachannel.addEventListener('open', e => {
console.log(`Data channel connected: ${datachannel.label}(${datachannel.id})`);
if (self.onDataChannelConnected) {
self.onDataChannelConnected();
}
});
datachannel.addEventListener('close', e => {
console.log(`Data channel disconnected: ${datachannel.label}(${datachannel.id}`, e);
});
datachannel.addEventListener('message', e => {
if (self.onDataChannelMessage) {
self.onDataChannelMessage(e.data);
}
});
datachannel.addEventListener('error', e => {
console.error(`Data channel error: ${datachannel.label}(${datachannel.id}`, e);
});
return datachannel;
} catch (e) {
console.warn('Datachannel setup caused an exception: ', e);
return null;
}
}
const onicecandidate = function (e) {
let candidate = e.candidate;
if (candidate && candidate.candidate) {
console.log("%c[Browser ICE candidate]", "background: violet; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
self.onWebRtcCandidate(candidate);
}
};
const handleCreateOffer = function (pc) {
pc.createOffer(self.sdpConstraints).then(function (offer) {
// Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
mungeSDP(offer);
// Set our munged SDP on the local peer connection so it is "set" and will be send across
pc.setLocalDescription(offer);
if (self.onWebRtcOffer) {
self.onWebRtcOffer(offer);
}
},
function () { console.warn("Couldn't create offer") });
}
const mungeSDP = function (offer) {
let audioSDP = '';
// set max bitrate to highest bitrate Opus supports
audioSDP += 'maxaveragebitrate=510000;';
if (self.useMic) {
// set the max capture rate to 48khz (so we can send high quality audio from mic)
audioSDP += 'sprop-maxcapturerate=48000;';
}
// Force mono or stereo based on whether ?forceMono was passed or not
audioSDP += self.forceMonoAudio ? 'sprop-stereo=0;stereo=0;' : 'sprop-stereo=1;stereo=1;';
// enable in-band forward error correction for opus audio
audioSDP += 'useinbandfec=1';
// We use the line 'useinbandfec=1' (which Opus uses) to set our Opus specific audio parameters.
offer.sdp = offer.sdp.replace('useinbandfec=1', audioSDP);
}
const setupPeerConnection = function (pc) {
//Setup peerConnection events
pc.onsignalingstatechange = onsignalingstatechange;
pc.oniceconnectionstatechange = oniceconnectionstatechange;
pc.onicegatheringstatechange = onicegatheringstatechange;
pc.ontrack = handleOnTrack;
pc.onicecandidate = onicecandidate;
pc.ondatachannel = onDataChannel;
};
const generateAggregatedStatsFunction = function () {
if (!self.aggregatedStats)
self.aggregatedStats = {};
return function (stats) {
let newStat = {};
// store each type of codec we can get stats on
newStat.codecs = {};
stats.forEach(stat => {
// Get the inbound-rtp for video
if (stat.type === 'inbound-rtp'
&& !stat.isRemote
&& (stat.mediaType === 'video' || stat.id.toLowerCase().includes('video'))) {
newStat.timestamp = stat.timestamp;
newStat.bytesReceived = stat.bytesReceived;
newStat.framesDecoded = stat.framesDecoded;
newStat.packetsLost = stat.packetsLost;
newStat.bytesReceivedStart = self.aggregatedStats && self.aggregatedStats.bytesReceivedStart ? self.aggregatedStats.bytesReceivedStart : stat.bytesReceived;
newStat.framesDecodedStart = self.aggregatedStats && self.aggregatedStats.framesDecodedStart ? self.aggregatedStats.framesDecodedStart : stat.framesDecoded;
newStat.timestampStart = self.aggregatedStats && self.aggregatedStats.timestampStart ? self.aggregatedStats.timestampStart : stat.timestamp;
if (self.aggregatedStats && self.aggregatedStats.timestamp) {
// Get the mimetype of the video codec being used
if (stat.codecId && self.aggregatedStats.codecs && self.aggregatedStats.codecs.hasOwnProperty(stat.codecId)) {
newStat.videoCodec = self.aggregatedStats.codecs[stat.codecId];
}
if (self.aggregatedStats.bytesReceived) {
// bitrate = bits received since last time / number of ms since last time
//This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other)
newStat.bitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceived) / (newStat.timestamp - self.aggregatedStats.timestamp);
newStat.bitrate = Math.floor(newStat.bitrate);
newStat.lowBitrate = self.aggregatedStats.lowBitrate && self.aggregatedStats.lowBitrate < newStat.bitrate ? self.aggregatedStats.lowBitrate : newStat.bitrate
newStat.highBitrate = self.aggregatedStats.highBitrate && self.aggregatedStats.highBitrate > newStat.bitrate ? self.aggregatedStats.highBitrate : newStat.bitrate
}
if (self.aggregatedStats.bytesReceivedStart) {
newStat.avgBitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceivedStart) / (newStat.timestamp - self.aggregatedStats.timestampStart);
newStat.avgBitrate = Math.floor(newStat.avgBitrate);
}
if (self.aggregatedStats.framesDecoded) {
// framerate = frames decoded since last time / number of seconds since last time
newStat.framerate = (newStat.framesDecoded - self.aggregatedStats.framesDecoded) / ((newStat.timestamp - self.aggregatedStats.timestamp) / 1000);
newStat.framerate = Math.floor(newStat.framerate);
newStat.lowFramerate = self.aggregatedStats.lowFramerate && self.aggregatedStats.lowFramerate < newStat.framerate ? self.aggregatedStats.lowFramerate : newStat.framerate
newStat.highFramerate = self.aggregatedStats.highFramerate && self.aggregatedStats.highFramerate > newStat.framerate ? self.aggregatedStats.highFramerate : newStat.framerate
}
if (self.aggregatedStats.framesDecodedStart) {
newStat.avgframerate = (newStat.framesDecoded - self.aggregatedStats.framesDecodedStart) / ((newStat.timestamp - self.aggregatedStats.timestampStart) / 1000);
newStat.avgframerate = Math.floor(newStat.avgframerate);
}
}
}
// Get inbound-rtp for audio
if (stat.type === 'inbound-rtp'
&& !stat.isRemote
&& (stat.mediaType === 'audio' || stat.id.toLowerCase().includes('audio'))) {
// Get audio bytes received
if (stat.bytesReceived) {
newStat.audioBytesReceived = stat.bytesReceived;
}
// As we loop back through we may wish to compute some stats based on a delta of the previous time we recorded the stat
if (self.aggregatedStats && self.aggregatedStats.timestamp) {
// Get the mimetype of the audio codec being used
if (stat.codecId && self.aggregatedStats.codecs && self.aggregatedStats.codecs.hasOwnProperty(stat.codecId)) {
newStat.audioCodec = self.aggregatedStats.codecs[stat.codecId];
}
// Determine audio bitrate delta over the time period
if (self.aggregatedStats.audioBytesReceived) {
newStat.audioBitrate = 8 * (newStat.audioBytesReceived - self.aggregatedStats.audioBytesReceived) / (stat.timestamp - self.aggregatedStats.timestamp);
newStat.audioBitrate = Math.floor(newStat.audioBitrate);
}
}
}
//Read video track stats
if (stat.type === 'track' && (stat.trackIdentifier === 'video_label' || stat.kind === 'video')) {
newStat.framesDropped = stat.framesDropped;
newStat.framesReceived = stat.framesReceived;
newStat.framesDroppedPercentage = stat.framesDropped / stat.framesReceived * 100;
newStat.frameHeight = stat.frameHeight;
newStat.frameWidth = stat.frameWidth;
newStat.frameHeightStart = self.aggregatedStats && self.aggregatedStats.frameHeightStart ? self.aggregatedStats.frameHeightStart : stat.frameHeight;
newStat.frameWidthStart = self.aggregatedStats && self.aggregatedStats.frameWidthStart ? self.aggregatedStats.frameWidthStart : stat.frameWidth;
}
if (stat.type === 'candidate-pair' && stat.hasOwnProperty('currentRoundTripTime') && stat.currentRoundTripTime != 0) {
newStat.currentRoundTripTime = stat.currentRoundTripTime;
}
// Store mimetype of each codec
if (newStat.hasOwnProperty('codecs') && stat.type === 'codec' && stat.mimeType && stat.id) {
const codecId = stat.id;
const codecType = stat.mimeType.replace("video/", "").replace("audio/", "");
newStat.codecs[codecId] = codecType;
}
});
if (self.aggregatedStats.receiveToCompositeMs) {
newStat.receiveToCompositeMs = self.aggregatedStats.receiveToCompositeMs;
self.latencyTestTimings.SetFrameDisplayDeltaTime(self.aggregatedStats.receiveToCompositeMs);
}
self.aggregatedStats = newStat;
if (self.onAggregatedStats)
self.onAggregatedStats(newStat)
}
};
const setupTransceiversAsync = async function (pc) {
let hasTransceivers = pc.getTransceivers().length > 0;
// Setup a transceiver for getting UE video
pc.addTransceiver("video", { direction: "recvonly" });
// Setup a transceiver for sending mic audio to UE and receiving audio from UE
if (!self.useMic) {
pc.addTransceiver("audio", { direction: "recvonly" });
}
else {
let audioSendOptions = self.useMic ?
{
autoGainControl: false,
channelCount: 1,
echoCancellation: false,
latency: 0,
noiseSuppression: false,
sampleRate: 48000,
sampleSize: 16,
volume: 1.0
} : false;
// Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
const stream = await navigator.mediaDevices.getUserMedia({ video: false, audio: audioSendOptions });
if (stream) {
if (hasTransceivers) {
for (let transceiver of pc.getTransceivers()) {
if (transceiver && transceiver.receiver && transceiver.receiver.track && transceiver.receiver.track.kind === "audio") {
for (const track of stream.getTracks()) {
if (track.kind && track.kind == "audio") {
transceiver.sender.replaceTrack(track);
transceiver.direction = "sendrecv";
}
}
}
}
}
else {
for (const track of stream.getTracks()) {
if (track.kind && track.kind == "audio") {
pc.addTransceiver(track, { direction: "sendrecv" });
}
}
}
}
else {
pc.addTransceiver("audio", { direction: "recvonly" });
}
}
};
//**********************
//Public functions
//**********************
this.setVideoEnabled = function (enabled) {
self.video.srcObject.getTracks().forEach(track => track.enabled = enabled);
}
this.startLatencyTest = function (onTestStarted) {
// Can't start latency test without a video element
if (!self.video) {
return;
}
self.latencyTestTimings.Reset();
self.latencyTestTimings.TestStartTimeMs = Date.now();
onTestStarted(self.latencyTestTimings.TestStartTimeMs);
}
//This is called when revceiving new ice candidates individually instead of part of the offer
this.handleCandidateFromServer = function (iceCandidate) {
let candidate = new RTCIceCandidate(iceCandidate);
console.log("%c[Unreal ICE candidate]", "background: pink; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
// if forcing TURN, reject any candidates not relay
if (self.forceTURN) {
// check if no relay address is found, if so, we are assuming it means no TURN server
if (candidate.candidate.indexOf("relay") < 0) {
console.warn("Dropping candidate because it was not TURN relay.", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|")
return;
}
}
self.pcClient.addIceCandidate(candidate).catch(function (e) {
console.error("Failed to add ICE candidate", e);
});
};
//Called externaly to create an offer for the server
this.createOffer = function () {
if (self.pcClient) {
console.log("Closing existing PeerConnection")
self.pcClient.close();
self.pcClient = null;
}
self.pcClient = new RTCPeerConnection(self.cfg);
setupPeerConnection(self.pcClient);
setupTransceiversAsync(self.pcClient).finally(function () {
self.dcClient = createDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
handleCreateOffer(self.pcClient);
});
};
//Called externaly when an offer is received from the server
this.receiveOffer = function (offer) {
if (offer.sfu) {
this.sfu = true;
delete offer.sfu;
}
if (!self.pcClient) {
console.log("Creating a new PeerConnection in the browser.")
self.pcClient = new RTCPeerConnection(self.cfg);
setupPeerConnection(self.pcClient);
// Put things here that happen post transceiver setup
self.pcClient.setRemoteDescription(offer)
.then(() => {
setupTransceiversAsync(self.pcClient).finally(function () {
self.pcClient.createAnswer()
.then(answer => {
mungeSDP(answer);
return self.pcClient.setLocalDescription(answer);
})
.then(() => {
if (self.onWebRtcAnswer) {
self.onWebRtcAnswer(self.pcClient.currentLocalDescription);
}
})
.then(() => {
let receivers = self.pcClient.getReceivers();
for (let receiver of receivers) {
receiver.playoutDelayHint = 0;
}
})
.catch((error) => console.error("createAnswer() failed:", error));
});
});
}
};
//Called externaly when an answer is received from the server
this.receiveAnswer = function (answer) {
self.pcClient.setRemoteDescription(answer);
};
this.receiveSFUPeerDataChannelRequest = function (channelData) {
const sendOptions = {
ordered: true,
negotiated: true,
id: channelData.sendStreamId
};
const unidirectional = channelData.sendStreamId != channelData.recvStreamId;
const sendDataChannel = self.pcClient.createDataChannel(unidirectional ? 'send-datachannel' : 'datachannel', sendOptions);
setupDataChannelCallbacks(sendDataChannel);
if (unidirectional) {
const recvOptions = {
ordered: true,
negotiated: true,
id: channelData.recvStreamId
};
const recvDataChannel = self.pcClient.createDataChannel('recv-datachannel', recvOptions);
// when recv data channel is "open" we want to let SFU know so it can tell streamer
recvDataChannel.addEventListener('open', e => {
if (self.onSFURecvDataChannelReady) {
self.onSFURecvDataChannelReady();
}
});
setupDataChannelCallbacks(recvDataChannel);
}
this.dcClient = sendDataChannel;
}
this.close = function () {
if (self.pcClient) {
console.log("Closing existing peerClient")
self.pcClient.close();
self.pcClient = null;
}
if (self.aggregateStatsIntervalId) {
clearInterval(self.aggregateStatsIntervalId);
}
}
//Sends data across the datachannel
this.send = function (data) {
if (self.dcClient && self.dcClient.readyState == 'open') {
//console.log('Sending data on dataconnection', self.dcClient)
self.dcClient.send(data);
}
};
this.getStats = function (onStats) {
if (self.pcClient && onStats) {
self.pcClient.getStats(null).then((stats) => {
onStats(stats);
});
}
}
this.aggregateStats = function (checkInterval) {
let calcAggregatedStats = generateAggregatedStatsFunction();
let printAggregatedStats = () => { self.getStats(calcAggregatedStats); }
self.aggregateStatsIntervalId = setInterval(printAggregatedStats, checkInterval);
}
}