Enhance Video and StreamPage components with improved media stream handling. Added audio and video track management, refined microphone and camera status updates, and optimized rendering based on available media streams. Improved user experience with conditional rendering and state management for audio and video devices.

This commit is contained in:
2026-02-27 18:34:51 +05:00
parent cb156bd99d
commit c2fc1624a4
3 changed files with 216 additions and 112 deletions
+57 -14
View File
@@ -13,44 +13,76 @@ interface Props {
user?: IUser;
}
const SPEAKING_HIDE_DELAY_MS = 400;
function Video({ mediaStream, muted, user }: Props) {
const remoteVideoRef = useRef<HTMLVideoElement>(null);
const remoteAudioRef = useRef<HTMLAudioElement>(null);
const isSpeaking = useIsAudioActive({ source: mediaStream });
const [showSpeakingBorder, setShowSpeakingBorder] = useState(false);
const hideTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const [_muted, setMuted] = useState(muted);
const [isLoading, setIsLoading] = useState(true);
const [minimized, setMinimized] = useState(user?.isAdmin ? false : true);
const hasVideo = (mediaStream?.getVideoTracks().length ?? 0) > 0;
useEffect(() => {
if (user && user.micEnabled === false) {
if (hideTimeoutRef.current) {
clearTimeout(hideTimeoutRef.current);
hideTimeoutRef.current = null;
}
setShowSpeakingBorder(false);
return;
}
if (isSpeaking) {
if (hideTimeoutRef.current) {
clearTimeout(hideTimeoutRef.current);
hideTimeoutRef.current = null;
}
setShowSpeakingBorder(true);
} else {
hideTimeoutRef.current = setTimeout(() => {
setShowSpeakingBorder(false);
hideTimeoutRef.current = null;
}, SPEAKING_HIDE_DELAY_MS);
}
return () => {
if (hideTimeoutRef.current) clearTimeout(hideTimeoutRef.current);
};
}, [isSpeaking, user?.micEnabled]);
function toggleSound() {
if (!remoteVideoRef.current) return;
// remoteVideoRef.current.muted = !remoteVideoRef.current.muted;
setMuted((prev) => !prev);
}
useEffect(() => {
if (!remoteVideoRef.current) return;
if (!mediaStream) return;
if (hasVideo && remoteVideoRef.current) {
remoteVideoRef.current.srcObject = mediaStream;
remoteVideoRef.current.onloadedmetadata = () => {
remoteVideoRef.current?.play();
};
remoteVideoRef.current.onplay = () => {
setIsLoading(false);
remoteVideoRef.current.onplay = () => setIsLoading(false);
} else if (!hasVideo && remoteAudioRef.current) {
remoteAudioRef.current.srcObject = mediaStream;
remoteAudioRef.current.onloadedmetadata = () => {
remoteAudioRef.current?.play();
};
console.log("mediaStream", mediaStream?.getTracks());
}, [mediaStream]);
useEffect(() => {
console.log("remoteVideoRef.current", remoteVideoRef);
}, [remoteVideoRef.current]);
remoteAudioRef.current.onplay = () => setIsLoading(false);
setIsLoading(false);
}
}, [mediaStream, hasVideo]);
return (
<div
className={`relative border-2 rounded-lg ${
minimized ? "h-8 rounded-lg overflow-hidden" : ""
} ${!_muted && user?.micEnabled && isSpeaking ? "border-green-500" : "border-transparent"}`}
} ${!_muted && user?.micEnabled !== false && showSpeakingBorder ? "border-green-500" : "border-transparent"}`}
>
{hasVideo ? (
<video
ref={remoteVideoRef}
className={`aspect-video lg:w-[216px] w-[160px] lg:h-[162px] h-[120px] rounded-lg object-cover bg-gray-500`}
@@ -58,6 +90,17 @@ function Video({ mediaStream, muted, user }: Props) {
autoPlay
muted={_muted}
></video>
) : (
<>
<audio
ref={remoteAudioRef}
autoPlay
muted={_muted}
className="hidden"
/>
<div className="aspect-video lg:w-[216px] w-[160px] lg:h-[162px] h-[120px] rounded-lg bg-gray-500" />
</>
)}
<div
className={`absolute -bottom-1.5 flex items-center justify-between w-full gap-2 p-2 ${
minimized ? "bg-black" : ""
+30 -22
View File
@@ -49,52 +49,60 @@ function SetNameModal({ onAction }: Props) {
videoStreamRef.current = null;
try {
const constraints: MediaStreamConstraints = {
const audioConstraints: MediaStreamConstraints = {
audio: audioDeviceId
? { deviceId: { exact: audioDeviceId } }
: true,
video: videoDeviceId
? { deviceId: { exact: videoDeviceId } }
: true,
};
const stream = await navigator.mediaDevices.getUserMedia(constraints);
const devices = await navigator.mediaDevices.enumerateDevices();
const microphones = devices.filter((d) => d.kind === "audioinput");
const cameras = devices.filter((d) => d.kind === "videoinput");
const audioTracks = stream.getAudioTracks();
const videoTracks = stream.getVideoTracks();
const audioStreamResult = await navigator.mediaDevices.getUserMedia(
audioConstraints
);
const audioTracks = audioStreamResult.getAudioTracks();
if (audioTracks.length) {
const audioMediaStream = new MediaStream(audioTracks);
audioStreamRef.current = audioMediaStream;
setAudioStream(audioMediaStream);
setAudioDevices(microphones);
setSelectedAudioDeviceId(
audioDeviceId ?? microphones[0]?.deviceId ?? ""
);
setMicStatus("success");
} else {
setMicStatus("error");
}
} catch {
setMicStatus("error");
}
try {
const videoConstraints: MediaStreamConstraints = {
video: videoDeviceId
? { deviceId: { exact: videoDeviceId } }
: true,
};
const videoStreamResult = await navigator.mediaDevices.getUserMedia(
videoConstraints
);
const videoTracks = videoStreamResult.getVideoTracks();
if (videoTracks.length) {
const videoMediaStream = new MediaStream(videoTracks);
videoStreamRef.current = videoMediaStream;
setVideoStream(videoMediaStream);
setVideoDevices(cameras);
setSelectedVideoDeviceId(
videoDeviceId ?? cameras[0]?.deviceId ?? ""
);
setCameraStatus("success");
} else {
setCameraStatus("error");
}
} catch {
setMicStatus("error");
setCameraStatus("error");
}
const devices = await navigator.mediaDevices.enumerateDevices();
const microphones = devices.filter((d) => d.kind === "audioinput");
const cameras = devices.filter((d) => d.kind === "videoinput");
setAudioDevices(microphones);
setVideoDevices(cameras);
setSelectedAudioDeviceId(
audioDeviceId ?? microphones[0]?.deviceId ?? ""
);
setSelectedVideoDeviceId(
videoDeviceId ?? cameras[0]?.deviceId ?? ""
);
}
async function checkMicrophone(deviceId?: string) {
+59 -6
View File
@@ -147,6 +147,7 @@ function StreamPage() {
...existingVideoStream!.getVideoTracks(),
]);
} else if (hasAudio) {
try {
const videoStream = await navigator.mediaDevices.getUserMedia({
video: videoDeviceId
? { deviceId: { exact: videoDeviceId } }
@@ -156,7 +157,13 @@ function StreamPage() {
...existingAudioStream!.getAudioTracks(),
...videoStream.getVideoTracks(),
]);
} catch {
mediaStream = new MediaStream(
existingAudioStream!.getAudioTracks()
);
}
} else if (hasVideo) {
try {
const audioStream = await navigator.mediaDevices.getUserMedia({
audio: audioDeviceId
? { deviceId: { exact: audioDeviceId } }
@@ -166,6 +173,11 @@ function StreamPage() {
...audioStream.getAudioTracks(),
...existingVideoStream!.getVideoTracks(),
]);
} catch {
mediaStream = new MediaStream(
existingVideoStream!.getVideoTracks()
);
}
} else {
mediaStream = await navigator.mediaDevices.getUserMedia({
video: videoDeviceId
@@ -177,12 +189,15 @@ function StreamPage() {
});
}
if (!localVideoRef.current) return;
if (
localVideoRef.current &&
mediaStream.getVideoTracks().length > 0
) {
localVideoRef.current.srcObject = mediaStream;
localVideoRef.current.onloadedmetadata = () => {
localVideoRef.current?.play();
};
}
setLocalStream(mediaStream);
setPermission(true);
@@ -479,6 +494,15 @@ function StreamPage() {
}, [users.length]);
useEffect(() => {
if (
localVideoRef.current &&
localStream.getVideoTracks().length > 0
) {
localVideoRef.current.srcObject = localStream;
localVideoRef.current.onloadedmetadata = () => {
localVideoRef.current?.play();
};
}
toggleCamera();
toggleMic();
}, [localStream]);
@@ -564,11 +588,16 @@ function StreamPage() {
{permission && (
<>
{localStream.getAudioTracks().length > 0 && (
<div className="relative group">
<Button
variant="secondary"
icon={
isMicEnabled ? <MicroOnIcon /> : <MicroOffIcon />
isMicEnabled ? (
<MicroOnIcon />
) : (
<MicroOffIcon />
)
}
onlyIcon
onClick={toggleMic}
@@ -581,6 +610,8 @@ function StreamPage() {
}
/>
</div>
)}
{localStream.getVideoTracks().length > 0 && (
<div className="relative group">
<Button
variant="secondary"
@@ -602,6 +633,7 @@ function StreamPage() {
}
/>
</div>
)}
</>
)}
</div>
@@ -733,7 +765,12 @@ function StreamPage() {
<div className="absolute top-2 space-y-2 lg:left-2 max-lg:right-2">
<div
className={`relative border-2 rounded-lg ${
!permission || !isCameraEnabled ? "hidden" : ""
!permission ? "hidden" : ""
} ${
localStream.getVideoTracks().length === 0 ||
!isCameraEnabled
? "h-8 overflow-hidden"
: ""
} ${
isMicEnabled && isSpeaking
? "border-green-500"
@@ -742,12 +779,28 @@ function StreamPage() {
>
<video
ref={localVideoRef}
className={`object-cover bg-gray-500 rounded-lg aspect-video lg:w-[216px] w-[160px] lg:h-[162px] h-[120px] -scale-x-100`}
className={`object-cover bg-gray-500 rounded-lg aspect-video lg:w-[216px] w-[160px] lg:h-[162px] h-[120px] -scale-x-100 ${
localStream.getVideoTracks().length === 0 ||
!isCameraEnabled
? "hidden"
: ""
}`}
playsInline
autoPlay
muted
></video>
<div className="absolute bottom-0 p-2">
{(localStream.getVideoTracks().length === 0 ||
!isCameraEnabled) && (
<div className="aspect-video lg:w-[216px] w-[160px] lg:h-[162px] h-[120px] rounded-lg bg-gray-500" />
)}
<div
className={`absolute -bottom-1.5 flex items-center lg:w-[216px] w-[160px] gap-2 p-2 ${
localStream.getVideoTracks().length === 0 ||
!isCameraEnabled
? "bg-black"
: ""
}`}
>
<p className="text-xs text-white truncate lg:text-sm">
{name}
</p>