Update environment configuration for production, refactor WebRTC components, and enhance chat functionality. Replace deprecated SessionUsersPanel with SessionPage, integrate chat history loading, and improve audio/video toggle handling. Remove unused SessionUsersPanel2 component and update related socket event handling in the server.

This commit is contained in:
2025-10-27 16:49:52 +05:00
parent 95f7b90d38
commit 2378ed1ff4
20 changed files with 936 additions and 304 deletions
+2 -2
View File
@@ -1,4 +1,4 @@
# VITE_API_URL=http://192.168.1.23:3000
# VITE_API_URL=http://192.168.1.224:3000
VITE_API_URL=http://localhost:3000
VITE_WEBRTC_URL=http://localhost:3001
VITE_API_URL=https://stream.graff.estate/api
VITE_WEBRTC_URL=https://stream.graff.estate
@@ -0,0 +1,94 @@
import UserCamera from "./ui/UserCamera";
import UserDevicesControls from "./ui/UserDevicesControls";
import DraggableContainer from "./DraggableContainer";
import { useWebRTC } from "../hooks/useWebRTC";
import { useCallback } from "react";
interface SessionUsersPanelProps {
roomId: string;
autoJoin?: boolean;
}
function SessionUsersPanel({
roomId,
autoJoin = false,
}: SessionUsersPanelProps) {
const {
localStream,
participants,
isAudioMuted: isLocalAudioMuted,
isVideoMuted: isLocalVideoMuted,
toggleAudio,
toggleVideo,
updateSpeakingState,
} = useWebRTC(roomId, autoJoin);
const hasLocalStream = localStream !== null;
// Callback для отправки состояния speaking
const handleSpeakingChange = useCallback((isSpeaking: boolean) => {
updateSpeakingState?.(isSpeaking);
}, [updateSpeakingState]);
return (
<DraggableContainer
enableSnapping={true}
autoAlign={true}
initialCorner={innerWidth >= 640 ? "bottom-right" : "top-right"}
padding="1.111vw"
className="flex gap-4 z-[999]"
>
{/* Локальная камера пользователя - показываем только если есть разрешение */}
{localStream && (
<UserCamera
name="Вы"
isMuted={isLocalAudioMuted}
isVideoOff={isLocalVideoMuted}
isControlDisabled={false}
isAdmin={true}
isLocal={true}
mediaStream={localStream}
onMute={toggleAudio}
onVideoOff={toggleVideo}
onCanControl={() => console.log("Toggle control")}
onSpeakingChange={handleSpeakingChange}
/>
)}
{/* Камеры удаленных участников - показываем только если есть поток с активными треками */}
{participants
.filter(
(participant) =>
participant.stream != null &&
participant.stream.getTracks().length > 0
)
.map((participant) => (
<UserCamera
key={participant.id}
name={participant.id}
isMuted={participant.isMuted || false}
isVideoOff={participant.isVideoOff || false}
isSpeaking={participant.isSpeaking}
isControlDisabled={true}
isAdmin={true} // Локальный пользователь - админ своей сессии
mediaStream={participant.stream}
onMute={() => console.log(`Mute user ${participant.id}`)}
onVideoOff={() => console.log(`Video off user ${participant.id}`)}
onCanControl={() =>
console.log(`Can control user ${participant.id}`)
}
/>
))}
<UserDevicesControls
toggleAudio={toggleAudio}
toggleVideo={toggleVideo}
isAudioMuted={isLocalAudioMuted}
isVideoMuted={isLocalVideoMuted}
hasLocalStream={hasLocalStream}
/>
</DraggableContainer>
);
}
export default SessionUsersPanel;
@@ -1,77 +0,0 @@
import UserCamera from "./ui/UserCamera";
import UserDevicesControls from "./ui/UserDevicesControls";
import DraggableContainer from "./DraggableContainer";
import { useWebRTC } from "../hooks/useWebRTC";
interface SessionUsersPanel2Props {
roomId: string;
autoJoin?: boolean;
}
function SessionUsersPanel2({
roomId,
autoJoin = false,
}: SessionUsersPanel2Props) {
const {
localStream,
participants,
isAudioMuted: isLocalAudioMuted,
isVideoMuted: isLocalVideoMuted,
toggleAudio,
toggleVideo,
} = useWebRTC(roomId, autoJoin);
const hasLocalStream = localStream !== null;
return (
<DraggableContainer
enableSnapping={true}
autoAlign={true}
initialCorner={innerWidth >= 640 ? "bottom-right" : "top-right"}
padding="1.111vw"
className="flex gap-4 z-[999]"
>
{/* Локальная камера пользователя */}
<UserCamera
name="Вы"
isSpeaking={false}
isMuted={isLocalAudioMuted}
isVideoOff={isLocalVideoMuted}
isControlDisabled={false}
isAdmin={true}
isLocal={true}
mediaStream={localStream}
onMute={toggleAudio}
onVideoOff={toggleVideo}
onCanControl={() => console.log("Toggle control")}
/>
{/* Камеры удаленных участников */}
{participants.map((participant) => (
<UserCamera
key={participant.id}
name={participant.id}
isSpeaking={false}
isMuted={false}
isVideoOff={false}
isControlDisabled={true}
isAdmin={true} // Локальный пользователь - админ своей сессии
mediaStream={participant.stream}
onMute={() => console.log(`Mute user ${participant.id}`)}
onVideoOff={() => console.log(`Video off user ${participant.id}`)}
onCanControl={() => console.log(`Can control user ${participant.id}`)}
/>
))}
<UserDevicesControls
toggleAudio={toggleAudio}
toggleVideo={toggleVideo}
isAudioMuted={isLocalAudioMuted}
isVideoMuted={isLocalVideoMuted}
hasLocalStream={hasLocalStream}
/>
</DraggableContainer>
);
}
export default SessionUsersPanel2;
+62 -15
View File
@@ -6,10 +6,27 @@ import clsx from "clsx";
import PopupWrapper from "../PopupWrapper";
import DraggableContainer from "../DraggableContainer";
import { useWebRTC } from "../../hooks/useWebRTC";
import { useChatHistory } from "../../hooks/useChatHistory";
import { useParams } from "react-router";
export default function ChatPopup() {
const headerRef = useRef<HTMLDivElement>(null);
const { chatMessages, sendMessage, currentUserId } = useWebRTC();
const { id: sessionId } = useParams<{ id: string }>();
const {
chatMessages: realtimeMessages,
sendMessage,
currentUserId,
} = useWebRTC();
// Загружаем историю через REST API
const { data: historyMessages = [], isLoading } = useChatHistory(sessionId);
// Объединяем историю и realtime сообщения
const historyIds = new Set(historyMessages.map((m) => m.id));
const newRealtimeMessages = realtimeMessages.filter(
(m) => !historyIds.has(m.id)
);
const allMessages = [...historyMessages, ...newRealtimeMessages];
function onMessageSend(message: string) {
sendMessage(message);
@@ -23,9 +40,17 @@ export default function ChatPopup() {
initialPosition={{ right: "5vw" }}
dragHandleRef={headerRef}
>
<PopupWrapper title="Чат" className="sm:overflow-hidden" headerRef={headerRef}>
<PopupWrapper
title="Чат"
className="sm:overflow-hidden"
headerRef={headerRef}
>
<div className="flex flex-col 2xl:h-[19.444vw] max-sm:h-[87.5dvh] 2xl:-m-[1.389vw] -m-5">
<MessageFeed messages={chatMessages} currentUserId={currentUserId} />
<MessageFeed
messages={allMessages}
currentUserId={currentUserId}
isLoading={isLoading}
/>
<MessageInput onMessageSend={onMessageSend} />
</div>
</PopupWrapper>
@@ -38,25 +63,44 @@ interface MessageFeedProps {
id: string;
senderId: string;
content: string;
timestamp: Date;
timestamp: Date | string;
}>;
currentUserId: string;
isLoading?: boolean;
}
function MessageFeed({ messages, currentUserId }: MessageFeedProps) {
function MessageFeed({ messages, currentUserId, isLoading }: MessageFeedProps) {
const messagesEndRef = useRef<HTMLDivElement>(null);
const prevMessageCountRef = useRef(0);
// Скролл к концу при получении нового сообщения
// Умный скролл - только при добавлении новых сообщений
useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}, [messages]);
const currentCount = messages.length;
// Скроллим только если добавилось новое сообщение
if (
currentCount > prevMessageCountRef.current &&
prevMessageCountRef.current > 0
) {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}
// Обновляем счетчик
prevMessageCountRef.current = currentCount;
}, [messages.length]); // Зависимость только от длины, не от всего массива
return (
<div
className="flex flex-col w-full 2xl:h-[calc(100%-4.444vw)] h-full bg-[#F0F0F0] 2xl:p-[1.111vw] p-4 pb-0 overflow-y-auto [-webkit-scrollbar]:hidden"
style={{ scrollbarWidth: "none", msOverflowStyle: "none" }}
>
{messages.length === 0 ? (
{isLoading ? (
<div className="flex justify-center items-center m-auto w-full">
<span className="text-center text-gray-500 text-s">
Загрузка истории чата...
</span>
</div>
) : messages.length === 0 ? (
<div className="w-full flex flex-col 2xl:gap-[1.667vw] gap-6 items-center justify-center 2xl:px-[2.778vw] px-10 m-auto">
<img
src="/img/popups/EmptyMessageFeed.svg"
@@ -69,13 +113,16 @@ function MessageFeed({ messages, currentUserId }: MessageFeedProps) {
) : (
<div className="flex flex-col 2xl:gap-[1.111vw] gap-4 items-end mt-auto">
{messages.map((message) => (
<MessageItem
key={message.id}
<MessageItem
key={message.id}
content={message.content}
timestamp={new Date(message.timestamp).toLocaleTimeString('ru-RU', {
hour: '2-digit',
minute: '2-digit'
})}
timestamp={new Date(message.timestamp).toLocaleTimeString(
"ru-RU",
{
hour: "2-digit",
minute: "2-digit",
}
)}
isFromMe={message.senderId === currentUserId}
/>
))}
+123 -48
View File
@@ -10,6 +10,7 @@ import Admin from "../indicators/Admin";
import clsx from "clsx";
import VolumeIcon from "../icons/VolumeIcon";
import VolumeOffIcon from "../icons/VolumeOffIcon";
import { useVoiceActivity } from "../../hooks/useVoiceActivity";
interface UserCameraControlsProps {
isMuted: boolean;
@@ -31,8 +32,9 @@ interface UserCameraProps {
isAdmin?: boolean;
name?: string;
mediaStream?: MediaStream | null;
isSpeaking?: boolean;
isLocal?: boolean;
isSpeaking?: boolean; // Для удаленных участников - получаем по Socket.IO
onSpeakingChange?: (isSpeaking: boolean) => void; // Для локального - отправляем изменения
}
export default function UserCamera({
@@ -42,15 +44,44 @@ export default function UserCamera({
onMute,
onVideoOff,
onCanControl,
isSpeaking = false,
isAdmin = false,
name = "Гость",
mediaStream = null,
isLocal = false,
isSpeaking: remoteSpeaking,
onSpeakingChange,
}: UserCameraProps) {
const ref = useRef<HTMLVideoElement>(null);
const [isAudioMuted, setIsAudioMuted] = useState(true); // Для удаленных участников - начинаем с muted
// Детекция голосовой активности (только для локального пользователя)
const { isSpeaking: isVoiceActive } = useVoiceActivity(
isLocal ? mediaStream : null
);
// Для локального - используем локальную детекцию
// Для удаленных - используем полученное состояние через Socket.IO
const localSpeaking = !isMuted && isVoiceActive;
const isSpeaking = isLocal ? localSpeaking : (remoteSpeaking || false);
// Отправляем изменения состояния для локального пользователя
useEffect(() => {
if (isLocal && onSpeakingChange) {
onSpeakingChange(localSpeaking);
}
}, [isLocal, localSpeaking, onSpeakingChange]);
// Рамка либо горит на 100%, либо выключена
// isSpeaking уже учитывает threshold и debounce (1 сек)
const ringOpacity = isSpeaking ? 1 : 0;
// Логируем для отладки
useEffect(() => {
console.log(
`[${name}${isLocal ? " (local)" : ""}] isSpeaking: ${isSpeaking}, ringOpacity: ${ringOpacity.toFixed(2)}, isMuted: ${isMuted}`
);
}, [isSpeaking, ringOpacity, name, isMuted, isLocal]);
useEffect(() => {
if (ref.current && mediaStream) {
console.log(
@@ -58,7 +89,7 @@ export default function UserCamera({
mediaStream
);
ref.current.srcObject = mediaStream;
// Убеждаемся что видео muted для autoplay
if (!isLocal) {
ref.current.muted = true;
@@ -75,10 +106,15 @@ export default function UserCamera({
// Попытка через 500ms
setTimeout(() => {
if (ref.current) {
console.log(`[UserCamera] First retry for ${name}, paused: ${ref.current.paused}, readyState: ${ref.current.readyState}`);
console.log(
`[UserCamera] First retry for ${name}, paused: ${ref.current.paused}, readyState: ${ref.current.readyState}`
);
if (ref.current.paused) {
ref.current.play().catch((error) => {
console.error(`[UserCamera] First retry play failed for ${name}:`, error);
console.error(
`[UserCamera] First retry play failed for ${name}:`,
error
);
});
}
}
@@ -89,7 +125,10 @@ export default function UserCamera({
if (ref.current && ref.current.paused) {
console.log(`[UserCamera] Second retry for ${name} after timeout`);
ref.current.play().catch((error) => {
console.error(`[UserCamera] Second retry play failed for ${name}:`, error);
console.error(
`[UserCamera] Second retry play failed for ${name}:`,
error
);
});
}
}, 1000);
@@ -97,10 +136,15 @@ export default function UserCamera({
// Еще одна попытка через 3 секунды
setTimeout(() => {
if (ref.current) {
console.log(`[UserCamera] Final retry for ${name}, paused: ${ref.current.paused}, readyState: ${ref.current.readyState}`);
console.log(
`[UserCamera] Final retry for ${name}, paused: ${ref.current.paused}, readyState: ${ref.current.readyState}`
);
if (ref.current.paused) {
ref.current.play().catch((error) => {
console.error(`[UserCamera] Final retry play failed for ${name}:`, error);
console.error(
`[UserCamera] Final retry play failed for ${name}:`,
error
);
});
}
}
@@ -146,11 +190,15 @@ export default function UserCamera({
};
const handleWaiting = () => {
console.log(`[UserCamera] ${name} video waiting event, paused: ${videoElement.paused}, readyState: ${videoElement.readyState}`);
console.log(
`[UserCamera] ${name} video waiting event, paused: ${videoElement.paused}, readyState: ${videoElement.readyState}`
);
};
const handleStalled = () => {
console.log(`[UserCamera] ${name} video stalled event, paused: ${videoElement.paused}, readyState: ${videoElement.readyState}`);
console.log(
`[UserCamera] ${name} video stalled event, paused: ${videoElement.paused}, readyState: ${videoElement.readyState}`
);
};
const handlePause = () => {
@@ -161,30 +209,30 @@ export default function UserCamera({
console.error(`[UserCamera] ${name} video error:`, e);
};
videoElement.addEventListener('loadstart', handleLoadStart);
videoElement.addEventListener('loadeddata', handleLoadedData);
videoElement.addEventListener('loadedmetadata', handleLoadedMetadata);
videoElement.addEventListener('canplay', handleCanPlay);
videoElement.addEventListener('canplaythrough', handleCanPlayThrough);
videoElement.addEventListener('play', handlePlay);
videoElement.addEventListener('playing', handlePlaying);
videoElement.addEventListener('waiting', handleWaiting);
videoElement.addEventListener('stalled', handleStalled);
videoElement.addEventListener('pause', handlePause);
videoElement.addEventListener('error', handleError);
videoElement.addEventListener("loadstart", handleLoadStart);
videoElement.addEventListener("loadeddata", handleLoadedData);
videoElement.addEventListener("loadedmetadata", handleLoadedMetadata);
videoElement.addEventListener("canplay", handleCanPlay);
videoElement.addEventListener("canplaythrough", handleCanPlayThrough);
videoElement.addEventListener("play", handlePlay);
videoElement.addEventListener("playing", handlePlaying);
videoElement.addEventListener("waiting", handleWaiting);
videoElement.addEventListener("stalled", handleStalled);
videoElement.addEventListener("pause", handlePause);
videoElement.addEventListener("error", handleError);
return () => {
videoElement.removeEventListener('loadstart', handleLoadStart);
videoElement.removeEventListener('loadeddata', handleLoadedData);
videoElement.removeEventListener('loadedmetadata', handleLoadedMetadata);
videoElement.removeEventListener('canplay', handleCanPlay);
videoElement.removeEventListener('canplaythrough', handleCanPlayThrough);
videoElement.removeEventListener('play', handlePlay);
videoElement.removeEventListener('playing', handlePlaying);
videoElement.removeEventListener('waiting', handleWaiting);
videoElement.removeEventListener('stalled', handleStalled);
videoElement.removeEventListener('pause', handlePause);
videoElement.removeEventListener('error', handleError);
videoElement.removeEventListener("loadstart", handleLoadStart);
videoElement.removeEventListener("loadeddata", handleLoadedData);
videoElement.removeEventListener("loadedmetadata", handleLoadedMetadata);
videoElement.removeEventListener("canplay", handleCanPlay);
videoElement.removeEventListener("canplaythrough", handleCanPlayThrough);
videoElement.removeEventListener("play", handlePlay);
videoElement.removeEventListener("playing", handlePlaying);
videoElement.removeEventListener("waiting", handleWaiting);
videoElement.removeEventListener("stalled", handleStalled);
videoElement.removeEventListener("pause", handlePause);
videoElement.removeEventListener("error", handleError);
};
}, [name]);
@@ -194,14 +242,18 @@ export default function UserCamera({
ref.current.muted = newMutedState;
setIsAudioMuted(newMutedState);
console.log(
`[UserCamera] ${name} audio ${newMutedState ? "muted" : "unmuted"}, video element muted: ${ref.current.muted}`
`[UserCamera] ${name} audio ${
newMutedState ? "muted" : "unmuted"
}, video element muted: ${ref.current.muted}`
);
}
};
const handleVideoClick = () => {
if (!isLocal && ref.current) {
console.log(`[UserCamera] User clicked on ${name} video, paused: ${ref.current.paused}, readyState: ${ref.current.readyState}, muted: ${ref.current.muted}`);
console.log(
`[UserCamera] User clicked on ${name} video, paused: ${ref.current.paused}, readyState: ${ref.current.readyState}, muted: ${ref.current.muted}`
);
if (ref.current.paused) {
ref.current.play().catch((error) => {
console.error(`[UserCamera] Click play failed for ${name}:`, error);
@@ -215,13 +267,21 @@ export default function UserCamera({
return (
<div
className={clsx(
"aspect-square h-fit group 2xl:rounded-[1.667vw] rounded-2xl relative flex-shrink-0 transition-[width,box-shadow,background-color] duration-300 pointer-events-auto hover:w-[10.833vw] w-[6.944vw] shadow-[0_4px_40px_0_rgba(15,16,17,0.1),0_2px_2px_0_rgba(0,0,0,0.06)] overflow-hidden",
"aspect-square h-fit group 2xl:rounded-[1.667vw] rounded-2xl relative flex-shrink-0 pointer-events-auto hover:w-[10.833vw] w-[6.944vw] overflow-hidden",
isLocal && "order-last",
isSpeaking
? "ring-[0.139vw] ring-[#7B60F3]"
: "ring-[0.069vw] ring-[#FFFFFF4D]",
isVideoOff ? "bg-green-500" : "bg-yellow-500/10"
)}
style={{
boxShadow: isSpeaking
? `0 4px 40px 0 rgba(15,16,17,0.1), 0 2px 2px 0 rgba(0,0,0,0.06), 0 0 0 ${
window.innerWidth >= 1536 ? "0.139vw" : "2px"
} rgba(34, 197, 94, ${ringOpacity})`
: `0 4px 40px 0 rgba(15,16,17,0.1), 0 2px 2px 0 rgba(0,0,0,0.06), 0 0 0 ${
window.innerWidth >= 1536 ? "0.069vw" : "1px"
} rgba(255, 255, 255, 0.3)`,
transition:
"box-shadow 0.1s ease-out, width 0.3s, background-color 0.3s",
}}
onClick={handleVideoClick}
>
{isLocal && <Admin className="absolute top-0 right-0" />}
@@ -234,7 +294,7 @@ export default function UserCamera({
</div>
{/* Заглушка когда нет видео */}
{!mediaStream && (
{(!mediaStream || isVideoOff) && (
<div className="flex absolute inset-0 justify-center items-center bg-gradient-to-br from-gray-700 to-gray-900">
<div className="flex flex-col gap-2 items-center text-white/60">
<div className="2xl:size-[2.778vw] size-10">
@@ -246,7 +306,7 @@ export default function UserCamera({
)}
{/* Подсказка для запуска видео */}
{!isLocal && mediaStream && (
{!isLocal && mediaStream && !isVideoOff && (
<div className="flex absolute inset-0 justify-center items-center opacity-0 transition-opacity duration-300 pointer-events-none bg-black/50 group-hover:opacity-100">
<div className="px-2 py-1 text-xs text-white rounded bg-black/70">
Кликните для запуска видео
@@ -259,7 +319,7 @@ export default function UserCamera({
className={clsx(
"object-cover size-full",
isLocal && "scale-x-[-1]",
!mediaStream && "hidden"
(!mediaStream || isVideoOff) && "hidden"
)}
autoPlay
muted={isLocal ? isMuted : isAudioMuted}
@@ -270,17 +330,27 @@ export default function UserCamera({
loop={false}
onLoadedData={() => {
if (!isLocal && ref.current) {
console.log(`[UserCamera] onLoadedData for ${name}, attempting play, readyState: ${ref.current.readyState}`);
console.log(
`[UserCamera] onLoadedData for ${name}, attempting play, readyState: ${ref.current.readyState}`
);
ref.current.play().catch((error) => {
console.error(`[UserCamera] onLoadedData play failed for ${name}:`, error);
console.error(
`[UserCamera] onLoadedData play failed for ${name}:`,
error
);
});
}
}}
onLoadedMetadata={() => {
if (!isLocal && ref.current) {
console.log(`[UserCamera] onLoadedMetadata for ${name}, attempting play, readyState: ${ref.current.readyState}`);
console.log(
`[UserCamera] onLoadedMetadata for ${name}, attempting play, readyState: ${ref.current.readyState}`
);
ref.current.play().catch((error) => {
console.error(`[UserCamera] onLoadedMetadata play failed for ${name}:`, error);
console.error(
`[UserCamera] onLoadedMetadata play failed for ${name}:`,
error
);
});
}
}}
@@ -288,12 +358,17 @@ export default function UserCamera({
if (!isLocal && ref.current) {
console.log(`[UserCamera] onCanPlay for ${name}, attempting play`);
ref.current.play().catch((error) => {
console.error(`[UserCamera] onCanPlay play failed for ${name}:`, error);
console.error(
`[UserCamera] onCanPlay play failed for ${name}:`,
error
);
});
}
}}
onPlaying={() => {
console.log(`[UserCamera] onPlaying for ${name} - video is actually playing!`);
console.log(
`[UserCamera] onPlaying for ${name} - video is actually playing!`
);
}}
onClick={(e) => {
e.stopPropagation();
@@ -302,7 +377,7 @@ export default function UserCamera({
/>
{/* Кнопка управления звуком для удаленных участников */}
{!isLocal && mediaStream && (
{!isLocal && mediaStream && !isVideoOff && (
<div
className="absolute top-[0.556vw] right-[0.556vw] opacity-0 group-hover:opacity-100 transition-opacity duration-300"
onMouseDown={(e) => e.stopPropagation()}
+37
View File
@@ -0,0 +1,37 @@
import { useQuery } from "@tanstack/react-query";
import { api } from "../lib/api";
import type { ChatMessage } from "../lib/webrtc";
interface ChatHistoryResponse {
success: boolean;
messages: ChatMessage[];
count: number;
error?: string;
}
export const useChatHistory = (sessionId: string | undefined, enabled = true) => {
return useQuery({
queryKey: ["chat-history", sessionId],
queryFn: async () => {
if (!sessionId) {
throw new Error("Session ID is required");
}
const response = await api
.get(`sessions/${sessionId}/messages`)
.json<ChatHistoryResponse>();
if (!response.success) {
throw new Error(response.error || "Failed to load chat history");
}
return response.messages;
},
enabled: enabled && !!sessionId,
staleTime: 1000 * 60 * 5, // 5 минут - история считается актуальной
gcTime: 1000 * 60 * 30, // 30 минут в кэше
refetchOnWindowFocus: false, // Не перезагружать при фокусе
refetchOnReconnect: false, // Не перезагружать при реконнекте
});
};
+197
View File
@@ -0,0 +1,197 @@
import { useEffect, useState, useRef } from "react";
interface UseVoiceActivityOptions {
threshold?: number; // Порог громкости (0-100)
smoothingTimeConstant?: number; // Сглаживание (0-1)
fftSize?: number; // Размер FFT для анализа
debounceTime?: number; // Время задержки выключения индикатора (ms)
}
/**
* Хук для определения голосовой активности в MediaStream
* @param stream - MediaStream для анализа
* @param options - Опции для настройки детекции
* @returns объект с isSpeaking и audioLevel
*/
export function useVoiceActivity(
stream: MediaStream | null | undefined,
options: UseVoiceActivityOptions = {}
): { isSpeaking: boolean; audioLevel: number } {
const {
threshold = 6, // Низкий порог для непрерывной речи (ловит тихие паузы между словами)
smoothingTimeConstant = 0.8, // Высокое сглаживание для стабильности
fftSize = 2048, // Больший размер для лучшей точности голоса
debounceTime = 1000, // 1 секунда задержки выключения
} = options;
const [isSpeaking, setIsSpeaking] = useState(false);
const [audioLevel, setAudioLevel] = useState(0);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const animationFrameRef = useRef<number | null>(null);
const lastSpeakingTimeRef = useRef<number>(0);
const speakingTimeoutRef = useRef<NodeJS.Timeout | null>(null);
useEffect(() => {
if (!stream) {
setIsSpeaking(false);
setAudioLevel(0);
return;
}
const audioTracks = stream.getAudioTracks();
if (audioTracks.length === 0) {
setIsSpeaking(false);
setAudioLevel(0);
return;
}
// Проверяем, что аудио трек активен
const audioTrack = audioTracks[0];
if (!audioTrack.enabled) {
setIsSpeaking(false);
setAudioLevel(0);
return;
}
try {
// Создаем AudioContext
const audioContext = new AudioContext();
audioContextRef.current = audioContext;
// Создаем источник из MediaStream
const source = audioContext.createMediaStreamSource(stream);
// Создаем анализатор
const analyser = audioContext.createAnalyser();
analyser.fftSize = fftSize;
analyser.smoothingTimeConstant = smoothingTimeConstant;
analyserRef.current = analyser;
// Подключаем источник к анализатору
source.connect(analyser);
// Буфер для данных
const dataArray = new Uint8Array(analyser.frequencyBinCount);
// Счетчик для логирования (не логируем каждый кадр)
let frameCount = 0;
// Функция для проверки активности
const checkVoiceActivity = () => {
if (!analyserRef.current) return;
// Получаем waveform (временные данные)
analyser.getByteTimeDomainData(dataArray);
// Вычисляем RMS (Root Mean Square) - эффективную громкость
// dataArray содержит значения от 0 до 255, где 128 = тишина
let sumSquares = 0;
for (let i = 0; i < dataArray.length; i++) {
const normalized = (dataArray[i] - 128) / 128; // Нормализуем к диапазону -1 до 1
sumSquares += normalized * normalized;
}
const rms = Math.sqrt(sumSquares / dataArray.length);
// Преобразуем RMS в проценты (калибровка под Chrome)
// RMS обычно в диапазоне 0.0-0.3 (тишина-громкая речь)
// Используем нелинейную кривую для лучшего соответствия Chrome
const audioLevel = Math.min(100, Math.pow(rms * 100, 1.2));
// Обновляем уровень громкости постоянно
setAudioLevel(audioLevel);
// Проверяем, превышен ли порог
const isActive = audioLevel > threshold;
if (isActive) {
// Если человек говорит сейчас - обновляем время и включаем индикатор
lastSpeakingTimeRef.current = Date.now();
setIsSpeaking(true);
// Отменяем предыдущий таймер выключения (если был)
if (speakingTimeoutRef.current) {
clearTimeout(speakingTimeoutRef.current);
speakingTimeoutRef.current = null;
}
} else {
// Если сейчас тишина, проверяем прошло ли debounceTime с последней активности
const timeSinceLastSpeaking =
Date.now() - lastSpeakingTimeRef.current;
if (timeSinceLastSpeaking >= debounceTime) {
// Прошло достаточно времени - выключаем индикатор
setIsSpeaking(false);
} else if (!speakingTimeoutRef.current) {
// Ставим таймер на выключение через оставшееся время
const remainingTime = debounceTime - timeSinceLastSpeaking;
speakingTimeoutRef.current = setTimeout(() => {
setIsSpeaking(false);
speakingTimeoutRef.current = null;
}, remainingTime);
}
}
// Логируем каждые 30 кадров (~500ms при 60fps)
frameCount++;
if (frameCount % 30 === 0) {
console.log(
`[VoiceActivity] Level: ${audioLevel.toFixed(
1
)}% | RMS: ${rms.toFixed(
4
)} | Threshold: ${threshold}% | Speaking: ${
isActive ? "🟢 YES" : "⚪ NO"
}`
);
}
// Запланировать следующую проверку
animationFrameRef.current = requestAnimationFrame(checkVoiceActivity);
};
// Запускаем проверку
checkVoiceActivity();
console.log(
`[useVoiceActivity] Started voice activity detection - Threshold: ${threshold}, FFT: ${fftSize}, Smoothing: ${smoothingTimeConstant}, Debounce: ${debounceTime}ms`
);
} catch (error) {
console.error(
"[useVoiceActivity] Error setting up voice detection:",
error
);
setIsSpeaking(false);
setAudioLevel(0);
}
// Cleanup
return () => {
if (animationFrameRef.current !== null) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (speakingTimeoutRef.current !== null) {
clearTimeout(speakingTimeoutRef.current);
speakingTimeoutRef.current = null;
}
if (analyserRef.current) {
analyserRef.current.disconnect();
analyserRef.current = null;
}
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
setIsSpeaking(false);
setAudioLevel(0);
console.log("[useVoiceActivity] Cleaned up voice activity detection");
};
}, [stream, threshold, smoothingTimeConstant, fftSize, debounceTime]);
return { isSpeaking, audioLevel };
}
+30 -6
View File
@@ -98,15 +98,33 @@ export const useWebRTC = (roomId?: string, autoJoin = false) => {
onParticipantLeft: (participantId) => {
setParticipants((prev) => prev.filter((p) => p.id !== participantId));
},
onParticipantAudioToggle: (participantId, isEnabled) => {
console.log(`[useWebRTC] Audio toggle for ${participantId}: ${isEnabled}`);
setParticipants((prev) =>
prev.map((p) =>
p.id === participantId ? { ...p, isMuted: !isEnabled } : p
)
);
},
onParticipantVideoToggle: (participantId, isEnabled) => {
console.log(`[useWebRTC] Video toggle for ${participantId}: ${isEnabled}`);
setParticipants((prev) =>
prev.map((p) =>
p.id === participantId ? { ...p, isVideoOff: !isEnabled } : p
)
);
},
onParticipantSpeakingChange: (participantId, isSpeaking) => {
setParticipants((prev) =>
prev.map((p) =>
p.id === participantId ? { ...p, isSpeaking } : p
)
);
},
onChatMessage: (message) => {
console.log("[useWebRTC] onChatMessage called:", message);
setChatMessages((prev) => [...prev, message]);
},
onDataChannelOpen: () => {
// DataChannel opened
},
onDataChannelClose: () => {
// DataChannel closed
},
onError: (error) => {
console.error("[useWebRTC] Error:", error);
},
@@ -202,6 +220,11 @@ export const useWebRTC = (roomId?: string, autoJoin = false) => {
setParticipants([]);
};
const updateSpeakingState = (isSpeaking: boolean) => {
if (!webrtcServiceInstance) return;
webrtcServiceInstance.updateSpeakingState(isSpeaking);
};
return {
localStream,
participants,
@@ -214,6 +237,7 @@ export const useWebRTC = (roomId?: string, autoJoin = false) => {
toggleAudio,
toggleVideo,
sendMessage,
updateSpeakingState,
joinRoom,
leaveRoom,
};
+110 -136
View File
@@ -6,7 +6,7 @@ export interface ChatMessage {
senderId: string;
senderName?: string;
content: string;
timestamp: Date;
timestamp: Date | string;
type: "text" | "system";
}
@@ -15,7 +15,6 @@ export interface Participant {
name?: string;
stream?: MediaStream;
peerConnection?: RTCPeerConnection;
dataChannel?: RTCDataChannel;
isMuted?: boolean;
isVideoOff?: boolean;
isSpeaking?: boolean;
@@ -29,8 +28,6 @@ export interface WebRTCCallbacks {
onRemoteStreamReady?: (participantId: string, stream: MediaStream) => void;
onRoomParticipants?: (participantIds: string[]) => void;
onChatMessage?: (message: ChatMessage) => void;
onDataChannelOpen?: (participantId: string) => void;
onDataChannelClose?: (participantId: string) => void;
onParticipantAudioToggle?: (
participantId: string,
isEnabled: boolean
@@ -39,6 +36,10 @@ export interface WebRTCCallbacks {
participantId: string,
isEnabled: boolean
) => void;
onParticipantSpeakingChange?: (
participantId: string,
isSpeaking: boolean
) => void;
onError?: (error: Error) => void;
}
@@ -144,6 +145,7 @@ export function createWebRTCService(callbacks: WebRTCCallbacks = {}) {
toggleVideo,
leaveRoom,
sendChatMessage,
updateSpeakingState,
getChatMessages: () => state?.chatMessages || [],
getCurrentUserId: () => state?.userId || "",
getParticipants: () => Array.from(state?.participants.values() || []),
@@ -281,6 +283,61 @@ function setupSocketListeners() {
}
);
// Chat message handlers
socket.on("chat-message", (message: ChatMessage) => {
console.log("📨 Received chat message from Socket.IO:", message);
if (!state) return;
// Добавляем сообщение в локальное хранилище
state.chatMessages.push(message);
console.log(
"📨 Added message to local state, total messages:",
state.chatMessages.length
);
// Уведомляем все компоненты
callAllCallbacks("onChatMessage", message);
});
socket.on("chat-error", (error: { message: string }) => {
console.error("📨 Chat error:", error.message);
callAllCallbacks("onError", new Error(error.message));
});
// Audio/Video toggle handlers
socket.on("audio-toggle", ({ userId, isEnabled }: { userId: string; isEnabled: boolean }) => {
console.log(`[WebRTC] Received audio-toggle from ${userId}: ${isEnabled}`);
if (!state) return;
const participant = state.participants.get(userId);
if (participant) {
participant.isMuted = !isEnabled;
callAllCallbacks("onParticipantAudioToggle", userId, isEnabled);
}
});
socket.on("video-toggle", ({ userId, isEnabled }: { userId: string; isEnabled: boolean }) => {
console.log(`[WebRTC] Received video-toggle from ${userId}: ${isEnabled}`);
if (!state) return;
const participant = state.participants.get(userId);
if (participant) {
participant.isVideoOff = !isEnabled;
callAllCallbacks("onParticipantVideoToggle", userId, isEnabled);
}
});
socket.on("speaking-state", ({ userId, isSpeaking }: { userId: string; isSpeaking: boolean }) => {
if (!state) return;
const participant = state.participants.get(userId);
if (participant) {
participant.isSpeaking = isSpeaking;
// Уведомляем callback для обновления UI
callAllCallbacks("onParticipantSpeakingChange", userId, isSpeaking);
}
});
console.log("Socket listeners set up complete");
}
@@ -343,7 +400,12 @@ async function joinRoom(roomId: string): Promise<void> {
console.log("Joining room:", roomId, "with user ID:", state.userId);
state.roomId = roomId;
state.socket.emit("join-room", { roomId, userId: state.userId });
state.socket.emit("join-room", {
roomId,
userId: state.userId,
isAudioEnabled: state.isAudioEnabled,
isVideoEnabled: state.isVideoEnabled,
});
}
// Вспомогательная функция для добавления участника
@@ -393,62 +455,19 @@ async function createPeerConnection(
});
participant.peerConnection = peerConnection;
// Create DataChannel for chat (only initiator creates the channel)
if (isInitiator) {
const dataChannel = peerConnection.createDataChannel("chat", {
ordered: true,
});
participant.dataChannel = dataChannel;
setupDataChannelListeners(dataChannel, participantId);
}
// Handle incoming DataChannel
peerConnection.ondatachannel = (event) => {
const dataChannel = event.channel;
participant!.dataChannel = dataChannel;
setupDataChannelListeners(dataChannel, participantId);
};
// Add local stream tracks or create silent/black tracks
// Add local stream tracks if available
if (state.localStream) {
state.localStream.getTracks().forEach((track) => {
peerConnection.addTrack(track, state!.localStream!);
});
console.log("[WebRTC] Added local stream tracks for:", participantId);
} else {
// Если нет локального потока, создаем пустые треки для совместимости
// Если нет локального потока, не добавляем треки
// Будем использовать offerToReceiveAudio/Video для приема
console.log(
"[WebRTC] No local stream, creating silent/black tracks for:",
"[WebRTC] No local stream, will use offerToReceive options for:",
participantId
);
// Создаем пустой audio track
const audioContext = new AudioContext();
const oscillator = audioContext.createOscillator();
const dst = audioContext.createMediaStreamDestination();
oscillator.connect(dst);
oscillator.start();
const audioTrack = dst.stream.getAudioTracks()[0];
audioTrack.enabled = false; // Отключаем сразу
// Создаем черный video track
const canvas = document.createElement("canvas");
canvas.width = 640;
canvas.height = 480;
const ctx = canvas.getContext("2d");
if (ctx) {
ctx.fillStyle = "black";
ctx.fillRect(0, 0, canvas.width, canvas.height);
}
const canvasStream = canvas.captureStream(1);
const videoTrack = canvasStream.getVideoTracks()[0];
videoTrack.enabled = false; // Отключаем сразу
// Добавляем треки с MediaStream для лучшей совместимости
const dummyStream = new MediaStream([audioTrack, videoTrack]);
dummyStream.getTracks().forEach((track) => {
peerConnection.addTrack(track, dummyStream);
});
}
// Handle remote stream
@@ -776,6 +795,17 @@ function toggleAudio(): boolean {
track.enabled = !track.enabled;
});
state.isAudioEnabled = !state.isAudioEnabled;
// Отправляем обновление состояния аудио всем участникам
if (state.roomId) {
state.socket.emit("audio-toggle", {
roomId: state.roomId,
userId: state.userId,
isEnabled: state.isAudioEnabled,
});
console.log(`[WebRTC] Sent audio-toggle: ${state.isAudioEnabled}`);
}
return state.isAudioEnabled;
}
@@ -795,99 +825,43 @@ function toggleVideo(): boolean {
track.enabled = !track.enabled;
});
state.isVideoEnabled = !state.isVideoEnabled;
// Отправляем обновление состояния видео всем участникам
if (state.roomId) {
state.socket.emit("video-toggle", {
roomId: state.roomId,
userId: state.userId,
isEnabled: state.isVideoEnabled,
});
console.log(`[WebRTC] Sent video-toggle: ${state.isVideoEnabled}`);
}
return state.isVideoEnabled;
}
function setupDataChannelListeners(
dataChannel: RTCDataChannel,
participantId: string
): void {
if (!state) return;
function updateSpeakingState(isSpeaking: boolean): void {
if (!state || !state.roomId) return;
dataChannel.onopen = () => {
console.log("DataChannel opened with participant:", participantId);
callAllCallbacks("onDataChannelOpen", participantId);
};
dataChannel.onclose = () => {
console.log("DataChannel closed with participant:", participantId);
callAllCallbacks("onDataChannelClose", participantId);
};
dataChannel.onmessage = (event) => {
try {
const message: ChatMessage = JSON.parse(event.data);
console.log("📨 Received chat message from DataChannel:", message);
// Only add messages from other participants (not our own)
if (message.senderId !== state!.userId) {
// Add to local messages
state!.chatMessages.push(message);
console.log(
"📨 Added message to local state, total messages:",
state!.chatMessages.length
);
// Notify callback
callAllCallbacks("onChatMessage", message);
}
} catch (error) {
console.error("Error parsing chat message:", error);
}
};
dataChannel.onerror = (error) => {
console.error("DataChannel error with participant:", participantId, error);
callAllCallbacks("onError", new Error(`DataChannel error: ${error}`));
};
// Отправляем состояние speaking через Socket.IO
state.socket.emit("speaking-state", {
roomId: state.roomId,
userId: state.userId,
isSpeaking,
});
}
function sendChatMessage(content: string): void {
if (!state || !content.trim()) return;
function sendChatMessage(content: string, userName?: string): void {
if (!state || !content.trim() || !state.roomId) return;
const message: ChatMessage = {
id: uuidv4(),
senderId: state.userId,
console.log("📤 Sending message via Socket.IO:", content);
// Отправляем сообщение через Socket.IO
state.socket.emit("chat-message", {
roomId: state.roomId,
userId: state.userId,
content: content.trim(),
timestamp: new Date(),
type: "text",
};
// Add to local messages
state.chatMessages.push(message);
console.log(
"Added own message to local state, total messages:",
state.chatMessages.length
);
// Send to all participants via DataChannel
console.log(
"📤 Sending message to participants, total participants:",
state.participants.size
);
state.participants.forEach((participant) => {
if (
participant.dataChannel &&
participant.dataChannel.readyState === "open"
) {
try {
participant.dataChannel.send(JSON.stringify(message));
console.log(
"📤 Successfully sent message to participant:",
participant.id
);
} catch (error) {
console.error(
"📤 Error sending chat message to participant:",
participant.id,
error
);
}
}
userName: userName || "Anonymous",
});
// Notify local callback (for own messages to update UI)
callAllCallbacks("onChatMessage", message);
}
function leaveRoom(): void {
+2 -3
View File
@@ -12,9 +12,8 @@ import PublicRoute from "./components/PublicRoute";
import ModalContainer from "./components/ModalContainer";
import PopupContainer from "./components/PopupContainer";
import ToastsContainer from "./components/toasts/ToastsContainer";
// import NewSessionPage from "./pages/NewSessionPage";
import TestPage from "./pages/TestPage";
import NewSessionPage from "./pages/NewSessionPage";
import SessionPage from "./pages/SessionPage";
const router = createBrowserRouter([
{
@@ -47,7 +46,7 @@ const router = createBrowserRouter([
},
{
path: "/sessions/:id",
element: <NewSessionPage />,
element: <SessionPage />,
},
]);
+1 -3
View File
@@ -11,7 +11,6 @@ import ChatPopup from "../components/popups/ChatPopup";
import ChatFilledIcon from "../components/icons/ChatFilledIcon";
import ParticipantsPopup from "../components/popups/ParticipantsPopup";
import ControlsPopover from "../components/ui/ControlsPopover";
import SessionUsersPanel2 from "../components/SessionUsersPanel2";
function HomePage() {
const { data: user } = useMe();
@@ -60,7 +59,6 @@ function HomePage() {
</div>
</FloatingActionButton>
<ControlsPopover />
<SessionUsersPanel2 />
<div className="space-y-4">
<div className="p-4 bg-blue-50 rounded-lg border border-blue-200">
@@ -118,7 +116,7 @@ function HomePage() {
<Button
onClick={handleLogout}
disabled={logoutMutation.isPending}
className="hover:bg-red-700 disabled:opacity-50 px-4 py-2 text-white bg-red-600 rounded-md"
className="px-4 py-2 text-white bg-red-600 rounded-md hover:bg-red-700 disabled:opacity-50"
>
{logoutMutation.isPending ? "Выход..." : "Выйти"}
</Button>
@@ -22,9 +22,9 @@ import { PixelStreamingWrapper } from "../components/PixelStreamingWrapper";
import WarningIcon from "../components/icons/WarningIcon";
import Button from "../components/ui/Button";
import LoaderIcon from "../components/icons/LoaderIcon";
import SessionUsersPanel2 from "../components/SessionUsersPanel2";
import SessionUsersPanel from "../components/SessionUsersPanel";
function NewSessionPage() {
function SessionPage() {
const { setPopup } = usePopupStore();
const [isFullscreen, setIsFullscreen] = useState(false);
@@ -232,9 +232,9 @@ function NewSessionPage() {
</ActionsSidebarWrapper>
{/* WebRTC видеочат - работает всегда, пока пользователь на странице */}
<SessionUsersPanel2 roomId={session.id} autoJoin={true} />
<SessionUsersPanel roomId={session.id} autoJoin={true} />
</div>
);
}
export default NewSessionPage;
export default SessionPage;
+3 -1
View File
@@ -1,2 +1,4 @@
DATABASE_URL=postgres://postgres:v1sq3vD5faXL@194.26.138.94:5432/stream
JWT_SECRET=b5cf2bd3894fb24191f13dc9dddaeecccc92d0ee298e7ee41c2d0aab51c28fa1
JWT_SECRET=b5cf2bd3894fb24191f13dc9dddaeecccc92d0ee298e7ee41c2d0aab51c28fa1
PORT=6000
SOCKET_PORT=6001
+8
View File
@@ -0,0 +1,8 @@
module.exports = {
apps: [
{
name: "stream.graff.estate-server",
script: "bun ./dist",
},
],
};
+38
View File
@@ -0,0 +1,38 @@
import { Elysia, t } from "elysia";
import { getChatHistory } from "../services/chat";
export const chatController = new Elysia({ prefix: "/sessions" })
.get(
"/:id/messages",
async ({ params, query }) => {
const { id } = params;
const limit = query.limit ? parseInt(query.limit) : 100;
try {
const messages = await getChatHistory(id, limit);
return {
success: true,
messages,
count: messages.length,
};
} catch (error) {
console.error("[Chat API] Error fetching chat history:", error);
return {
success: false,
error: "Failed to fetch chat history",
messages: [],
count: 0,
};
}
},
{
params: t.Object({
id: t.String(),
}),
query: t.Object({
limit: t.Optional(t.String()),
}),
}
);
+42
View File
@@ -0,0 +1,42 @@
import { pgTable, uuid, text, timestamp, pgEnum } from "drizzle-orm/pg-core";
import { relations } from "drizzle-orm";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { serverSessions } from "./serverSessions";
import { users } from "./users";
// Enum для типа сообщения
export const messageTypeEnum = pgEnum("message_type", ["text", "system"]);
export const chatMessages = pgTable("chat_messages", {
id: uuid("id").primaryKey().defaultRandom(),
sessionId: uuid("session_id")
.notNull()
.references(() => serverSessions.id, { onDelete: "cascade" }),
userId: uuid("user_id").references(() => users.id), // Nullable для системных сообщений или анонимных пользователей
content: text("content").notNull(),
type: messageTypeEnum("type").notNull().default("text"),
createdAt: timestamp("created_at", { withTimezone: true })
.defaultNow()
.notNull(),
});
// Relations
export const chatMessagesRelations = relations(chatMessages, ({ one }) => ({
session: one(serverSessions, {
fields: [chatMessages.sessionId],
references: [serverSessions.id],
}),
user: one(users, {
fields: [chatMessages.userId],
references: [users.id],
}),
}));
// Zod schemas for validation
export const insertChatMessageSchema = createInsertSchema(chatMessages);
export const selectChatMessageSchema = createSelectSchema(chatMessages);
// Type exports
export type ChatMessage = typeof chatMessages.$inferSelect;
export type NewChatMessage = typeof chatMessages.$inferInsert;
+1
View File
@@ -9,6 +9,7 @@ export * from "./userBranches";
export * from "./serverSessions";
export * from "./authSessions";
export * from "./protectedRoutes";
export * from "./chatMessages";
// Relations (defined here to avoid circular dependencies)
import { relations } from "drizzle-orm";
+89 -9
View File
@@ -5,9 +5,12 @@ import { sessionController } from "./controllers/session";
import { companyController } from "./controllers/company";
import { branchController } from "./controllers/branch";
import { serverController } from "./controllers/server";
import { chatController } from "./controllers/chat";
import { serverSessionService } from "./services/serverSession";
import { saveChatMessage } from "./services/chat";
import { Server } from "socket.io";
import { createServer } from "http";
import { AddressInfo } from "net";
const app = new Elysia();
@@ -23,8 +26,9 @@ app.use(sessionController);
app.use(companyController);
app.use(branchController);
app.use(serverController);
app.use(chatController);
app.listen(3000);
app.listen(process.env.PORT || 3000);
console.log(
`🦊 Elysia is running at ${app.server?.hostname}:${app.server?.port}`
@@ -38,8 +42,12 @@ const io = new Server(httpServer, {
},
});
httpServer.listen(3001, () => {
console.log("🎥 WebRTC Socket.IO server running on port 3001");
httpServer.listen(process.env.SOCKET_PORT || 3001, () => {
console.log(
`🎥 WebRTC Socket.IO server running on port ${
(httpServer.address() as AddressInfo).port
}`
);
});
interface Room {
@@ -75,8 +83,10 @@ io.on("connection", (socket) => {
console.log(`[WebRTC] User connected: ${socket.id}`);
// Присоединение к комнате
socket.on("join-room", ({ roomId, userId }) => {
console.log(`[WebRTC] User ${userId} (socket: ${socket.id}) joining room ${roomId}`);
socket.on("join-room", async ({ roomId, userId, isAudioEnabled, isVideoEnabled }) => {
console.log(
`[WebRTC] User ${userId} (socket: ${socket.id}) joining room ${roomId}, audio: ${isAudioEnabled}, video: ${isVideoEnabled}`
);
// Покинуть предыдущую комнату если была
const existingUser = users.get(userId);
@@ -121,20 +131,29 @@ io.on("connection", (socket) => {
// Уведомить других участников
socket.to(roomId).emit("user-joined", userId);
console.log(`[WebRTC] Notified room ${roomId} about user ${userId} joining`);
console.log(
`[WebRTC] Notified room ${roomId} about user ${userId} joining`
);
// Отправить состояние аудио/видео нового пользователя всем в комнате
socket.to(roomId).emit("audio-toggle", { userId, isEnabled: isAudioEnabled !== false });
socket.to(roomId).emit("video-toggle", { userId, isEnabled: isVideoEnabled !== false });
// Отправить список участников новому пользователю
const participants = Array.from(room.participants).filter(
(id) => id !== userId
);
console.log(`[WebRTC] Sending participant list to ${userId}:`, participants);
console.log(
`[WebRTC] Sending participant list to ${userId}:`,
participants
);
socket.emit("room-participants", participants);
});
// Покидание комнаты
socket.on("leave-room", ({ roomId, userId }) => {
console.log(`[WebRTC] User ${userId} leaving room ${roomId}`);
socket.leave(roomId);
const room = rooms.get(roomId);
if (room) {
@@ -194,6 +213,65 @@ io.on("connection", (socket) => {
}
});
// Обработка audio/video toggle
socket.on("audio-toggle", ({ roomId, userId, isEnabled }) => {
console.log(`[WebRTC] Audio toggle from ${userId} in room ${roomId}: ${isEnabled}`);
// Отправляем всем в комнате (кроме отправителя)
socket.to(roomId).emit("audio-toggle", { userId, isEnabled });
});
socket.on("video-toggle", ({ roomId, userId, isEnabled }) => {
console.log(`[WebRTC] Video toggle from ${userId} in room ${roomId}: ${isEnabled}`);
// Отправляем всем в комнате (кроме отправителя)
socket.to(roomId).emit("video-toggle", { userId, isEnabled });
});
// Обработка speaking state
socket.on("speaking-state", ({ roomId, userId, isSpeaking }) => {
// Отправляем всем в комнате (кроме отправителя)
socket.to(roomId).emit("speaking-state", { userId, isSpeaking });
});
// Обработка сообщений чата
socket.on("chat-message", async ({ roomId, userId, content, userName }) => {
console.log(`[Chat] Message from ${userId} in room ${roomId}: ${content}`);
const user = findUserBySocketId(socket.id);
if (!user || user.roomId !== roomId) {
console.warn(`[Chat] User ${socket.id} is not in room ${roomId}`);
return;
}
try {
// Сохраняем сообщение в БД
const savedMessage = await saveChatMessage({
sessionId: roomId,
userId: userId || null, // null для анонимных пользователей
content,
type: "text",
});
// Формируем сообщение для отправки клиентам
const messageToSend = {
id: savedMessage.id,
senderId: userId,
senderName: userName,
content: savedMessage.content,
timestamp: savedMessage.createdAt,
type: savedMessage.type,
};
// Отправляем всем в комнате (включая отправителя)
io.to(roomId).emit("chat-message", messageToSend);
console.log(`[Chat] Message broadcasted to room ${roomId}`);
} catch (error) {
console.error(`[Chat] Error saving message:`, error);
socket.emit("chat-error", {
message: "Failed to save message",
});
}
});
// Отключение
socket.on("disconnect", () => {
console.log(`[WebRTC] User disconnected: ${socket.id}`);
@@ -215,7 +293,9 @@ io.on("connection", (socket) => {
// Удалить пустую комнату
if (room.participants.size === 0) {
rooms.delete(disconnectedUser.roomId);
console.log(`[WebRTC] Deleted empty room ${disconnectedUser.roomId}`);
console.log(
`[WebRTC] Deleted empty room ${disconnectedUser.roomId}`
);
}
}
}
+42
View File
@@ -0,0 +1,42 @@
import db from "../../db";
import {
chatMessages,
type NewChatMessage,
} from "../../db/schema/chatMessages";
import { eq, desc } from "drizzle-orm";
/**
* Сохранить новое сообщение в чате
*/
export async function saveChatMessage(message: NewChatMessage) {
const [newMessage] = await db
.insert(chatMessages)
.values(message)
.returning();
return newMessage;
}
/**
* Получить историю сообщений для сессии
* @param sessionId ID сессии
* @param limit Максимальное количество сообщений (по умолчанию 100)
*/
export async function getChatHistory(sessionId: string, limit = 100) {
const messages = await db
.select()
.from(chatMessages)
.where(eq(chatMessages.sessionId, sessionId))
.orderBy(desc(chatMessages.createdAt))
.limit(limit);
// Возвращаем в правильном порядке (старые сначала)
return messages.reverse();
}
/**
* Удалить все сообщения для сессии
* @param sessionId ID сессии
*/
export async function deleteChatHistory(sessionId: string) {
await db.delete(chatMessages).where(eq(chatMessages.sessionId, sessionId));
}
+51
View File
@@ -0,0 +1,51 @@
server {
listen 443 ssl http2;
listen [::]:443 ssl http2;
server_name stream.graff.estate;
root /var/www/stream.graff.estate/client/dist;
# SSL
ssl_certificate /etc/letsencrypt/live/stream.graff.estate/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/stream.graff.estate/privkey.pem;
ssl_trusted_certificate /etc/letsencrypt/live/stream.graff.estate/chain.pem;
# security
include nginxconfig.io/security.conf;
# logging
access_log /var/log/nginx/access.log combined buffer=512k flush=1m;
error_log /var/log/nginx/error.log warn;
# index.html fallback
location / {
try_files $uri $uri/ /index.html;
}
location /api {
rewrite ^/api/(.*)$ /$1 break;
proxy_pass http://127.0.0.1:6000;
proxy_set_header Host $host;
include nginxconfig.io/proxy.conf;
}
location /socket.io {
proxy_pass http://127.0.0.1:6001;
proxy_set_header Host $host;
include nginxconfig.io/proxy.conf;
}
# additional config
include nginxconfig.io/general.conf;
}
# HTTP redirect
server {
listen 80;
listen [::]:80;
server_name stream.graff.estate;
include nginxconfig.io/letsencrypt.conf;
location / {
return 301 https://stream.graff.estate$request_uri;
}
}