Screen Sharing Implementation on Website
Screen demonstration in browser works via getDisplayMedia() API—user selects what to show: entire screen, window, or tab. Then the stream is sent via WebRTC as a separate video track.
Native Browser API
async function startScreenShare(): Promise<MediaStream> {
const stream = await navigator.mediaDevices.getDisplayMedia({
video: {
displaySurface: 'monitor', // 'monitor' | 'window' | 'browser'
width: { ideal: 1920 },
height: { ideal: 1080 },
frameRate: { ideal: 30, max: 60 },
},
audio: {
echoCancellation: false,
noiseSuppression: false,
},
preferCurrentTab: false,
});
return stream;
}
Screen Share Component
import { useRef, useState, useCallback } from 'react';
function ScreenShareButton({ peerConnection }: { peerConnection: RTCPeerConnection | null }) {
const [isSharing, setIsSharing] = useState(false);
const screenStreamRef = useRef<MediaStream | null>(null);
const screenSenderRef = useRef<RTCRtpSender | null>(null);
const startSharing = useCallback(async () => {
try {
const stream = await startScreenShare();
screenStreamRef.current = stream;
const [videoTrack] = stream.getVideoTracks();
const [audioTrack] = stream.getAudioTracks();
if (peerConnection) {
const senders = peerConnection.getSenders();
const videoSender = senders.find(s => s.track?.kind === 'video');
if (videoSender) {
await videoSender.replaceTrack(videoTrack);
screenSenderRef.current = videoSender;
} else {
screenSenderRef.current = peerConnection.addTrack(videoTrack, stream);
}
if (audioTrack) {
peerConnection.addTrack(audioTrack, stream);
}
}
setIsSharing(true);
videoTrack.addEventListener('ended', stopSharing);
} catch (err) {
if ((err as DOMException).name !== 'NotAllowedError') {
console.error('Screen share error:', err);
}
}
}, [peerConnection]);
const stopSharing = useCallback(async () => {
screenStreamRef.current?.getTracks().forEach(t => t.stop());
if (screenSenderRef.current && peerConnection) {
const cameraStream = await navigator.mediaDevices.getUserMedia({ video: true });
const [cameraTrack] = cameraStream.getVideoTracks();
await screenSenderRef.current.replaceTrack(cameraTrack);
}
setIsSharing(false);
}, [peerConnection]);
return (
<button
onClick={isSharing ? stopSharing : startSharing}
className={`p-3 rounded-full ${isSharing ? 'bg-red-600 text-white' : 'bg-gray-700 text-white'}`}
>
{isSharing ? 'Stop' : 'Share Screen'}
</button>
);
}
LiveKit Integration
import { createLocalScreenTracks, Track } from 'livekit-client';
async function shareScreen(room: Room) {
const screenTracks = await createLocalScreenTracks({
audio: true,
video: {
width: 1920,
height: 1080,
frameRate: 30,
},
});
await room.localParticipant.publishTrack(screenTracks[0], {
name: 'screen',
source: Track.Source.ScreenShare,
});
if (screenTracks[1]) {
await room.localParticipant.publishTrack(screenTracks[1], {
name: 'screen-audio',
source: Track.Source.ScreenShareAudio,
});
}
screenTracks[0].on('ended', async () => {
await room.localParticipant.unpublishTrack(screenTracks[0]);
});
}
Displaying Another's Screen
function RemoteScreenShare({ participant }: { participant: RemoteParticipant }) {
const videoRef = useRef<HTMLVideoElement>(null);
const screenTrack = [...participant.videoTracks.values()]
.find(pub => pub.source === Track.Source.ScreenShare)?.track;
useEffect(() => {
if (!screenTrack || !videoRef.current) return;
screenTrack.attach(videoRef.current);
return () => { screenTrack.detach(videoRef.current!); };
}, [screenTrack]);
if (!screenTrack) return null;
return (
<div className="fixed inset-0 z-50 bg-black flex items-center justify-center">
<video ref={videoRef} autoPlay playsInline
className="max-w-full max-h-full" />
<span className="absolute top-4 left-4 text-white bg-black/60 px-3 py-1 rounded">
{participant.name} is sharing screen
</span>
</div>
);
}
Timeline
Screen sharing via getDisplayMedia in P2P—1 day. LiveKit/Daily integration in group—1–2 days.







