Skip to content

Instantly share code, notes, and snippets.

@istockmarket
Created August 30, 2025 19:07
Show Gist options
  • Select an option

  • Save istockmarket/16226ec0ad3752e0ddfa52599d48c57d to your computer and use it in GitHub Desktop.

Select an option

Save istockmarket/16226ec0ad3752e0ddfa52599d48c57d to your computer and use it in GitHub Desktop.
Untitled
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebRTC Technology Research</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
background: linear-gradient(135deg, #1e3c72, #2a5298);
color: white;
line-height: 1.6;
}
.container {
max-width: 1200px;
margin: 0 auto;
padding: 20px;
}
.header {
text-align: center;
margin-bottom: 40px;
}
.header h1 {
font-size: 3rem;
margin-bottom: 10px;
background: linear-gradient(45deg, #ff6b6b, #feca57);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
background-clip: text;
}
.section {
background: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
border-radius: 15px;
padding: 30px;
margin-bottom: 30px;
border: 1px solid rgba(255, 255, 255, 0.2);
}
.section h2 {
color: #feca57;
margin-bottom: 20px;
font-size: 2rem;
}
.section h3 {
color: #ff6b6b;
margin: 20px 0 10px 0;
font-size: 1.3rem;
}
.demo-area {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 20px;
margin: 20px 0;
}
.video-container {
position: relative;
background: #000;
border-radius: 10px;
overflow: hidden;
height: 200px;
}
video {
width: 100%;
height: 100%;
object-fit: cover;
}
.video-label {
position: absolute;
top: 10px;
left: 10px;
background: rgba(0, 0, 0, 0.7);
padding: 5px 10px;
border-radius: 5px;
font-size: 0.9rem;
}
.controls {
display: flex;
gap: 10px;
margin: 20px 0;
flex-wrap: wrap;
justify-content: center;
}
button {
background: linear-gradient(45deg, #ff6b6b, #feca57);
color: white;
border: none;
padding: 10px 20px;
border-radius: 25px;
cursor: pointer;
font-weight: 600;
transition: all 0.3s ease;
}
button:hover {
transform: translateY(-2px);
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.3);
}
button:disabled {
background: #666;
cursor: not-allowed;
transform: none;
}
.code-block {
background: #1a1a1a;
border-radius: 10px;
padding: 20px;
margin: 15px 0;
overflow-x: auto;
border-left: 4px solid #feca57;
}
.code-block pre {
color: #f8f8f2;
font-family: 'Courier New', monospace;
font-size: 0.9rem;
}
.architecture {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
gap: 20px;
margin: 20px 0;
}
.component {
background: rgba(255, 255, 255, 0.1);
padding: 20px;
border-radius: 10px;
text-align: center;
border: 2px solid #feca57;
}
.component h4 {
color: #ff6b6b;
margin-bottom: 10px;
}
.log-area {
background: #000;
color: #00ff00;
padding: 15px;
border-radius: 10px;
height: 200px;
overflow-y: auto;
font-family: monospace;
font-size: 0.9rem;
margin: 20px 0;
}
.stats-display {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
gap: 15px;
margin: 20px 0;
}
.stat-box {
background: rgba(0, 0, 0, 0.3);
padding: 15px;
border-radius: 10px;
text-align: center;
}
.stat-value {
font-size: 1.5rem;
font-weight: bold;
color: #feca57;
}
.stat-label {
font-size: 0.9rem;
color: #ccc;
}
@media (max-width: 768px) {
.demo-area {
grid-template-columns: 1fr;
}
.header h1 {
font-size: 2rem;
}
.section {
padding: 20px;
}
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>🌐 WebRTC Technology Research</h1>
<p>Real-Time Communication in Browsers - Complete Analysis</p>
</div>
<!-- Architecture Section -->
<div class="section">
<h2>🏗️ WebRTC Architecture</h2>
<div class="architecture">
<div class="component">
<h4>📷 MediaStream API</h4>
<p>Camera & Microphone access, getUserMedia()</p>
</div>
<div class="component">
<h4>🔗 RTCPeerConnection</h4>
<p>P2P connection, media encoding/decoding</p>
</div>
<div class="component">
<h4>📡 RTCDataChannel</h4>
<p>Text/binary data transfer</p>
</div>
<div class="component">
<h4>🌐 Signaling Server</h4>
<p>WebSocket/HTTP for offer/answer exchange</p>
</div>
</div>
</div>
<!-- Live Demo Section -->
<div class="section">
<h2>🚀 Live WebRTC Demo</h2>
<p>Interactive demonstration of WebRTC capabilities</p>
<div class="demo-area">
<div class="video-container">
<video id="localVideo" autoplay muted playsinline></video>
<div class="video-label">Local Stream</div>
</div>
<div class="video-container">
<video id="remoteVideo" autoplay playsinline></video>
<div class="video-label">Remote Stream (Simulated)</div>
</div>
</div>
<div class="controls">
<button onclick="startLocalStream()">📷 Start Camera</button>
<button onclick="createOffer()" id="offerBtn" disabled>📤 Create Offer</button>
<button onclick="createAnswer()" id="answerBtn" disabled>📥 Create Answer</button>
<button onclick="stopStream()">⏹️ Stop</button>
<button onclick="clearLogs()">🗑️ Clear Logs</button>
</div>
<div class="stats-display">
<div class="stat-box">
<div class="stat-value" id="connectionState">New</div>
<div class="stat-label">Connection State</div>
</div>
<div class="stat-box">
<div class="stat-value" id="iceState">New</div>
<div class="stat-label">ICE State</div>
</div>
<div class="stat-box">
<div class="stat-value" id="dataChannelState">Closed</div>
<div class="stat-label">Data Channel</div>
</div>
<div class="stat-box">
<div class="stat-value" id="mediaStreams">0</div>
<div class="stat-label">Media Streams</div>
</div>
</div>
<div class="log-area" id="logArea">
<div>WebRTC Research Demo Initialized...</div>
<div>Ready to demonstrate WebRTC concepts</div>
</div>
</div>
<!-- Technical Details -->
<div class="section">
<h2>🔬 Technical Implementation</h2>
<h3>1. Getting User Media</h3>
<div class="code-block">
<pre>
// Access camera and microphone
navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1280 },
height: { ideal: 720 },
frameRate: { ideal: 30 }
},
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
}).then(stream => {
localVideo.srcObject = stream;
localStream = stream;
}).catch(error => {
console.error('Error accessing media devices:', error);
});
</pre>
</div>
<h3>2. Creating Peer Connection</h3>
<div class="code-block">
<pre>
// ICE servers configuration
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' },
{
urls: 'turn:turnserver.com:3478',
username: 'user',
credential: 'password'
}
]
};
// Create peer connection
const peerConnection = new RTCPeerConnection(configuration);
// Add local stream to peer connection
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
});
</pre>
</div>
<h3>3. Signaling Process</h3>
<div class="code-block">
<pre>
// Create offer (Caller side)
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
// Send offer to remote peer via signaling server
// Create answer (Callee side)
await peerConnection.setRemoteDescription(receivedOffer);
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer);
// Send answer back to caller
</pre>
</div>
<h3>4. ICE Candidate Exchange</h3>
<div class="code-block">
<pre>
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
// Send candidate to remote peer
signalingChannel.send({
type: 'ice-candidate',
candidate: event.candidate
});
}
};
// Add received ICE candidates
function addIceCandidate(candidate) {
peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
}
</pre>
</div>
</div>
<!-- Security & Limitations -->
<div class="section">
<h2>🔒 Security & Browser Limitations</h2>
<h3>Browser Security Features:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>HTTPS Required:</strong> WebRTC only works on secure origins</li>
<li><strong>User Permission:</strong> Explicit consent needed for camera/mic access</li>
<li><strong>Same-Origin Policy:</strong> Restricts cross-origin access</li>
<li><strong>Content Security Policy:</strong> Additional restrictions possible</li>
</ul>
<h3>NAT Traversal:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>STUN Servers:</strong> Discover public IP address</li>
<li><strong>TURN Servers:</strong> Relay traffic when direct connection fails</li>
<li><strong>ICE Framework:</strong> Tries multiple connection paths</li>
</ul>
<h3>Data Protection:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>DTLS/SRTP:</strong> All media is encrypted by default</li>
<li><strong>Identity Verification:</strong> Optional identity providers</li>
<li><strong>Fingerprint Verification:</strong> Certificate validation</li>
</ul>
</div>
<!-- Use Cases -->
<div class="section">
<h2>🎯 Real-World Applications</h2>
<div class="architecture">
<div class="component">
<h4>💬 Video Conferencing</h4>
<p>Zoom, Google Meet, Microsoft Teams</p>
</div>
<div class="component">
<h4>🎮 Gaming</h4>
<p>Real-time multiplayer games</p>
</div>
<div class="component">
<h4>📁 File Sharing</h4>
<p>P2P file transfer applications</p>
</div>
<div class="component">
<h4>🏥 Telemedicine</h4>
<p>Remote medical consultations</p>
</div>
</div>
</div>
</div>
<script>
let localVideo = document.getElementById('localVideo');
let remoteVideo = document.getElementById('remoteVideo');
let localStream = null;
let peerConnection = null;
let dataChannel = null;
let logArea = document.getElementById('logArea');
// ICE servers configuration
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
]
};
function log(message) {
const timestamp = new Date().toLocaleTimeString();
const logEntry = document.createElement('div');
logEntry.textContent = `[${timestamp}] ${message}`;
logArea.appendChild(logEntry);
logArea.scrollTop = logArea.scrollHeight;
}
function updateStats() {
if (peerConnection) {
document.getElementById('connectionState').textContent = peerConnection.connectionState;
document.getElementById('iceState').textContent = peerConnection.iceConnectionState;
}
if (dataChannel) {
document.getElementById('dataChannelState').textContent = dataChannel.readyState;
}
document.getElementById('mediaStreams').textContent = localStream ? localStream.getTracks().length : 0;
}
async function startLocalStream() {
try {
log('Requesting camera and microphone access...');
localStream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 640 },
height: { ideal: 480 }
},
audio: true
});
localVideo.srcObject = localStream;
log('Local stream started successfully');
log(`Video tracks: ${localStream.getVideoTracks().length}`);
log(`Audio tracks: ${localStream.getAudioTracks().length}`);
document.getElementById('offerBtn').disabled = false;
updateStats();
} catch (error) {
log(`Error accessing media devices: ${error.message}`);
console.error('Error:', error);
}
}
function createPeerConnection() {
if (peerConnection) {
peerConnection.close();
}
peerConnection = new RTCPeerConnection(configuration);
log('Peer connection created');
// Add local stream to peer connection
if (localStream) {
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
log(`Added ${track.kind} track to peer connection`);
});
}
// Handle remote stream
peerConnection.ontrack = (event) => {
log('Received remote stream');
remoteVideo.srcObject = event.streams[0];
};
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
log(`ICE candidate: ${event.candidate.type} - ${event.candidate.protocol}`);
} else {
log('ICE candidate gathering complete');
}
};
// Connection state changes
peerConnection.onconnectionstatechange = () => {
log(`Connection state: ${peerConnection.connectionState}`);
updateStats();
};
peerConnection.oniceconnectionstatechange = () => {
log(`ICE connection state: ${peerConnection.iceConnectionState}`);
updateStats();
};
// Create data channel
dataChannel = peerConnection.createDataChannel('demo', {
ordered: true
});
dataChannel.onopen = () => {
log('Data channel opened');
updateStats();
};
dataChannel.onmessage = (event) => {
log(`Data channel message: ${event.data}`);
};
return peerConnection;
}
async function createOffer() {
if (!localStream) {
log('Error: No local stream available');
return;
}
const pc = createPeerConnection();
try {
log('Creating offer...');
const offer = await pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
await pc.setLocalDescription(offer);
log('Local description set (offer)');
log(`Offer SDP type: ${offer.type}`);
// In a real application, you would send this offer to the remote peer
// For demo purposes, we'll simulate the process
setTimeout(() => {
simulateRemoteAnswer(offer);
}, 1000);
document.getElementById('answerBtn').disabled = false;
} catch (error) {
log(`Error creating offer: ${error.message}`);
}
}
async function simulateRemoteAnswer(offer) {
try {
log('Simulating remote peer response...');
// Simulate creating answer
const answer = {
type: 'answer',
sdp: offer.sdp.replace('a=sendrecv', 'a=recvonly')
};
// In real scenario, remote peer would send this back
await peerConnection.setRemoteDescription(answer);
log('Remote description set (answer)');
log('WebRTC connection process demonstrated!');
// Simulate successful connection
setTimeout(() => {
log('Connection established (simulated)');
// Create a fake remote stream for demonstration
createSimulatedRemoteStream();
}, 2000);
} catch (error) {
log(`Error in simulated answer: ${error.message}`);
}
}
function createSimulatedRemoteStream() {
// Create a canvas element for simulated remote video
const canvas = document.createElement('canvas');
canvas.width = 640;
canvas.height = 480;
const ctx = canvas.getContext('2d');
// Create animated pattern
let frame = 0;
function animate() {
ctx.fillStyle = `hsl(${frame % 360}, 50%, 50%)`;
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = 'white';
ctx.font = '48px Arial';
ctx.textAlign = 'center';
ctx.fillText('Simulated Remote', canvas.width/2, canvas.height/2 - 50);
ctx.fillText('Video Stream', canvas.width/2, canvas.height/2 + 50);
// Draw some animated circles
for (let i = 0; i < 5; i++) {
ctx.beginPath();
ctx.arc(
100 + i * 120,
200 + Math.sin((frame + i * 60) * 0.05) * 50,
20,
0,
2 * Math.PI
);
ctx.fillStyle = `hsl(${(frame + i * 72) % 360}, 70%, 60%)`;
ctx.fill();
}
frame++;
requestAnimationFrame(animate);
}
animate();
// Convert canvas to video stream
const stream = canvas.captureStream(30);
remoteVideo.srcObject = stream;
log('Simulated remote video stream created');
}
async function createAnswer() {
log('Answer creation demonstrated in the offer process');
log('In real implementation, this would be called by the remote peer');
}
function stopStream() {
if (localStream) {
localStream.getTracks().forEach(track => {
track.stop();
log(`Stopped ${track.kind} track`);
});
localStream = null;
localVideo.srcObject = null;
}
if (peerConnection) {
peerConnection.close();
peerConnection = null;
log('Peer connection closed');
}
remoteVideo.srcObject = null;
document.getElementById('offerBtn').disabled = true;
document.getElementById('answerBtn').disabled = true;
updateStats();
log('All streams stopped');
}
function clearLogs() {
logArea.innerHTML = '<div>Logs cleared...</div>';
}
// Initialize stats display
updateStats();
// Update stats periodically
setInterval(updateStats, 1000);
// Log browser capabilities
log('WebRTC Browser Support Check:');
log(`getUserMedia: ${!!navigator.mediaDevices?.getUserMedia}`);
log(`RTCPeerConnection: ${!!window.RTCPeerConnection}`);
log(`RTCDataChannel: ${!!window.RTCDataChannel}`);
// Log available devices
if (navigator.mediaDevices?.enumerateDevices) {
navigator.mediaDevices.enumerateDevices().then(devices => {
const videoDevices = devices.filter(d => d.kind === 'videoinput').length;
const audioDevices = devices.filter(d => d.kind === 'audioinput').length;
log(`Available devices: ${videoDevices} cameras, ${audioDevices} microphones`);
});
}
</script>
</body>
</html>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebRTC Technology Research</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
background: linear-gradient(135deg, #1e3c72, #2a5298);
color: white;
line-height: 1.6;
}
.container {
max-width: 1200px;
margin: 0 auto;
padding: 20px;
}
.header {
text-align: center;
margin-bottom: 40px;
}
.header h1 {
font-size: 3rem;
margin-bottom: 10px;
background: linear-gradient(45deg, #ff6b6b, #feca57);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
background-clip: text;
}
.section {
background: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
border-radius: 15px;
padding: 30px;
margin-bottom: 30px;
border: 1px solid rgba(255, 255, 255, 0.2);
}
.section h2 {
color: #feca57;
margin-bottom: 20px;
font-size: 2rem;
}
.section h3 {
color: #ff6b6b;
margin: 20px 0 10px 0;
font-size: 1.3rem;
}
.demo-area {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 20px;
margin: 20px 0;
}
.video-container {
position: relative;
background: #000;
border-radius: 10px;
overflow: hidden;
height: 200px;
}
video {
width: 100%;
height: 100%;
object-fit: cover;
}
.video-label {
position: absolute;
top: 10px;
left: 10px;
background: rgba(0, 0, 0, 0.7);
padding: 5px 10px;
border-radius: 5px;
font-size: 0.9rem;
}
.controls {
display: flex;
gap: 10px;
margin: 20px 0;
flex-wrap: wrap;
justify-content: center;
}
button {
background: linear-gradient(45deg, #ff6b6b, #feca57);
color: white;
border: none;
padding: 10px 20px;
border-radius: 25px;
cursor: pointer;
font-weight: 600;
transition: all 0.3s ease;
}
button:hover {
transform: translateY(-2px);
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.3);
}
button:disabled {
background: #666;
cursor: not-allowed;
transform: none;
}
.code-block {
background: #1a1a1a;
border-radius: 10px;
padding: 20px;
margin: 15px 0;
overflow-x: auto;
border-left: 4px solid #feca57;
}
.code-block pre {
color: #f8f8f2;
font-family: 'Courier New', monospace;
font-size: 0.9rem;
}
.architecture {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
gap: 20px;
margin: 20px 0;
}
.component {
background: rgba(255, 255, 255, 0.1);
padding: 20px;
border-radius: 10px;
text-align: center;
border: 2px solid #feca57;
}
.component h4 {
color: #ff6b6b;
margin-bottom: 10px;
}
.log-area {
background: #000;
color: #00ff00;
padding: 15px;
border-radius: 10px;
height: 200px;
overflow-y: auto;
font-family: monospace;
font-size: 0.9rem;
margin: 20px 0;
}
.stats-display {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
gap: 15px;
margin: 20px 0;
}
.stat-box {
background: rgba(0, 0, 0, 0.3);
padding: 15px;
border-radius: 10px;
text-align: center;
}
.stat-value {
font-size: 1.5rem;
font-weight: bold;
color: #feca57;
}
.stat-label {
font-size: 0.9rem;
color: #ccc;
}
@media (max-width: 768px) {
.demo-area {
grid-template-columns: 1fr;
}
.header h1 {
font-size: 2rem;
}
.section {
padding: 20px;
}
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>🌐 WebRTC Technology Research</h1>
<p>Real-Time Communication in Browsers - Complete Analysis</p>
</div>
<!-- Architecture Section -->
<div class="section">
<h2>🏗️ WebRTC Architecture</h2>
<div class="architecture">
<div class="component">
<h4>📷 MediaStream API</h4>
<p>Camera & Microphone access, getUserMedia()</p>
</div>
<div class="component">
<h4>🔗 RTCPeerConnection</h4>
<p>P2P connection, media encoding/decoding</p>
</div>
<div class="component">
<h4>📡 RTCDataChannel</h4>
<p>Text/binary data transfer</p>
</div>
<div class="component">
<h4>🌐 Signaling Server</h4>
<p>WebSocket/HTTP for offer/answer exchange</p>
</div>
</div>
</div>
<!-- Live Demo Section -->
<div class="section">
<h2>🚀 Live WebRTC Demo</h2>
<p>Interactive demonstration of WebRTC capabilities</p>
<div class="demo-area">
<div class="video-container">
<video id="localVideo" autoplay muted playsinline></video>
<div class="video-label">Local Stream</div>
</div>
<div class="video-container">
<video id="remoteVideo" autoplay playsinline></video>
<div class="video-label">Remote Stream (Simulated)</div>
</div>
</div>
<div class="controls">
<button onclick="startLocalStream()">📷 Start Camera</button>
<button onclick="createOffer()" id="offerBtn" disabled>📤 Create Offer</button>
<button onclick="createAnswer()" id="answerBtn" disabled>📥 Create Answer</button>
<button onclick="stopStream()">⏹️ Stop</button>
<button onclick="clearLogs()">🗑️ Clear Logs</button>
</div>
<div class="stats-display">
<div class="stat-box">
<div class="stat-value" id="connectionState">New</div>
<div class="stat-label">Connection State</div>
</div>
<div class="stat-box">
<div class="stat-value" id="iceState">New</div>
<div class="stat-label">ICE State</div>
</div>
<div class="stat-box">
<div class="stat-value" id="dataChannelState">Closed</div>
<div class="stat-label">Data Channel</div>
</div>
<div class="stat-box">
<div class="stat-value" id="mediaStreams">0</div>
<div class="stat-label">Media Streams</div>
</div>
</div>
<div class="log-area" id="logArea">
<div>WebRTC Research Demo Initialized...</div>
<div>Ready to demonstrate WebRTC concepts</div>
</div>
</div>
<!-- Technical Details -->
<div class="section">
<h2>🔬 Technical Implementation</h2>
<h3>1. Getting User Media</h3>
<div class="code-block">
<pre>
// Access camera and microphone
navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1280 },
height: { ideal: 720 },
frameRate: { ideal: 30 }
},
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
}).then(stream => {
localVideo.srcObject = stream;
localStream = stream;
}).catch(error => {
console.error('Error accessing media devices:', error);
});
</pre>
</div>
<h3>2. Creating Peer Connection</h3>
<div class="code-block">
<pre>
// ICE servers configuration
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' },
{
urls: 'turn:turnserver.com:3478',
username: 'user',
credential: 'password'
}
]
};
// Create peer connection
const peerConnection = new RTCPeerConnection(configuration);
// Add local stream to peer connection
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
});
</pre>
</div>
<h3>3. Signaling Process</h3>
<div class="code-block">
<pre>
// Create offer (Caller side)
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
// Send offer to remote peer via signaling server
// Create answer (Callee side)
await peerConnection.setRemoteDescription(receivedOffer);
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer);
// Send answer back to caller
</pre>
</div>
<h3>4. ICE Candidate Exchange</h3>
<div class="code-block">
<pre>
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
// Send candidate to remote peer
signalingChannel.send({
type: 'ice-candidate',
candidate: event.candidate
});
}
};
// Add received ICE candidates
function addIceCandidate(candidate) {
peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
}
</pre>
</div>
</div>
<!-- Security & Limitations -->
<div class="section">
<h2>🔒 Security & Browser Limitations</h2>
<h3>Browser Security Features:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>HTTPS Required:</strong> WebRTC only works on secure origins</li>
<li><strong>User Permission:</strong> Explicit consent needed for camera/mic access</li>
<li><strong>Same-Origin Policy:</strong> Restricts cross-origin access</li>
<li><strong>Content Security Policy:</strong> Additional restrictions possible</li>
</ul>
<h3>NAT Traversal:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>STUN Servers:</strong> Discover public IP address</li>
<li><strong>TURN Servers:</strong> Relay traffic when direct connection fails</li>
<li><strong>ICE Framework:</strong> Tries multiple connection paths</li>
</ul>
<h3>Data Protection:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>DTLS/SRTP:</strong> All media is encrypted by default</li>
<li><strong>Identity Verification:</strong> Optional identity providers</li>
<li><strong>Fingerprint Verification:</strong> Certificate validation</li>
</ul>
</div>
<!-- Use Cases -->
<div class="section">
<h2>🎯 Real-World Applications</h2>
<div class="architecture">
<div class="component">
<h4>💬 Video Conferencing</h4>
<p>Zoom, Google Meet, Microsoft Teams</p>
</div>
<div class="component">
<h4>🎮 Gaming</h4>
<p>Real-time multiplayer games</p>
</div>
<div class="component">
<h4>📁 File Sharing</h4>
<p>P2P file transfer applications</p>
</div>
<div class="component">
<h4>🏥 Telemedicine</h4>
<p>Remote medical consultations</p>
</div>
</div>
</div>
</div>
<script>
let localVideo = document.getElementById('localVideo');
let remoteVideo = document.getElementById('remoteVideo');
let localStream = null;
let peerConnection = null;
let dataChannel = null;
let logArea = document.getElementById('logArea');
// ICE servers configuration
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
]
};
function log(message) {
const timestamp = new Date().toLocaleTimeString();
const logEntry = document.createElement('div');
logEntry.textContent = `[${timestamp}] ${message}`;
logArea.appendChild(logEntry);
logArea.scrollTop = logArea.scrollHeight;
}
function updateStats() {
if (peerConnection) {
document.getElementById('connectionState').textContent = peerConnection.connectionState;
document.getElementById('iceState').textContent = peerConnection.iceConnectionState;
}
if (dataChannel) {
document.getElementById('dataChannelState').textContent = dataChannel.readyState;
}
document.getElementById('mediaStreams').textContent = localStream ? localStream.getTracks().length : 0;
}
async function startLocalStream() {
try {
log('Requesting camera and microphone access...');
localStream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 640 },
height: { ideal: 480 }
},
audio: true
});
localVideo.srcObject = localStream;
log('Local stream started successfully');
log(`Video tracks: ${localStream.getVideoTracks().length}`);
log(`Audio tracks: ${localStream.getAudioTracks().length}`);
document.getElementById('offerBtn').disabled = false;
updateStats();
} catch (error) {
log(`Error accessing media devices: ${error.message}`);
console.error('Error:', error);
}
}
function createPeerConnection() {
if (peerConnection) {
peerConnection.close();
}
peerConnection = new RTCPeerConnection(configuration);
log('Peer connection created');
// Add local stream to peer connection
if (localStream) {
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
log(`Added ${track.kind} track to peer connection`);
});
}
// Handle remote stream
peerConnection.ontrack = (event) => {
log('Received remote stream');
remoteVideo.srcObject = event.streams[0];
};
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
log(`ICE candidate: ${event.candidate.type} - ${event.candidate.protocol}`);
} else {
log('ICE candidate gathering complete');
}
};
// Connection state changes
peerConnection.onconnectionstatechange = () => {
log(`Connection state: ${peerConnection.connectionState}`);
updateStats();
};
peerConnection.oniceconnectionstatechange = () => {
log(`ICE connection state: ${peerConnection.iceConnectionState}`);
updateStats();
};
// Create data channel
dataChannel = peerConnection.createDataChannel('demo', {
ordered: true
});
dataChannel.onopen = () => {
log('Data channel opened');
updateStats();
};
dataChannel.onmessage = (event) => {
log(`Data channel message: ${event.data}`);
};
return peerConnection;
}
async function createOffer() {
if (!localStream) {
log('Error: No local stream available');
return;
}
const pc = createPeerConnection();
try {
log('Creating offer...');
const offer = await pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
await pc.setLocalDescription(offer);
log('Local description set (offer)');
log(`Offer SDP type: ${offer.type}`);
// In a real application, you would send this offer to the remote peer
// For demo purposes, we'll simulate the process
setTimeout(() => {
simulateRemoteAnswer(offer);
}, 1000);
document.getElementById('answerBtn').disabled = false;
} catch (error) {
log(`Error creating offer: ${error.message}`);
}
}
async function simulateRemoteAnswer(offer) {
try {
log('Simulating remote peer response...');
// Simulate creating answer
const answer = {
type: 'answer',
sdp: offer.sdp.replace('a=sendrecv', 'a=recvonly')
};
// In real scenario, remote peer would send this back
await peerConnection.setRemoteDescription(answer);
log('Remote description set (answer)');
log('WebRTC connection process demonstrated!');
// Simulate successful connection
setTimeout(() => {
log('Connection established (simulated)');
// Create a fake remote stream for demonstration
createSimulatedRemoteStream();
}, 2000);
} catch (error) {
log(`Error in simulated answer: ${error.message}`);
}
}
function createSimulatedRemoteStream() {
// Create a canvas element for simulated remote video
const canvas = document.createElement('canvas');
canvas.width = 640;
canvas.height = 480;
const ctx = canvas.getContext('2d');
// Create animated pattern
let frame = 0;
function animate() {
ctx.fillStyle = `hsl(${frame % 360}, 50%, 50%)`;
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = 'white';
ctx.font = '48px Arial';
ctx.textAlign = 'center';
ctx.fillText('Simulated Remote', canvas.width/2, canvas.height/2 - 50);
ctx.fillText('Video Stream', canvas.width/2, canvas.height/2 + 50);
// Draw some animated circles
for (let i = 0; i < 5; i++) {
ctx.beginPath();
ctx.arc(
100 + i * 120,
200 + Math.sin((frame + i * 60) * 0.05) * 50,
20,
0,
2 * Math.PI
);
ctx.fillStyle = `hsl(${(frame + i * 72) % 360}, 70%, 60%)`;
ctx.fill();
}
frame++;
requestAnimationFrame(animate);
}
animate();
// Convert canvas to video stream
const stream = canvas.captureStream(30);
remoteVideo.srcObject = stream;
log('Simulated remote video stream created');
}
async function createAnswer() {
log('Answer creation demonstrated in the offer process');
log('In real implementation, this would be called by the remote peer');
}
function stopStream() {
if (localStream) {
localStream.getTracks().forEach(track => {
track.stop();
log(`Stopped ${track.kind} track`);
});
localStream = null;
localVideo.srcObject = null;
}
if (peerConnection) {
peerConnection.close();
peerConnection = null;
log('Peer connection closed');
}
remoteVideo.srcObject = null;
document.getElementById('offerBtn').disabled = true;
document.getElementById('answerBtn').disabled = true;
updateStats();
log('All streams stopped');
}
function clearLogs() {
logArea.innerHTML = '<div>Logs cleared...</div>';
}
// Initialize stats display
updateStats();
// Update stats periodically
setInterval(updateStats, 1000);
// Log browser capabilities
log('WebRTC Browser Support Check:');
log(`getUserMedia: ${!!navigator.mediaDevices?.getUserMedia}`);
log(`RTCPeerConnection: ${!!window.RTCPeerConnection}`);
log(`RTCDataChannel: ${!!window.RTCDataChannel}`);
// Log available devices
if (navigator.mediaDevices?.enumerateDevices) {
navigator.mediaDevices.enumerateDevices().then(devices => {
const videoDevices = devices.filter(d => d.kind === 'videoinput').length;
const audioDevices = devices.filter(d => d.kind === 'audioinput').length;
log(`Available devices: ${videoDevices} cameras, ${audioDevices} microphones`);
});
}
</script>
</body>
</html>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebRTC Technology Research</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
background: linear-gradient(135deg, #1e3c72, #2a5298);
color: white;
line-height: 1.6;
}
.container {
max-width: 1200px;
margin: 0 auto;
padding: 20px;
}
.header {
text-align: center;
margin-bottom: 40px;
}
.header h1 {
font-size: 3rem;
margin-bottom: 10px;
background: linear-gradient(45deg, #ff6b6b, #feca57);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
background-clip: text;
}
.section {
background: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
border-radius: 15px;
padding: 30px;
margin-bottom: 30px;
border: 1px solid rgba(255, 255, 255, 0.2);
}
.section h2 {
color: #feca57;
margin-bottom: 20px;
font-size: 2rem;
}
.section h3 {
color: #ff6b6b;
margin: 20px 0 10px 0;
font-size: 1.3rem;
}
.demo-area {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 20px;
margin: 20px 0;
}
.video-container {
position: relative;
background: #000;
border-radius: 10px;
overflow: hidden;
height: 200px;
}
video {
width: 100%;
height: 100%;
object-fit: cover;
}
.video-label {
position: absolute;
top: 10px;
left: 10px;
background: rgba(0, 0, 0, 0.7);
padding: 5px 10px;
border-radius: 5px;
font-size: 0.9rem;
}
.controls {
display: flex;
gap: 10px;
margin: 20px 0;
flex-wrap: wrap;
justify-content: center;
}
button {
background: linear-gradient(45deg, #ff6b6b, #feca57);
color: white;
border: none;
padding: 10px 20px;
border-radius: 25px;
cursor: pointer;
font-weight: 600;
transition: all 0.3s ease;
}
button:hover {
transform: translateY(-2px);
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.3);
}
button:disabled {
background: #666;
cursor: not-allowed;
transform: none;
}
.code-block {
background: #1a1a1a;
border-radius: 10px;
padding: 20px;
margin: 15px 0;
overflow-x: auto;
border-left: 4px solid #feca57;
}
.code-block pre {
color: #f8f8f2;
font-family: 'Courier New', monospace;
font-size: 0.9rem;
}
.architecture {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
gap: 20px;
margin: 20px 0;
}
.component {
background: rgba(255, 255, 255, 0.1);
padding: 20px;
border-radius: 10px;
text-align: center;
border: 2px solid #feca57;
}
.component h4 {
color: #ff6b6b;
margin-bottom: 10px;
}
.log-area {
background: #000;
color: #00ff00;
padding: 15px;
border-radius: 10px;
height: 200px;
overflow-y: auto;
font-family: monospace;
font-size: 0.9rem;
margin: 20px 0;
}
.stats-display {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
gap: 15px;
margin: 20px 0;
}
.stat-box {
background: rgba(0, 0, 0, 0.3);
padding: 15px;
border-radius: 10px;
text-align: center;
}
.stat-value {
font-size: 1.5rem;
font-weight: bold;
color: #feca57;
}
.stat-label {
font-size: 0.9rem;
color: #ccc;
}
@media (max-width: 768px) {
.demo-area {
grid-template-columns: 1fr;
}
.header h1 {
font-size: 2rem;
}
.section {
padding: 20px;
}
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>🌐 WebRTC Technology Research</h1>
<p>Real-Time Communication in Browsers - Complete Analysis</p>
</div>
<!-- Architecture Section -->
<div class="section">
<h2>🏗️ WebRTC Architecture</h2>
<div class="architecture">
<div class="component">
<h4>📷 MediaStream API</h4>
<p>Camera & Microphone access, getUserMedia()</p>
</div>
<div class="component">
<h4>🔗 RTCPeerConnection</h4>
<p>P2P connection, media encoding/decoding</p>
</div>
<div class="component">
<h4>📡 RTCDataChannel</h4>
<p>Text/binary data transfer</p>
</div>
<div class="component">
<h4>🌐 Signaling Server</h4>
<p>WebSocket/HTTP for offer/answer exchange</p>
</div>
</div>
</div>
<!-- Live Demo Section -->
<div class="section">
<h2>🚀 Live WebRTC Demo</h2>
<p>Interactive demonstration of WebRTC capabilities</p>
<div class="demo-area">
<div class="video-container">
<video id="localVideo" autoplay muted playsinline></video>
<div class="video-label">Local Stream</div>
</div>
<div class="video-container">
<video id="remoteVideo" autoplay playsinline></video>
<div class="video-label">Remote Stream (Simulated)</div>
</div>
</div>
<div class="controls">
<button onclick="startLocalStream()">📷 Start Camera</button>
<button onclick="createOffer()" id="offerBtn" disabled>📤 Create Offer</button>
<button onclick="createAnswer()" id="answerBtn" disabled>📥 Create Answer</button>
<button onclick="stopStream()">⏹️ Stop</button>
<button onclick="clearLogs()">🗑️ Clear Logs</button>
</div>
<div class="stats-display">
<div class="stat-box">
<div class="stat-value" id="connectionState">New</div>
<div class="stat-label">Connection State</div>
</div>
<div class="stat-box">
<div class="stat-value" id="iceState">New</div>
<div class="stat-label">ICE State</div>
</div>
<div class="stat-box">
<div class="stat-value" id="dataChannelState">Closed</div>
<div class="stat-label">Data Channel</div>
</div>
<div class="stat-box">
<div class="stat-value" id="mediaStreams">0</div>
<div class="stat-label">Media Streams</div>
</div>
</div>
<div class="log-area" id="logArea">
<div>WebRTC Research Demo Initialized...</div>
<div>Ready to demonstrate WebRTC concepts</div>
</div>
</div>
<!-- Technical Details -->
<div class="section">
<h2>🔬 Technical Implementation</h2>
<h3>1. Getting User Media</h3>
<div class="code-block">
<pre>
// Access camera and microphone
navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1280 },
height: { ideal: 720 },
frameRate: { ideal: 30 }
},
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
}).then(stream => {
localVideo.srcObject = stream;
localStream = stream;
}).catch(error => {
console.error('Error accessing media devices:', error);
});
</pre>
</div>
<h3>2. Creating Peer Connection</h3>
<div class="code-block">
<pre>
// ICE servers configuration
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' },
{
urls: 'turn:turnserver.com:3478',
username: 'user',
credential: 'password'
}
]
};
// Create peer connection
const peerConnection = new RTCPeerConnection(configuration);
// Add local stream to peer connection
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
});
</pre>
</div>
<h3>3. Signaling Process</h3>
<div class="code-block">
<pre>
// Create offer (Caller side)
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
// Send offer to remote peer via signaling server
// Create answer (Callee side)
await peerConnection.setRemoteDescription(receivedOffer);
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer);
// Send answer back to caller
</pre>
</div>
<h3>4. ICE Candidate Exchange</h3>
<div class="code-block">
<pre>
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
// Send candidate to remote peer
signalingChannel.send({
type: 'ice-candidate',
candidate: event.candidate
});
}
};
// Add received ICE candidates
function addIceCandidate(candidate) {
peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
}
</pre>
</div>
</div>
<!-- Security & Limitations -->
<div class="section">
<h2>🔒 Security & Browser Limitations</h2>
<h3>Browser Security Features:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>HTTPS Required:</strong> WebRTC only works on secure origins</li>
<li><strong>User Permission:</strong> Explicit consent needed for camera/mic access</li>
<li><strong>Same-Origin Policy:</strong> Restricts cross-origin access</li>
<li><strong>Content Security Policy:</strong> Additional restrictions possible</li>
</ul>
<h3>NAT Traversal:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>STUN Servers:</strong> Discover public IP address</li>
<li><strong>TURN Servers:</strong> Relay traffic when direct connection fails</li>
<li><strong>ICE Framework:</strong> Tries multiple connection paths</li>
</ul>
<h3>Data Protection:</h3>
<ul style="margin: 15px 0; padding-left: 30px;">
<li><strong>DTLS/SRTP:</strong> All media is encrypted by default</li>
<li><strong>Identity Verification:</strong> Optional identity providers</li>
<li><strong>Fingerprint Verification:</strong> Certificate validation</li>
</ul>
</div>
<!-- Use Cases -->
<div class="section">
<h2>🎯 Real-World Applications</h2>
<div class="architecture">
<div class="component">
<h4>💬 Video Conferencing</h4>
<p>Zoom, Google Meet, Microsoft Teams</p>
</div>
<div class="component">
<h4>🎮 Gaming</h4>
<p>Real-time multiplayer games</p>
</div>
<div class="component">
<h4>📁 File Sharing</h4>
<p>P2P file transfer applications</p>
</div>
<div class="component">
<h4>🏥 Telemedicine</h4>
<p>Remote medical consultations</p>
</div>
</div>
</div>
</div>
<script>
let localVideo = document.getElementById('localVideo');
let remoteVideo = document.getElementById('remoteVideo');
let localStream = null;
let peerConnection = null;
let dataChannel = null;
let logArea = document.getElementById('logArea');
// ICE servers configuration
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
]
};
function log(message) {
const timestamp = new Date().toLocaleTimeString();
const logEntry = document.createElement('div');
logEntry.textContent = `[${timestamp}] ${message}`;
logArea.appendChild(logEntry);
logArea.scrollTop = logArea.scrollHeight;
}
function updateStats() {
if (peerConnection) {
document.getElementById('connectionState').textContent = peerConnection.connectionState;
document.getElementById('iceState').textContent = peerConnection.iceConnectionState;
}
if (dataChannel) {
document.getElementById('dataChannelState').textContent = dataChannel.readyState;
}
document.getElementById('mediaStreams').textContent = localStream ? localStream.getTracks().length : 0;
}
async function startLocalStream() {
try {
log('Requesting camera and microphone access...');
localStream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 640 },
height: { ideal: 480 }
},
audio: true
});
localVideo.srcObject = localStream;
log('Local stream started successfully');
log(`Video tracks: ${localStream.getVideoTracks().length}`);
log(`Audio tracks: ${localStream.getAudioTracks().length}`);
document.getElementById('offerBtn').disabled = false;
updateStats();
} catch (error) {
log(`Error accessing media devices: ${error.message}`);
console.error('Error:', error);
}
}
function createPeerConnection() {
if (peerConnection) {
peerConnection.close();
}
peerConnection = new RTCPeerConnection(configuration);
log('Peer connection created');
// Add local stream to peer connection
if (localStream) {
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
log(`Added ${track.kind} track to peer connection`);
});
}
// Handle remote stream
peerConnection.ontrack = (event) => {
log('Received remote stream');
remoteVideo.srcObject = event.streams[0];
};
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
log(`ICE candidate: ${event.candidate.type} - ${event.candidate.protocol}`);
} else {
log('ICE candidate gathering complete');
}
};
// Connection state changes
peerConnection.onconnectionstatechange = () => {
log(`Connection state: ${peerConnection.connectionState}`);
updateStats();
};
peerConnection.oniceconnectionstatechange = () => {
log(`ICE connection state: ${peerConnection.iceConnectionState}`);
updateStats();
};
// Create data channel
dataChannel = peerConnection.createDataChannel('demo', {
ordered: true
});
dataChannel.onopen = () => {
log('Data channel opened');
updateStats();
};
dataChannel.onmessage = (event) => {
log(`Data channel message: ${event.data}`);
};
return peerConnection;
}
async function createOffer() {
if (!localStream) {
log('Error: No local stream available');
return;
}
const pc = createPeerConnection();
try {
log('Creating offer...');
const offer = await pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
await pc.setLocalDescription(offer);
log('Local description set (offer)');
log(`Offer SDP type: ${offer.type}`);
// In a real application, you would send this offer to the remote peer
// For demo purposes, we'll simulate the process
setTimeout(() => {
simulateRemoteAnswer(offer);
}, 1000);
document.getElementById('answerBtn').disabled = false;
} catch (error) {
log(`Error creating offer: ${error.message}`);
}
}
async function simulateRemoteAnswer(offer) {
try {
log('Simulating remote peer response...');
// Simulate creating answer
const answer = {
type: 'answer',
sdp: offer.sdp.replace('a=sendrecv', 'a=recvonly')
};
// In real scenario, remote peer would send this back
await peerConnection.setRemoteDescription(answer);
log('Remote description set (answer)');
log('WebRTC connection process demonstrated!');
// Simulate successful connection
setTimeout(() => {
log('Connection established (simulated)');
// Create a fake remote stream for demonstration
createSimulatedRemoteStream();
}, 2000);
} catch (error) {
log(`Error in simulated answer: ${error.message}`);
}
}
function createSimulatedRemoteStream() {
// Create a canvas element for simulated remote video
const canvas = document.createElement('canvas');
canvas.width = 640;
canvas.height = 480;
const ctx = canvas.getContext('2d');
// Create animated pattern
let frame = 0;
function animate() {
ctx.fillStyle = `hsl(${frame % 360}, 50%, 50%)`;
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = 'white';
ctx.font = '48px Arial';
ctx.textAlign = 'center';
ctx.fillText('Simulated Remote', canvas.width/2, canvas.height/2 - 50);
ctx.fillText('Video Stream', canvas.width/2, canvas.height/2 + 50);
// Draw some animated circles
for (let i = 0; i < 5; i++) {
ctx.beginPath();
ctx.arc(
100 + i * 120,
200 + Math.sin((frame + i * 60) * 0.05) * 50,
20,
0,
2 * Math.PI
);
ctx.fillStyle = `hsl(${(frame + i * 72) % 360}, 70%, 60%)`;
ctx.fill();
}
frame++;
requestAnimationFrame(animate);
}
animate();
// Convert canvas to video stream
const stream = canvas.captureStream(30);
remoteVideo.srcObject = stream;
log('Simulated remote video stream created');
}
async function createAnswer() {
log('Answer creation demonstrated in the offer process');
log('In real implementation, this would be called by the remote peer');
}
function stopStream() {
if (localStream) {
localStream.getTracks().forEach(track => {
track.stop();
log(`Stopped ${track.kind} track`);
});
localStream = null;
localVideo.srcObject = null;
}
if (peerConnection) {
peerConnection.close();
peerConnection = null;
log('Peer connection closed');
}
remoteVideo.srcObject = null;
document.getElementById('offerBtn').disabled = true;
document.getElementById('answerBtn').disabled = true;
updateStats();
log('All streams stopped');
}
function clearLogs() {
logArea.innerHTML = '<div>Logs cleared...</div>';
}
// Initialize stats display
updateStats();
// Update stats periodically
setInterval(updateStats, 1000);
// Log browser capabilities
log('WebRTC Browser Support Check:');
log(`getUserMedia: ${!!navigator.mediaDevices?.getUserMedia}`);
log(`RTCPeerConnection: ${!!window.RTCPeerConnection}`);
log(`RTCDataChannel: ${!!window.RTCDataChannel}`);
// Log available devices
if (navigator.mediaDevices?.enumerateDevices) {
navigator.mediaDevices.enumerateDevices().then(devices => {
const videoDevices = devices.filter(d => d.kind === 'videoinput').length;
const audioDevices = devices.filter(d => d.kind === 'audioinput').length;
log(`Available devices: ${videoDevices} cameras, ${audioDevices} microphones`);
});
}
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment