WebRTC (Web Real-Time Communication) provides a powerful set of JavaScript APIs, enabling developers to implement real-time audio, video, and data transmission in browsers. Below, I will comprehensively introduce the core WebRTC JavaScript APIs and their usage.
Core API Overview
WebRTC primarily consists of the following three core APIs:
- getUserMedia – Access user media devices.
- RTCPeerConnection – Manage peer-to-peer connections.
- RTCDataChannel – Transmit arbitrary data between peers.
getUserMedia API Explained
Basic Usage
The getUserMedia API is used to obtain permissions for user media devices (camera and microphone):
// Basic syntax
navigator.mediaDevices.getUserMedia(constraints)
.then(stream => {
// Successfully acquired media stream
console.log('Successfully acquired media stream:', stream);
})
.catch(err => {
// Handle errors
console.error('Failed to acquire media stream:', err);
});
// Example: Acquire camera and microphone
navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(stream => {
// Assign stream to video element
document.getElementById('localVideo').srcObject = stream;
});Constraints
Constraints allow you to specify parameters for the media stream:
// Video constraints example
const videoConstraints = {
width: { ideal: 1280 }, // Ideal width
height: { ideal: 720 }, // Ideal height
frameRate: { ideal: 30 }, // Ideal frame rate
facingMode: 'user' // 'user' for front camera, 'environment' for rear camera
};
// Audio constraints example
const audioConstraints = {
echoCancellation: true, // Echo cancellation
noiseSuppression: true, // Noise suppression
sampleRate: 48000 // Sample rate
};
// Combined constraints
const constraints = {
video: videoConstraints,
audio: audioConstraints
};
navigator.mediaDevices.getUserMedia(constraints)
.then(stream => {
// Process media stream
});Media Stream (MediaStream) Operations
After acquiring a media stream, you can perform various operations on it:
let stream;
// Acquire media stream
navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(s => {
stream = s;
document.getElementById('localVideo').srcObject = stream;
});
// Stop all tracks
function stopStream() {
if (stream) {
stream.getTracks().forEach(track => track.stop());
stream = null;
}
}
// Switch camera
async function switchCamera() {
if (!stream) return;
const devices = await navigator.mediaDevices.enumerateDevices();
const videoDevices = devices.filter(device => device.kind === 'videoinput');
if (videoDevices.length < 2) return;
const currentTrack = stream.getVideoTracks()[0];
const currentDeviceId = currentTrack.getSettings().deviceId;
const newDeviceId = videoDevices.find(d => d.deviceId !== currentDeviceId).deviceId;
const newStream = await navigator.mediaDevices.getUserMedia({
video: { deviceId: { exact: newDeviceId } },
audio: true
});
stream.getVideoTracks()[0].stop();
stream.addTrack(newStream.getVideoTracks()[0]);
}RTCPeerConnection API Explained
Creating a Connection
RTCPeerConnection is the core class of WebRTC, used to manage peer-to-peer connections:
// Basic syntax for creating a connection
const pc = new RTCPeerConnection(configuration);
// Example configuration (includes STUN server)
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' }
// Optional TURN server
// { urls: 'turn:your-turn-server.com', username: 'user', credential: 'pass' }
]
};
const pc = new RTCPeerConnection(configuration);Adding Media Tracks
Add a local media stream to the connection:
// Assuming localStream has been acquired
localStream.getTracks().forEach(track => {
pc.addTrack(track, localStream);
});
// Or use the more modern API (recommended)
pc.addTrack(localStream.getVideoTracks()[0], localStream);
pc.addTrack(localStream.getAudioTracks()[0], localStream);SDP Exchange
SDP (Session Description Protocol) describes the parameters of a media session:
// Create offer
async function createOffer() {
try {
const offer = await pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
await pc.setLocalDescription(offer);
// Send offer to peer via signaling server
sendToPeer({
type: 'offer',
sdp: pc.localDescription
});
} catch (err) {
console.error('Failed to create offer:', err);
}
}
// Handle received offer
async function handleOffer(offer) {
try {
await pc.setRemoteDescription(new RTCSessionDescription(offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
sendToPeer({
type: 'answer',
sdp: pc.localDescription
});
} catch (err) {
console.error('Failed to handle offer:', err);
}
}
// Handle received answer
async function handleAnswer(answer) {
try {
await pc.setRemoteDescription(new RTCSessionDescription(answer));
} catch (err) {
console.error('Failed to handle answer:', err);
}
}ICE Candidate Exchange
ICE (Interactive Connectivity Establishment) is used for network traversal:
// Monitor ICE candidate collection
pc.onicecandidate = event => {
if (event.candidate) {
// Send ICE candidate to peer via signaling server
sendToPeer({
type: 'ice-candidate',
candidate: event.candidate
});
}
};
// Handle received ICE candidate
async function handleIceCandidate(candidate) {
try {
await pc.addIceCandidate(new RTCIceCandidate(candidate));
} catch (err) {
console.error('Failed to add ICE candidate:', err);
}
}
// ICE restart (when connection is lost)
function restartIce() {
pc.restartIce();
}Connection State Monitoring
// Monitor ICE connection state changes
pc.oniceconnectionstatechange = () => {
console.log('ICE connection state:', pc.iceConnectionState);
switch (pc.iceConnectionState) {
case 'connected':
console.log('Connection established');
break;
case 'disconnected':
console.log('Connection lost (may recover)');
break;
case 'failed':
console.log('Connection failed (requires restart)');
restartIce();
break;
case 'closed':
console.log('Connection closed');
break;
}
};
// Monitor signaling state changes
pc.onsignalingstatechange = () => {
console.log('Signaling state:', pc.signalingState);
switch (pc.signalingState) {
case 'stable':
console.log('Signaling state stable');
break;
case 'have-local-offer':
console.log('Local offer created');
break;
case 'have-remote-offer':
console.log('Remote offer received');
break;
case 'closed':
console.log('Signaling channel closed');
break;
}
};
// Monitor connection state changes (higher-level state)
pc.onconnectionstatechange = () => {
console.log('Connection state:', pc.connectionState);
switch (pc.connectionState) {
case 'connected':
console.log('Connection established');
break;
case 'disconnected':
console.log('Connection lost');
break;
case 'failed':
console.log('Connection failed');
break;
case 'closed':
console.log('Connection closed');
break;
}
};Error Handling
// Error handling
pc.onerror = error => {
console.error('RTCPeerConnection error:', error);
};
// Manually close connection
function closeConnection() {
if (pc) {
pc.close();
pc = null;
}
}RTCDataChannel API Explained
Creating a Data Channel
RTCDataChannel is used to transmit arbitrary data between peers:
// Create a reliable data channel (default)
const reliableChannel = pc.createDataChannel('chat');
// Create an unreliable data channel (similar to UDP)
const unreliableChannel = pc.createDataChannel('game', {
ordered: false, // No message order guarantee
maxRetransmits: 0 // No retransmissions (completely unreliable)
});
// Create a partially reliable data channel (based on message count)
const partialReliableChannel = pc.createDataChannel('partial', {
ordered: true,
maxRetransmits: 3 // Maximum 3 retransmissions
});Data Channel Events
// Monitor data channel state changes
dataChannel.onopen = () => {
console.log('Data channel opened');
// Can start sending data
};
dataChannel.onclose = () => {
console.log('Data channel closed');
};
dataChannel.onerror = error => {
console.error('Data channel error:', error);
};
// Receive messages
dataChannel.onmessage = event => {
if (typeof event.data === 'string') {
// Text message
console.log('Received text message:', event.data);
} else if (event.data instanceof Blob) {
// Binary data (Blob)
console.log('Received Blob data');
// Can use FileReader to read Blob content
} else if (event.data instanceof ArrayBuffer) {
// Binary data (ArrayBuffer)
console.log('Received ArrayBuffer data');
}
};Sending Data
// Send text message
function sendTextMessage(text) {
if (dataChannel.readyState === 'open') {
dataChannel.send(text);
} else {
console.error('Data channel not open');
}
}
// Send JSON data
function sendJsonData(data) {
if (dataChannel.readyState === 'open') {
dataChannel.send(JSON.stringify(data));
}
}
// Send binary data (Blob)
function sendBlobData(blob) {
if (dataChannel.readyState === 'open') {
dataChannel.send(blob);
}
}
// Send ArrayBuffer
function sendArrayBuffer(buffer) {
if (dataChannel.readyState === 'open') {
dataChannel.send(buffer);
}
}Closing Data Channel
// Close data channel
function closeDataChannel(channel) {
if (channel.readyState === 'open') {
channel.close();
}
}Complete Example: One-to-One Video Call
Below is a complete HTML file demonstrating how to use WebRTC JavaScript APIs to implement a one-to-one video call:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebRTC One-to-One Video Call</title>
<style>
body {
font-family: Arial, sans-serif;
max-width: 800px;
margin: 0 auto;
padding: 20px;
}
.video-container {
display: flex;
flex-wrap: wrap;
gap: 20px;
margin-bottom: 20px;
}
video {
width: 320px;
height: 240px;
background-color: #000;
border: 1px solid #ccc;
}
.controls {
margin-bottom: 20px;
}
button {
padding: 8px 16px;
margin-right: 10px;
cursor: pointer;
}
#roomId {
padding: 8px;
width: 200px;
}
</style>
</head>
<body>
<h1>WebRTC One-to-One Video Call</h1>
<div class="controls">
<input type="text" id="roomId" valueholder="Enter Room ID">
<button id="joinBtn">Join Room</button>
</div>
<div class="video-container">
<div>
<h3>Local Video</h3>
<video id="localVideo" autoplay playsinline muted></video>
</div>
<div>
<h3>Remote Video</h3>
<video id="remoteVideo" autoplay playsinline></video>
</div>
</div>
<div class="controls">
<button id="startBtn" disabled>Start Call</button>
<button id="hangupBtn" disabled>Hang Up</button>
</div>
<script>
// Global variables
let localStream;
let peerConnection;
let roomId;
let isInitiator = false;
let signalingChannel;
// DOM elements
const localVideo = document.getElementById('localVideo');
const remoteVideo = document.getElementById('remoteVideo');
const roomIdInput = document.getElementById('roomId');
const joinBtn = document.getElementById('joinBtn');
const startBtn = document.getElementById('startBtn');
const hangupBtn = document.getElementById('hangupBtn');
// Join room button click event
joinBtn.addEventListener('click', () => {
roomId = roomIdInput.value.trim();
if (!roomId) {
alert('Please enter a room ID');
return;
}
// Create signaling channel (simulating WebSocket)
signalingChannel = createSignalingChannel();
// Enable start button
startBtn.disabled = false;
});
// Start call button click event
startBtn.addEventListener('click', async () => {
try {
// Acquire local media stream
localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
localVideo.srcObject = localStream;
// Create RTCPeerConnection
createPeerConnection();
// Add local stream to connection
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
});
// If initiator, create offer
isInitiator = true;
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
// Send offer via signaling channel
signalingChannel.send({
type: 'offer',
sdp: peerConnection.localDescription,
roomId: roomId
});
startBtn.disabled = true;
hangupBtn.disabled = false;
} catch (err) {
console.error('Failed to start call:', err);
alert('Failed to acquire media stream: ' + err.message);
}
});
// Hang up button click event
hangupBtn.addEventListener('click', () => {
if (peerConnection) {
peerConnection.close();
peerConnection = null;
}
if (localStream) {
localStream.getTracks().forEach(track => track.stop());
localStream = null;
localVideo.srcObject = null;
remoteVideo.srcObject = null;
}
startBtn.disabled = false;
hangupBtn.disabled = true;
});
// Create signaling channel (simulating WebSocket)
function createSignalingChannel() {
// In real applications, this should create a WebSocket connection
// Here, we use a simple object to simulate the signaling channel
return {
send: function(data) {
console.log('Sending signaling:', data);
// Simulate network delay
setTimeout(() => {
// Simulate receiver processing signaling
if (data.type === 'offer') {
// Simulate receiver creating answer
setTimeout(() => {
const simulatedAnswer = {
type: 'answer',
sdp: { /* Simulated SDP */ },
roomId: data.roomId
};
onSignalingMessage(simulatedAnswer);
// Simulate ICE candidate exchange
setTimeout(() => {
const simulatedCandidate = {
type: 'ice-candidate',
candidate: { /* Simulated ICE candidate */ },
roomId: data.roomId
};
onSignalingMessage(simulatedCandidate);
}, 100);
}, 100);
} else if (data.type === 'ice-candidate') {
// Simulate receiving ICE candidate
setTimeout(() => {
onSignalingMessage(data);
}, 100);
}
}, 100);
},
onmessage: function(message) {
// In real applications, this would receive messages from WebSocket
// Here, we directly call the handler function
onSignalingMessage(message);
}
};
}
// Handle signaling messages
async function onSignalingMessage(message) {
console.log('Received signaling message:', message);
if (message.roomId !== roomId) {
// Not a message for the current room
return;
}
switch (message.type) {
case 'offer':
// If receiver
if (!isInitiator) {
// Create RTCPeerConnection
createPeerConnection();
// Set remote description
await peerConnection.setRemoteDescription(new RTCSessionDescription(message.sdp));
// Create answer
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer);
// Send answer
signalingChannel.send({
type: 'answer',
sdp: peerConnection.localDescription,
roomId: roomId
});
}
break;
case 'answer':
// If initiator
if (isInitiator) {
// Set remote description
await peerConnection.setRemoteDescription(new RTCSessionDescription(message.sdp));
}
break;
case 'ice-candidate':
// Add ICE candidate
if (peerConnection) {
await peerConnection.addIceCandidate(new RTCIceCandidate(message.candidate));
}
break;
}
}
// Create RTCPeerConnection
function createPeerConnection() {
// Configure ICE servers (using Google's public STUN server)
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' }
// In real applications, you may need to add a TURN server
// { urls: 'turn:your-turn-server.com', username: 'user', credential: 'pass' }
]
};
peerConnection = new RTCPeerConnection(configuration);
// Monitor ICE candidates
peerConnection.onicecandidate = event => {
if (event.candidate) {
signalingChannel.send({
type: 'ice-candidate',
candidate: event.candidate,
roomId: roomId
});
}
};
// Monitor remote stream
peerConnection.ontrack = event => {
remoteVideo.srcObject = event.streams[0];
};
// Monitor connection state changes
peerConnection.oniceconnectionstatechange = () => {
console.log('ICE connection state:', peerConnection.iceConnectionState);
if (peerConnection.iceConnectionState === 'connected') {
console.log('Connection established');
} else if (peerConnection.iceConnectionState === 'failed' ||
peerConnection.iceConnectionState === 'disconnected') {
console.error('Connection failed or disconnected');
// Add reconnection logic here if needed
}
};
}
</script>
</body>
</html>Advanced API Features
Media Stream Track Control
// Mute/unmute
function toggleMute() {
const audioTrack = localStream.getAudioTracks()[0];
if (audioTrack) {
audioTrack.enabled = !audioTrack.enabled;
console.log('Audio status:', audioTrack.enabled ? 'On' : 'Muted');
}
}
// Disable/enable video
function toggleVideo() {
const videoTrack = localStream.getVideoTracks()[0];
if (videoTrack) {
videoTrack.enabled = !videoTrack.enabled;
console.log('Video status:', videoTrack.enabled ? 'On' : 'Disabled');
}
}
// Replace video track (e.g., switch camera)
async function replaceVideoTrack() {
if (!localStream) return;
const newStream = await navigator.mediaDevices.getUserMedia({ video: true });
const newVideoTrack = newStream.getVideoTracks()[0];
// Replace track
localStream.getVideoTracks()[0].stop();
localStream.addTrack(newVideoTrack);
// If connection is established, renegotiation is needed
if (peerConnection) {
// Create offer
const offer = await peerConnection.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
await peerConnection.setLocalDescription(offer);
// Send offer to peer
signalingChannel.send({
type: 'offer',
sdp: peerConnection.localDescription,
roomId: roomId
});
}
}Statistics Retrieval
// Retrieve connection statistics
async function getStats() {
if (!peerConnection) return;
try {
const stats = await peerConnection.getStats();
stats.forEach(report => {
console.log('Statistics report:', report);
// Parse various statistics
if (report.type === 'inbound-rtp' || report.type === 'outbound-rtp') {
console.log('Media statistics:', {
bytesReceived: report.bytesReceived,
bytesSent: report.bytesSent,
packetsLost: report.packetsLost,
jitter: report.jitter
});
}
});
} catch (err) {
console.error('Failed to retrieve statistics:', err);
}
}
// Periodically retrieve statistics
setInterval(getStats, 5000);Advanced Data Channel Features
// Create reliable data channel
const reliableChannel = pc.createDataChannel('chat');
// Create partially reliable data channel (based on message count)
const partialReliableChannel = pc.createDataChannel('game', {
ordered: true,
maxRetransmits: 3
});
// Send large file (chunked transmission)
async function sendFile(file) {
if (!reliableChannel || reliableChannel.readyState !== 'open') {
console.error('Data channel not open');
return;
}
const chunkSize = 16384; // 16KB
let offset = 0;
while (offset < file.size) {
const chunk = file.slice(offset, offset + chunkSize);
const arrayBuffer = await chunk.arrayBuffer();
reliableChannel.send(arrayBuffer);
offset += chunkSize;
// Can add progress display
console.log(`Sent ${offset} / ${file.size} bytes`);
}
console.log('File transmission completed');
}
// Receive file
let receivedFile;
let receivedSize = 0;
const chunks = [];
reliableChannel.onmessage = event => {
if (event.data instanceof ArrayBuffer) {
chunks.push(event.data);
receivedSize += event.data.byteLength;
// Can display reception progress
console.log(`Received ${receivedSize} bytes`);
// If total file size is known, check if reception is complete
// if (receivedSize >= expectedFileSize) {
// assembleFile();
// }
}
};
function assembleFile() {
if (chunks.length === 0) return;
// Calculate total size
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0);
// Create new ArrayBuffer
const fileArrayBuffer = new ArrayBuffer(totalSize);
const view = new Uint8Array(fileArrayBuffer);
// Merge all chunks
let offset = 0;
for (const chunk of chunks) {
view.set(new Uint8Array(chunk), offset);
offset += chunk.byteLength;
}
// Create Blob object
receivedFile = new Blob([fileArrayBuffer]);
// Can process received file here
console.log('File reception completed, size:', receivedFile.size);
}



