Advanced WebSocket Applications
High-Performance WebSocket Server Implementation
Multi-Room Chat Server Support:
import { serve } from 'bun'
const server = serve({
port: 8080,
fetch(request) {
if (request.headers.get('upgrade') === 'websocket') {
const { socket, response } = Bun.upgradeWebSocket(request)
// Room management data structure
const rooms = new Map() // roomId -> Set
// Client state
let currentRoomId = null
// WebSocket event handling
socket.onopen = () => {
console.log('WebSocket connected')
sendSystemMessage(socket, 'Welcome to Bun Chat!')
}
socket.onmessage = (event) => {
try {
const message = JSON.parse(event.data)
switch (message.type) {
case 'JOIN_ROOM':
handleJoinRoom(socket, message.roomId, rooms)
currentRoomId = message.roomId
break
case 'CHAT_MESSAGE':
if (currentRoomId) {
broadcastToRoom(rooms, currentRoomId, {
type: 'CHAT_MESSAGE',
sender: message.sender,
content: message.content,
timestamp: Date.now()
})
}
break
}
} catch (err) {
console.error('Message parse error:', err)
}
}
socket.onclose = () => {
if (currentRoomId) {
leaveRoom(rooms, currentRoomId, socket)
}
console.log('WebSocket disconnected')
}
socket.onerror = (error) => {
console.error('WebSocket error:', error)
}
return response
}
return new Response('HTTP Server', { status: 400 })
}
})
// Room management functions
function handleJoinRoom(socket, roomId, rooms) {
if (!rooms.has(roomId)) {
rooms.set(roomId, new Set())
}
rooms.get(roomId).add(socket)
sendSystemMessage(socket, `Joined room: ${roomId}`)
}
function leaveRoom(rooms, roomId, socket) {
if (rooms.has(roomId)) {
rooms.get(roomId).delete(socket)
if (rooms.get(roomId).size === 0) {
rooms.delete(roomId)
}
sendSystemMessage(socket, `Left room: ${roomId}`)
}
}
function broadcastToRoom(rooms, roomId, message) {
if (rooms.has(roomId)) {
const roomSockets = rooms.get(roomId)
const messageStr = JSON.stringify(message)
roomSockets.forEach(client => {
if (client.readyState === WebSocket.OPEN) {
client.send(messageStr)
}
})
}
}
function sendSystemMessage(socket, content) {
socket.send(JSON.stringify({
type: 'SYSTEM_MESSAGE',
content
}))
}
console.log('WebSocket Chat Server running on ws://localhost:8080')WebSocket Performance Optimization
Binary Data Transfer Optimization:
socket.onmessage = (event) => {
if (event.data instanceof Blob) {
// Handle binary data (Blob)
const reader = new FileReader()
reader.onload = () => {
const arrayBuffer = reader.result
const dataView = new DataView(arrayBuffer)
// Parse binary protocol...
}
reader.readAsArrayBuffer(event.data)
} else if (typeof event.data === 'string') {
// Handle text data
const message = JSON.parse(event.data)
// ...
}
}
// Binary data sending example
const buffer = new ArrayBuffer(16)
const view = new DataView(buffer)
view.setInt32(0, 123456789)
view.setFloat64(4, 3.1415926)
socket.send(buffer)Heartbeat Mechanism:
// Heartbeat configuration
const HEARTBEAT_INTERVAL = 30000 // 30 seconds
const HEARTBEAT_TIMEOUT = 60000 // 60 seconds
socket.onopen = () => {
// Start heartbeat
let heartbeatTimer = setInterval(() => {
if (socket.readyState === WebSocket.OPEN) {
socket.send(JSON.stringify({ type: 'HEARTBEAT' }))
}
}, HEARTBEAT_INTERVAL)
// Set timeout detection
let timeoutTimer = setTimeout(() => {
socket.close(4001, 'Heartbeat timeout')
}, HEARTBEAT_TIMEOUT)
// Reset timeout timer
socket.onmessage = (event) => {
clearTimeout(timeoutTimer)
timeoutTimer = setTimeout(() => {
socket.close(4001, 'Heartbeat timeout')
}, HEARTBEAT_TIMEOUT)
// Handle messages...
}
socket.onclose = () => {
clearInterval(heartbeatTimer)
clearTimeout(timeoutTimer)
}
}Advanced HTTP/2 and HTTP/3 Features
HTTP/2 Server Push
Resource Pre-Pushing Implementation:
import { serve } from 'bun'
const server = serve({
port: 443,
https: {
certFile: './cert.pem',
keyFile: './key.pem'
},
http2: true
})
server.on('request', (req, res) => {
if (req.url === '/') {
// Enable HTTP/2 server push
const pushResources = [
'/styles.css',
'/app.js',
'/logo.png'
]
pushResources.forEach(resource => {
const pushStream = res.push(resource)
if (pushStream) {
// Set response headers based on resource type
if (resource.endsWith('.css')) {
pushStream.respond({
':status': 200,
'content-type': 'text/css'
})
pushStream.end('/* CSS content */')
} else if (resource.endsWith('.js')) {
pushStream.respond({
':status': 200,
'content-type': 'application/javascript'
})
pushStream.end('// JS content')
} else if (resource.endsWith('.png')) {
pushStream.respond({
':status': 200,
'content-type': 'image/png'
})
// In real applications, read from file system
pushStream.end(Buffer.from('PNG placeholder'))
}
}
})
// Main response
res.respond({
':status': 200,
'content-type': 'text/html'
})
res.end(`
<html>
<head>
<link rel="stylesheet" href="/styles.css">
<script src="/app.js"></script>
</head>
<body>
<img src="/logo.png">
</body>
</html>
`)
}
})
console.log('HTTP/2 Server with Push running on https://localhost:443')Experimental HTTP/3 Support
QUIC Protocol Server:
// Note: Requires Bun's experimental feature support
import { serve } from 'bun'
const server = serve({
port: 443,
quic: { // HTTP/3 based on QUIC protocol
certFile: './cert.pem',
keyFile: './key.pem'
}
})
server.on('request', (req, res) => {
res.respond({
':status': 200,
'content-type': 'text/plain'
})
res.end('Hello HTTP/3!')
})
console.log('HTTP/3 Server running (experimental)')Advanced Proxy Server Implementation
Intelligent Load Balancing Proxy
Weighted Load Balancing:
import { serve } from 'bun'
// Backend server configuration
const backends = [
{ url: 'http://backend1.example.com', weight: 3 },
{ url: 'http://backend2.example.com', weight: 2 },
{ url: 'http://backend3.example.com', weight: 1 }
]
// Calculate total weight and current index
let totalWeight = backends.reduce((sum, backend) => sum + backend.weight, 0)
let currentIndex = 0
let currentWeight = 0
const proxy = serve({
port: 8080,
fetch(request) {
// Weighted round-robin algorithm
while (true) {
currentIndex = (currentIndex + 1) % backends.length
if (currentIndex === 0) {
currentWeight = currentWeight - 1
if (currentWeight <= 0) {
currentWeight = backends.reduce((max, b) => Math.max(max, b.weight), 0)
}
}
if (backends[currentIndex].weight >= currentWeight) {
const targetUrl = new URL(request.url)
targetUrl.hostname = backends[currentIndex].url.replace(/^https?:\/\//, '')
targetUrl.port = ''
targetUrl.protocol = backends[currentIndex].url.startsWith('https') ? 'https:' : 'http:'
// Forward request
return fetch(targetUrl.toString(), {
method: request.method,
headers: request.headers,
body: request.body
})
}
}
}
})
console.log('Load Balancer Proxy running on http://localhost:8080')Advanced Reverse Proxy Features
Request/Response Transformation Proxy:
import { serve } from 'bun'
const apiProxy = serve({
port: 8080,
fetch(request) {
// 1. Modify request
const url = new URL(request.url)
const targetUrl = new URL('https://api.example.com' + url.pathname)
// Add authentication header
const headers = new Headers(request.headers)
headers.set('Authorization', 'Bearer YOUR_API_KEY')
// 2. Forward request
return fetch(targetUrl.toString(), {
method: request.method,
headers,
body: request.body
})
.then(async response => {
// 3. Modify response
const modifiedResponse = new Response(response.body, {
status: response.status,
statusText: response.statusText,
headers: new Headers({
'X-Proxy-By': 'Bun Proxy',
'Cache-Control': 'no-cache',
...Object.fromEntries(response.headers)
})
})
// 4. Response body transformation (example: JSON formatting)
if (response.headers.get('content-type')?.includes('application/json')) {
const originalJson = await response.json()
const formattedJson = JSON.stringify(originalJson, null, 2)
return new Response(formattedJson, {
status: modifiedResponse.status,
headers: modifiedResponse.headers
})
}
return modifiedResponse
})
}
})
console.log('API Reverse Proxy running on http://localhost:8080')Deep Network Protocol Analysis
Custom TCP Protocol Implementation
Binary Protocol Server:
import { TCP } from 'bun'
const server = new TCP({
port: 9000,
host: '0.0.0.0'
})
// Custom protocol format:
// [Message Type (1 byte)][Message Length (4 bytes)][Message Body (N bytes)]
server.on('connection', (socket) => {
console.log('New TCP connection')
let buffer = Buffer.alloc(0)
socket.on('data', (data) => {
buffer = Buffer.concat([buffer, data])
while (buffer.length >= 5) { // At least 1 byte type + 4 bytes length
const messageType = buffer.readUInt8(0)
const messageLength = buffer.readUInt32BE(1)
if (buffer.length < 5 + messageLength) {
// Insufficient data, wait for more
break
}
// Extract message body
const messageBody = buffer.slice(5, 5 + messageLength)
buffer = buffer.slice(5 + messageLength)
// Process message
handleMessage(socket, messageType, messageBody)
}
})
socket.on('close', () => {
console.log('TCP connection closed')
})
})
function handleMessage(socket, type, body) {
switch (type) {
case 0x01: // Heartbeat message
console.log('Received heartbeat')
socket.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x00])) // Reply heartbeat
break
case 0x02: // Data message
console.log('Received data:', body.toString())
// Process data...
break
default:
console.log('Unknown message type:', type)
}
}
console.log('Custom TCP Protocol Server running on tcp://localhost:9000')UDP Real-Time Data Transmission
Reliable UDP Implementation Example:
import { UDP } from 'bun'
const udp = new UDP({
port: 99301
})
// Reliable UDP requires:
// 1. Sequence number
// 2. Acknowledgment mechanism
// 3. Timeout retransmission
// 4. Sliding window
// Simple example: UDP communication with sequence number
let sequenceNumber = 0
const pendingAcks = new Map() // seq -> { data, timestamp, retries }
udp.on('message', (message, remote) => {
const [receivedSeq, ...data] = message
console.log(`Received packet ${receivedSeq} from ${remote.address}:${remote.port}`)
// Send ACK
udp.send([0xFF, receivedSeq], remote.port, remote.address)
// Process data...
if (receivedSeq === sequenceNumber) {
console.log('In-order packet:', data.toString())
sequenceNumber++
} else {
console.log('Out-of-order packet, buffering...')
// In a real application, implement buffer management
}
})
// Send reliable message
function sendReliableMessage(data, remote) {
const seq = sequenceNumber++
метров const packet = Buffer.concat([
Buffer.from([seq]),
Buffer.from(data)
])
pendingAcks.set(seq, {
data: packet,
timestamp: Date.now(),
retries: 0
})
udp.send(packet, remote.port, remote.address)
// Start retransmission check (use timer in production)
}
// Usage example
sendReliableMessage('Hello UDP', { address: '127.0.0.1', port: 9002 })Network Performance Optimization
Zero-Copy File Transfers
Efficient File Service Implementation:
import { serve } from 'bun'
const server = serve({
port: 8080,
fetch(request) {
const url = new URL(request.url)
const filePath = `./files${url.pathname}`
// 1. Check if file exists
try {
const file = Bun.file(filePath)
if (!file.exists()) {
return new Response('Not Found', { status: 404 })
}
// 2. Get file stats
const stats = file.stat()
// 3. Set response headers
const headers = new Headers({
'Content-Type': getContentType(filePath),
'Content-Length': stats.size.toString(),
'Last-Modified': stats.mtime.toUTCString(),
'Accept-Ranges': 'bytes'
})
// 4. Support range requests (resumable downloads)
if (request.headers.get('range')) {
return handleRangeRequest(file, headers, request)
}
// 5. Use streaming
return new Response(file.stream(), { headers })
} catch (err) {
console.error('File error:', err)
return new Response('Internal Server Error', { status: 500 })
}
}
})
function getContentType(filePath) {
const ext = filePath.split('.').pop().toLowerCase()
const mimeTypes = {
'html': 'text/html',
'css': 'text/css',
'js': 'application/javascript',
'json': 'application/json',
'png': 'image/png',
'jpg': 'image/jpeg',
'gif': 'image/gif'
}
return mimeTypes[ext] || 'application/octet-stream'
}
async function handleRangeRequest(file, headers, request) {
const rangeHeader = request.headers.get('range')
const [startStr, endStr] = rangeHeader.substring(6).split('-')
const start = parseInt(startStr)
const end = endStr ? parseInt(endStr) : file.size - 1
if (start >= file.size || end >= file.size) {
return new Response('Invalid Range', {
status: 416,
headers: {
'Content-Range': `bytes */${file.size}`
}
})
}
const contentLength = end - start + 1
headers.set('Content-Range', `bytes ${start}-${end}/${file.size}`)
headers.set('Content-Length', contentLength.toString())
headers.set('Accept-Ranges', 'bytes')
// Create partial response
const response = new Response(file.stream().slice(start, end + 1), {
status: 206,
headers
})
return response
}
console.log('High Performance File Server running on http://localhost:8080')Connection Reuse and Keep-Alive
HTTP Keep-Alive Optimization:
import { serve } from 'bun'
const server = serve({
port: 8080,
fetch(request) {
// 1. Check Connection header
const connectionHeader = request.headers.get('connection')
const keepAlive = !connectionHeader ||
connectionHeader.toLowerCase() !== 'close'
// 2. Process request
const response = new Response('Hello Keep-Alive', {
headers: {
'content-type': 'text/plain',
// 3. Explicitly set Connection header
'connection': keepAlive ?edio : 'close'
}
})
// 4. Set Keep-Alive timeout (Bun handles this automatically by default)
// In production, configure via environment variables:
// BUN_TCP_KEEPALIVE=1
// BUN_TCP_KEEPALIVE_IDLE=30
// BUN_TCP_KEEPALIVE_INTERVAL=5
// BUN_TCP_KEEPALIVE_COUNT=5
return response
}
})
console.log('HTTP Keep-Alive Server running on http://localhost:8080')Load Testing and Performance Analysis
Using wrk for Stress Testing:
# Install wrk
brew install wrk # macOS
# or apt-get install wrk # Linux
# Test HTTP server
wrk -t12 -c400 -d30s http://localhost:8080
# Test WebSocket server
wrk -t12 -cws -s ws.lua http://localhost:8080
# ws.lua example:
request = function()
local ws = require("websocket").connect("ws://localhost:8080")
ws:send("ping")
ws:receive()
ws:close()
endPerformance Analysis Tools:
# CPU profiling
BUN_ENV_PROFILE=1 bun run server.js
# Generates profile.cpuprofile file, analyzable with Chrome DevTools
# Memory profiling
BUN_ENV_PROFILE_HEAP=1 bun run server.js
# Generates heapdump.heapsnapshot file, analyzable with Chrome DevTools
# Network tracing
BUN_ENV_PROFILE_NET=1 bun run server.js
# Outputs network request traces



