乐闻世界logo
搜索文章和话题

How to implement real-time video processing with OpenCV.js?

3月6日 21:36

OpenCV.js supports real-time video processing in the browser. Here's how to implement it:

1. Get Video Stream

javascript
async function startVideo() { const video = document.getElementById('videoInput'); try { const stream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } }); video.srcObject = stream; await video.play(); // Start processing video frames processVideo(); } catch (err) { console.error('Error accessing webcam:', err); } }

2. Process Video Frames

javascript
function processVideo() { const video = document.getElementById('videoInput'); const canvas = document.getElementById('canvasOutput'); const ctx = canvas.getContext('2d'); // Set canvas dimensions canvas.width = video.videoWidth; canvas.height = video.videoHeight; // Create Mat objects let src = new cv.Mat(video.videoHeight, video.videoWidth, cv.CV_8UC4); let dst = new cv.Mat(); let cap = new cv.VideoCapture(video); function processFrame() { try { // Read video frame cap.read(src); // Image processing (example: edge detection) cv.cvtColor(src, dst, cv.COLOR_RGBA2GRAY); cv.Canny(dst, dst, 50, 100); // Display result cv.imshow('canvasOutput', dst); // Request next frame requestAnimationFrame(processFrame); } catch (err) { console.error('Error processing frame:', err); } } processFrame(); }

3. Face Detection Example

javascript
function faceDetection() { const video = document.getElementById('videoInput'); const canvas = document.getElementById('canvasOutput'); // Load face detection model let faceCascade = new cv.CascadeClassifier(); faceCascade.load('haarcascade_frontalface_default.xml'); let src = new cv.Mat(); let gray = new cv.Mat(); let faces = new cv.RectVector(); let cap = new cv.VideoCapture(video); function detectFaces() { try { cap.read(src); // Convert to grayscale cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY); // Detect faces faceCascade.detectMultiScale(gray, faces, 1.1, 3, 0); // Draw face rectangles for (let i = 0; i < faces.size(); ++i) { let face = faces.get(i); let point1 = new cv.Point(face.x, face.y); let point2 = new cv.Point(face.x + face.width, face.y + face.height); cv.rectangle(src, point1, point2, [255, 0, 0, 255], 2); } cv.imshow('canvasOutput', src); requestAnimationFrame(detectFaces); } catch (err) { console.error('Error:', err); } } detectFaces(); }

4. Performance Optimization Tips

Reduce Resolution

javascript
// Process low-resolution image, then upscale let small = new cv.Mat(); cv.resize(src, small, new cv.Size(320, 240)); // Process small cv.resize(small, dst, new cv.Size(src.cols, src.rows));

Limit Frame Rate

javascript
let lastTime = 0; const FPS = 30; function processVideo(timestamp) { if (timestamp - lastTime >= 1000 / FPS) { // Process video frame lastTime = timestamp; } requestAnimationFrame(processVideo); }

Use Web Worker

javascript
// Main thread const worker = new Worker('opencv-worker.js'); worker.onmessage = function(e) { const { imageData } = e.data; ctx.putImageData(imageData, 0, 0); }; function sendFrameToWorker() { const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height); worker.postMessage({ imageData }, [imageData.data.buffer]); } // opencv-worker.js self.onmessage = function(e) { const { imageData } = e.data; // Process image using OpenCV.js const result = processImage(imageData); self.postMessage({ imageData: result }, [result.data.buffer]); };

5. Memory Management

javascript
function processVideo() { let src = new cv.Mat(); let dst = new cv.Mat(); function processFrame() { try { // Processing logic } finally { // Ensure memory is released src.delete(); dst.delete(); } } // Cleanup on page unload window.addEventListener('beforeunload', () => { src.delete(); dst.delete(); }); }

6. Complete Example: Real-time Edge Detection

javascript
class VideoProcessor { constructor(videoId, canvasId) { this.video = document.getElementById(videoId); this.canvas = document.getElementById(canvasId); this.ctx = this.canvas.getContext('2d'); this.isProcessing = false; } async start() { try { const stream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } }); this.video.srcObject = stream; await this.video.play(); this.canvas.width = this.video.videoWidth; this.canvas.height = this.video.videoHeight; this.src = new cv.Mat(this.video.videoHeight, this.video.videoWidth, cv.CV_8UC4); this.dst = new cv.Mat(); this.cap = new cv.VideoCapture(this.video); this.isProcessing = true; this.processFrame(); } catch (err) { console.error('Error starting video:', err); } } processFrame() { if (!this.isProcessing) return; try { this.cap.read(this.src); cv.cvtColor(this.src, this.dst, cv.COLOR_RGBA2GRAY); cv.Canny(this.dst, this.dst, 50, 100); cv.imshow(this.canvas.id, this.dst); requestAnimationFrame(() => this.processFrame()); } catch (err) { console.error('Error processing frame:', err); } } stop() { this.isProcessing = false; this.src.delete(); this.dst.delete(); } } // Usage const processor = new VideoProcessor('videoInput', 'canvasOutput'); processor.start();
标签:Opencv.js