OpenCV.js has many practical applications in real development. Here are several typical real-world use cases:
1. Web-based Image Editor
Feature Implementation
javascriptclass ImageEditor { constructor(canvasId) { this.canvas = document.getElementById(canvasId); this.ctx = this.canvas.getContext('2d'); this.originalImage = null; this.currentImage = null; } loadImage(file) { return new Promise((resolve, reject) => { const img = new Image(); img.onload = () => { this.canvas.width = img.width; this.canvas.height = img.height; this.ctx.drawImage(img, 0, 0); this.originalImage = cv.imread(this.canvas); this.currentImage = this.originalImage.clone(); resolve(); }; img.onerror = reject; img.src = URL.createObjectURL(file); }); } applyFilter(filterType) { let temp = new cv.Mat(); try { switch(filterType) { case 'grayscale': cv.cvtColor(this.currentImage, temp, cv.COLOR_RGBA2GRAY); cv.cvtColor(temp, this.currentImage, cv.COLOR_GRAY2RGBA); break; case 'blur': cv.GaussianBlur(this.currentImage, temp, new cv.Size(15, 15), 0); temp.copyTo(this.currentImage); break; case 'sharpen': let kernel = cv.matFromArray(3, 3, cv.CV_32FC1, [ 0, -1, 0, -1, 5, -1, 0, -1, 0 ]); cv.filter2D(this.currentImage, temp, -1, kernel); temp.copyTo(this.currentImage); kernel.delete(); break; case 'edge': cv.cvtColor(this.currentImage, temp, cv.COLOR_RGBA2GRAY); cv.Canny(temp, temp, 50, 100); cv.cvtColor(temp, this.currentImage, cv.COLOR_GRAY2RGBA); break; } cv.imshow(this.canvas.id, this.currentImage); } finally { temp.delete(); } } adjustBrightness(value) { let temp = new cv.Mat(); try { this.currentImage.convertTo(temp, -1, 1, value); temp.copyTo(this.currentImage); cv.imshow(this.canvas.id, this.currentImage); } finally { temp.delete(); } } reset() { this.currentImage = this.originalImage.clone(); cv.imshow(this.canvas.id, this.currentImage); } download() { const link = document.createElement('a'); link.download = 'edited-image.png'; link.href = this.canvas.toDataURL(); link.click(); } }
2. Real-time Face Detection and Recognition
javascriptclass FaceDetector { constructor(videoId, canvasId) { this.video = document.getElementById(videoId); this.canvas = document.getElementById(canvasId); this.faceCascade = new cv.CascadeClassifier(); this.isRunning = false; } async init() { // Load face detection model await this.loadModel('haarcascade_frontalface_default.xml'); // Start camera const stream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } }); this.video.srcObject = stream; await this.video.play(); this.canvas.width = this.video.videoWidth; this.canvas.height = this.video.videoHeight; } async loadModel(url) { return new Promise((resolve, reject) => { this.faceCascade.load(url); resolve(); }); } start() { this.isRunning = true; this.detect(); } stop() { this.isRunning = false; } detect() { if (!this.isRunning) return; let src = new cv.Mat(); let gray = new cv.Mat(); let faces = new cv.RectVector(); try { // Read video frame src = cv.imread(this.video); // Convert to grayscale cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY); // Detect faces this.faceCascade.detectMultiScale(gray, faces, 1.1, 3, 0); // Draw face rectangles for (let i = 0; i < faces.size(); ++i) { let face = faces.get(i); let point1 = new cv.Point(face.x, face.y); let point2 = new cv.Point(face.x + face.width, face.y + face.height); cv.rectangle(src, point1, point2, [255, 0, 0, 255], 2); // Add label cv.putText(src, `Face ${i + 1}`, new cv.Point(face.x, face.y - 10), cv.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0, 255], 1); } cv.imshow(this.canvas.id, src); requestAnimationFrame(() => this.detect()); } finally { src.delete(); gray.delete(); faces.delete(); } } }
3. OCR Text Recognition
javascriptclass OCRProcessor { constructor() { this.tesseract = null; } async init() { // Initialize Tesseract.js this.tesseract = Tesseract.createWorker({ logger: m => console.log(m) }); await this.tesseract.loadLanguage('eng'); await this.tesseract.initialize('eng'); } async preprocessImage(imageElement) { let src = cv.imread(imageElement); let gray = new cv.Mat(); let binary = new cv.Mat(); let denoised = new cv.Mat(); try { // Convert to grayscale cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY); // Denoise cv.medianBlur(gray, denoised, 3); // Binarization cv.threshold(denoised, binary, 0, 255, cv.THRESH_BINARY + cv.THRESH_OTSU); // Display preprocessing result const canvas = document.getElementById('preprocessedCanvas'); cv.imshow(canvas.id, binary); return binary; } finally { src.delete(); gray.delete(); denoised.delete(); } } async recognizeText(imageElement) { // Preprocess image const processed = await this.preprocessImage(imageElement); // Convert to ImageData const canvas = document.getElementById('preprocessedCanvas'); const imageData = canvas.toDataURL('image/png'); // OCR recognition const { data: { text } } = await this.tesseract.recognize(imageData); processed.delete(); return text; } async cleanup() { await this.tesseract.terminate(); } }
4. Real-time QR Code Scanner
javascriptclass QRScanner { constructor(videoId, canvasId) { this.video = document.getElementById(videoId); this.canvas = document.getElementById(canvasId); this.isScanning = false; } async start() { const stream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: 'environment' } }); this.video.srcObject = stream; await this.video.play(); this.canvas.width = this.video.videoWidth; this.canvas.height = this.video.videoHeight; this.isScanning = true; this.scan(); } scan() { if (!this.isScanning) return; let src = new cv.Mat(); let gray = new cv.Mat(); let edges = new cv.Mat(); try { src = cv.imread(this.video); cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY); cv.Canny(gray, edges, 50, 150); // Find contours let contours = new cv.MatVector(); let hierarchy = new cv.Mat(); cv.findContours(edges, contours, hierarchy, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE); // Detect QR codes for (let i = 0; i < contours.size(); i++) { let contour = contours.get(i); let area = cv.contourArea(contour); if (area > 1000) { // Draw contour cv.drawContours(src, contours, i, [0, 255, 0, 255], 2); // Extract QR code region let rect = cv.boundingRect(contour); let qrCode = src.roi(rect); // Decode using jsQR library const imageData = new ImageData( new Uint8ClampedArray(qrCode.data), qrCode.cols, qrCode.rows ); const code = jsQR(imageData.data, imageData.width, imageData.height); if (code) { console.log('QR Code:', code.data); // Trigger callback this.onQRCodeDetected(code.data); } qrCode.delete(); } } cv.imshow(this.canvas.id, src); requestAnimationFrame(() => this.scan()); } finally { src.delete(); gray.delete(); edges.delete(); } } stop() { this.isScanning = false; } onQRCodeDetected(data) { // Override this method to handle QR code data console.log('QR Code detected:', data); } }
5. Real-time Video Filters
javascriptclass VideoFilter { constructor(videoId, canvasId) { this.video = document.getElementById(videoId); this.canvas = document.getElementById(canvasId); this.currentFilter = 'none'; } async start() { const stream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 } }); this.video.srcObject = stream; await this.video.play(); this.canvas.width = this.video.videoWidth; this.canvas.height = this.video.videoHeight; this.process(); } setFilter(filterName) { this.currentFilter = filterName; } process() { let src = new cv.Mat(); let dst = new cv.Mat(); try { src = cv.imread(this.video); switch(this.currentFilter) { case 'grayscale': cv.cvtColor(src, dst, cv.COLOR_RGBA2GRAY); cv.cvtColor(dst, dst, cv.COLOR_GRAY2RGBA); break; case 'sepia': this.applySepia(src, dst); break; case 'cartoon': this.applyCartoon(src, dst); break; case 'emboss': this.applyEmboss(src, dst); break; default: src.copyTo(dst); } cv.imshow(this.canvas.id, dst); requestAnimationFrame(() => this.process()); } finally { src.delete(); dst.delete(); } } applySepia(src, dst) { let kernel = cv.matFromArray(3, 3, cv.CV_32FC1, [ 0.272, 0.534, 0.131, 0.349, 0.686, 0.168, 0.393, 0.769, 0.189 ]); cv.transform(src, dst, kernel); kernel.delete(); } applyCartoon(src, dst) { let gray = new cv.Mat(); let edges = new cv.Mat(); let color = new cv.Mat(); cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY); cv.medianBlur(gray, gray, 7); cv.Canny(gray, edges, 50, 150); cv.cvtColor(edges, edges, cv.COLOR_GRAY2RGBA); cv.bilateralFilter(src, color, 9, 250, 250); cv.bitwise_and(color, edges, dst); gray.delete(); edges.delete(); color.delete(); } applyEmboss(src, dst) { let kernel = cv.matFromArray(3, 3, cv.CV_32FC1, [ -2, -1, 0, -1, 1, 1, 0, 1, 2 ]); cv.filter2D(src, dst, -1, kernel); kernel.delete(); } }
These real-world examples demonstrate OpenCV.js applications in different scenarios. Developers can choose appropriate implementation approaches based on specific requirements.