WebGL Post-processing Overview
Post-processing is a technique that performs image processing on rendering results after scene rendering is complete. Through post-processing, various visual effects can be achieved, such as blur, bloom, tone mapping, anti-aliasing, etc.
Basic Post-processing Principle
The core workflow of post-processing:
- Render the scene to a texture (offscreen rendering)
- Perform various image processing on the texture
- Render the processed result to the screen
shellScene rendering → Color texture → Post-processing shader → Screen ↓ Depth texture (optional)
Basic Post-processing Framework
Creating Post-processing Resources
javascriptclass PostProcess { constructor(gl, width, height) { this.gl = gl; this.width = width; this.height = height; // Create framebuffer this.framebuffer = gl.createFramebuffer(); // Create color texture this.colorTexture = gl.createTexture(); gl.bindTexture(gl.TEXTURE_2D, this.colorTexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); // Bind to framebuffer gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.colorTexture, 0); // Create depth renderbuffer this.depthBuffer = gl.createRenderbuffer(); gl.bindRenderbuffer(gl.RENDERBUFFER, this.depthBuffer); gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, width, height); gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, this.depthBuffer); gl.bindFramebuffer(gl.FRAMEBUFFER, null); // Create fullscreen quad this.quad = this.createFullscreenQuad(); } createFullscreenQuad() { // Return VAO or vertex data const vertices = new Float32Array([ // Position // TexCoord -1, 1, 0, 1, -1, -1, 0, 0, 1, 1, 1, 1, 1, -1, 1, 0 ]); // Create VBO etc... return { vertices, vbo: this.gl.createBuffer() }; } beginScene() { this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, this.framebuffer); this.gl.viewport(0, 0, this.width, this.height); this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT); } endScene() { this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, null); } apply(effectProgram) { this.gl.useProgram(effectProgram); this.gl.activeTexture(this.gl.TEXTURE0); this.gl.bindTexture(this.gl.TEXTURE_2D, this.colorTexture); this.gl.uniform1i(this.gl.getUniformLocation(effectProgram, 'u_texture'), 0); // Draw fullscreen quad this.drawQuad(); } drawQuad() { // Bind VAO and draw this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4); } }
Post-processing Vertex Shader
glslattribute vec2 a_position; attribute vec2 a_texCoord; varying vec2 v_texCoord; void main() { gl_Position = vec4(a_position, 0.0, 1.0); v_texCoord = a_texCoord; }
Common Post-processing Effects
1. Grayscale Effect
glslprecision mediump float; varying vec2 v_texCoord; uniform sampler2D u_texture; void main() { vec4 color = texture2D(u_texture, v_texCoord); // Weighted grayscale conversion float gray = dot(color.rgb, vec3(0.299, 0.587, 0.114)); gl_FragColor = vec4(vec3(gray), color.a); }
2. Blur Effect
Gaussian Blur
glslprecision mediump float; varying vec2 v_texCoord; uniform sampler2D u_texture; uniform vec2 u_texelSize; // 1.0 / textureSize uniform vec2 u_direction; // Blur direction (1,0) or (0,1) void main() { vec4 color = vec4(0.0); // 5x5 Gaussian kernel float weights[5]; weights[0] = 0.227027; weights[1] = 0.1945946; weights[2] = 0.1216216; weights[3] = 0.054054; weights[4] = 0.016216; // Center pixel color += texture2D(u_texture, v_texCoord) * weights[0]; // Side pixels for (int i = 1; i < 5; i++) { vec2 offset = u_direction * u_texelSize * float(i); color += texture2D(u_texture, v_texCoord + offset) * weights[i]; color += texture2D(u_texture, v_texCoord - offset) * weights[i]; } gl_FragColor = color; }
Two-pass Blur (Performance Optimization)
javascript// Horizontal blur first horizontalBlurProgram.setUniform('u_direction', [1, 0]); postProcess.apply(horizontalBlurProgram); // Then vertical blur verticalBlurProgram.setUniform('u_direction', [0, 1]); postProcess.apply(verticalBlurProgram);
3. Edge Detection
glslprecision mediump float; varying vec2 v_texCoord; uniform sampler2D u_texture; uniform vec2 u_texelSize; void main() { // Sobel operator float kernelX[9]; kernelX[0] = -1.0; kernelX[1] = 0.0; kernelX[2] = 1.0; kernelX[3] = -2.0; kernelX[4] = 0.0; kernelX[5] = 2.0; kernelX[6] = -1.0; kernelX[7] = 0.0; kernelX[8] = 1.0; float kernelY[9]; kernelY[0] = -1.0; kernelY[1] = -2.0; kernelY[2] = -1.0; kernelY[3] = 0.0; kernelY[4] = 0.0; kernelY[5] = 0.0; kernelY[6] = 1.0; kernelY[7] = 2.0; kernelY[8] = 1.0; vec2 offsets[9]; offsets[0] = vec2(-1, -1); offsets[1] = vec2(0, -1); offsets[2] = vec2(1, -1); offsets[3] = vec2(-1, 0); offsets[4] = vec2(0, 0); offsets[5] = vec2(1, 0); offsets[6] = vec2(-1, 1); offsets[7] = vec2(0, 1); offsets[8] = vec2(1, 1); float edgeX = 0.0; float edgeY = 0.0; for (int i = 0; i < 9; i++) { vec2 coord = v_texCoord + offsets[i] * u_texelSize; float gray = dot(texture2D(u_texture, coord).rgb, vec3(0.299, 0.587, 0.114)); edgeX += gray * kernelX[i]; edgeY += gray * kernelY[i]; } float edge = sqrt(edgeX * edgeX + edgeY * edgeY); gl_FragColor = vec4(vec3(edge), 1.0); }
4. Bloom Effect
glsl// Extract bright parts precision mediump float; varying vec2 v_texCoord; uniform sampler2D u_texture; uniform float u_threshold; void main() { vec4 color = texture2D(u_texture, v_texCoord); float brightness = dot(color.rgb, vec3(0.2126, 0.7152, 0.0722)); if (brightness > u_threshold) { gl_FragColor = color; } else { gl_FragColor = vec4(0.0); } } // Composite bloom precision mediump float; varying vec2 v_texCoord; uniform sampler2D u_sceneTexture; uniform sampler2D u_bloomTexture; uniform float u_bloomIntensity; void main() { vec4 sceneColor = texture2D(u_sceneTexture, v_texCoord); vec4 bloomColor = texture2D(u_bloomTexture, v_texCoord); gl_FragColor = sceneColor + bloomColor * u_bloomIntensity; }
5. Tone Mapping
glslprecision mediump float; varying vec2 v_texCoord; uniform sampler2D u_texture; uniform float u_exposure; // Reinhard tone mapping vec3 reinhardToneMapping(vec3 color) { return color / (color + vec3(1.0)); } // ACES tone mapping vec3 acesToneMapping(vec3 color) { const float a = 2.51; const float b = 0.03; const float c = 2.43; const float d = 0.59; const float e = 0.14; return clamp((color * (a * color + b)) / (color * (c * color + d) + e), 0.0, 1.0); } void main() { vec4 color = texture2D(u_texture, v_texCoord); // Exposure adjustment vec3 mapped = vec3(1.0) - exp(-color.rgb * u_exposure); // Gamma correction mapped = pow(mapped, vec3(1.0 / 2.2)); gl_FragColor = vec4(mapped, color.a); }
6. Color Adjustment
glslprecision mediump float; varying vec2 v_texCoord; uniform sampler2D u_texture; uniform float u_brightness; uniform float u_contrast; uniform float u_saturation; void main() { vec4 color = texture2D(u_texture, v_texCoord); // Brightness color.rgb += u_brightness; // Contrast color.rgb = (color.rgb - 0.5) * u_contrast + 0.5; // Saturation float gray = dot(color.rgb, vec3(0.299, 0.587, 0.114)); color.rgb = mix(vec3(gray), color.rgb, u_saturation); gl_FragColor = color; }
7. Screen Space Ambient Occlusion (SSAO)
glslprecision mediump float; varying vec2 v_texCoord; uniform sampler2D u_colorTexture; uniform sampler2D u_depthTexture; uniform sampler2D u_normalTexture; uniform mat4 u_projectionMatrix; uniform mat4 u_viewMatrix; uniform vec2 u_texelSize; uniform vec3 u_samples[64]; // Sampling kernel vec3 getViewPosition(vec2 texCoord) { float depth = texture2D(u_depthTexture, texCoord).r; vec4 clipPos = vec4(texCoord * 2.0 - 1.0, depth * 2.0 - 1.0, 1.0); vec4 viewPos = inverse(u_projectionMatrix) * clipPos; return viewPos.xyz / viewPos.w; } void main() { vec3 viewPos = getViewPosition(v_texCoord); vec3 normal = texture2D(u_normalTexture, v_texCoord).xyz * 2.0 - 1.0; float occlusion = 0.0; float radius = 0.5; for (int i = 0; i < 64; i++) { // Sample point vec3 samplePos = viewPos + u_samples[i] * radius; // Project to screen space vec4 offset = u_projectionMatrix * vec4(samplePos, 1.0); offset.xyz = offset.xyz / offset.w * 0.5 + 0.5; // Get sample point depth float sampleDepth = getViewPosition(offset.xy).z; // Range check float rangeCheck = smoothstep(0.0, 1.0, radius / abs(viewPos.z - sampleDepth)); // If sample point depth is less than current point depth, it's occluded occlusion += (sampleDepth >= samplePos.z ? 1.0 : 0.0) * rangeCheck; } occlusion = 1.0 - (occlusion / 64.0); vec3 color = texture2D(u_colorTexture, v_texCoord).rgb; gl_FragColor = vec4(color * occlusion, 1.0); }
Multi-effect Chain Processing
javascriptclass PostProcessChain { constructor(gl, width, height) { this.gl = gl; // Create two framebuffers for ping-pong rendering this.fbo1 = new PostProcess(gl, width, height); this.fbo2 = new PostProcess(gl, width, height); this.effects = []; } addEffect(effect) { this.effects.push(effect); } render(sceneRenderFunc) { // Step 1: Render scene to FBO1 this.fbo1.beginScene(); sceneRenderFunc(); this.fbo1.endScene(); let readFBO = this.fbo1; let writeFBO = this.fbo2; // Apply each effect for (let effect of this.effects) { writeFBO.beginScene(); effect.apply(readFBO.colorTexture); writeFBO.endScene(); // Swap read/write buffers [readFBO, writeFBO] = [writeFBO, readFBO]; } // Finally render to screen this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, null); finalPass.apply(readFBO.colorTexture); } }
Performance Optimization Suggestions
-
Reduce Resolution:
- Post-processing can use half or quarter resolution
- Especially suitable for blur effects
-
Combine Effects:
- Merge multiple simple effects into one shader
- Reduce render passes
-
Use Mipmap:
- Blur effects can use mipmap for fast downsampling
-
Smart Update:
- Static scenes don't need post-processing every frame
- Can update certain effects every other frame
-
Mobile Optimization:
- Reduce sampling count
- Use simple approximation algorithms