未验证 提交 854d9ff8 编写于 作者: M Mr.doob 提交者: GitHub

Merge pull request #15243 from Mugen87/dev9

SSAO: New implementation
'use strict';
/**
* Screen-space ambient occlusion pass.
*
* Has the following parameters
* - radius
* - Ambient occlusion shadow radius (numeric value).
* - onlyAO
* - Display only ambient occlusion result (boolean value).
* - aoClamp
* - Ambient occlusion clamp (numeric value).
* - lumInfluence
* - Pixel luminosity influence in AO calculation (numeric value).
*
* To output to screen set renderToScreens true
*
* @author alteredq / http://alteredqualia.com/
* @author tentone
* @class SSAOPass
* @author Mugen87 / https://github.com/Mugen87
*/
THREE.SSAOPass = function ( scene, camera, width, height ) {
THREE.Pass.call( this );
this.width = ( width !== undefined ) ? width : 512;
this.height = ( height !== undefined ) ? height : 512;
this.clear = true;
this.camera = camera;
this.scene = scene;
this.kernelRadius = 8;
this.kernelSize = 64;
this.kernel = [];
this.noiseTexture = null;
this.output = 0;
this.minDistance = 0.005;
this.maxDistance = 0.1;
//
this.generateSampleKernel();
this.generateRandomKernelRotations();
// beauty render target with depth buffer
var depthTexture = new THREE.DepthTexture();
depthTexture.type = THREE.UnsignedShortType;
depthTexture.minFilter = THREE.NearestFilter;
depthTexture.maxFilter = THREE.NearestFilter;
this.beautyRenderTarget = new THREE.WebGLRenderTarget( width, height, {
minFilter: THREE.LinearFilter,
magFilter: THREE.LinearFilter,
format: THREE.RGBAFormat,
depthTexture: depthTexture,
depthBuffer: true
} );
// normal render target
this.normalRenderTarget = new THREE.WebGLRenderTarget( width, height, {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat
} );
// ssao render target
this.ssaoRenderTarget = new THREE.WebGLRenderTarget( width, height, {
minFilter: THREE.LinearFilter,
magFilter: THREE.LinearFilter,
format: THREE.RGBAFormat
} );
this.blurRenderTarget = this.ssaoRenderTarget.clone();
// ssao material
if ( THREE.SSAOShader === undefined ) {
console.warn( 'THREE.SSAOPass depends on THREE.SSAOShader' );
return new THREE.ShaderPass();
console.error( 'THREE.SSAOPass: The pass relies on THREE.SSAOShader.' );
}
THREE.ShaderPass.call( this, THREE.SSAOShader );
this.ssaoMaterial = new THREE.ShaderMaterial( {
defines: Object.assign( {}, THREE.SSAOShader.defines ),
uniforms: THREE.UniformsUtils.clone( THREE.SSAOShader.uniforms ),
vertexShader: THREE.SSAOShader.vertexShader,
fragmentShader: THREE.SSAOShader.fragmentShader,
blending: THREE.NoBlending
} );
this.width = ( width !== undefined ) ? width : 512;
this.height = ( height !== undefined ) ? height : 256;
this.ssaoMaterial.uniforms[ 'tDiffuse' ].value = this.beautyRenderTarget.texture;
this.ssaoMaterial.uniforms[ 'tNormal' ].value = this.normalRenderTarget.texture;
this.ssaoMaterial.uniforms[ 'tDepth' ].value = this.beautyRenderTarget.depthTexture;
this.ssaoMaterial.uniforms[ 'tNoise' ].value = this.noiseTexture;
this.ssaoMaterial.uniforms[ 'kernel' ].value = this.kernel;
this.ssaoMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
this.ssaoMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
this.ssaoMaterial.uniforms[ 'resolution' ].value.set( width, height );
this.ssaoMaterial.uniforms[ 'cameraProjectionMatrix' ].value.copy( this.camera.projectionMatrix );
this.ssaoMaterial.uniforms[ 'cameraInverseProjectionMatrix' ].value.getInverse( this.camera.projectionMatrix );
// normal material
this.normalMaterial = new THREE.MeshNormalMaterial();
this.normalMaterial.blending = THREE.NoBlending;
// blur material
this.blurMaterial = new THREE.ShaderMaterial( {
defines: Object.assign( {}, THREE.SSAOBlurShader.defines ),
uniforms: THREE.UniformsUtils.clone( THREE.SSAOBlurShader.uniforms ),
vertexShader: THREE.SSAOBlurShader.vertexShader,
fragmentShader: THREE.SSAOBlurShader.fragmentShader
} );
this.blurMaterial.uniforms[ 'tDiffuse' ].value = this.ssaoRenderTarget.texture;
this.blurMaterial.uniforms[ 'resolution' ].value.set( width, height );
this.renderToScreen = false;
// material for rendering the depth
this.camera2 = camera;
this.scene2 = scene;
this.depthRenderMaterial = new THREE.ShaderMaterial( {
defines: Object.assign( {}, THREE.SSAODepthShader.defines ),
uniforms: THREE.UniformsUtils.clone( THREE.SSAODepthShader.uniforms ),
vertexShader: THREE.SSAODepthShader.vertexShader,
fragmentShader: THREE.SSAODepthShader.fragmentShader,
blending: THREE.NoBlending
} );
this.depthRenderMaterial.uniforms[ 'tDepth' ].value = this.beautyRenderTarget.depthTexture;
this.depthRenderMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
this.depthRenderMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
// material for rendering the content of a render target
this.copyMaterial = new THREE.ShaderMaterial( {
uniforms: THREE.UniformsUtils.clone( THREE.CopyShader.uniforms ),
vertexShader: THREE.CopyShader.vertexShader,
fragmentShader: THREE.CopyShader.fragmentShader,
transparent: true,
depthTest: false,
depthWrite: false,
blendSrc: THREE.DstColorFactor,
blendDst: THREE.ZeroFactor,
blendEquation: THREE.AddEquation,
blendSrcAlpha: THREE.DstAlphaFactor,
blendDstAlpha: THREE.ZeroFactor,
blendEquationAlpha: THREE.AddEquation
} );
//Depth material
this.depthMaterial = new THREE.MeshDepthMaterial();
this.depthMaterial.depthPacking = THREE.RGBADepthPacking;
this.depthMaterial.blending = THREE.NoBlending;
//
//Depth render target
this.depthRenderTarget = new THREE.WebGLRenderTarget( this.width, this.height, { minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter } );
//this.depthRenderTarget.texture.name = 'SSAOShader.rt';
this.quadCamera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 );
this.quadScene = new THREE.Scene();
this.quad = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), null );
this.quadScene.add( this.quad );
//Shader uniforms
this.uniforms[ 'tDepth' ].value = this.depthRenderTarget.texture;
this.uniforms[ 'size' ].value.set( this.width, this.height );
this.uniforms[ 'cameraNear' ].value = this.camera2.near;
this.uniforms[ 'cameraFar' ].value = this.camera2.far;
};
this.uniforms[ 'radius' ].value = 4;
this.uniforms[ 'onlyAO' ].value = false;
this.uniforms[ 'aoClamp' ].value = 0.25;
this.uniforms[ 'lumInfluence' ].value = 0.7;
THREE.SSAOPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), {
//Setters and getters for uniforms
constructor: THREE.SSAOPass,
Object.defineProperties( this, {
render: function ( renderer, writeBuffer /*, readBuffer, delta, maskActive */ ) {
radius: {
get: function () {
// render beauty and depth
return this.uniforms[ 'radius' ].value;
renderer.setClearColor( 0x000000 );
renderer.render( this.scene, this.camera, this.beautyRenderTarget, true );
},
set: function ( value ) {
// render normals
this.uniforms[ 'radius' ].value = value;
this.renderOverride( renderer, this.normalMaterial, this.normalRenderTarget, 0x7777ff, 1.0 );
}
},
// render SSAO
onlyAO: {
get: function () {
this.ssaoMaterial.uniforms[ 'kernelRadius' ].value = this.kernelRadius;
this.ssaoMaterial.uniforms[ 'minDistance' ].value = this.minDistance;
this.ssaoMaterial.uniforms[ 'maxDistance' ].value = this.maxDistance;
this.renderPass( renderer, this.ssaoMaterial, this.ssaoRenderTarget );
return this.uniforms[ 'onlyAO' ].value;
// render blur
},
set: function ( value ) {
this.renderPass( renderer, this.blurMaterial, this.blurRenderTarget );
this.uniforms[ 'onlyAO' ].value = value;
// output result to screen
}
},
switch ( this.output ) {
aoClamp: {
get: function () {
case THREE.SSAOPass.OUTPUT.SSAO:
return this.uniforms[ 'aoClamp' ].value;
this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.ssaoRenderTarget.texture;
this.copyMaterial.blending = THREE.NoBlending;
this.renderPass( renderer, this.copyMaterial, null, true );
},
set: function ( value ) {
break;
this.uniforms[ 'aoClamp' ].value = value;
case THREE.SSAOPass.OUTPUT.Blur:
}
},
this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.blurRenderTarget.texture;
this.copyMaterial.blending = THREE.NoBlending;
this.renderPass( renderer, this.copyMaterial, null, true );
lumInfluence: {
get: function () {
break;
return this.uniforms[ 'lumInfluence' ].value;
case THREE.SSAOPass.OUTPUT.Beauty:
},
set: function ( value ) {
this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.beautyRenderTarget.texture;
this.copyMaterial.blending = THREE.NoBlending;
this.renderPass( renderer, this.copyMaterial, null, true );
this.uniforms[ 'lumInfluence' ].value = value;
break;
}
},
case THREE.SSAOPass.OUTPUT.Depth:
} );
this.renderPass( renderer, this.depthRenderMaterial, null, true );
};
break;
THREE.SSAOPass.prototype = Object.create( THREE.ShaderPass.prototype );
case THREE.SSAOPass.OUTPUT.Normal:
/**
* Render using this pass.
*
* @method render
* @param {WebGLRenderer} renderer
* @param {WebGLRenderTarget} writeBuffer Buffer to write output.
* @param {WebGLRenderTarget} readBuffer Input buffer.
* @param {Number} delta Delta time in milliseconds.
* @param {Boolean} maskActive Not used in this pass.
*/
THREE.SSAOPass.prototype.render = function ( renderer, writeBuffer, readBuffer, delta, maskActive ) {
this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.normalRenderTarget.texture;
this.copyMaterial.blending = THREE.NoBlending;
this.renderPass( renderer, this.copyMaterial, null, true );
//Render depth into depthRenderTarget
this.scene2.overrideMaterial = this.depthMaterial;
break;
renderer.render( this.scene2, this.camera2, this.depthRenderTarget, true );
case THREE.SSAOPass.OUTPUT.Default:
this.scene2.overrideMaterial = null;
this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.beautyRenderTarget.texture;
this.copyMaterial.blending = THREE.NoBlending;
this.renderPass( renderer, this.copyMaterial, null, true );
this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.blurRenderTarget.texture;
this.copyMaterial.blending = THREE.CustomBlending;
this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
//SSAO shaderPass
THREE.ShaderPass.prototype.render.call( this, renderer, writeBuffer, readBuffer, delta, maskActive );
break;
};
default:
console.warn( 'THREE.SSAOPass: Unknown output type.' );
/**
* Change scene to be renderer by this render pass.
*
* @method setScene
* @param {Scene} scene
*/
THREE.SSAOPass.prototype.setScene = function ( scene ) {
}
this.scene2 = scene;
},
};
renderPass: function ( renderer, passMaterial, renderTarget, clearColor, clearAlpha ) {
/**
* Set camera used by this render pass.
*
* @method setCamera
* @param {Camera} camera
*/
THREE.SSAOPass.prototype.setCamera = function ( camera ) {
// save original state
var originalClearColor = renderer.getClearColor();
var originalClearAlpha = renderer.getClearAlpha();
var originalAutoClear = renderer.autoClear;
this.camera2 = camera;
// setup pass state
renderer.autoClear = false;
var clearNeeded = ( clearColor !== undefined ) && ( clearColor !== null );
if ( clearNeeded ) {
this.uniforms[ 'cameraNear' ].value = this.camera2.near;
this.uniforms[ 'cameraFar' ].value = this.camera2.far;
renderer.setClearColor( clearColor );
renderer.setClearAlpha( clearAlpha || 0.0 );
};
}
/**
* Set resolution of this render pass.
*
* @method setSize
* @param {Number} width
* @param {Number} height
*/
THREE.SSAOPass.prototype.setSize = function ( width, height ) {
this.quad.material = passMaterial;
renderer.render( this.quadScene, this.quadCamera, renderTarget, clearNeeded );
this.width = width;
this.height = height;
// restore original state
renderer.autoClear = originalAutoClear;
renderer.setClearColor( originalClearColor );
renderer.setClearAlpha( originalClearAlpha );
},
renderOverride: function ( renderer, overrideMaterial, renderTarget, clearColor, clearAlpha ) {
var originalClearColor = renderer.getClearColor();
var originalClearAlpha = renderer.getClearAlpha();
var originalAutoClear = renderer.autoClear;
renderer.autoClear = false;
clearColor = overrideMaterial.clearColor || clearColor;
clearAlpha = overrideMaterial.clearAlpha || clearAlpha;
var clearNeeded = ( clearColor !== undefined ) && ( clearColor !== null );
if ( clearNeeded ) {
renderer.setClearColor( clearColor );
renderer.setClearAlpha( clearAlpha || 0.0 );
}
this.scene.overrideMaterial = overrideMaterial;
renderer.render( this.scene, this.camera, renderTarget, clearNeeded );
this.scene.overrideMaterial = null;
// restore original state
renderer.autoClear = originalAutoClear;
renderer.setClearColor( originalClearColor );
renderer.setClearAlpha( originalClearAlpha );
},
setSize: function ( width, height ) {
this.width = width;
this.height = height;
this.beautyRenderTarget.setSize( width, height );
this.ssaoRenderTarget.setSize( width, height );
this.normalRenderTarget.setSize( width, height );
this.blurRenderTarget.setSize( width, height );
this.ssaoMaterial.uniforms[ 'resolution' ].value.set( width, height );
this.ssaoMaterial.uniforms[ 'cameraProjectionMatrix' ].value.copy( this.camera.projectionMatrix );
this.ssaoMaterial.uniforms[ 'cameraInverseProjectionMatrix' ].value.getInverse( this.camera.projectionMatrix );
this.blurMaterial.uniforms[ 'resolution' ].value.set( width, height );
},
generateSampleKernel: function () {
var kernelSize = this.kernelSize;
var kernel = this.kernel;
for ( var i = 0; i < kernelSize; i ++ ) {
var sample = new THREE.Vector3();
sample.x = ( Math.random() * 2 ) - 1;
sample.y = ( Math.random() * 2 ) - 1;
sample.z = Math.random();
sample.normalize();
var scale = i / kernelSize;
scale = THREE.Math.lerp( 0.1, 1, scale * scale );
sample.multiplyScalar( scale );
kernel.push( sample );
}
},
generateRandomKernelRotations: function () {
var width = 4, height = 4;
if ( SimplexNoise === undefined ) {
console.error( 'THREE.SSAOPass: The pass relies on THREE.SimplexNoise.' );
}
var simplex = new SimplexNoise();
var size = width * height;
var data = new Float32Array( size );
for ( var i = 0; i < size; i ++ ) {
var x = ( Math.random() * 2 ) - 1;
var y = ( Math.random() * 2 ) - 1;
var z = 0;
data[ i ] = simplex.noise3d( x, y, z );
}
this.noiseTexture = new THREE.DataTexture( data, width, height, THREE.LuminanceFormat, THREE.FloatType );
this.noiseTexture.wrapS = THREE.RepeatWrapping;
this.noiseTexture.wrapT = THREE.RepeatWrapping;
this.noiseTexture.needsUpdate = true;
}
this.uniforms[ 'size' ].value.set( this.width, this.height );
this.depthRenderTarget.setSize( this.width, this.height );
} );
THREE.SSAOPass.OUTPUT = {
'Default': 0,
'SSAO': 1,
'Blur': 2,
'Beauty': 3,
'Depth': 4,
'Normal': 5
};
/**
* @author alteredq / http://alteredqualia.com/
* @author Mugen87 / https://github.com/Mugen87
*
* Screen-space ambient occlusion shader
* - ported from
* SSAO GLSL shader v1.2
* assembled by Martins Upitis (martinsh) (http://devlog-martinsh.blogspot.com)
* original technique is made by ArKano22 (http://www.gamedev.net/topic/550699-ssao-no-halo-artifacts/)
* - modifications
* - modified to use RGBA packed depth texture (use clear color 1,1,1,1 for depth pass)
* - refactoring and optimizations
* References:
* http://john-chapman-graphics.blogspot.com/2013/01/ssao-tutorial.html
* https://learnopengl.com/Advanced-Lighting/SSAO
* https://github.com/McNopper/OpenGL/blob/master/Example28/shader/ssao.frag.glsl
*/
THREE.SSAOShader = {
defines: {
"PERSPECTIVE_CAMERA": 1,
"KERNEL_SIZE": 64
},
uniforms: {
"tDiffuse": { value: null },
"tDepth": { value: null },
"size": { value: new THREE.Vector2( 512, 512 ) },
"cameraNear": { value: 1 },
"cameraFar": { value: 100 },
"radius": { value: 32 },
"onlyAO": { value: 0 },
"aoClamp": { value: 0.25 },
"lumInfluence": { value: 0.7 }
"tDiffuse": { value: null },
"tNormal": { value: null },
"tDepth": { value: null },
"tNoise": { value: null },
"kernel": { value: null },
"cameraNear": { value: null },
"cameraFar": { value: null },
"resolution": { value: new THREE.Vector2() },
"cameraProjectionMatrix": { value: new THREE.Matrix4() },
"cameraInverseProjectionMatrix": { value: new THREE.Matrix4() },
"kernelRadius": { value: 8 },
"minDistance": { value: 0.005 },
"maxDistance": { value: 0.05 },
},
......@@ -33,9 +38,9 @@ THREE.SSAOShader = {
"void main() {",
"vUv = uv;",
" vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
" gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
......@@ -43,188 +48,245 @@ THREE.SSAOShader = {
fragmentShader: [
"uniform sampler2D tDiffuse;",
"uniform sampler2D tNormal;",
"uniform sampler2D tDepth;",
"uniform sampler2D tNoise;",
"uniform vec3 kernel[ KERNEL_SIZE ];",
"uniform vec2 resolution;",
"uniform float cameraNear;",
"uniform float cameraFar;",
"#ifdef USE_LOGDEPTHBUF",
"uniform float logDepthBufFC;",
"#endif",
"uniform mat4 cameraProjectionMatrix;",
"uniform mat4 cameraInverseProjectionMatrix;",
"uniform float radius;", // ao radius
"uniform bool onlyAO;", // use only ambient occlusion pass?
"uniform float kernelRadius;",
"uniform float minDistance;", // avoid artifacts caused by neighbour fragments with minimal depth difference
"uniform float maxDistance;", // avoid the influence of fragments which are too far away
"uniform vec2 size;", // texture width, height
"uniform float aoClamp;", // depth clamp - reduces haloing at screen edges
"varying vec2 vUv;",
"uniform float lumInfluence;", // how much luminance affects occlusion
"#include <packing>",
"uniform sampler2D tDiffuse;",
"uniform sampler2D tDepth;",
"float getDepth( const in vec2 screenPosition ) {",
"varying vec2 vUv;",
" return texture2D( tDepth, screenPosition ).x;",
// "#define PI 3.14159265",
"#define DL 2.399963229728653", // PI * ( 3.0 - sqrt( 5.0 ) )
"#define EULER 2.718281828459045",
"}",
// user variables
"float getLinearDepth( const in vec2 screenPosition ) {",
"const int samples = 64;", // ao sample count
" #if PERSPECTIVE_CAMERA == 1",
"const bool useNoise = true;", // use noise instead of pattern for sample dithering
"const float noiseAmount = 0.0004;", // dithering amount
" float fragCoordZ = texture2D( tDepth, screenPosition ).x;",
" float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );",
" return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );",
"const float diffArea = 0.4;", // self-shadowing reduction
"const float gDisplace = 0.4;", // gauss bell center
" #else",
" return texture2D( depthSampler, coord ).x;",
// RGBA depth
" #endif",
"#include <packing>",
"}",
// generating noise / pattern texture for dithering
"float getViewZ( const in float depth ) {",
"vec2 rand( const vec2 coord ) {",
" #if PERSPECTIVE_CAMERA == 1",
"vec2 noise;",
" return perspectiveDepthToViewZ( depth, cameraNear, cameraFar );",
"if ( useNoise ) {",
" #else",
"float nx = dot ( coord, vec2( 12.9898, 78.233 ) );",
"float ny = dot ( coord, vec2( 12.9898, 78.233 ) * 2.0 );",
" return orthographicDepthToViewZ( depth, cameraNear, cameraFar );",
"noise = clamp( fract ( 43758.5453 * sin( vec2( nx, ny ) ) ), 0.0, 1.0 );",
" #endif",
"} else {",
"}",
"vec3 getViewPosition( const in vec2 screenPosition, const in float depth, const in float viewZ ) {",
"float ff = fract( 1.0 - coord.s * ( size.x / 2.0 ) );",
"float gg = fract( coord.t * ( size.y / 2.0 ) );",
" float clipW = cameraProjectionMatrix[2][3] * viewZ + cameraProjectionMatrix[3][3];",
"noise = vec2( 0.25, 0.75 ) * vec2( ff ) + vec2( 0.75, 0.25 ) * gg;",
" vec4 clipPosition = vec4( ( vec3( screenPosition, depth ) - 0.5 ) * 2.0, 1.0 );",
"}",
" clipPosition *= clipW; // unprojection.",
"return ( noise * 2.0 - 1.0 ) * noiseAmount;",
" return ( cameraInverseProjectionMatrix * clipPosition ).xyz;",
"}",
"float readDepth( const in vec2 coord ) {",
"vec3 getViewNormal( const in vec2 screenPosition ) {",
"float cameraFarPlusNear = cameraFar + cameraNear;",
"float cameraFarMinusNear = cameraFar - cameraNear;",
"float cameraCoef = 2.0 * cameraNear;",
" return unpackRGBToNormal( texture2D( tNormal, screenPosition ).xyz );",
"#ifdef USE_LOGDEPTHBUF",
"}",
"float logz = unpackRGBAToDepth( texture2D( tDepth, coord ) );",
"float w = pow(2.0, (logz / logDepthBufFC)) - 1.0;",
"float z = (logz / w) + 1.0;",
"void main() {",
"#else",
" float depth = getDepth( vUv );",
" float viewZ = getViewZ( depth );",
"float z = unpackRGBAToDepth( texture2D( tDepth, coord ) );",
" vec3 viewPosition = getViewPosition( vUv, depth, viewZ );",
" vec3 viewNormal = getViewNormal( vUv );",
"#endif",
" vec2 noiseScale = vec2( resolution.x / 4.0, resolution.y / 4.0 );",
" vec3 random = texture2D( tNoise, vUv * noiseScale ).xyz;",
"return cameraCoef / ( cameraFarPlusNear - z * cameraFarMinusNear );",
// compute matrix used to reorient a kernel vector
" vec3 tangent = normalize( random - viewNormal * dot( random, viewNormal ) );",
" vec3 bitangent = cross( viewNormal, tangent );",
" mat3 kernelMatrix = mat3( tangent, bitangent, viewNormal );",
"}",
" float occlusion = 0.0;",
"float compareDepths( const in float depth1, const in float depth2, inout int far ) {",
" for ( int i = 0; i < KERNEL_SIZE; i ++ ) {",
"float garea = 8.0;", // gauss bell width
"float diff = ( depth1 - depth2 ) * 100.0;", // depth difference (0-100)
" vec3 sampleVector = kernelMatrix * kernel[ i ];", // reorient sample vector in view space
" vec3 samplePoint = viewPosition + ( sampleVector * kernelRadius );", // calculate sample point
// reduce left bell width to avoid self-shadowing
" vec4 samplePointNDC = cameraProjectionMatrix * vec4( samplePoint, 1.0 );", // project point and calculate NDC
" samplePointNDC /= samplePointNDC.w;",
"if ( diff < gDisplace ) {",
" vec2 samplePointUv = samplePointNDC.xy * 0.5 + 0.5;", // compute uv coordinates
"garea = diffArea;",
" float realDepth = getLinearDepth( samplePointUv );", // get linear depth from depth texture
" float sampleDepth = viewZToOrthographicDepth( samplePoint.z, cameraNear, cameraFar );", // compute linear depth of the sample view Z value
" float delta = sampleDepth - realDepth;",
"} else {",
" if ( delta > minDistance && delta < maxDistance ) {", // if fragment is before sample point, increase occlusion
"far = 1;",
" occlusion += 1.0;",
"}",
" }",
"float dd = diff - gDisplace;",
"float gauss = pow( EULER, -2.0 * ( dd * dd ) / ( garea * garea ) );",
"return gauss;",
" }",
"}",
" occlusion = clamp( occlusion / float( KERNEL_SIZE ), 0.0, 1.0 );",
"float calcAO( float depth, float dw, float dh ) {",
" gl_FragColor = vec4( vec3( 1.0 - occlusion ), 1.0 );",
"vec2 vv = vec2( dw, dh );",
"}"
"vec2 coord1 = vUv + radius * vv;",
"vec2 coord2 = vUv - radius * vv;",
].join( "\n" )
};
"float temp1 = 0.0;",
"float temp2 = 0.0;",
THREE.SSAODepthShader = {
"int far = 0;",
"temp1 = compareDepths( depth, readDepth( coord1 ), far );",
defines: {
"PERSPECTIVE_CAMERA": 1
},
// DEPTH EXTRAPOLATION
uniforms: {
"if ( far > 0 ) {",
"tDepth": { value: null },
"cameraNear": { value: null },
"cameraFar": { value: null },
"temp2 = compareDepths( readDepth( coord2 ), depth, far );",
"temp1 += ( 1.0 - temp1 ) * temp2;",
},
"}",
vertexShader: [
"return temp1;",
"varying vec2 vUv;",
"void main() {",
" vUv = uv;",
" gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join( "\n" ),
fragmentShader: [
"uniform sampler2D tDepth;",
"uniform float cameraNear;",
"uniform float cameraFar;",
"varying vec2 vUv;",
"#include <packing>",
"float getLinearDepth( const in vec2 screenPosition ) {",
" #if PERSPECTIVE_CAMERA == 1",
" float fragCoordZ = texture2D( tDepth, screenPosition ).x;",
" float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );",
" return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );",
" #else",
" return texture2D( depthSampler, coord ).x;",
" #endif",
"}",
"void main() {",
"vec2 noise = rand( vUv );",
"float depth = readDepth( vUv );",
" float depth = getLinearDepth( vUv );",
" gl_FragColor = vec4( vec3( 1.0 - depth ), 1.0 );",
"}"
].join( "\n" )
};
"float tt = clamp( depth, aoClamp, 1.0 );",
THREE.SSAOBlurShader = {
"float w = ( 1.0 / size.x ) / tt + ( noise.x * ( 1.0 - noise.x ) );",
"float h = ( 1.0 / size.y ) / tt + ( noise.y * ( 1.0 - noise.y ) );",
uniforms: {
"float ao = 0.0;",
"tDiffuse": { value: null },
"resolution": { value: new THREE.Vector2() }
},
"float dz = 1.0 / float( samples );",
"float l = 0.0;",
"float z = 1.0 - dz / 2.0;",
vertexShader: [
"for ( int i = 0; i <= samples; i ++ ) {",
"varying vec2 vUv;",
"float r = sqrt( 1.0 - z );",
"void main() {",
"float pw = cos( l ) * r;",
"float ph = sin( l ) * r;",
"ao += calcAO( depth, pw * w, ph * h );",
"z = z - dz;",
"l = l + DL;",
" vUv = uv;",
" gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}",
"}"
"ao /= float( samples );",
"ao = 1.0 - ao;",
].join( "\n" ),
fragmentShader: [
"uniform sampler2D tDiffuse;",
"uniform vec2 resolution;",
"varying vec2 vUv;",
"void main() {",
"vec3 color = texture2D( tDiffuse, vUv ).rgb;",
" vec2 texelSize = ( 1.0 / resolution );",
" float result = 0.0;",
"vec3 lumcoeff = vec3( 0.299, 0.587, 0.114 );",
"float lum = dot( color.rgb, lumcoeff );",
"vec3 luminance = vec3( lum );",
" for ( int i = - 2; i <= 2; i ++ ) {",
"vec3 final = vec3( color * mix( vec3( ao ), vec3( 1.0 ), luminance * lumInfluence ) );", // mix( color * ao, white, luminance )
" for ( int j = - 2; j <= 2; j ++ ) {",
"if ( onlyAO ) {",
" vec2 offset = ( vec2( float( i ), float( j ) ) ) * texelSize;",
" result += texture2D( tDiffuse, vUv + offset ).r;",
"final = vec3( mix( vec3( ao ), vec3( 1.0 ), luminance * lumInfluence ) );", // ambient occlusion only
" }",
"}",
" }",
"gl_FragColor = vec4( final, 1.0 );",
" gl_FragColor = vec4( vec3( result / ( 5.0 * 5.0 ) ), 1.0 );",
"}"
......
<!DOCTYPE html>
<!--Reference:
SSAO algo: http://devlog-martinsh.blogspot.tw/2011/12/ssao-shader-update-v12.html?showComment=1398158188712#c1563204765906693531
log depth http://outerra.blogspot.tw/2013/07/logarithmic-depth-buffer-optimizations.html
convert the exponential depth to a linear value: http://www.ozone3d.net/blogs/lab/20090206/how-to-linearize-the-depth-value/
Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info/wiki/Evenly_distributed_points_on_sphere-->
<html lang="en">
<head>
<title>three.js webgl - postprocessing - Screen Space Ambient Occlusion</title>
......@@ -40,21 +33,24 @@ Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info
<script src="../build/three.js"></script>
<script src="js/shaders/SSAOShader.js"></script>
<script src="js/shaders/CopyShader.js"></script>
<script src="js/postprocessing/EffectComposer.js"></script>
<script src="js/postprocessing/RenderPass.js"></script>
<script src="js/postprocessing/ShaderPass.js"></script>
<script src="js/postprocessing/MaskPass.js"></script>
<script src="js/postprocessing/SSAOPass.js"></script>
<script src="js/shaders/CopyShader.js"></script>
<script src="js/SimplexNoise.js"></script>
<script src="js/WebGL.js"></script>
<script src="js/libs/stats.min.js"></script>
<script src='js/libs/dat.gui.min.js'></script>
<div id="info">
<a href="http://threejs.org" target="_blank" rel="noopener">three.js</a> - webgl screen space ambient occlusion example<br/>
shader by <a href="http://alteredqualia.com">alteredq</a>
<a href="http://threejs.org" target="_blank" rel="noopener">three.js</a> - screen space ambient occlusion<br/>
<div id="error" style="display: none;">
Your browser does not support <strong>WEBGL_depth_texture</strong>.<br/><br/>
This demo will not work.
</div>
</div>
<script>
......@@ -71,8 +67,6 @@ Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info
var ssaoPass;
var group;
var postprocessing = { enabled: true, onlyAO: false, radius: 32, aoClamp: 0.25, lumInfluence: 0.7 };
init();
animate();
......@@ -85,6 +79,13 @@ Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
if ( ! renderer.extensions.get( 'WEBGL_depth_texture' ) ) {
document.querySelector( '#error' ).style.display = 'block';
return;
}
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 100, 700 );
camera.position.z = 500;
......@@ -118,33 +119,30 @@ Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info
stats = new Stats();
container.appendChild( stats.dom );
// Init postprocessing
initPostprocessing();
ssaoPass = new THREE.SSAOPass( scene, camera );
ssaoPass.renderToScreen = true;
effectComposer = new THREE.EffectComposer( renderer );
effectComposer.addPass( ssaoPass );
// Init gui
var gui = new dat.GUI();
gui.add( postprocessing, 'enabled' );
gui.add( postprocessing, 'onlyAO', false ).onChange( function ( value ) {
ssaoPass.onlyAO = value;
} );
gui.add( postprocessing, 'radius' ).min( 0 ).max( 64 ).onChange( function ( value ) {
ssaoPass.radius = value;
} );
gui.add( postprocessing, 'aoClamp' ).min( 0 ).max( 1 ).onChange( function ( value ) {
gui.add( ssaoPass, 'output', {
'Default': THREE.SSAOPass.OUTPUT.Default,
'SSAO Only': THREE.SSAOPass.OUTPUT.SSAO,
'SSAO Only + Blur': THREE.SSAOPass.OUTPUT.Blur,
'Beauty': THREE.SSAOPass.OUTPUT.Beauty,
'Depth': THREE.SSAOPass.OUTPUT.Depth,
'Normal': THREE.SSAOPass.OUTPUT.Normal
} ).onChange( function ( value ) {
ssaoPass.aoClamp = value;
} );
gui.add( postprocessing, 'lumInfluence' ).min( 0 ).max( 1 ).onChange( function ( value ) {
ssaoPass.lumInfluence = value;
ssaoPass.output = parseInt( value );
} );
gui.add( ssaoPass, 'kernelRadius' ).min( 0 ).max( 32 );
gui.add( ssaoPass, 'minDistance' ).min( 0.001 ).max( 0.02 );
gui.add( ssaoPass, 'maxDistance' ).min( 0.01 ).max( 0.3 );
window.addEventListener( 'resize', onWindowResize, false );
......@@ -152,7 +150,6 @@ Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info
}
function onWindowResize() {
var width = window.innerWidth;
......@@ -162,30 +159,8 @@ Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info
camera.updateProjectionMatrix();
renderer.setSize( width, height );
// Resize renderTargets
ssaoPass.setSize( width, height );
var pixelRatio = renderer.getPixelRatio();
var newWidth = Math.floor( width / pixelRatio ) || 1;
var newHeight = Math.floor( height / pixelRatio ) || 1;
effectComposer.setSize( newWidth, newHeight );
}
function initPostprocessing() {
// Setup render pass
var renderPass = new THREE.RenderPass( scene, camera );
// Setup SSAO pass
ssaoPass = new THREE.SSAOPass( scene, camera );
ssaoPass.renderToScreen = true;
// Add pass to effect composer
effectComposer = new THREE.EffectComposer( renderer );
effectComposer.addPass( renderPass );
effectComposer.addPass( ssaoPass );
}
function animate() {
......@@ -204,15 +179,7 @@ Spiral sampling http://web.archive.org/web/20120421191837/http://www.cgafaq.info
group.rotation.x = timer * 0.0002;
group.rotation.y = timer * 0.0001;
if ( postprocessing.enabled ) {
effectComposer.render();
} else {
renderer.render( scene, camera );
}
effectComposer.render();
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册