diff --git a/docs/list.js b/docs/list.js index 0f756eacf6ebfaf8398c07787753c6cc80dc0f60..fa7cbbfa6130c1fe89fdef721db80d29a143f192 100644 --- a/docs/list.js +++ b/docs/list.js @@ -24,6 +24,7 @@ var list = { "How to update things": "manual/en/introduction/How-to-update-things", "How to dispose of objects": "manual/en/introduction/How-to-dispose-of-objects", "How to create VR content": "manual/en/introduction/How-to-create-VR-content", + "How to use post-processing": "manual/en/introduction/How-to-use-post-processing", "Matrix transformations": "manual/en/introduction/Matrix-transformations", "Animation system": "manual/en/introduction/Animation-system" }, @@ -461,6 +462,7 @@ var list = { "如何更新场景": "manual/zh/introduction/How-to-update-things", "如何废置对象": "manual/zh/introduction/How-to-dispose-of-objects", "如何创建VR内容": "manual/zh/introduction/How-to-create-VR-content", + "How to use post-processing": "manual/zh/introduction/How-to-use-post-processing", "矩阵变换": "manual/zh/introduction/Matrix-transformations", "动画系统": "manual/zh/introduction/Animation-system" }, diff --git a/docs/manual/en/introduction/How-to-use-post-processing.html b/docs/manual/en/introduction/How-to-use-post-processing.html new file mode 100644 index 0000000000000000000000000000000000000000..08a17e3b720696aa5dd9502433b16415e93effc8 --- /dev/null +++ b/docs/manual/en/introduction/How-to-use-post-processing.html @@ -0,0 +1,112 @@ + + +
+ ++ Many three.js applications render their 3D objects directly to the screen. Sometimes, however, you want to apply one or more graphical + effects like Depth-Of-Field, Bloom, Film Grain or various types of Anti-aliasing. Post-processing is a widely used approach + to implement such effects. First, the scene is rendered to a render target which represents a buffer in the video card's memory. + In the next step one ore more post-processing passes apply filters and effects to the image buffer before it is eventually rendered to + the screen. +
++ three.js provides a complete post-processing solution via [page:EffectComposer] to implement such a workflow. +
+ ++ The first step in the process is to import all necessary files from the examples directory. The guide assumes your are using the official + [link:https://www.npmjs.com/package/three npm package] of three.js. For our basic demo in this guide we need the following files. +
+ +
+ import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer.js';
+ import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass.js';
+ import { GlitchPass } from 'three/examples/jsm/postprocessing/GlitchPass.js';
+
+
+ + After all files are successfully imported, we can create our composer by passing in an instance of [page:WebGLRenderer]. +
+ +
+ var composer = new EffectComposer( renderer );
+
+
+ + When using a composer, it's necessary to change the application's animation loop. Instead of calling the render method of + [page:WebGLRenderer], we now use the respective counterpart of [page:EffectComposer]. +
+ +
+ function animate() {
+
+ requestAnimationFrame( animate );
+
+ composer.render();
+
+ }
+
+
+ + Our composer is now ready so it's possible to configure the chain of post-processing passes. These passes are responsible for creating + the final visual output of the application. They are processed in order of their addition/insertion. In our example, the instance of *RenderPass* + is executed first and then the instance of *GlitchPass*. The last enabled pass in the chain is automatically rendered to the screen. The setup + of the passes looks like so: +
+ +
+ var renderPass = new RenderPass( scene, camera );
+ composer.addPass( renderPass );
+
+ var glitchPass = new GlitchPass();
+ composer.addPass( glitchPass );
+
+
+ + *RenderPass* is normally placed at the beginning of the chain in order to provide the rendered scene as an input for the next post-processing step. In our case, + *GlitchPass* is going to use these image data to apply a wild glitch effect. Check out this [link:https://threejs.org/examples/webgl_postprocessing_glitch live example] + to see it in action. +
+ ++ You can use a wide range of pre-defined post-processing passes provided by the engine. They are located in the + [link:https://github.com/mrdoob/three.js/tree/dev/examples/jsm/postprocessing postprocessing] directory. +
+ ++ Sometimes you want to write a custom post-processing shader and include it into the chain of post-processing passes. For this scenario, + you can utilize *ShaderPass*. After importing the file and your custom shader, you can use the following code to setup the pass. +
+ +
+ import { ShaderPass } from 'three/examples/jsm/postprocessing/ShaderPass.js';
+ import { LuminosityShader } from 'three/examples/jsm/shaders/LuminosityShader.js';
+
+ // later in your init routine
+
+ var luminosityPass = new ShaderPass( LuminosityShader );
+ composer.addPass( luminosityPass );
+
+
+ + The repository provides a file called [link:https://github.com/mrdoob/three.js/blob/master/examples/jsm/shaders/CopyShader.js CopyShader] which is a + good starting code for your own custom shader. *CopyShader* just copies the image contents of the [page:EffectComposer]'s read buffer + to its write buffer without applying any effects. +
+ + + diff --git a/docs/manual/zh/introduction/How-to-use-post-processing.html b/docs/manual/zh/introduction/How-to-use-post-processing.html new file mode 100644 index 0000000000000000000000000000000000000000..08a17e3b720696aa5dd9502433b16415e93effc8 --- /dev/null +++ b/docs/manual/zh/introduction/How-to-use-post-processing.html @@ -0,0 +1,112 @@ + + + + ++ Many three.js applications render their 3D objects directly to the screen. Sometimes, however, you want to apply one or more graphical + effects like Depth-Of-Field, Bloom, Film Grain or various types of Anti-aliasing. Post-processing is a widely used approach + to implement such effects. First, the scene is rendered to a render target which represents a buffer in the video card's memory. + In the next step one ore more post-processing passes apply filters and effects to the image buffer before it is eventually rendered to + the screen. +
++ three.js provides a complete post-processing solution via [page:EffectComposer] to implement such a workflow. +
+ ++ The first step in the process is to import all necessary files from the examples directory. The guide assumes your are using the official + [link:https://www.npmjs.com/package/three npm package] of three.js. For our basic demo in this guide we need the following files. +
+ +
+ import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer.js';
+ import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass.js';
+ import { GlitchPass } from 'three/examples/jsm/postprocessing/GlitchPass.js';
+
+
+ + After all files are successfully imported, we can create our composer by passing in an instance of [page:WebGLRenderer]. +
+ +
+ var composer = new EffectComposer( renderer );
+
+
+ + When using a composer, it's necessary to change the application's animation loop. Instead of calling the render method of + [page:WebGLRenderer], we now use the respective counterpart of [page:EffectComposer]. +
+ +
+ function animate() {
+
+ requestAnimationFrame( animate );
+
+ composer.render();
+
+ }
+
+
+ + Our composer is now ready so it's possible to configure the chain of post-processing passes. These passes are responsible for creating + the final visual output of the application. They are processed in order of their addition/insertion. In our example, the instance of *RenderPass* + is executed first and then the instance of *GlitchPass*. The last enabled pass in the chain is automatically rendered to the screen. The setup + of the passes looks like so: +
+ +
+ var renderPass = new RenderPass( scene, camera );
+ composer.addPass( renderPass );
+
+ var glitchPass = new GlitchPass();
+ composer.addPass( glitchPass );
+
+
+ + *RenderPass* is normally placed at the beginning of the chain in order to provide the rendered scene as an input for the next post-processing step. In our case, + *GlitchPass* is going to use these image data to apply a wild glitch effect. Check out this [link:https://threejs.org/examples/webgl_postprocessing_glitch live example] + to see it in action. +
+ ++ You can use a wide range of pre-defined post-processing passes provided by the engine. They are located in the + [link:https://github.com/mrdoob/three.js/tree/dev/examples/jsm/postprocessing postprocessing] directory. +
+ ++ Sometimes you want to write a custom post-processing shader and include it into the chain of post-processing passes. For this scenario, + you can utilize *ShaderPass*. After importing the file and your custom shader, you can use the following code to setup the pass. +
+ +
+ import { ShaderPass } from 'three/examples/jsm/postprocessing/ShaderPass.js';
+ import { LuminosityShader } from 'three/examples/jsm/shaders/LuminosityShader.js';
+
+ // later in your init routine
+
+ var luminosityPass = new ShaderPass( LuminosityShader );
+ composer.addPass( luminosityPass );
+
+
+ + The repository provides a file called [link:https://github.com/mrdoob/three.js/blob/master/examples/jsm/shaders/CopyShader.js CopyShader] which is a + good starting code for your own custom shader. *CopyShader* just copies the image contents of the [page:EffectComposer]'s read buffer + to its write buffer without applying any effects. +
+ + +