Created
May 25, 2023 01:48
-
-
Save bhouston/b43d81fa9751ee0d7172917c4cf43684 to your computer and use it in GitHub Desktop.
Temporal Reproduction Anti-Alising for Three.js
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
THREE.TRAAPass = function(scene, camera, resolution) { | |
THREE.Pass.call(this); | |
this.scene = scene; | |
this.camera = camera; | |
// old velcotiy material. | |
function getSuperSampleTRAAMaterial() { | |
return new THREE.ShaderMaterial({ | |
defines: { | |
DEPTH_PACKING: 1, | |
PERSPECTIVE_CAMERA: 1, | |
}, | |
uniforms: { | |
jitterOffset: { value: new THREE.Vector2(0, 0) }, | |
currentBeauty: { value: null }, | |
previousBeauty: { value: null }, | |
transparentBeauty: { value: null }, | |
tDepth: { value: null }, | |
tVelocity: { value: null }, | |
minSampleWeight: { value: 1.0 / 16.0 }, | |
mode: { value: 0 }, | |
cameraNearFar: { value: new THREE.Vector2() }, | |
textureSize: { value: new THREE.Vector2() }, | |
cameraProjectionMatrix: { value: new THREE.Matrix4() }, | |
cameraInverseProjectionMatrix: { value: new THREE.Matrix4() }, | |
cameraInverseViewMatrix: { value: new THREE.Matrix4() }, | |
}, | |
depthWrite: false, | |
vertexShader: | |
'varying vec2 vUv;\ | |
void main() {\ | |
vUv = uv;\ | |
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\ | |
}', | |
fragmentShader: [ | |
'#include <common>', | |
'varying vec2 vUv;', | |
'uniform sampler2D currentBeauty;', | |
'uniform sampler2D previousBeauty;', | |
'uniform sampler2D transparentBeauty;', | |
'uniform sampler2D tDepth;', | |
'uniform sampler2D tVelocity;', | |
'uniform vec2 textureSize;', | |
'uniform mat4 prevProjectionViewMatrix;', | |
'uniform mat4 currentProjectionViewMatrix;', | |
'uniform mat4 cameraProjectionMatrix;', | |
'uniform mat4 cameraInverseProjectionMatrix;', | |
'uniform mat4 cameraInverseViewMatrix;', | |
'uniform vec2 cameraNearFar;', | |
'uniform float minSampleWeight;', | |
'uniform int mode;', | |
'uniform vec2 jitterOffset;', | |
'#define MODE_MOVING 0', | |
'#define MODE_STATIC 1', | |
'#include <packing>', | |
'float getDepth( const in vec2 screenPosition ) {', | |
' #if DEPTH_PACKING == 1', | |
' return unpackRGBAToDepth( texture2D( tDepth, screenPosition ) );', | |
' #else', | |
' return texture2D( tDepth, screenPosition ).x;', | |
' #endif', | |
'}', | |
'vec3 find_closest_fragment_9tap(const in vec2 uv) { ', | |
'const vec3 offset = vec3(1.0, -1.0, 0.0);', | |
'vec2 texelSize = 1.0/textureSize; ', | |
'vec3 dtl = vec3(-1, 1, getDepth( uv + offset.yx * texelSize) ); ', | |
'vec3 dtc = vec3( 0, 1, getDepth( uv + offset.zx * texelSize) );', | |
'vec3 dtr = vec3( 1, 1, getDepth( uv + offset.xx * texelSize) );', | |
'vec3 dml = vec3(-1, 0, getDepth( uv + offset.yz * texelSize) );', | |
'vec3 dmc = vec3( 0, 0, getDepth( uv ) );', | |
'vec3 dmr = vec3( 1, 0, getDepth( uv + offset.xz * texelSize) );', | |
'vec3 dbl = vec3(-1, -1, getDepth( uv + offset.yy * texelSize) );', | |
'vec3 dbc = vec3( 0, -1, getDepth( uv + offset.zy * texelSize) );', | |
'vec3 dbr = vec3( 1, -1, getDepth( uv + offset.xy * texelSize) );', | |
'vec3 dmin = dtl;', | |
'if ( dmin.z > dtc.z ) dmin = dtc;', | |
'if ( dmin.z > dtr.z ) dmin = dtr;', | |
'if ( dmin.z > dml.z ) dmin = dml;', | |
'if ( dmin.z > dmc.z ) dmin = dmc;', | |
'if ( dmin.z > dmr.z ) dmin = dmr;', | |
'if ( dmin.z > dbl.z ) dmin = dbl;', | |
'if ( dmin.z > dbc.z ) dmin = dbc;', | |
'if ( dmin.z > dbr.z ) dmin = dbr;', | |
'return vec3(uv + texelSize.xy * dmin.xy, dmin.z);', | |
'}', | |
'vec3 find_closest_fragment_5tap(const in vec2 uv) ', | |
'{ ', | |
'vec2 offset = vec2(1.0, -1.0);', | |
'vec2 texelSize = 1.0/textureSize; ', | |
'vec3 dtl = vec3(-1, 1, getDepth( uv + offset.yx * texelSize ) ); ', | |
'vec3 dtr = vec3( 1, 1, getDepth( uv + offset.xx * texelSize ) );', | |
'vec3 dmc = vec3( 0, 0, getDepth( uv) );', | |
'vec3 dbl = vec3(-1, -1, getDepth( uv + offset.yy * texelSize ) );', | |
'vec3 dbr = vec3( 1, -1, getDepth( uv + offset.xy * texelSize ) );', | |
'vec3 dmin = dtl;', | |
'if ( dmin.z > dtr.z ) dmin = dtr;', | |
'if ( dmin.z > dmc.z ) dmin = dmc;', | |
'if ( dmin.z > dbl.z ) dmin = dbl;', | |
'if ( dmin.z > dbr.z ) dmin = dbr;', | |
'return vec3(uv + texelSize * dmin.xy, dmin.z);', | |
'}', | |
'vec4 clip_aabb(const in vec4 aabb_min, const in vec4 aabb_max, vec4 p )', | |
'{ ', | |
'const float FLT_EPS = 1e-8;', | |
'vec4 p_clip = 0.5 * (aabb_max + aabb_min); ', | |
'vec4 e_clip = 0.5 * (aabb_max - aabb_min) + FLT_EPS; ', | |
'vec4 v_clip = p - p_clip;', | |
'vec4 v_unit = abs(v_clip / e_clip);', | |
'float mv_unit = max(v_unit.x, max(v_unit.y, v_unit.z));', | |
'float a = clamp(p.a, aabb_min.a, aabb_max.a);', | |
'if (mv_unit > 1.0) ', | |
'return vec4((p_clip + v_clip / mv_unit).rgb, a);', | |
'else ', | |
'return vec4(p.rgb, a);', | |
'}', | |
/* | |
'vec4 clip_aabb(const in vec4 aabb_min, const in vec4 aabb_max, vec4 p, vec4 q )', | |
'{ ', | |
'const float FLT_EPS = 1e-8;', | |
'vec3 p_clip = 0.5 * (aabb_max + aabb_min).rgb; ', | |
'vec3 e_clip = 0.5 * (aabb_max - aabb_min).rgb + FLT_EPS; ', | |
'vec4 v_clip = q - (p_clip, p.w);', | |
'vec3 v_unit = abs(v_clip.xyz / e_clip);', | |
'float mv_unit = max(v_unit.x, max(v_unit.y, v_unit.z));', | |
'if (mv_unit > 1.0) ', | |
'return vec4(p_clip + v_clip.rgb / mv_unit, p.a);', | |
'else ', | |
'return vec4(q.rgb, p.a);', | |
'}', | |
*/ | |
'bool calcMinMax(vec4[9] colors, vec4[9] transp, int count, vec4 truth, inout vec4 cMin, inout vec4 cMax) {', | |
'cMin = vec4(2,2,2,2);', | |
'cMax = vec4(-2,-2,-2,-2);', | |
'bool foundColor = false;', | |
'bool foundAlpha = false;', | |
'bool centerTransparent = (truth.a > EPSILON);', | |
'for(int i = 0; i < 9; i++) {', | |
'if(i >= count) break;', | |
'bool pixelTransparent = (transp[i].a > EPSILON);', | |
'if(centerTransparent == pixelTransparent) {', | |
'vec4 min_ = min(cMin, colors[i]);', | |
'vec4 max_ = max(cMax, colors[i]);', | |
'foundAlpha = true;', | |
'if(colors[i].a > .5) {', | |
'cMin = min_;', | |
'cMax = max_;', | |
'foundColor = true;', | |
'} else {', | |
'cMin.a = min_.a;', | |
'cMax.a = max_.a;', | |
'}', | |
'}', | |
'}', | |
'return foundColor;', | |
'}', | |
'void premul(inout vec4 c) {', | |
'c.rgb *= c.a;', | |
'}', | |
'vec2 getScreenSpaceVelocity( vec2 uv ) {', | |
'vec4 value = texture2D(tVelocity, uv);', | |
'if( value.x == 0.0 && value.y == 0.0 && value.z == 0.0 && value.w == 0.0 ) {', | |
'return vec2( 0.0, 0.0 );', | |
'}', | |
'float vx = unpackRGToDepth(value.xy);', | |
'float vy = unpackRGToDepth(value.zw);', | |
'return vec2(2.*vx - 1., 2.*vy - 1.);', | |
'}', | |
'vec4 calculateTAA(const in vec2 uv, const in vec2 screenSpaceVelocity) {', | |
'float _FeedbackMin = 1.0 - 2.0 * minSampleWeight;', | |
'float _FeedbackMax = 1.0 - minSampleWeight;', | |
'vec2 texelSize = 1./textureSize;', | |
// http://twvideo01.ubm-us.net/o1/vault/gdc2016/Presentations/Pedersen_LasseJonFuglsang_TemporalReprojectionAntiAliasing.pdf | |
// the paper says to unjitter the currentColor but when clear is off the edge jitter is more | |
// obvious if you unjitter. especially in clara.io | |
'vec4 currentColor = texture2D(currentBeauty, uv - 0.*jitterOffset * texelSize);', | |
'premul(currentColor);', | |
'vec2 lookBackUV = uv - screenSpaceVelocity;', | |
'vec4 previousColor = texture2D(previousBeauty, lookBackUV);', | |
'const vec3 offset = vec3(1., -1., 0.);', | |
'vec4 ctl = texture2D(currentBeauty, uv + offset.yx * texelSize);', | |
'vec4 ctc = texture2D(currentBeauty, uv + offset.zx * texelSize);', | |
'vec4 ctr = texture2D(currentBeauty, uv + offset.xx * texelSize);', | |
'vec4 cml = texture2D(currentBeauty, uv + offset.yz * texelSize);', | |
'vec4 cmc = texture2D(currentBeauty, uv);', | |
'vec4 cmr = texture2D(currentBeauty, uv + offset.xz * texelSize);', | |
'vec4 cbl = texture2D(currentBeauty, uv + offset.yy * texelSize);', | |
'vec4 cbc = texture2D(currentBeauty, uv + offset.zy * texelSize);', | |
'vec4 cbr = texture2D(currentBeauty, uv + offset.xy * texelSize);', | |
'vec4 ttl = texture2D(transparentBeauty, uv + offset.yx * texelSize);', | |
'vec4 ttc = texture2D(transparentBeauty, uv + offset.zx * texelSize);', | |
'vec4 ttr = texture2D(transparentBeauty, uv + offset.xx * texelSize);', | |
'vec4 tml = texture2D(transparentBeauty, uv + offset.yz * texelSize);', | |
'vec4 tmc = texture2D(transparentBeauty, uv);', | |
'vec4 tmr = texture2D(transparentBeauty, uv + offset.xz * texelSize);', | |
'vec4 tbl = texture2D(transparentBeauty, uv + offset.yy * texelSize);', | |
'vec4 tbc = texture2D(transparentBeauty, uv + offset.zy * texelSize);', | |
'vec4 tbr = texture2D(transparentBeauty, uv + offset.xy * texelSize);', | |
//'vec4 cmin = min(ctl, min(ctc, min(ctr, min(cml, min(cmc, min(cmr, min(cbl, min(cbc, cbr))))))));', | |
//'vec4 cmax = max(ctl, max(ctc, max(ctr, max(cml, max(cmc, max(cmr, max(cbl, max(cbc, cbr))))))));', | |
'vec4 cmin, cmax, cmin5, cmax5;', | |
'vec4 cneighbors[9];', | |
'cneighbors[0] = cmc;', | |
// first 4 don't incldue corners | |
'cneighbors[1] = ctc;', | |
'cneighbors[2] = cbc;', | |
'cneighbors[3] = cml;', | |
'cneighbors[4] = cmr;', | |
// first 8 includes corners | |
'cneighbors[5] = ctl;', | |
'cneighbors[6] = ctr;', | |
'cneighbors[7] = cbl;', | |
'cneighbors[8] = cbr;', | |
// | |
'vec4 tneighbors[9];', | |
'tneighbors[0] = tmc;', | |
// first 4 don't incldue corners | |
'tneighbors[1] = ttc;', | |
'tneighbors[2] = tbc;', | |
'tneighbors[3] = tml;', | |
'tneighbors[4] = tmr;', | |
// first 8 includes corners | |
'tneighbors[5] = ttl;', | |
'tneighbors[6] = ttr;', | |
'tneighbors[7] = tbl;', | |
'tneighbors[8] = tbr;', | |
//'for(int i = 0; i < 8; i++) { if((tneighbors[i].a <= EPSILON) != (tmc.a <= EPSILON)) return currentColor; }', | |
// min max | |
'if(!calcMinMax(cneighbors, tneighbors, 9, tmc, cmin, cmax))', | |
'return currentColor;', | |
'if(calcMinMax(cneighbors, tneighbors, 5, tmc, cmin5, cmax5)) {', | |
'cmin = (cmin + cmin5) * .5;', | |
'cmax = (cmax + cmax5) * .5;', | |
'}', | |
'vec4 clampedPreviousColor =clip_aabb(cmin, cmax, previousColor);', | |
'premul(clampedPreviousColor);', | |
'float lum0 = linearToRelativeLuminance(currentColor.rgb);', | |
'float lum1 = linearToRelativeLuminance(clampedPreviousColor.rgb);', | |
'float unbiased_diff = abs(lum0 - lum1) / max(lum0, max(lum1, 0.2));', | |
'float unbiased_weight = 1.0 - unbiased_diff;', | |
'float unbiased_weight_sqr = unbiased_weight * unbiased_weight;', | |
'float k_feedback = mix(_FeedbackMin, _FeedbackMax, unbiased_weight_sqr);', | |
'vec2 previousVelocity = getScreenSpaceVelocity(lookBackUV);', | |
// velocity field over 10 pixels. | |
'k_feedback *= 1.0 - saturate( length( ( screenSpaceVelocity - previousVelocity ) / texelSize ) / 10.0 );', | |
// deals with mirror and other transparent surfaces | |
//'k_feedback *= min( currentColor.a, clampedPreviousColor.a );', | |
'if( mode == MODE_MOVING ) {', | |
'return mix(currentColor, clampedPreviousColor, k_feedback);', | |
'}', | |
'else if( mode == MODE_STATIC ) {', | |
'return mix(currentColor, previousColor, 1.0 - pow( minSampleWeight, 0.75 ) );', | |
'}', | |
'else { // mode == MODE_RESET', | |
'return currentColor;', | |
'}', | |
'}', | |
'void main() {', | |
'vec2 screenSpaceVelocity = getScreenSpaceVelocity( vUv );', | |
'gl_FragColor = calculateTAA(vUv, screenSpaceVelocity);', | |
'}', | |
].join('\n'), | |
}); | |
} | |
this.orthoScene = new THREE.Scene(); | |
this.orthoCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, -0.01, 1000); | |
this.superSampleTRAAMaterial = getSuperSampleTRAAMaterial(); | |
this.currentMaterial = this.superSampleTRAAMaterial; | |
var quad = new THREE.PlaneGeometry(2, 2); | |
var quadMesh = new THREE.Mesh(quad, this.currentMaterial); | |
this.orthoScene.add(quadMesh); | |
this.oldClearColor = new THREE.Color(); | |
this.oldClearAlpha = 1; | |
this.needsSwap = false; | |
if (THREE.CopyShader === undefined) | |
console.error('THREE.TRAAPass relies on THREE.CopyShader'); | |
var copyShader = THREE.CopyShader; | |
this.copyUniforms = THREE.UniformsUtils.clone(copyShader.uniforms); | |
this.copyMaterial = new THREE.ShaderMaterial({ | |
uniforms: this.copyUniforms, | |
vertexShader: copyShader.vertexShader, | |
fragmentShader: copyShader.fragmentShader, | |
transparent: false, | |
depthWrite: false, | |
blending: THREE.NoBlending | |
}); | |
var params = { | |
minFilter: THREE.LinearFilter, | |
magFilter: THREE.LinearFilter, | |
format: THREE.RGBAFormat, | |
stencilBuffer: true, | |
}; | |
this.accumulatedBeautyRenderTarget = new THREE.WebGLRenderTarget( | |
256, | |
256, | |
params | |
); | |
this.transparentRT = new THREE.WebGLRenderTarget( | |
0, | |
0, | |
params | |
); | |
this.previousProjectionViewMatrix = new THREE.Matrix4(); | |
this.currentProjectionViewMatrix = new THREE.Matrix4(); | |
this.projectionMatrix = new THREE.Matrix4(); | |
this.projectionMatrix.copy(this.camera.projectionMatrix); | |
this.numSamplesPerAccumulation = 16; | |
this.staticMode = false; | |
this.depthTexture = null; | |
this.visibilityFunc = (obj, mat) => { | |
return !mat.transparent; | |
}; | |
this.depthMaterial = new THREE.ShaderMaterial({ | |
defines: [], | |
uniforms: [], | |
side: THREE.DoubleSide, | |
vertexShader: ` | |
void main() { | |
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.); | |
}`, | |
fragmentShader: ` | |
void main() { | |
gl_FragColor = vec4(0); | |
}` | |
}); | |
}; | |
THREE.TRAAPass.prototype = Object.assign(Object.create(THREE.Pass.prototype), { | |
constructor: THREE.TRAAPass, | |
dispose: function() { | |
if (this.accumulatedBeautyRenderTarget) | |
this.accumulatedBeautyRenderTarget.dispose(); | |
}, | |
setSize: function(width, height) { | |
if (this.accumulatedBeautyRenderTarget) | |
this.accumulatedBeautyRenderTarget.setSize(width, height); | |
if (this.velocityRenderTarget) | |
this.velocityRenderTarget.setSize(width, height); | |
this.projectionMatrix.copy(this.camera.projectionMatrix); | |
this.transparentRT.setSize(width, height); | |
this.resetPending = true; | |
}, | |
renderOverride: function( | |
renderer, | |
overrideMaterial, | |
renderTarget, | |
clearColor, | |
clearAlpha | |
) { | |
var originalClearColor = renderer.getClearColor().getHex(); | |
var originalClearAlpha = renderer.getClearAlpha(); | |
var originalAutoClear = renderer.autoClear; | |
renderer.autoClear = false; | |
clearColor = overrideMaterial.clearColor || clearColor; | |
clearAlpha = overrideMaterial.clearAlpha || clearAlpha; | |
var clearNeeded = clearColor !== undefined && clearColor !== null; | |
if (clearNeeded) { | |
renderer.setClearColor(clearColor); | |
renderer.setClearAlpha(clearAlpha || 0.0); | |
} | |
this.scene.overrideMaterial = overrideMaterial; | |
// if ( this.camera.clearViewOffset ) this.camera.clearViewOffset(); | |
renderer.render(this.scene, this.camera, renderTarget, clearNeeded, this.visibilityFunc); | |
this.scene.overrideMaterial = null; | |
// restore original state | |
renderer.autoClear = originalAutoClear; | |
renderer.setClearColor(originalClearColor); | |
renderer.setClearAlpha(originalClearAlpha); | |
}, | |
render: function(renderer, writeBuffer, readBuffer, delta, maskActive, overrideCamera) { | |
var camera = overrideCamera || this.camera; | |
this.oldClearColor = renderer.getClearColor().getHex(); | |
this.oldClearAlpha = renderer.getClearAlpha(); | |
var oldAutoClear = renderer.autoClear; | |
var oldAutoClearDepth = renderer.autoClearDepth; | |
var width = writeBuffer.width, | |
height = writeBuffer.height; | |
if (!this.velocityRenderTarget) { | |
var params = { | |
minFilter: THREE.LinearFilter, | |
magFilter: THREE.LinearFilter, | |
format: THREE.RGBAFormat, | |
}; | |
this.velocityRenderTarget = new THREE.WebGLRenderTarget( | |
width, | |
height, | |
params | |
); | |
} | |
this.currentMaterial = this.superSampleTRAAMaterial; | |
this.currentProjectionViewMatrix.multiplyMatrices( | |
this.projectionMatrix, // need a special case for VR!! | |
camera.matrixWorldInverse | |
); | |
this.oldClearColor = renderer.getClearColor().getHex(); | |
this.oldClearAlpha = renderer.getClearAlpha(); | |
var oldAutoClear = renderer.autoClear; | |
var oldAutoClearDepth = renderer.autoClearDepth; | |
var oldAutoClearColor = renderer.autoClearColor; | |
renderer.autoClear = false; | |
renderer.setClearColor(new THREE.Color(0, 0, 0), 0); | |
// change material | |
THREE.MaterialChanger.renderPassVelocity( | |
renderer, | |
this.scene, | |
camera, | |
this.velocityRenderTarget, | |
this.visibilityFunc, | |
this.currentProjectionViewMatrix, | |
this.previousProjectionViewMatrix | |
); | |
this.scene.traverse(function(obj) { | |
if (obj instanceof THREE.Object3D) { | |
obj.matrixWorldPrevious.copy(obj.matrixWorld); | |
} | |
}); | |
// | |
renderer.autoClearColor = true; | |
renderer.autoClearDepth = true; | |
// depth only | |
renderer.renderOverride( | |
this.depthMaterial, | |
this.scene, | |
camera, | |
this.transparentRT, | |
true, | |
this.visibilityFunc | |
); | |
// transparent | |
renderer.render( | |
this.scene, | |
camera, | |
this.transparentRT, | |
false, | |
(obj, mat) => { | |
return !this.visibilityFunc(obj, mat); | |
} | |
); | |
// | |
if(camera.view) { | |
this.currentMaterial.uniforms['jitterOffset'].value.set(camera.view.offsetX, camera.view.offsetY); | |
} | |
this.currentMaterial.uniforms['transparentBeauty'].value = this.transparentRT.texture; | |
this.currentMaterial.uniforms['currentBeauty'].value = readBuffer.texture; | |
this.currentMaterial.uniforms[ | |
'previousBeauty' | |
].value = this.accumulatedBeautyRenderTarget.texture; | |
this.currentMaterial.defines[ | |
'DEPTH_PACKING' | |
] = this.depthTexture.depthPacking; | |
this.currentMaterial.uniforms['tDepth'].value = this.depthTexture; | |
this.currentMaterial.uniforms[ | |
'tVelocity' | |
].value = this.velocityRenderTarget.texture; | |
if (this.resetPending) { | |
this.currentMaterial.uniforms['mode'].value = 2; | |
this.resetPending = false; | |
} else if (this.staticMode) { | |
this.currentMaterial.uniforms['mode'].value = 1; | |
} else { | |
this.currentMaterial.uniforms['mode'].value = 0; | |
} | |
this.currentMaterial.uniforms[ | |
'cameraInverseProjectionMatrix' | |
].value.getInverse(this.projectionMatrix); | |
this.currentMaterial.uniforms['cameraProjectionMatrix'].value.copy( | |
this.projectionMatrix | |
); | |
this.currentMaterial.uniforms['cameraInverseViewMatrix'].value.copy( | |
camera.matrixWorld | |
); | |
this.currentMaterial.uniforms['cameraNearFar'].value.copy( | |
new THREE.Vector2(camera.near, camera.far) | |
); | |
this.currentMaterial.uniforms['textureSize'].value.copy( | |
new THREE.Vector2(width, height) | |
); | |
this.currentMaterial.uniforms['minSampleWeight'].value = | |
1.0 / this.numSamplesPerAccumulation; | |
//renderer.autoClearColor = true; | |
//renderer.autoClearDepth = false; | |
this.orthoScene.overrideMaterial = this.currentMaterial; | |
renderer.autoClearDepth = false; | |
renderer.render(this.orthoScene, this.orthoCamera, writeBuffer, true); | |
this.orthoScene.overrideMaterial = null; | |
this.copyUniforms['tDiffuse'].value = writeBuffer.texture; | |
this.copyUniforms['opacity'].value = 1; | |
this.orthoScene.overrideMaterial = this.copyMaterial; | |
renderer.render( | |
this.orthoScene, | |
this.orthoCamera, | |
this.accumulatedBeautyRenderTarget, | |
true | |
); | |
renderer.render( | |
this.orthoScene, | |
this.orthoCamera, | |
readBuffer, | |
true | |
); | |
this.orthoScene.overrideMaterial = null; | |
renderer.setClearColor(this.oldClearColor, this.oldClearAlpha); | |
renderer.autoClear = oldAutoClear; | |
renderer.autoClearColor = oldAutoClearColor; | |
renderer.autoClearDepth = oldAutoClearDepth; | |
this.previousProjectionViewMatrix.copy(this.currentProjectionViewMatrix); | |
}, | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment