4.9 C
New York
Thursday, December 19, 2024

Crafting a Dreamy Particle Impact with Three.js and GPGPU


Hello! I’m Dominik, a inventive developer based mostly in Wroclaw, Poland. At present I’m at Huncwot.

On this tutorial, I’ll information you thru making a dreamy, interactive particle impact utilizing Three.js, shaders, and the highly effective GPGPU method. Collectively, we’ll discover easy methods to use GPU computation to carry hundreds of particles to life with seamless movement, glowing highlights, and dynamic interactivity.

Right here’s what we’ll do:

  • Organising GPGPU for lightning-fast particle calculations
  • Creating mouse-responsive animations
  • Including additional shine with post-processing results

To observe this tutorial, a stable understanding of Three.js and shaders is beneficial.

Able to get began?

So let’s dive in!

What’s GPGPU?

GPGPU stands for Common-Objective Computation on Graphics Processing Items. Usually, GPUs are used to create graphics and render photographs, however they’ll additionally deal with different forms of computations. By offloading sure duties from the CPU to the GPU, processes will be accomplished a lot quicker. GPUs excel at performing many operations concurrently, making them very best for duties like transferring hundreds of particles effectively. This strategy considerably boosts efficiency and permits complicated results that might be too sluggish for a CPU to handle by itself.

You’ll be able to study extra about GPGPU right here:

Setting Up GPGPU

To harness the facility of the GPU, we have to create two textures to retailer our knowledge. Consider these textures as arrays, the place every pixel represents the place of a single particle. To simplify this course of, we’ll create a GPGPUUtils class to streamline the GPGPU setup.

GPGPUUtils.js

import * as THREE from 'three';
import { MeshSurfaceSampler } from 'three/examples/jsm/math/MeshSurfaceSampler.js';

  

export default class GPGPUUtils {
	constructor(mesh, measurement) {
	
	this.measurement = measurement;
	
	this.quantity = this.measurement * this.measurement;
	
	this.mesh = mesh;
	
	this.sampler = new MeshSurfaceSampler(this.mesh).construct();
	
	  
	
	this.setupDataFromMesh();
	this.setupVelocitiesData();
}

  
	setupDataFromMesh() {
		const knowledge = new Float32Array(4 * this.quantity);
		const positions = new Float32Array(3 * this.quantity);
		const uvs = new Float32Array(2 * this.quantity);
		
		this._position = new THREE.Vector3();
		
		for (let i = 0; i < this.measurement; i++) {
			for (let j = 0; j < this.measurement; j++) {
				const index = i * this.measurement + j;
				
				// Decide random level from Mesh
				
				this.sampler.pattern(this._position);


				// Setup for DataTexture
				
				knowledge[4 * index] = this._position.x;
				knowledge[4 * index + 1] = this._position.y;
				knowledge[4 * index + 2] = this._position.z;


				// Setup positions attribute for geometry
				
				positions[3 * index] = this._position.x;
				positions[3 * index + 1] = this._position.y;
				positions[3 * index + 2] = this._position.z;
				
				  
				
				// Setup UV attribute for geometry
				
				uvs[2 * index] = j / (this.measurement - 1);
				uvs[2 * index + 1] = i / (this.measurement - 1);
			}
		}
	  
	
		const positionTexture = new THREE.DataTexture(knowledge, this.measurement, this.measurement, THREE.RGBAFormat, THREE.FloatType);
		
		positionTexture.needsUpdate = true;
		
		this.positions = positions;
		
		this.positionTexture = positionTexture;
		
		this.uvs = uvs;
	}
	  
	
	setupVelocitiesData() {
		const knowledge = new Float32Array(4 * this.quantity);
		
		knowledge.fill(0);
		
		let velocityTexture = new THREE.DataTexture(knowledge, this.measurement, this.measurement, THREE.RGBAFormat, THREE.FloatType);
		
		velocityTexture.needsUpdate = true;
		
		this.velocityTexture = velocityTexture
	}
	  
	
	getPositions() {
		return this.positions;
	}
	
	  
	getUVs() {
		return this.uvs;
	}
	
	  
	getPositionTexture() {
		return this.positionTexture;
	}
	 
	
	getVelocityTexture() {
		return this.velocityTexture;
	}
}

GPGPU.js

import * as THREE from 'three';
import GPGPUUtils from './utils';
  

export default class GPGPU {
	constructor({ measurement, digital camera, renderer, mouse, scene, mannequin, sizes }) {
		this.digital camera = digital camera; // Digicam
		this.renderer = renderer; // Renderer
		this.mouse = mouse; // Mouse, our cursor place
		this.scene = scene; // International scene
		this.sizes = sizes; // Sizes of the scene, canvas, pixel ratio
		this.measurement = measurement; // Quantity of GPGPU particles
		this.mannequin = mannequin; // Mesh from which we are going to pattern the particles
		
		
		this.init();
	}
	
	
	init() {
		this.utils = new GPGPUUtils(this.mannequin, this.measurement); // Setup GPGPUUtils
	}
}

Integrating GPUComputationRenderer

We’ll use GPUComputationRenderer from Three.js to avoid wasting particle positions and velocities inside textures.

That is how our GPGPU class ought to appear to be to this point:

import * as THREE from 'three';
import GPGPUUtils from './utils';

import { GPUComputationRenderer } from 'three/examples/jsm/misc/GPUComputationRenderer.js';
  

export default class GPGPU {
	constructor({ measurement, digital camera, renderer, mouse, scene, mannequin, sizes }) {
		this.digital camera = digital camera; // Digicam
		this.renderer = renderer; // Renderer
		this.mouse = mouse; // Mouse, our cursor place
		this.scene = scene; // International scene
		this.sizes = sizes; // Sizes of the scene, canvas, pixel ratio
		this.measurement = measurement; // Quantity of GPGPU particles, ex. 1500
		this.mannequin = mannequin; // Mesh from which we are going to pattern the particles
		
		
		this.init();
	}
	
	
	init() {
		this.utils = new GPGPUUtils(this.mannequin, this.measurement); // Setup GPGPUUtils

		this.initGPGPU();
	}
	

	initGPGPU() {
		this.gpgpuCompute = new GPUComputationRenderer(this.sizes.width, this.sizes.width, this.renderer);
	}
}

Now we have to move two textures containing knowledge into our GPUComputationRenderer:

  • positionTexture: Texture with positions of particles.
  • velocityTexture: Texture with velocities of particles.

Because of GPGPUUtils, we will simply create them:

const positionTexture = this.utils.getPositionTexture();
const velocityTexture = this.utils.getVelocityTexture();

Now that we’ve the textures, we have to create two shaders for the GPUComputationRenderer:

simFragmentVelocity

This shader calculates the rate of our particles (makes particles transfer).

simFragmentVelocity.glsl

uniform sampler2D uOriginalPosition;

void primary() {
    vec2 vUv = gl_FragCoord.xy / decision.xy;

    vec3 place = texture2D( uCurrentPosition, vUv ).xyz;
    vec3 unique = texture2D( uOriginalPosition, vUv ).xyz;
    vec3 velocity = texture2D( uCurrentVelocity, vUv ).xyz;

    gl_FragColor = vec4(velocity, 1.);
}

simFragment

Inside this shader, we replace the present particle place based mostly on its velocity.

simFragment.glsl

void primary() {
    vec2 vUv = gl_FragCoord.xy / decision.xy;

    vec3 place = texture2D( uCurrentPosition, vUv ).xyz;
    vec3 velocity = texture2D( uCurrentVelocity, vUv ).xyz;

    place += velocity;

    gl_FragColor = vec4( place, 1.);
}

As you’ve in all probability seen, we aren’t creating uniforms for uCurrentPosition and uCurrentVelocity. It is because these textures are robotically handed to the shader by GPUComputationRenderer.

Now let’s move these shaders and knowledge textures into the GPUComputationRenderer as follows:

this.positionVariable = this.gpgpuCompute.addVariable('uCurrentPosition', simFragmentPositionShader, positionTexture);

this.velocityVariable = this.gpgpuCompute.addVariable('uCurrentVelocity', simFragmentVelocityShader, velocityTexture);


this.gpgpuCompute.setVariableDependencies(this.positionVariable, [this.positionVariable, this.velocityVariable]);

this.gpgpuCompute.setVariableDependencies(this.velocityVariable, [this.positionVariable, this.velocityVariable]);

Subsequent, let’s arrange the uniforms for the simFragmentVelocity and simFragmentPosition shaders.

this.uniforms = {
    positionUniforms: this.positionVariable.materials.uniforms,
    velocityUniforms: this.velocityVariable.materials.uniforms
}


this.uniforms.velocityUniforms.uMouse = { worth: this.mouse.cursorPosition };
this.uniforms.velocityUniforms.uMouseSpeed = { worth: 0 };
this.uniforms.velocityUniforms.uOriginalPosition = { worth: positionTexture }
this.uniforms.velocityUniforms.uTime = { worth: 0 };

And eventually we will initialize our GPUComputationRenderer

this.gpgpuCompute.init();

That’s how our class ought to appear to be:

import * as THREE from 'three';  
import simFragmentPositionShader from './shaders/simFragment.glsl';
import simFragmentVelocityShader from './shaders/simFragmentVelocity.glsl';
import { GPUComputationRenderer } from 'three/examples/jsm/misc/GPUComputationRenderer.js';
import GPGPUUtils from './utils';


export default class GPGPU {
    constructor({ measurement, digital camera, renderer, mouse, scene, mannequin, sizes }) {
        this.digital camera = digital camera; // Digicam
        this.renderer = renderer; // Renderer
        this.mouse = mouse; // Our cursor place
        this.scene = scene; // International scene
        this.sizes = sizes; // window width & peak

        this.measurement = measurement; // Quantity of GPGPU particles
        this.mannequin = mannequin; // Mesh from which we are going to pattern the particles


        this.init();
    }


    init() {
        this.utils = new GPGPUUtils(this.mannequin, this.measurement);

        this.initGPGPU();  
    }


    initGPGPU() {
        this.gpgpuCompute = new GPUComputationRenderer(this.sizes.width, this.sizes.width, this.renderer);

        const positionTexture = this.utils.getPositionTexture();
        const velocityTexture = this.utils.getVelocityTexture();

        this.positionVariable = this.gpgpuCompute.addVariable('uCurrentPosition', simFragmentPositionShader, positionTexture);

        this.velocityVariable = this.gpgpuCompute.addVariable('uCurrentVelocity', simFragmentVelocityShader, velocityTexture);

        this.gpgpuCompute.setVariableDependencies(this.positionVariable, [this.positionVariable, this.velocityVariable]);

        this.gpgpuCompute.setVariableDependencies(this.velocityVariable, [this.positionVariable, this.velocityVariable]);

        this.uniforms = {
        positionUniforms: this.positionVariable.materials.uniforms,
        velocityUniforms: this.velocityVariable.materials.uniforms
        }

        this.uniforms.velocityUniforms.uMouse = { worth: this.mouse.cursorPosition };
        this.uniforms.velocityUniforms.uMouseSpeed = { worth: 0 };
        this.uniforms.velocityUniforms.uOriginalPosition = { worth: positionTexture };
        this.uniforms.velocityUniforms.uTime = { worth: 0 };


        this.gpgpuCompute.init();
    }


    compute(time) {
        this.gpgpuCompute.compute();

        this.uniforms.velocityUniforms.uTime.worth = time;
    }
}

Excellent! After the GPUComputationRenderer is ready up and able to carry out calculations, we will proceed to create our particles.

Creating Particles

Let’s begin by creating the fabric for our particles. We are going to want two shaders to replace the particles’ positions based mostly on the information computed by the GPGPU.

vertex.glsl

various vec2 vUv;
various vec3 vPosition;

uniform float uParticleSize;
uniform sampler2D uPositionTexture;



void primary() {
    vUv = uv;

    vec3 newpos = place;

    vec4 shade = texture2D( uPositionTexture, vUv );



    newpos.xyz = shade.xyz;

    vPosition = newpos;

    vec4 mvPosition = modelViewMatrix * vec4( newpos, 1.0 );

    gl_PointSize = ( uParticleSize / -mvPosition.z );

    gl_Position = projectionMatrix * mvPosition;
}

fragment.glsl

various vec2 vUv;

uniform sampler2D uVelocityTexture;



void primary() {
    float middle = size(gl_PointCoord - 0.5);

    vec3 velocity = texture2D( uVelocityTexture, vUv ).xyz * 100.0;
    float velocityAlpha = clamp(size(velocity.r), 0.04, 0.8);

    if (middle > 0.5) { discard; }

    gl_FragColor = vec4(0.808, 0.647, 0.239, velocityAlpha);
}

Now let’s setup ShaderMaterial for particles.

// Setup Particles Materials

this.materials = new THREE.ShaderMaterial({
    uniforms: {
        uPositionTexture: { worth: this.gpgpuCompute.getCurrentRenderTarget(this.positionVariable).texture },
        uVelocityTexture: { worth: this.gpgpuCompute.getCurrentRenderTarget(this.velocityVariable).texture },
        uResolution: { worth: new THREE.Vector2(this.sizes.width, this.sizes.peak) },
        uParticleSize: { worth: 2 }
    },
    vertexShader: vertexShader,
    fragmentShader: fragmentShader,
    depthWrite: false,
    depthTest: false,
    mixing: THREE.AdditiveBlending,
    clear: true
});

The positions of the particles calculated by the GPGPU are handed as a uniform by way of a texture saved in a buffer.

Let’s now create the geometry for our particles. The information of positions and UVs will be simply retrieved from the GPGPUUtils we created earlier. After that, we have to set these values as attributes for the geometry.

// Setup Particles Geometry

const geometry = new THREE.BufferGeometry();


// Get positions, uvs knowledge for geometry attributes

const positions = this.utils.getPositions();
const uvs = this.utils.getUVs();


// Set geometry attributes

geometry.setAttribute('place', new THREE.BufferAttribute(positions, 3));
geometry.setAttribute('uv', new THREE.BufferAttribute(uvs, 2));

As soon as we’ve our materials and geometry, we will mix them with a THREE.Factors operate and add them into scene to show the particles.

createParticles() {

    // Setup Particles Materials

    this.materials = new THREE.ShaderMaterial({
        uniforms: {
            uPositionTexture: { worth: this.gpgpuCompute.getCurrentRenderTarget(this.positionVariable).texture },
            uVelocityTexture: { worth: this.gpgpuCompute.getCurrentRenderTarget(this.velocityVariable).texture },
            uResolution: { worth: new THREE.Vector2(this.sizes.width, this.sizes.peak) },
            uParticleSize: { worth: 2 }
        },
        vertexShader: vertexShader,
        fragmentShader: fragmentShader,
        depthWrite: false,
        depthTest: false,
        mixing: THREE.AdditiveBlending,
        clear: true
    })


    // Setup Particles Geometry

    const geometry = new THREE.BufferGeometry();


    // Get positions, uvs knowledge for geometry attributes

    const positions = this.utils.getPositions();
    const uvs = this.utils.getUVs();


    // Set geometry attributes

    geometry.setAttribute('place', new THREE.BufferAttribute(positions, 3));
    geometry.setAttribute('uv', new THREE.BufferAttribute(uvs, 2));


    this.mesh = new THREE.Factors(geometry, this.materials);

    this.scene.add(this.mesh);
}

As soon as every thing is ready up, we have to run the <code>GPUComputationRenderer computations on each body in order that the positions of the particles are up to date.

GPGPU.js

compute() {
    this.gpgpuCompute.compute();
}

That’s our impact appears to this point:

Now, let’s take a look on the subsequent step the place we are going to put the particles into movement on mouse transfer.

Mouse interplay

As soon as our particles are seen on the display screen, we will create a mouse impact to push particles away from our cursor. For this, we’ll use the GPGPUEvents class to deal with the Three.js Raycaster and three-mesh-bvh to sped up raycasting.

import * as THREE from 'three';
import { MeshBVH, acceleratedRaycast } from 'three-mesh-bvh';



export default class GPGPUEvents {

constructor(mouse, digital camera, mesh, uniforms) {
    this.digital camera = digital camera;
    this.mouse = mouse;
    this.geometry = mesh.geometry;
    this.uniforms = uniforms;
    this.mesh = mesh;


    // Mouse

    this.mouseSpeed = 0;



    this.init();
}



init() {
    this.setupMouse();
}



setupMouse() {
    THREE.Mesh.prototype.raycast = acceleratedRaycast;

    this.geometry.boundsTree = new MeshBVH(this.geometry);

    this.raycaster = new THREE.Raycaster();

    this.raycaster.firstHitOnly = true;

    this.raycasterMesh = new THREE.Mesh(
        this.geometry,
        new THREE.MeshBasicMaterial()
    );


    this.mouse.on('mousemove', (cursorPosition) => {
        this.raycaster.setFromCamera(cursorPosition, this.digital camera);

        const intersects = this.raycaster.intersectObjects([this.raycasterMesh]);

        if (intersects.size > 0) {
            const worldPoint = intersects[0].level.clone();

            this.mouseSpeed = 1;

            this.uniforms.velocityUniforms.uMouse.worth = worldPoint;
        }
    });
}


replace() {
    if (!this.mouse.cursorPosition) return; // Do not replace if cursorPosition is undefined

    this.mouseSpeed *= 0.85;

    this.mouseSpeed = Math.min(this.currentMousePosition.distanceTo(this.previousMousePosition) * 500, 1);

    if (this.uniforms.velocityUniforms.uMouseSpeed) this.uniforms.velocityUniforms.uMouseSpeed.worth = this.mouseSpeed;
}

GPGPUEvents, as you may see, sends the present mouse place and velocity to simFragmentVelocity as uniforms. This might be vital later to make the particles repel when the mouse strikes.

We will now initialize them contained in the GPGPU class and add them to the compute() operate to replace on each tick.

init() {
    this.utils = new GPGPUUtils(this.mannequin, this.measurement);

    this.initGPGPU();

    this.createParticles();

    this.occasions = new GPGPUEvents(this.mouse, this.digital camera, this.mannequin, this.uniforms);
}


compute() {
    this.gpgpuCompute.compute();
    this.occasions.replace();
}

As soon as GPGPUEvents are arrange, we will transfer to the simFragmentVelocity shader to animate the particles based mostly on mouse motion.

simFragmentVelocity.glsl

uniform sampler2D uOriginalPosition;
uniform vec3 uMouse;
uniform float uMouseSpeed;


void primary() {
    vec2 vUv = gl_FragCoord.xy / decision.xy;

    vec3 place = texture2D( uCurrentPosition, vUv ).xyz;
    vec3 unique = texture2D( uOriginalPosition, vUv ).xyz;
    vec3 velocity = texture2D( uCurrentVelocity, vUv ).xyz;

    velocity *= 0.7; // velocity leisure


    // particle attraction to form drive

    vec3 course = normalize( unique - place );

    float dist = size( unique - place );

    if( dist > 0.001 ) velocity += course * 0.0003;


    // mouse repel drive

    float mouseDistance = distance( place, uMouse );
    float maxDistance = 0.1;


    if( mouseDistance < maxDistance ) {
        vec3 pushDirection = normalize( place - uMouse );
        velocity += pushDirection * ( 1.0 - mouseDistance / maxDistance ) * 0.0023 * uMouseSpeed;
    }


    gl_FragColor = vec4(velocity, 1.);
}

We will additionally make the particles shine brighter when the rate is excessive inside fragment.glsl.

fragment.glsl

various vec2 vUv;

uniform sampler2D uVelocityTexture;


void primary() {
    float middle = size(gl_PointCoord - 0.5);

    vec3 velocity = texture2D( uVelocityTexture, vUv ).xyz * 100.0;

    float velocityAlpha = clamp(size(velocity.r), 0.04, 0.8);

    if (middle > 0.5) { discard; }


    gl_FragColor = vec4(0.808, 0.647, 0.239, velocityAlpha);
}

And that’s the way it appears to this point. Pretty, proper?

Publish-processing

Within the remaining step, we’ll arrange post-processing to make our particles shine. The PostProcessing class does simply that.

import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer.js';
import { MotionBloomPass } from './MotionBloomPass.js';
import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass.js';
import { OutputPass } from 'three/examples/jsm/postprocessing/OutputPass.js';
import { Vector2 } from 'three';



export default class PostProcessing {
    constructor({ renderer, scene, digital camera, sizes, debug }) {
        this.renderer = renderer;
        this.scene = scene;
        this.digital camera = digital camera;
        this.sizes = sizes;
        this.debug = debug;

        this.params = {
            threshold: 0.2,
            power: 0.8,
        }

        this.init();
    }


    static getInstance(args) {
        if (!PostProcessing.occasion) {
            PostProcessing.occasion = new PostProcessing(args);
        }

        return PostProcessing.occasion;
    }


    // Init

    init() {
        this.setupEffect();
        this.setupDebug();
    }


    setupEffect() {
        const renderScene = new RenderPass(this.scene, this.digital camera.goal);

        this.bloomPass = new MotionBloomPass(new Vector2(this.sizes.width, this.sizes.peak), 1.5, 0.4, 0.85);
        this.bloomPass.threshold = this.params.threshold;
        this.bloomPass.power = this.params.power;
        this.bloomPass.radius = this.params.radius;

        const outputPass = new OutputPass();

        this.composer = new EffectComposer(this.renderer);
        this.composer.addPass(renderScene);
        this.composer.addPass(this.bloomPass); // <-- Our impact to make particles shine
        this.composer.addPass(outputPass);
    }  


    resize() {
        if (this.composer) {
            this.composer.setSize(this.sizes.width, this.sizes.peak);
            this.composer.setPixelRatio(this.sizes.pixelRatio);
        }
    }  


    replace() {
        if (this.composer) this.composer.render();
    }  
}

The Impact we’re utilizing right here is modified the UnrealBloomPass from the Three.js library. You’ll find the code right here.

For a post-processing implementation, take a look at:

And that’s it! Our remaining result’s a dreamy, unreal impact:

And that is the way it appears in movement:

Ultimate Phrases

I hope you loved this tutorial and discovered one thing from it!

GPGPU is a complicated subject that might fill a whole article by itself. Nonetheless, I hope this undertaking might be a cool place to begin so that you can discover or experiment with this system.



Supply hyperlink

Related Articles

LEAVE A REPLY

Please enter your comment!
Please enter your name here

Latest Articles