I’ve just started learning WebGL.
I am rendering multiple spheres but I’m not sure about the “bindBuffer” and “bufferData” calls inside the render loops.
I can render a single sphere with 2 million vertices no problem. But once I try to render 3 spheres with 100k vertices each (300k total, 85% less vertices), the performance starts to go down.
I want to know exactly what needs to remain inside the render loop and what doesn’t. And if there is something else I am missing.
Here is my Sphere “class”:
function Sphere (resolution, gl, vertex, fragment) { const {positions, indexes} = createPositionsAndIndexes(resolution); const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertex); const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragment); const program = createProgram(gl, vertexShader, fragmentShader); this.x = 0; this.y = 0; this.z = -6; this.angle = {x:0,y:0,z:0}; const positionBuffer = gl.createBuffer(); const indexBuffer = gl.createBuffer(); const positionLocation = gl.getAttribLocation(program, "position"); const viewLocation = gl.getUniformLocation(program, "view"); const projectionLocation = gl.getUniformLocation(program, "projection"); this.render = () => { gl.useProgram(program); gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer); gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint32Array(indexes), gl.STATIC_DRAW); gl.vertexAttribPointer(positionLocation, 3, gl.FLOAT, false, 0, 0); gl.enableVertexAttribArray(positionLocation); const viewMatrix = glMatrix.mat4.create(); glMatrix.mat4.translate(viewMatrix, viewMatrix, [this.x, this.y, this.z]); glMatrix.mat4.rotateX(viewMatrix, viewMatrix, this.angle.x); glMatrix.mat4.rotateY(viewMatrix, viewMatrix, this.angle.y); glMatrix.mat4.rotateZ(viewMatrix, viewMatrix, this.angle.z); gl.uniformMatrix4fv(viewLocation, false, viewMatrix); const projectionMatrix = glMatrix.mat4.create(); glMatrix.mat4.perspective(projectionMatrix, 45 * Math.PI / 180, gl.canvas.clientWidth / gl.canvas.clientHeight, 0.1, 100.0); gl.uniformMatrix4fv(projectionLocation, false, projectionMatrix); gl.drawElements(gl.TRIANGLES, indexes.length, gl.UNSIGNED_INT, 0); }; }
And here is the main “class”:
document.addEventListener("DOMContentLoaded", () => { const canvas = document.querySelector("canvas"); const width = canvas.width = canvas.clientWidth; const height = canvas.height = canvas.clientHeight; const gl = canvas.getContext("webgl2"); const sphere1 = new Sphere(300, gl, vertexShaderSource, fragmentShaderSource); sphere1.x = -0.5; const sphere2 = new Sphere(300, gl, vertexShaderSource, fragmentShaderSource); sphere2.x = 0.0; const sphere3 = new Sphere(300, gl, vertexShaderSource, fragmentShaderSource); sphere3.x = +0.5; const render = () => { gl.viewport(0, 0, gl.canvas.width, gl.canvas.height); gl.clearColor(0, 0, 0, 0); gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); gl.enable(gl.DEPTH_TEST); gl.clearDepth(1.0); gl.depthFunc(gl.LEQUAL); sphere1.angle.y -= 0.01; sphere1.render(); sphere2.angle.y -= 0.01; sphere2.render(); sphere3.angle.y -= 0.005; sphere3.render(); window.requestAnimationFrame(render); }; render(); });
Advertisement
Answer
You shouldn’t call bufferData at render time unless you’re changing the data in the buffer.
unction Sphere (resolution, gl, vertex, fragment) { const {positions, indexes} = createPositionsAndIndexes(resolution); const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertex); const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragment); const program = createProgram(gl, vertexShader, fragmentShader); this.x = 0; this.y = 0; this.z = -6; this.angle = {x:0,y:0,z:0}; const positionBuffer = gl.createBuffer(); const indexBuffer = gl.createBuffer(); const positionLocation = gl.getAttribLocation(program, "position"); const viewLocation = gl.getUniformLocation(program, "view"); const projectionLocation = gl.getUniformLocation(program, "projection"); // create buffers and put data in them gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer); gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint32Array(indexes), gl.STATIC_DRAW); this.render = () => { gl.useProgram(program); // bind the position buffer to the attribute gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); gl.vertexAttribPointer(positionLocation, 3, gl.FLOAT, false, 0, 0); gl.enableVertexAttribArray(positionLocation); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer); const viewMatrix = glMatrix.mat4.create(); glMatrix.mat4.translate(viewMatrix, viewMatrix, [this.x, this.y, this.z]); glMatrix.mat4.rotateX(viewMatrix, viewMatrix, this.angle.x); glMatrix.mat4.rotateY(viewMatrix, viewMatrix, this.angle.y); glMatrix.mat4.rotateZ(viewMatrix, viewMatrix, this.angle.z); gl.uniformMatrix4fv(viewLocation, false, viewMatrix); const projectionMatrix = glMatrix.mat4.create(); glMatrix.mat4.perspective(projectionMatrix, 45 * Math.PI / 180, gl.canvas.clientWidth / gl.canvas.clientHeight, 0.1, 100.0); gl.uniformMatrix4fv(projectionLocation, false, projectionMatrix); gl.drawElements(gl.TRIANGLES, indexes.length, gl.UNSIGNED_INT, 0); }; }
you might find these articles and in particular this one