Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

WebGL Framebuffer Multisampling

I know webgl can antialias or multisample stuff you render to the screen to avoid hard edges, but when I used a framebuffer it didnt do it anymore and there were a bunch of jagged edges on the screen.

How can I make the framebuffer use multisampling?

like image 469
notrota Avatar asked Dec 22 '17 00:12

notrota


2 Answers

This took me a day to figure out, so I thought I should post an example for others to follow. I borrowed the cube animation code below from webgl2fundamentals.org. All I have added to it is the code that does antialiasing on the 3d texture. Make sure the context is initialized with canvas.getContext("webgl2", {antialias: false}); This method won't work with antialiasing on.

To antialias a generated texture you need to initialize a Renderbuffer object and two Framebuffer objects, one for storing the drawing, and the other to process the antialiased graphics into afterwards.

  // Create and bind the framebuffer
  const FRAMEBUFFER = 
  {
    RENDERBUFFER: 0,
    COLORBUFFER: 1
  };
  const fb = 
  [
    gl.createFramebuffer(), 
    gl.createFramebuffer()
  ];
  const colorRenderbuffer = gl.createRenderbuffer();

  gl.bindRenderbuffer(gl.RENDERBUFFER, 
                      colorRenderbuffer);

  gl.renderbufferStorageMultisample(gl.RENDERBUFFER,
                                    gl.getParameter(gl.MAX_SAMPLES),
                                    gl.RGBA8, 
                                    targetTextureWidth,
                                    targetTextureHeight);

  gl.bindFramebuffer(gl.FRAMEBUFFER, 
                     fb[FRAMEBUFFER.RENDERBUFFER]);

  gl.framebufferRenderbuffer(gl.FRAMEBUFFER, 
                             gl.COLOR_ATTACHMENT0, 
                             gl.RENDERBUFFER, 
                             colorRenderbuffer);

  gl.bindFramebuffer(gl.FRAMEBUFFER, 
                     fb[FRAMEBUFFER.COLORBUFFER]);

  gl.framebufferTexture2D(gl.FRAMEBUFFER, 
                          gl.COLOR_ATTACHMENT0, 
                          gl.TEXTURE_2D, 
                          targetTexture, 0);

  gl.bindFramebuffer(gl.FRAMEBUFFER, null);

Just before drawing what will become your texture, set the Framebuffer as the first of the two Framebuffer objects.

  // render to our targetTexture by binding the framebuffer
  gl.bindFramebuffer(gl.FRAMEBUFFER, 
                     fb[FRAMEBUFFER.RENDERBUFFER]);

Then do the texture drawing, and then do the antialiasing, which will require the second buffer.

  // ... drawing code ...
  //
  // "blit" the cube into the color buffer, which adds antialiasing
  gl.bindFramebuffer(gl.READ_FRAMEBUFFER, 
                     fb[FRAMEBUFFER.RENDERBUFFER]);

  gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, 
                     fb[FRAMEBUFFER.COLORBUFFER]);

  gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);

  gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
                     0, 0, targetTextureWidth, targetTextureHeight,
                     gl.COLOR_BUFFER_BIT, gl.LINEAR);

  // render the top layer to the framebuffer as well
  gl.bindFramebuffer(gl.FRAMEBUFFER, 
                     fb[FRAMEBUFFER.RENDERBUFFER]);

Once you have finished drawing the top layer into the buffer, use the same antialiasing method from before, this time setting DRAW_FRAMEBUFFER to null; this tells it to draw to the actual canvas.

  // this time render to the default buffer, which is just canvas
  gl.bindFramebuffer(gl.READ_FRAMEBUFFER, 
                     fb[FRAMEBUFFER.RENDERBUFFER]);

  gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);

  gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
  gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
                     0, 0, canvas.width, canvas.height,
                     gl.COLOR_BUFFER_BIT, gl.LINEAR);

Here is the finished product:

"use strict";

var vertexShaderSource = `#version 300 es

// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec4 a_position;
in vec2 a_texcoord;

// A matrix to transform the positions by
uniform mat4 u_matrix;

// a varying to pass the texture coordinates to the fragment shader
out vec2 v_texcoord;

// all shaders have a main function
void main() {
  // Multiply the position by the matrix.
  gl_Position = u_matrix * a_position;

  // Pass the texcoord to the fragment shader.
  v_texcoord = a_texcoord;
}
`;

var fragmentShaderSource = `#version 300 es

precision mediump float;

// Passed in from the vertex shader.
in vec2 v_texcoord;

// The texture.
uniform sampler2D u_texture;

// we need to declare an output for the fragment shader
out vec4 outColor;

void main() {
  outColor = texture(u_texture, v_texcoord);
}
`;

function main() {
  // Get A WebGL context
  /** @type {HTMLCanvasElement} */
  var canvas = document.getElementById("canvas");
  var gl = canvas.getContext("webgl2", {
    antialias: false
  });
  if (!gl) {
    return;
  }

  // Use our boilerplate utils to compile the shaders and link into a program
  var program = webglUtils.createProgramFromSources(gl, [vertexShaderSource, fragmentShaderSource]);

  // look up where the vertex data needs to go.
  var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
  var texcoordAttributeLocation = gl.getAttribLocation(program, "a_texcoord");

  // look up uniform locations
  var matrixLocation = gl.getUniformLocation(program, "u_matrix");
  var textureLocation = gl.getUniformLocation(program, "u_texture");

  // Create a buffer
  var positionBuffer = gl.createBuffer();

  // Create a vertex array object (attribute state)
  var vao = gl.createVertexArray();

  // and make it the one we're currently working with
  gl.bindVertexArray(vao);

  // Turn on the attribute
  gl.enableVertexAttribArray(positionAttributeLocation);

  // Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
  gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
  // Set Geometry.
  setGeometry(gl);

  // Tell the attribute how to get data out of positionBuffer (ARRAY_BUFFER)
  var size = 3; // 3 components per iteration
  var type = gl.FLOAT; // the data is 32bit floats
  var normalize = false; // don't normalize the data
  var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
  var offset = 0; // start at the beginning of the buffer
  gl.vertexAttribPointer(
    positionAttributeLocation, size, type, normalize, stride, offset);

  // create the texcoord buffer, make it the current ARRAY_BUFFER
  // and copy in the texcoord values
  var texcoordBuffer = gl.createBuffer();
  gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
  setTexcoords(gl);

  // Turn on the attribute
  gl.enableVertexAttribArray(texcoordAttributeLocation);

  // Tell the attribute how to get data out of colorBuffer (ARRAY_BUFFER)
  var size = 2; // 2 components per iteration
  var type = gl.FLOAT; // the data is 32bit floating point values
  var normalize = true; // convert from 0-255 to 0.0-1.0
  var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next color
  var offset = 0; // start at the beginning of the buffer
  gl.vertexAttribPointer(
    texcoordAttributeLocation, size, type, normalize, stride, offset);

  // Create a texture.
  var texture = gl.createTexture();

  // use texture unit 0
  gl.activeTexture(gl.TEXTURE0 + 0);

  // bind to the TEXTURE_2D bind point of texture unit 0
  gl.bindTexture(gl.TEXTURE_2D, texture);

  // fill texture with 3x2 pixels
  {
    const level = 0;
    const internalFormat = gl.R8;
    const width = 3;
    const height = 2;
    const border = 0;
    const format = gl.RED;
    const type = gl.UNSIGNED_BYTE;
    const data = new Uint8Array([
      128, 64, 128,
      0, 192, 0,
    ]);
    gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
    gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
      format, type, data);
  }

  // set the filtering so we don't need mips
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);

  // Create a texture to render to
  const targetTextureWidth = 512;
  const targetTextureHeight = 512;
  const targetTexture = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, targetTexture);

  {
    // define size and format of level 0
    const level = 0;
    const internalFormat = gl.RGBA;
    const border = 0;
    const format = gl.RGBA;
    const type = gl.UNSIGNED_BYTE;
    const data = null;
    gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
                  targetTextureWidth, targetTextureHeight, border,
                  format, type, data);

    // set the filtering so we don't need mips
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  }

  // Create and bind the framebuffer
  const FRAMEBUFFER = {
    RENDERBUFFER: 0,
    COLORBUFFER: 1
  };
  const fb = [gl.createFramebuffer(), gl.createFramebuffer()];
  const colorRenderbuffer = gl.createRenderbuffer();
  gl.bindRenderbuffer(gl.RENDERBUFFER, colorRenderbuffer);
  gl.renderbufferStorageMultisample(gl.RENDERBUFFER, gl.getParameter(gl.MAX_SAMPLES), gl.RGBA8, targetTextureWidth, targetTextureHeight);
  gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
  gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, colorRenderbuffer);
  gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
  gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
  gl.bindFramebuffer(gl.FRAMEBUFFER, null);

  function degToRad(d) {
    return d * Math.PI / 180;
  }

  var fieldOfViewRadians = degToRad(60);
  var modelXRotationRadians = degToRad(0);
  var modelYRotationRadians = degToRad(0);

  // Get the starting time.
  var then = 0;

  requestAnimationFrame(drawScene);

  function drawCube(aspect) {
    // Tell it to use our program (pair of shaders)
    gl.useProgram(program);

    // Bind the attribute/buffer set we want.
    gl.bindVertexArray(vao);

    // Compute the projection matrix
    var projectionMatrix =
      m4.perspective(fieldOfViewRadians, aspect, 1, 2000);

    var cameraPosition = [0, 0, 2];
    var up = [0, 1, 0];
    var target = [0, 0, 0];

    // Compute the camera's matrix using look at.
    var cameraMatrix = m4.lookAt(cameraPosition, target, up);

    // Make a view matrix from the camera matrix.
    var viewMatrix = m4.inverse(cameraMatrix);

    var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);

    var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
    matrix = m4.yRotate(matrix, modelYRotationRadians);

    // Set the matrix.
    gl.uniformMatrix4fv(matrixLocation, false, matrix);

    // Tell the shader to use texture unit 0 for u_texture
    gl.uniform1i(textureLocation, 0);

    // Draw the geometry.
    var primitiveType = gl.TRIANGLES;
    var offset = 0;
    var count = 6 * 6;
    gl.drawArrays(primitiveType, offset, count);
  }

  // Draw the scene.
  function drawScene(time) {
    // convert to seconds
    time *= 0.001;
    // Subtract the previous time from the current time
    var deltaTime = time - then;
    // Remember the current time for the next frame.
    then = time;

    // Animate the rotation
    modelYRotationRadians += -0.7 * deltaTime;
    modelXRotationRadians += -0.4 * deltaTime;

    //webglUtils.resizeCanvasToDisplaySize(gl.canvas);

    gl.enable(gl.CULL_FACE);
    gl.enable(gl.DEPTH_TEST);

    {
      // render to our targetTexture by binding the framebuffer
      gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);

      // render cube with our 3x2 texture
      gl.bindTexture(gl.TEXTURE_2D, texture);

      // Tell WebGL how to convert from clip space to pixels
      gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);

      // Clear the canvas AND the depth buffer.
      gl.clearColor(0, 0, 1, 1); // clear to blue
      gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);

      const aspect = targetTextureWidth / targetTextureHeight;
      drawCube(aspect);

      // "blit" the cube into the color buffer, which adds antialiasing
      gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
      gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
      gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
      gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
                         0, 0, targetTextureWidth, targetTextureHeight,
                         gl.COLOR_BUFFER_BIT, gl.LINEAR);
    }

    {
      // render the top layer to the frame buffer as well
      gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);

      // render the cube with the texture we just rendered to
      gl.bindTexture(gl.TEXTURE_2D, targetTexture);

      // Tell WebGL how to convert from clip space to pixels
      gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);

      // Clear the canvas AND the depth buffer.
      gl.clearColor(0.105, 0.105, 0.105, 1); // clear to black
      gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);


      const aspect = 1;
      drawCube(aspect);
      
      // this time render to the default buffer, which is just canvas
      gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
      gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
      gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
      gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
                         0, 0, canvas.width, canvas.height,
                         gl.COLOR_BUFFER_BIT, gl.LINEAR);
    }

    requestAnimationFrame(drawScene);
  }
}

// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
  var positions = new Float32Array(
    [-0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
      0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
      0.5, 0.5, -0.5,
      0.5, -0.5, -0.5,

      -0.5, -0.5, 0.5,
      0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, 0.5,
      0.5, -0.5, 0.5,
      0.5, 0.5, 0.5,

      -0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
      0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
      0.5, 0.5, 0.5,
      0.5, 0.5, -0.5,

      -0.5, -0.5, -0.5,
      0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5, 0.5,
      0.5, -0.5, -0.5,
      0.5, -0.5, 0.5,

      -0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, -0.5,

      0.5, -0.5, -0.5,
      0.5, 0.5, -0.5,
      0.5, -0.5, 0.5,
      0.5, -0.5, 0.5,
      0.5, 0.5, -0.5,
      0.5, 0.5, 0.5,

    ]);
  gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}

// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
  gl.bufferData(
    gl.ARRAY_BUFFER,
    new Float32Array(
      [
        0, 0,
        0, 1,
        1, 0,
        0, 1,
        1, 1,
        1, 0,

        0, 0,
        0, 1,
        1, 0,
        1, 0,
        0, 1,
        1, 1,

        0, 0,
        0, 1,
        1, 0,
        0, 1,
        1, 1,
        1, 0,

        0, 0,
        0, 1,
        1, 0,
        1, 0,
        0, 1,
        1, 1,

        0, 0,
        0, 1,
        1, 0,
        0, 1,
        1, 1,
        1, 0,

        0, 0,
        0, 1,
        1, 0,
        1, 0,
        0, 1,
        1, 1,

      ]),
    gl.STATIC_DRAW);
}

main();
html 
{
  background-color: #1b1b1b;
}
<canvas id="canvas" width="512" height="512"></canvas>

<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See http://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and http://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webgl2fundamentals.org/webgl/resources/m4.js"></script>
like image 87
omikes Avatar answered Sep 17 '22 01:09

omikes


WebGL1 does not support multisampling for framebuffers so in that case your options are things like rendering to a higher resolution and down sampling when rendering to the canvas and/or running some post processing effect to do the anti-aliasing

WebGL2 does support multisampling for framebuffers. You can call renderbufferStorageMultisample to create a multisampled renderbufffer and you can call blitFramebuffer to resolve it into the canvas

like image 43
gman Avatar answered Sep 17 '22 01:09

gman