diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index bd3ca55ba0..5289728040 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -19,12 +19,26 @@ jobs: uses: actions/setup-node@v1 with: node-version: 20.x + - name: Install Vulkan + run: | + sudo apt-get update + sudo apt-get install -y libvulkan1 vulkan-tools mesa-vulkan-drivers + - name: Install Chrome (latest stable) + run: | + sudo apt-get update + sudo apt-get install -y wget gnupg + wget -q -O - https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add - + sudo sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list' + sudo apt-get update + sudo apt-get install -y google-chrome-stable + which google-chrome + google-chrome --version - name: Get node modules run: npm ci env: CI: true - name: build and test - run: npm test + run: xvfb-run --auto-servernum --server-args='-screen 0 1280x1024x24' npm test env: CI: true - name: report test coverage diff --git a/preview/index.html b/preview/index.html index 6e4915ab34..4092992316 100644 --- a/preview/index.html +++ b/preview/index.html @@ -30,6 +30,7 @@ p.setup = async function () { await p.createCanvas(400, 400, p.WEBGPU); + fbo = p.createFramebuffer(); tex = p.createImage(100, 100); tex.loadPixels(); @@ -43,6 +44,10 @@ } } tex.updatePixels(); + fbo.draw(() => { + p.imageMode(p.CENTER); + p.image(tex, 0, 0, p.width, p.height); + }); sh = p.baseMaterialShader().modify({ uniforms: { @@ -87,7 +92,7 @@ 0, //p.width/3 * p.sin(t * 0.9 + i * Math.E + 0.2), p.width/3 * p.sin(t * 1.2 + i * Math.E + 0.3), ) - p.texture(tex) + p.texture(fbo) p.sphere(30); p.pop(); } diff --git a/src/core/main.js b/src/core/main.js index a5c9a6c93d..f9fc1c6559 100644 --- a/src/core/main.js +++ b/src/core/main.js @@ -468,11 +468,11 @@ for (const k in constants) { * If `setup()` is declared `async` (e.g. `async function setup()`), * execution pauses at each `await` until its promise resolves. * For example, `font = await loadFont(...)` waits for the font asset - * to load because `loadFont()` function returns a promise, and the await + * to load because `loadFont()` function returns a promise, and the await * keyword means the program will wait for the promise to resolve. * This ensures that all assets are fully loaded before the sketch continues. - * + * * loading assets. * * Note: `setup()` doesn’t have to be declared, but it’s common practice to do so. diff --git a/src/image/pixels.js b/src/image/pixels.js index ebea101273..6c2ea58115 100644 --- a/src/image/pixels.js +++ b/src/image/pixels.js @@ -933,7 +933,7 @@ function pixels(p5, fn){ */ fn.loadPixels = function(...args) { // p5._validateParameters('loadPixels', args); - this._renderer.loadPixels(); + return this._renderer.loadPixels(); }; /** diff --git a/src/webgl/3d_primitives.js b/src/webgl/3d_primitives.js index 8c29e3ea2d..386a64535d 100644 --- a/src/webgl/3d_primitives.js +++ b/src/webgl/3d_primitives.js @@ -1869,7 +1869,7 @@ function primitives3D(p5, fn){ if (typeof args[4] === 'undefined') { // Use the retained mode for drawing rectangle, // if args for rounding rectangle is not provided by user. - const perPixelLighting = this._pInst._glAttributes.perPixelLighting; + const perPixelLighting = this._pInst._glAttributes?.perPixelLighting; const detailX = args[4] || (perPixelLighting ? 1 : 24); const detailY = args[5] || (perPixelLighting ? 1 : 16); const gid = `rect|${detailX}|${detailY}`; diff --git a/src/webgl/p5.Framebuffer.js b/src/webgl/p5.Framebuffer.js index 0ebb3c0daa..0fb5504d25 100644 --- a/src/webgl/p5.Framebuffer.js +++ b/src/webgl/p5.Framebuffer.js @@ -5,11 +5,9 @@ import * as constants from '../core/constants'; import { RGB, RGBA } from '../color/creating_reading'; -import { checkWebGLCapabilities } from './p5.Texture'; -import { readPixelsWebGL, readPixelWebGL } from './utils'; +import { checkWebGLCapabilities } from './utils'; import { Camera } from './p5.Camera'; import { Texture } from './p5.Texture'; -import { Image } from '../image/p5.Image'; const constrain = (n, low, high) => Math.max(Math.min(n, high), low); @@ -52,7 +50,6 @@ class FramebufferTexture { } rawTexture() { - // TODO: handle webgpu texture handle return { texture: this.framebuffer[this.property] }; } } @@ -70,7 +67,7 @@ class Framebuffer { this.format = settings.format || constants.UNSIGNED_BYTE; this.channels = settings.channels || ( - this.renderer._pInst._glAttributes.alpha + this.renderer.defaultFramebufferAlpha() ? RGBA : RGB ); @@ -78,7 +75,7 @@ class Framebuffer { this.depthFormat = settings.depthFormat || constants.FLOAT; this.textureFiltering = settings.textureFiltering || constants.LINEAR; if (settings.antialias === undefined) { - this.antialiasSamples = this.renderer._pInst._glAttributes.antialias + this.antialiasSamples = this.renderer.defaultFramebufferAntialias() ? 2 : 0; } else if (typeof settings.antialias === 'number') { @@ -87,13 +84,11 @@ class Framebuffer { this.antialiasSamples = settings.antialias ? 2 : 0; } this.antialias = this.antialiasSamples > 0; - if (this.antialias && this.renderer.webglVersion !== constants.WEBGL2) { - console.warn('Antialiasing is unsupported in a WebGL 1 context'); + if (this.antialias && !this.renderer.supportsFramebufferAntialias()) { + console.warn('Framebuffer antialiasing is unsupported in this context'); this.antialias = false; } this.density = settings.density || this.renderer._pixelDensity; - const gl = this.renderer.GL; - this.gl = gl; if (settings.width && settings.height) { const dimensions = this.renderer._adjustDimensions(settings.width, settings.height); @@ -112,7 +107,8 @@ class Framebuffer { this.height = this.renderer.height; this._autoSized = true; } - this._checkIfFormatsAvailable(); + // Let renderer validate and adjust formats for this context + this.renderer.validateFramebufferFormats(this); if (settings.stencil && !this.useDepth) { console.warn('A stencil buffer can only be used if also using depth. Since the framebuffer has no depth buffer, the stencil buffer will be ignored.'); @@ -120,16 +116,8 @@ class Framebuffer { this.useStencil = this.useDepth && (settings.stencil === undefined ? true : settings.stencil); - this.framebuffer = gl.createFramebuffer(); - if (!this.framebuffer) { - throw new Error('Unable to create a framebuffer'); - } - if (this.antialias) { - this.aaFramebuffer = gl.createFramebuffer(); - if (!this.aaFramebuffer) { - throw new Error('Unable to create a framebuffer for antialiasing'); - } - } + // Let renderer create framebuffer resources with antialiasing support + this.renderer.createFramebufferResources(this); this._recreateTextures(); @@ -466,6 +454,10 @@ class Framebuffer { } } + _deleteTextures() { + this.renderer.deleteFramebufferTextures(this); + } + /** * Creates new textures and renderbuffers given the current size of the * framebuffer. @@ -473,117 +465,10 @@ class Framebuffer { * @private */ _recreateTextures() { - const gl = this.gl; - this._updateSize(); - const prevBoundTexture = gl.getParameter(gl.TEXTURE_BINDING_2D); - const prevBoundFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); - - const colorTexture = gl.createTexture(); - if (!colorTexture) { - throw new Error('Unable to create color texture'); - } - gl.bindTexture(gl.TEXTURE_2D, colorTexture); - const colorFormat = this._glColorFormat(); - gl.texImage2D( - gl.TEXTURE_2D, - 0, - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - colorFormat.format, - colorFormat.type, - null - ); - this.colorTexture = colorTexture; - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - gl.COLOR_ATTACHMENT0, - gl.TEXTURE_2D, - colorTexture, - 0 - ); - - if (this.useDepth) { - // Create the depth texture - const depthTexture = gl.createTexture(); - if (!depthTexture) { - throw new Error('Unable to create depth texture'); - } - const depthFormat = this._glDepthFormat(); - gl.bindTexture(gl.TEXTURE_2D, depthTexture); - gl.texImage2D( - gl.TEXTURE_2D, - 0, - depthFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - depthFormat.format, - depthFormat.type, - null - ); - - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - this.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, - gl.TEXTURE_2D, - depthTexture, - 0 - ); - this.depthTexture = depthTexture; - } - - // Create separate framebuffer for antialiasing - if (this.antialias) { - this.colorRenderbuffer = gl.createRenderbuffer(); - gl.bindRenderbuffer(gl.RENDERBUFFER, this.colorRenderbuffer); - gl.renderbufferStorageMultisample( - gl.RENDERBUFFER, - Math.max( - 0, - Math.min(this.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) - ), - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density - ); - - if (this.useDepth) { - const depthFormat = this._glDepthFormat(); - this.depthRenderbuffer = gl.createRenderbuffer(); - gl.bindRenderbuffer(gl.RENDERBUFFER, this.depthRenderbuffer); - gl.renderbufferStorageMultisample( - gl.RENDERBUFFER, - Math.max( - 0, - Math.min(this.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) - ), - depthFormat.internalFormat, - this.width * this.density, - this.height * this.density - ); - } - - gl.bindFramebuffer(gl.FRAMEBUFFER, this.aaFramebuffer); - gl.framebufferRenderbuffer( - gl.FRAMEBUFFER, - gl.COLOR_ATTACHMENT0, - gl.RENDERBUFFER, - this.colorRenderbuffer - ); - if (this.useDepth) { - gl.framebufferRenderbuffer( - gl.FRAMEBUFFER, - this.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, - gl.RENDERBUFFER, - this.depthRenderbuffer - ); - } - } + // Let renderer handle texture creation and framebuffer setup + this.renderer.recreateFramebufferTextures(this); if (this.useDepth) { this.depth = new FramebufferTexture(this, 'depthTexture'); @@ -612,131 +497,6 @@ class Framebuffer { } ); this.renderer.textures.set(this.color, this.colorP5Texture); - - gl.bindTexture(gl.TEXTURE_2D, prevBoundTexture); - gl.bindFramebuffer(gl.FRAMEBUFFER, prevBoundFramebuffer); - } - - /** - * To create a WebGL texture, one needs to supply three pieces of information: - * the type (the data type each channel will be stored as, e.g. int or float), - * the format (the color channels that will each be stored in the previously - * specified type, e.g. rgb or rgba), and the internal format (the specifics - * of how data for each channel, in the aforementioned type, will be packed - * together, such as how many bits to use, e.g. RGBA32F or RGB565.) - * - * The format and channels asked for by the user hint at what these values - * need to be, and the WebGL version affects what options are avaiable. - * This method returns the values for these three properties, given the - * framebuffer's settings. - * - * @private - */ - _glColorFormat() { - let type, format, internalFormat; - const gl = this.gl; - - if (this.format === constants.FLOAT) { - type = gl.FLOAT; - } else if (this.format === constants.HALF_FLOAT) { - type = this.renderer.webglVersion === constants.WEBGL2 - ? gl.HALF_FLOAT - : gl.getExtension('OES_texture_half_float').HALF_FLOAT_OES; - } else { - type = gl.UNSIGNED_BYTE; - } - - if (this.channels === RGBA) { - format = gl.RGBA; - } else { - format = gl.RGB; - } - - if (this.renderer.webglVersion === constants.WEBGL2) { - // https://webgl2fundamentals.org/webgl/lessons/webgl-data-textures.html - const table = { - [gl.FLOAT]: { - [gl.RGBA]: gl.RGBA32F - // gl.RGB32F is not available in Firefox without an alpha channel - }, - [gl.HALF_FLOAT]: { - [gl.RGBA]: gl.RGBA16F - // gl.RGB16F is not available in Firefox without an alpha channel - }, - [gl.UNSIGNED_BYTE]: { - [gl.RGBA]: gl.RGBA8, // gl.RGBA4 - [gl.RGB]: gl.RGB8 // gl.RGB565 - } - }; - internalFormat = table[type][format]; - } else if (this.format === constants.HALF_FLOAT) { - internalFormat = gl.RGBA; - } else { - internalFormat = format; - } - - return { internalFormat, format, type }; - } - - /** - * To create a WebGL texture, one needs to supply three pieces of information: - * the type (the data type each channel will be stored as, e.g. int or float), - * the format (the color channels that will each be stored in the previously - * specified type, e.g. rgb or rgba), and the internal format (the specifics - * of how data for each channel, in the aforementioned type, will be packed - * together, such as how many bits to use, e.g. RGBA32F or RGB565.) - * - * This method takes into account the settings asked for by the user and - * returns values for these three properties that can be used for the - * texture storing depth information. - * - * @private - */ - _glDepthFormat() { - let type, format, internalFormat; - const gl = this.gl; - - if (this.useStencil) { - if (this.depthFormat === constants.FLOAT) { - type = gl.FLOAT_32_UNSIGNED_INT_24_8_REV; - } else if (this.renderer.webglVersion === constants.WEBGL2) { - type = gl.UNSIGNED_INT_24_8; - } else { - type = gl.getExtension('WEBGL_depth_texture').UNSIGNED_INT_24_8_WEBGL; - } - } else { - if (this.depthFormat === constants.FLOAT) { - type = gl.FLOAT; - } else { - type = gl.UNSIGNED_INT; - } - } - - if (this.useStencil) { - format = gl.DEPTH_STENCIL; - } else { - format = gl.DEPTH_COMPONENT; - } - - if (this.useStencil) { - if (this.depthFormat === constants.FLOAT) { - internalFormat = gl.DEPTH32F_STENCIL8; - } else if (this.renderer.webglVersion === constants.WEBGL2) { - internalFormat = gl.DEPTH24_STENCIL8; - } else { - internalFormat = gl.DEPTH_STENCIL; - } - } else if (this.renderer.webglVersion === constants.WEBGL2) { - if (this.depthFormat === constants.FLOAT) { - internalFormat = gl.DEPTH_COMPONENT32F; - } else { - internalFormat = gl.DEPTH_COMPONENT24; - } - } else { - internalFormat = gl.DEPTH_COMPONENT; - } - - return { internalFormat, format, type }; } /** @@ -775,17 +535,7 @@ class Framebuffer { * @private */ _handleResize() { - const oldColor = this.color; - const oldDepth = this.depth; - const oldColorRenderbuffer = this.colorRenderbuffer; - const oldDepthRenderbuffer = this.depthRenderbuffer; - - this._deleteTexture(oldColor); - if (oldDepth) this._deleteTexture(oldDepth); - const gl = this.gl; - if (oldColorRenderbuffer) gl.deleteRenderbuffer(oldColorRenderbuffer); - if (oldDepthRenderbuffer) gl.deleteRenderbuffer(oldDepthRenderbuffer); - + this._deleteTextures(); this._recreateTextures(); this.defaultCamera._resize(); } @@ -913,20 +663,6 @@ class Framebuffer { return cam; } - /** - * Given a raw texture wrapper, delete its stored texture from WebGL memory, - * and remove it from p5's list of active textures. - * - * @param {p5.FramebufferTexture} texture - * @private - */ - _deleteTexture(texture) { - const gl = this.gl; - gl.deleteTexture(texture.rawTexture().texture); - - this.renderer.textures.delete(texture); - } - /** * Deletes the framebuffer from GPU memory. * @@ -996,19 +732,11 @@ class Framebuffer { * */ remove() { - const gl = this.gl; - this._deleteTexture(this.color); - if (this.depth) this._deleteTexture(this.depth); - gl.deleteFramebuffer(this.framebuffer); - if (this.aaFramebuffer) { - gl.deleteFramebuffer(this.aaFramebuffer); - } - if (this.depthRenderbuffer) { - gl.deleteRenderbuffer(this.depthRenderbuffer); - } - if (this.colorRenderbuffer) { - gl.deleteRenderbuffer(this.colorRenderbuffer); - } + this._deleteTextures(); + + // Let renderer clean up framebuffer resources + this.renderer.deleteFramebufferResources(this); + this.renderer.framebuffers.delete(this); } @@ -1095,14 +823,7 @@ class Framebuffer { * @private */ _framebufferToBind() { - if (this.antialias) { - // If antialiasing, draw to an antialiased renderbuffer rather - // than directly to the texture. In end() we will copy from the - // renderbuffer to the texture. - return this.aaFramebuffer; - } else { - return this.framebuffer; - } + return this.renderer.getFramebufferToBind(this); } /** @@ -1111,45 +832,9 @@ class Framebuffer { * @property {'colorTexutre'|'depthTexture'} property The property to update */ _update(property) { - if (this.dirty[property] && this.antialias) { - const gl = this.gl; - gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this.aaFramebuffer); - gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this.framebuffer); - const partsToCopy = { - colorTexture: [ - gl.COLOR_BUFFER_BIT, - // TODO: move to renderer - this.colorP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST - ], - }; - if (this.useDepth) { - partsToCopy.depthTexture = [ - gl.DEPTH_BUFFER_BIT, - // TODO: move to renderer - this.depthP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST - ]; - } - const [flag, filter] = partsToCopy[property]; - gl.blitFramebuffer( - 0, - 0, - this.width * this.density, - this.height * this.density, - 0, - 0, - this.width * this.density, - this.height * this.density, - flag, - filter - ); + if (this.dirty[property]) { + this.renderer.updateFramebufferTexture(this, property); this.dirty[property] = false; - - const activeFbo = this.renderer.activeFramebuffer(); - if (activeFbo) { - gl.bindFramebuffer(gl.FRAMEBUFFER, activeFbo._framebufferToBind()); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } } } @@ -1159,8 +844,7 @@ class Framebuffer { * @private */ _beforeBegin() { - const gl = this.gl; - gl.bindFramebuffer(gl.FRAMEBUFFER, this._framebufferToBind()); + this.renderer.bindFramebuffer(this); this.renderer.viewport( this.width * this.density, this.height * this.density @@ -1236,7 +920,7 @@ class Framebuffer { if (this.prevFramebuffer) { this.prevFramebuffer._beforeBegin(); } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); + this.renderer.bindFramebuffer(null); this.renderer.viewport( this.renderer._origViewport.width, this.renderer._origViewport.height @@ -1355,25 +1039,19 @@ class Framebuffer { */ loadPixels() { this._update('colorTexture'); - const gl = this.gl; - const prevFramebuffer = this.renderer.activeFramebuffer(); - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - const colorFormat = this._glColorFormat(); - this.pixels = readPixelsWebGL( - this.pixels, - gl, - this.framebuffer, - 0, - 0, - this.width * this.density, - this.height * this.density, - colorFormat.format, - colorFormat.type - ); - if (prevFramebuffer) { - gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer._framebufferToBind()); + const result = this.renderer.readFramebufferPixels(this); + + // Check if renderer returned a Promise (WebGPU) or data directly (WebGL) + if (result && typeof result.then === 'function') { + // WebGPU async case - return Promise + return result.then(pixels => { + this.pixels = pixels; + return pixels; + }); } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); + // WebGL sync case - assign directly + this.pixels = result; + return result; } } @@ -1415,7 +1093,7 @@ class Framebuffer { get(x, y, w, h) { this._update('colorTexture'); // p5._validateParameters('p5.Framebuffer.get', arguments); - const colorFormat = this._glColorFormat(); + if (x === undefined && y === undefined) { x = 0; y = 0; @@ -1430,14 +1108,7 @@ class Framebuffer { y = constrain(y, 0, this.height - 1); } - return readPixelWebGL( - this.gl, - this.framebuffer, - x * this.density, - y * this.density, - colorFormat.format, - colorFormat.type - ); + return this.renderer.readFramebufferPixel(this, x * this.density, y * this.density); } x = constrain(x, 0, this.width - 1); @@ -1445,60 +1116,7 @@ class Framebuffer { w = constrain(w, 1, this.width - x); h = constrain(h, 1, this.height - y); - const rawData = readPixelsWebGL( - undefined, - this.gl, - this.framebuffer, - x * this.density, - y * this.density, - w * this.density, - h * this.density, - colorFormat.format, - colorFormat.type - ); - // Framebuffer data might be either a Uint8Array or Float32Array - // depending on its format, and it may or may not have an alpha channel. - // To turn it into an image, we have to normalize the data into a - // Uint8ClampedArray with alpha. - const fullData = new Uint8ClampedArray( - w * h * this.density * this.density * 4 - ); - - // Default channels that aren't in the framebuffer (e.g. alpha, if the - // framebuffer is in RGB mode instead of RGBA) to 255 - fullData.fill(255); - - const channels = colorFormat.type === this.gl.RGB ? 3 : 4; - for (let y = 0; y < h * this.density; y++) { - for (let x = 0; x < w * this.density; x++) { - for (let channel = 0; channel < 4; channel++) { - const idx = (y * w * this.density + x) * 4 + channel; - if (channel < channels) { - // Find the index of this pixel in `rawData`, which might have a - // different number of channels - const rawDataIdx = channels === 4 - ? idx - : (y * w * this.density + x) * channels + channel; - fullData[idx] = rawData[rawDataIdx]; - } - } - } - } - - // Create an image from the data - const region = new Image(w * this.density, h * this.density); - region.imageData = region.canvas.getContext('2d').createImageData( - region.width, - region.height - ); - region.imageData.data.set(fullData); - region.pixels = region.imageData.data; - region.updatePixels(); - if (this.density !== 1) { - // TODO: support get() at a pixel density > 1 - region.resize(w, h); - } - return region; + return this.renderer.readFramebufferRegion(this, x, y, w, h); } /** @@ -1550,85 +1168,8 @@ class Framebuffer { * */ updatePixels() { - const gl = this.gl; - this.colorP5Texture.bindTexture(); - const colorFormat = this._glColorFormat(); - - const channels = colorFormat.format === gl.RGBA ? 4 : 3; - const len = - this.width * this.height * this.density * this.density * channels; - const TypedArrayClass = colorFormat.type === gl.UNSIGNED_BYTE - ? Uint8Array - : Float32Array; - if ( - !(this.pixels instanceof TypedArrayClass) || this.pixels.length !== len - ) { - throw new Error( - 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' - ); - } - - gl.texImage2D( - gl.TEXTURE_2D, - 0, - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - colorFormat.format, - colorFormat.type, - this.pixels - ); - this.colorP5Texture.unbindTexture(); - this.dirty.colorTexture = false; - - const prevFramebuffer = this.renderer.activeFramebuffer(); - if (this.antialias) { - // We need to make sure the antialiased framebuffer also has the updated - // pixels so that if more is drawn to it, it goes on top of the updated - // pixels instead of replacing them. - // We can't blit the framebuffer to the multisampled antialias - // framebuffer to leave both in the same state, so instead we have - // to use image() to put the framebuffer texture onto the antialiased - // framebuffer. - this.begin(); - this.renderer.push(); - // this.renderer.imageMode(constants.CENTER); - this.renderer.states.setValue('imageMode', constants.CORNER); - this.renderer.setCamera(this.filterCamera); - this.renderer.resetMatrix(); - this.renderer.states.setValue('strokeColor', null); - this.renderer.clear(); - this.renderer._drawingFilter = true; - this.renderer.image( - this, - 0, 0, - this.width, this.height, - -this.renderer.width / 2, -this.renderer.height / 2, - this.renderer.width, this.renderer.height - ); - this.renderer._drawingFilter = false; - this.renderer.pop(); - if (this.useDepth) { - gl.clearDepth(1); - gl.clear(gl.DEPTH_BUFFER_BIT); - } - this.end(); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - if (this.useDepth) { - gl.clearDepth(1); - gl.clear(gl.DEPTH_BUFFER_BIT); - } - if (prevFramebuffer) { - gl.bindFramebuffer( - gl.FRAMEBUFFER, - prevFramebuffer._framebufferToBind() - ); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } - } + // Let renderer handle the pixel update process + this.renderer.updateFramebufferPixels(this); } } diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 9025c0d31a..c6fbfa45a6 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -6,14 +6,17 @@ import { readPixelsWebGL, readPixelWebGL, setWebGLTextureParams, - setWebGLUniformValue + setWebGLUniformValue, + checkWebGLCapabilities } from './utils'; import { Renderer3D, getStrokeDefs } from "../core/p5.Renderer3D"; import { Shader } from "./p5.Shader"; import { Texture, MipmapTexture } from "./p5.Texture"; import { Framebuffer } from "./p5.Framebuffer"; import { Graphics } from "../core/p5.Graphics"; +import { RGB, RGBA } from '../color/creating_reading'; import { Element } from "../dom/p5.Element"; +import { Image } from '../image/p5.Image'; import filterBaseVert from "./shaders/filters/base.vert"; import lightingShader from "./shaders/lighting.glsl"; @@ -1299,6 +1302,11 @@ class RendererGL extends Renderer3D { return { texture: tex, glFormat: gl.RGBA, glDataType: gl.UNSIGNED_BYTE }; } + createFramebufferTextureHandle(framebufferTexture) { + // For WebGL, framebuffer texture handles are designed to be null + return null; + } + uploadTextureFromSource({ texture, glFormat, glDataType }, source) { const gl = this.GL; gl.texImage2D(gl.TEXTURE_2D, 0, glFormat, glFormat, glDataType, source); @@ -1386,6 +1394,609 @@ class RendererGL extends Renderer3D { populateHooks(shader, src, shaderType) { return populateGLSLHooks(shader, src, shaderType); } + + ////////////////////////////////////////////// + // Framebuffer methods + ////////////////////////////////////////////// + + defaultFramebufferAlpha() { + return this._pInst._glAttributes.alpha; + } + + defaultFramebufferAntialias() { + return this.supportsFramebufferAntialias() + ? this._pInst._glAttributes.antialias + : false; + } + + supportsFramebufferAntialias() { + return this.webglVersion === constants.WEBGL2; + } + + createFramebufferResources(framebuffer) { + const gl = this.GL; + + framebuffer.framebuffer = gl.createFramebuffer(); + if (!framebuffer.framebuffer) { + throw new Error('Unable to create a framebuffer'); + } + + if (framebuffer.antialias) { + framebuffer.aaFramebuffer = gl.createFramebuffer(); + if (!framebuffer.aaFramebuffer) { + throw new Error('Unable to create a framebuffer for antialiasing'); + } + } + } + + validateFramebufferFormats(framebuffer) { + const gl = this.GL; + + if ( + framebuffer.useDepth && + this.webglVersion === constants.WEBGL && + !gl.getExtension('WEBGL_depth_texture') + ) { + console.warn( + 'Unable to create depth textures in this environment. Falling back ' + + 'to a framebuffer without depth.' + ); + framebuffer.useDepth = false; + } + + if ( + framebuffer.useDepth && + this.webglVersion === constants.WEBGL && + framebuffer.depthFormat === constants.FLOAT + ) { + console.warn( + 'FLOAT depth format is unavailable in WebGL 1. ' + + 'Defaulting to UNSIGNED_INT.' + ); + framebuffer.depthFormat = constants.UNSIGNED_INT; + } + + if (![ + constants.UNSIGNED_BYTE, + constants.FLOAT, + constants.HALF_FLOAT + ].includes(framebuffer.format)) { + console.warn( + 'Unknown Framebuffer format. ' + + 'Please use UNSIGNED_BYTE, FLOAT, or HALF_FLOAT. ' + + 'Defaulting to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + if (framebuffer.useDepth && ![ + constants.UNSIGNED_INT, + constants.FLOAT + ].includes(framebuffer.depthFormat)) { + console.warn( + 'Unknown Framebuffer depth format. ' + + 'Please use UNSIGNED_INT or FLOAT. Defaulting to FLOAT.' + ); + framebuffer.depthFormat = constants.FLOAT; + } + + const support = checkWebGLCapabilities(this); + if (!support.float && framebuffer.format === constants.FLOAT) { + console.warn( + 'This environment does not support FLOAT textures. ' + + 'Falling back to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + if ( + framebuffer.useDepth && + !support.float && + framebuffer.depthFormat === constants.FLOAT + ) { + console.warn( + 'This environment does not support FLOAT depth textures. ' + + 'Falling back to UNSIGNED_INT.' + ); + framebuffer.depthFormat = constants.UNSIGNED_INT; + } + if (!support.halfFloat && framebuffer.format === constants.HALF_FLOAT) { + console.warn( + 'This environment does not support HALF_FLOAT textures. ' + + 'Falling back to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + + if ( + framebuffer.channels === RGB && + [constants.FLOAT, constants.HALF_FLOAT].includes(framebuffer.format) + ) { + console.warn( + 'FLOAT and HALF_FLOAT formats do not work cross-platform with only ' + + 'RGB channels. Falling back to RGBA.' + ); + framebuffer.channels = RGBA; + } + } + + recreateFramebufferTextures(framebuffer) { + const gl = this.GL; + + const prevBoundTexture = gl.getParameter(gl.TEXTURE_BINDING_2D); + const prevBoundFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); + + const colorTexture = gl.createTexture(); + if (!colorTexture) { + throw new Error('Unable to create color texture'); + } + gl.bindTexture(gl.TEXTURE_2D, colorTexture); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + colorFormat.format, + colorFormat.type, + null + ); + framebuffer.colorTexture = colorTexture; + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + gl.COLOR_ATTACHMENT0, + gl.TEXTURE_2D, + colorTexture, + 0 + ); + + if (framebuffer.useDepth) { + // Create the depth texture + const depthTexture = gl.createTexture(); + if (!depthTexture) { + throw new Error('Unable to create depth texture'); + } + const depthFormat = this._getFramebufferDepthFormat(framebuffer); + gl.bindTexture(gl.TEXTURE_2D, depthTexture); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + depthFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + depthFormat.format, + depthFormat.type, + null + ); + + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + framebuffer.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, + gl.TEXTURE_2D, + depthTexture, + 0 + ); + framebuffer.depthTexture = depthTexture; + } + + // Create separate framebuffer for antialiasing + if (framebuffer.antialias) { + framebuffer.colorRenderbuffer = gl.createRenderbuffer(); + gl.bindRenderbuffer(gl.RENDERBUFFER, framebuffer.colorRenderbuffer); + gl.renderbufferStorageMultisample( + gl.RENDERBUFFER, + Math.max( + 0, + Math.min(framebuffer.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) + ), + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density + ); + + if (framebuffer.useDepth) { + const depthFormat = this._getFramebufferDepthFormat(framebuffer); + framebuffer.depthRenderbuffer = gl.createRenderbuffer(); + gl.bindRenderbuffer(gl.RENDERBUFFER, framebuffer.depthRenderbuffer); + gl.renderbufferStorageMultisample( + gl.RENDERBUFFER, + Math.max( + 0, + Math.min(framebuffer.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) + ), + depthFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density + ); + } + + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.aaFramebuffer); + gl.framebufferRenderbuffer( + gl.FRAMEBUFFER, + gl.COLOR_ATTACHMENT0, + gl.RENDERBUFFER, + framebuffer.colorRenderbuffer + ); + if (framebuffer.useDepth) { + gl.framebufferRenderbuffer( + gl.FRAMEBUFFER, + framebuffer.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, + gl.RENDERBUFFER, + framebuffer.depthRenderbuffer + ); + } + } + + gl.bindTexture(gl.TEXTURE_2D, prevBoundTexture); + gl.bindFramebuffer(gl.FRAMEBUFFER, prevBoundFramebuffer); + } + + /** + * To create a WebGL texture, one needs to supply three pieces of information: + * the type (the data type each channel will be stored as, e.g. int or float), + * the format (the color channels that will each be stored in the previously + * specified type, e.g. rgb or rgba), and the internal format (the specifics + * of how data for each channel, in the aforementioned type, will be packed + * together, such as how many bits to use, e.g. RGBA32F or RGB565.) + * + * The format and channels asked for by the user hint at what these values + * need to be, and the WebGL version affects what options are avaiable. + * This method returns the values for these three properties, given the + * framebuffer's settings. + * + * @private + */ + _getFramebufferColorFormat(framebuffer) { + let type, format, internalFormat; + const gl = this.GL; + + if (framebuffer.format === constants.FLOAT) { + type = gl.FLOAT; + } else if (framebuffer.format === constants.HALF_FLOAT) { + type = this.webglVersion === constants.WEBGL2 + ? gl.HALF_FLOAT + : gl.getExtension('OES_texture_half_float').HALF_FLOAT_OES; + } else { + type = gl.UNSIGNED_BYTE; + } + + if (framebuffer.channels === RGBA) { + format = gl.RGBA; + } else { + format = gl.RGB; + } + + if (this.webglVersion === constants.WEBGL2) { + // https://webgl2fundamentals.org/webgl/lessons/webgl-data-textures.html + const table = { + [gl.FLOAT]: { + [gl.RGBA]: gl.RGBA32F + // gl.RGB32F is not available in Firefox without an alpha channel + }, + [gl.HALF_FLOAT]: { + [gl.RGBA]: gl.RGBA16F + // gl.RGB16F is not available in Firefox without an alpha channel + }, + [gl.UNSIGNED_BYTE]: { + [gl.RGBA]: gl.RGBA8, // gl.RGBA4 + [gl.RGB]: gl.RGB8 // gl.RGB565 + } + }; + internalFormat = table[type][format]; + } else if (framebuffer.format === constants.HALF_FLOAT) { + internalFormat = gl.RGBA; + } else { + internalFormat = format; + } + + return { internalFormat, format, type }; + } + + /** + * To create a WebGL texture, one needs to supply three pieces of information: + * the type (the data type each channel will be stored as, e.g. int or float), + * the format (the color channels that will each be stored in the previously + * specified type, e.g. rgb or rgba), and the internal format (the specifics + * of how data for each channel, in the aforementioned type, will be packed + * together, such as how many bits to use, e.g. RGBA32F or RGB565.) + * + * This method takes into account the settings asked for by the user and + * returns values for these three properties that can be used for the + * texture storing depth information. + * + * @private + */ + _getFramebufferDepthFormat(framebuffer) { + let type, format, internalFormat; + const gl = this.GL; + + if (framebuffer.useStencil) { + if (framebuffer.depthFormat === constants.FLOAT) { + type = gl.FLOAT_32_UNSIGNED_INT_24_8_REV; + } else if (this.webglVersion === constants.WEBGL2) { + type = gl.UNSIGNED_INT_24_8; + } else { + type = gl.getExtension('WEBGL_depth_texture').UNSIGNED_INT_24_8_WEBGL; + } + } else { + if (framebuffer.depthFormat === constants.FLOAT) { + type = gl.FLOAT; + } else { + type = gl.UNSIGNED_INT; + } + } + + if (framebuffer.useStencil) { + format = gl.DEPTH_STENCIL; + } else { + format = gl.DEPTH_COMPONENT; + } + + if (framebuffer.useStencil) { + if (framebuffer.depthFormat === constants.FLOAT) { + internalFormat = gl.DEPTH32F_STENCIL8; + } else if (this.webglVersion === constants.WEBGL2) { + internalFormat = gl.DEPTH24_STENCIL8; + } else { + internalFormat = gl.DEPTH_STENCIL; + } + } else if (this.webglVersion === constants.WEBGL2) { + if (framebuffer.depthFormat === constants.FLOAT) { + internalFormat = gl.DEPTH_COMPONENT32F; + } else { + internalFormat = gl.DEPTH_COMPONENT24; + } + } else { + internalFormat = gl.DEPTH_COMPONENT; + } + + return { internalFormat, format, type }; + } + + _deleteFramebufferTexture(texture) { + const gl = this.GL; + gl.deleteTexture(texture.rawTexture().texture); + this.textures.delete(texture); + } + + deleteFramebufferTextures(framebuffer) { + this._deleteFramebufferTexture(framebuffer.color) + if (framebuffer.depth) this._deleteFramebufferTexture(framebuffer.depth); + const gl = this.GL; + if (framebuffer.colorRenderbuffer) gl.deleteRenderbuffer(framebuffer.colorRenderbuffer); + if (framebuffer.depthRenderbuffer) gl.deleteRenderbuffer(framebuffer.depthRenderbuffer); + } + + deleteFramebufferResources(framebuffer) { + const gl = this.GL; + gl.deleteFramebuffer(framebuffer.framebuffer); + if (framebuffer.aaFramebuffer) { + gl.deleteFramebuffer(framebuffer.aaFramebuffer); + } + if (framebuffer.depthRenderbuffer) { + gl.deleteRenderbuffer(framebuffer.depthRenderbuffer); + } + if (framebuffer.colorRenderbuffer) { + gl.deleteRenderbuffer(framebuffer.colorRenderbuffer); + } + } + + getFramebufferToBind(framebuffer) { + if (framebuffer.antialias) { + return framebuffer.aaFramebuffer; + } else { + return framebuffer.framebuffer; + } + } + + updateFramebufferTexture(framebuffer, property) { + if (framebuffer.antialias) { + const gl = this.GL; + gl.bindFramebuffer(gl.READ_FRAMEBUFFER, framebuffer.aaFramebuffer); + gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, framebuffer.framebuffer); + const partsToCopy = { + colorTexture: [ + gl.COLOR_BUFFER_BIT, + framebuffer.colorP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST + ], + }; + if (framebuffer.useDepth) { + partsToCopy.depthTexture = [ + gl.DEPTH_BUFFER_BIT, + framebuffer.depthP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST + ]; + } + const [flag, filter] = partsToCopy[property]; + gl.blitFramebuffer( + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + flag, + filter + ); + + const activeFbo = this.activeFramebuffer(); + this.bindFramebuffer(activeFbo); + } + } + + bindFramebuffer(framebuffer) { + const gl = this.GL; + gl.bindFramebuffer( + gl.FRAMEBUFFER, + framebuffer + ? this.getFramebufferToBind(framebuffer) + : null + ); + } + + readFramebufferPixels(framebuffer) { + const gl = this.GL; + const prevFramebuffer = this.activeFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + const pixels = readPixelsWebGL( + framebuffer.pixels, + gl, + framebuffer.framebuffer, + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + colorFormat.format, + colorFormat.type + ); + this.bindFramebuffer(prevFramebuffer); + return pixels; + } + + readFramebufferPixel(framebuffer, x, y) { + const colorFormat = this._getFramebufferColorFormat(framebuffer); + return readPixelWebGL( + this.GL, + framebuffer.framebuffer, + x, + y, + colorFormat.format, + colorFormat.type + ); + } + + readFramebufferRegion(framebuffer, x, y, w, h) { + const gl = this.GL; + const colorFormat = this._getFramebufferColorFormat(framebuffer); + + const rawData = readPixelsWebGL( + undefined, + gl, + framebuffer.framebuffer, + x * framebuffer.density, + y * framebuffer.density, + w * framebuffer.density, + h * framebuffer.density, + colorFormat.format, + colorFormat.type + ); + + // Framebuffer data might be either a Uint8Array or Float32Array + // depending on its format, and it may or may not have an alpha channel. + // To turn it into an image, we have to normalize the data into a + // Uint8ClampedArray with alpha. + const fullData = new Uint8ClampedArray( + w * h * framebuffer.density * framebuffer.density * 4 + ); + // Default channels that aren't in the framebuffer (e.g. alpha, if the + // framebuffer is in RGB mode instead of RGBA) to 255 + fullData.fill(255); + + const channels = colorFormat.format === gl.RGB ? 3 : 4; + for (let yPos = 0; yPos < h * framebuffer.density; yPos++) { + for (let xPos = 0; xPos < w * framebuffer.density; xPos++) { + for (let channel = 0; channel < 4; channel++) { + const idx = (yPos * w * framebuffer.density + xPos) * 4 + channel; + if (channel < channels) { + // Find the index of this pixel in `rawData`, which might have a + // different number of channels + const rawDataIdx = channels === 4 + ? idx + : (yPos * w * framebuffer.density + xPos) * channels + channel; + fullData[idx] = rawData[rawDataIdx]; + } + } + } + } + + // Create image from data + const region = new Image(w * framebuffer.density, h * framebuffer.density); + region.imageData = region.canvas.getContext('2d').createImageData( + region.width, + region.height + ); + region.imageData.data.set(fullData); + region.pixels = region.imageData.data; + region.updatePixels(); + if (framebuffer.density !== 1) { + region.pixelDensity(framebuffer.density); + } + return region; + } + + updateFramebufferPixels(framebuffer) { + const gl = this.GL; + framebuffer.colorP5Texture.bindTexture(); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + + const channels = colorFormat.format === gl.RGBA ? 4 : 3; + const len = framebuffer.width * framebuffer.height * framebuffer.density * framebuffer.density * channels; + const TypedArrayClass = colorFormat.type === gl.UNSIGNED_BYTE ? Uint8Array : Float32Array; + + if (!(framebuffer.pixels instanceof TypedArrayClass) || framebuffer.pixels.length !== len) { + throw new Error( + 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' + ); + } + + gl.texImage2D( + gl.TEXTURE_2D, + 0, + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + colorFormat.format, + colorFormat.type, + framebuffer.pixels + ); + framebuffer.colorP5Texture.unbindTexture(); + framebuffer.dirty.colorTexture = false; + + const prevFramebuffer = this.activeFramebuffer(); + if (framebuffer.antialias) { + // We need to make sure the antialiased framebuffer also has the updated + // pixels so that if more is drawn to it, it goes on top of the updated + // pixels instead of replacing them. + // We can't blit the framebuffer to the multisampled antialias + // framebuffer to leave both in the same state, so instead we have + // to use image() to put the framebuffer texture onto the antialiased + // framebuffer. + framebuffer.begin(); + this.push(); + this.states.setValue('imageMode', constants.CORNER); + this.setCamera(framebuffer.filterCamera); + this.resetMatrix(); + this.states.setValue('strokeColor', null); + this.clear(); + this._drawingFilter = true; + this.image( + framebuffer, + 0, 0, + framebuffer.width, framebuffer.height, + -this.width / 2, -this.height / 2, + this.width, this.height + ); + this._drawingFilter = false; + this.pop(); + if (framebuffer.useDepth) { + gl.clearDepth(1); + gl.clear(gl.DEPTH_BUFFER_BIT); + } + framebuffer.end(); + } else { + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + if (framebuffer.useDepth) { + gl.clearDepth(1); + gl.clear(gl.DEPTH_BUFFER_BIT); + } + this.bindFramebuffer(prevFramebuffer); + } + } } function rendererGL(p5, fn) { diff --git a/src/webgl/p5.Texture.js b/src/webgl/p5.Texture.js index c88389bb8e..d1c45b84f1 100644 --- a/src/webgl/p5.Texture.js +++ b/src/webgl/p5.Texture.js @@ -128,6 +128,8 @@ class Texture { width: textureData.width, height: textureData.height, }); + } else { + this.textureHandle = this._renderer.createFramebufferTextureHandle(this.src); } this._renderer.setTextureParams(this, { @@ -382,27 +384,6 @@ function texture(p5, fn){ p5.MipmapTexture = MipmapTexture; } -export function checkWebGLCapabilities({ GL, webglVersion }) { - const gl = GL; - const supportsFloat = webglVersion === constants.WEBGL2 - ? (gl.getExtension('EXT_color_buffer_float') && - gl.getExtension('EXT_float_blend')) - : gl.getExtension('OES_texture_float'); - const supportsFloatLinear = supportsFloat && - gl.getExtension('OES_texture_float_linear'); - const supportsHalfFloat = webglVersion === constants.WEBGL2 - ? gl.getExtension('EXT_color_buffer_float') - : gl.getExtension('OES_texture_half_float'); - const supportsHalfFloatLinear = supportsHalfFloat && - gl.getExtension('OES_texture_half_float_linear'); - return { - float: supportsFloat, - floatLinear: supportsFloatLinear, - halfFloat: supportsHalfFloat, - halfFloatLinear: supportsHalfFloatLinear - }; -} - export default texture; export { Texture, MipmapTexture }; diff --git a/src/webgl/utils.js b/src/webgl/utils.js index 70766ac522..0727e91e1f 100644 --- a/src/webgl/utils.js +++ b/src/webgl/utils.js @@ -448,3 +448,24 @@ export function populateGLSLHooks(shader, src, shaderType) { return preMain + '\n' + defines + hooks + main + postMain; } + +export function checkWebGLCapabilities({ GL, webglVersion }) { + const gl = GL; + const supportsFloat = webglVersion === constants.WEBGL2 + ? (gl.getExtension('EXT_color_buffer_float') && + gl.getExtension('EXT_float_blend')) + : gl.getExtension('OES_texture_float'); + const supportsFloatLinear = supportsFloat && + gl.getExtension('OES_texture_float_linear'); + const supportsHalfFloat = webglVersion === constants.WEBGL2 + ? gl.getExtension('EXT_color_buffer_float') + : gl.getExtension('OES_texture_half_float'); + const supportsHalfFloatLinear = supportsHalfFloat && + gl.getExtension('OES_texture_half_float_linear'); + return { + float: supportsFloat, + floatLinear: supportsFloatLinear, + halfFloat: supportsHalfFloat, + halfFloatLinear: supportsHalfFloatLinear + }; +} diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 9ded1277d2..f85fd4607b 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -1,6 +1,8 @@ import { Renderer3D, getStrokeDefs } from '../core/p5.Renderer3D'; import { Shader } from '../webgl/p5.Shader'; import { Texture } from '../webgl/p5.Texture'; +import { Image } from '../image/p5.Image'; +import { RGB, RGBA } from '../color/creating_reading'; import * as constants from '../core/constants'; @@ -17,11 +19,21 @@ class RendererWebGPU extends Renderer3D { this.renderPass = {}; this.samplers = new Map(); + + // Single reusable staging buffer for pixel reading + this.pixelReadBuffer = null; + this.pixelReadBufferSize = 0; + + // Lazy readback texture for main canvas pixel reading + this.canvasReadbackTexture = null; } async setupContext() { this.adapter = await navigator.gpu?.requestAdapter(); - this.device = await this.adapter?.requestDevice(); + this.device = await this.adapter?.requestDevice({ + // Todo: check support + requiredFeatures: ['depth32float-stencil8'] + }); if (!this.device) { throw new Error('Your browser does not support WebGPU.'); } @@ -30,7 +42,8 @@ class RendererWebGPU extends Renderer3D { this.presentationFormat = navigator.gpu.getPreferredCanvasFormat(); this.drawingContext.configure({ device: this.device, - format: this.presentationFormat + format: this.presentationFormat, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC, }); // TODO disablable stencil @@ -52,6 +65,15 @@ class RendererWebGPU extends Renderer3D { format: this.depthFormat, usage: GPUTextureUsage.RENDER_ATTACHMENT, }); + + // Clear the main canvas after resize + this.clear(); + + // Destroy existing readback texture when size changes + if (this.canvasReadbackTexture && this.canvasReadbackTexture.destroy) { + this.canvasReadbackTexture.destroy(); + this.canvasReadbackTexture = null; + } } clear(...args) { @@ -61,16 +83,28 @@ class RendererWebGPU extends Renderer3D { const _a = args[3] || 0; const commandEncoder = this.device.createCommandEncoder(); - const textureView = this.drawingContext.getCurrentTexture().createView(); + + // Use framebuffer texture if active, otherwise use canvas texture + const activeFramebuffer = this.activeFramebuffer(); + const colorTexture = activeFramebuffer ? + (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : + this.drawingContext.getCurrentTexture(); const colorAttachment = { - view: textureView, + view: colorTexture.createView(), clearValue: { r: _r * _a, g: _g * _a, b: _b * _a, a: _a }, loadOp: 'clear', storeOp: 'store', + // If using multisampled texture, resolve to non-multisampled texture + resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? + activeFramebuffer.colorTexture.createView() : undefined, }; - const depthTextureView = this.depthTexture?.createView(); + // Use framebuffer depth texture if active, otherwise use canvas depth texture + const depthTexture = activeFramebuffer ? + (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : + this.depthTexture; + const depthTextureView = depthTexture?.createView(); const depthAttachment = depthTextureView ? { view: depthTextureView, @@ -187,12 +221,34 @@ class RendererWebGPU extends Renderer3D { freeDefs(this.renderer.buffers.user); } + _getValidSampleCount(requestedCount) { + // WebGPU supports sample counts of 1, 4 (and sometimes 8) + if (requestedCount <= 1) return 1; + if (requestedCount <= 4) return 4; + return 4; // Cap at 4 for broader compatibility + } + _shaderOptions({ mode }) { + const activeFramebuffer = this.activeFramebuffer(); + const format = activeFramebuffer ? + this._getWebGPUColorFormat(activeFramebuffer) : + this.presentationFormat; + + const requestedSampleCount = activeFramebuffer ? + (activeFramebuffer.antialias ? activeFramebuffer.antialiasSamples : 1) : + (this.antialias || 1); + const sampleCount = this._getValidSampleCount(requestedSampleCount); + + const depthFormat = activeFramebuffer && activeFramebuffer.useDepth ? + this._getWebGPUDepthFormat(activeFramebuffer) : + this.depthFormat; + return { topology: mode === constants.TRIANGLE_STRIP ? 'triangle-strip' : 'triangle-list', blendMode: this.states.curBlendMode, - sampleCount: (this.activeFramebuffer() || this).antialias || 1, // TODO - format: this.activeFramebuffer()?.format || this.presentationFormat, // TODO + sampleCount, + format, + depthFormat, } } @@ -203,8 +259,8 @@ class RendererWebGPU extends Renderer3D { shader.fragModule = device.createShaderModule({ code: shader.fragSrc() }); shader._pipelineCache = new Map(); - shader.getPipeline = ({ topology, blendMode, sampleCount, format }) => { - const key = `${topology}_${blendMode}_${sampleCount}_${format}`; + shader.getPipeline = ({ topology, blendMode, sampleCount, format, depthFormat }) => { + const key = `${topology}_${blendMode}_${sampleCount}_${format}_${depthFormat}`; if (!shader._pipelineCache.has(key)) { const pipeline = device.createRenderPipeline({ layout: shader._pipelineLayout, @@ -224,7 +280,7 @@ class RendererWebGPU extends Renderer3D { primitive: { topology }, multisample: { count: sampleCount }, depthStencil: { - format: this.depthFormat, + format: depthFormat, depthWriteEnabled: true, depthCompare: 'less', stencilFront: { @@ -525,9 +581,21 @@ class RendererWebGPU extends Renderer3D { _useShader(shader, options) {} _updateViewport() { + this._origViewport = { + width: this.width, + height: this.height, + }; this._viewport = [0, 0, this.width, this.height]; } + _createPixelsArray() { + this.pixels = new Uint8Array( + this.width * this.pixelDensity() * this.height * this.pixelDensity() * 4 + ); + } + + viewport() {} + zClipRange() { return [0, 1]; } @@ -567,14 +635,27 @@ class RendererWebGPU extends Renderer3D { if (!buffers) return; const commandEncoder = this.device.createCommandEncoder(); - const currentTexture = this.drawingContext.getCurrentTexture(); + + // Use framebuffer texture if active, otherwise use canvas texture + const activeFramebuffer = this.activeFramebuffer(); + const colorTexture = activeFramebuffer ? + (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : + this.drawingContext.getCurrentTexture(); + const colorAttachment = { - view: currentTexture.createView(), + view: colorTexture.createView(), loadOp: "load", storeOp: "store", + // If using multisampled texture, resolve to non-multisampled texture + resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? + activeFramebuffer.colorTexture.createView() : undefined, }; - const depthTextureView = this.depthTexture?.createView(); + // Use framebuffer depth texture if active, otherwise use canvas depth texture + const depthTexture = activeFramebuffer ? + (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : + this.depthTexture; + const depthTextureView = depthTexture?.createView(); const renderPassDescriptor = { colorAttachments: [colorAttachment], depthStencilAttachment: depthTextureView @@ -1120,6 +1201,650 @@ class RendererWebGPU extends Renderer3D { return preMain + '\n' + defines + hooks + main + postMain; } + + ////////////////////////////////////////////// + // Buffer management for pixel reading + ////////////////////////////////////////////// + + _ensurePixelReadBuffer(requiredSize) { + // Create or resize staging buffer if needed + if (!this.pixelReadBuffer || this.pixelReadBufferSize < requiredSize) { + // Clean up old buffer + if (this.pixelReadBuffer) { + this.pixelReadBuffer.destroy(); + } + + // Create new buffer with padding to avoid frequent recreations + // Scale by 2 to ensure integer size and reasonable headroom + const bufferSize = Math.max(requiredSize, this.pixelReadBufferSize * 2); + this.pixelReadBuffer = this.device.createBuffer({ + size: bufferSize, + usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, + }); + this.pixelReadBufferSize = bufferSize; + } + return this.pixelReadBuffer; + } + + _alignBytesPerRow(bytesPerRow) { + // WebGPU requires bytesPerRow to be a multiple of 256 bytes for texture-to-buffer copies + return Math.ceil(bytesPerRow / 256) * 256; + } + + ////////////////////////////////////////////// + // Framebuffer methods + ////////////////////////////////////////////// + + defaultFramebufferAlpha() { + return true + } + + defaultFramebufferAntialias() { + return true; + } + + supportsFramebufferAntialias() { + return true; + } + + createFramebufferResources(framebuffer) { + } + + validateFramebufferFormats(framebuffer) { + if (![ + constants.UNSIGNED_BYTE, + constants.FLOAT, + constants.HALF_FLOAT + ].includes(framebuffer.format)) { + console.warn( + 'Unknown Framebuffer format. ' + + 'Please use UNSIGNED_BYTE, FLOAT, or HALF_FLOAT. ' + + 'Defaulting to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + + if (framebuffer.useDepth && ![ + constants.UNSIGNED_INT, + constants.FLOAT + ].includes(framebuffer.depthFormat)) { + console.warn( + 'Unknown Framebuffer depth format. ' + + 'Please use UNSIGNED_INT or FLOAT. Defaulting to FLOAT.' + ); + framebuffer.depthFormat = constants.FLOAT; + } + } + + recreateFramebufferTextures(framebuffer) { + // Clean up existing textures + if (framebuffer.colorTexture && framebuffer.colorTexture.destroy) { + framebuffer.colorTexture.destroy(); + } + if (framebuffer.aaColorTexture && framebuffer.aaColorTexture.destroy) { + framebuffer.aaColorTexture.destroy(); + } + if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { + framebuffer.depthTexture.destroy(); + } + if (framebuffer.aaDepthTexture && framebuffer.aaDepthTexture.destroy) { + framebuffer.aaDepthTexture.destroy(); + } + + const baseDescriptor = { + size: { + width: framebuffer.width * framebuffer.density, + height: framebuffer.height * framebuffer.density, + depthOrArrayLayers: 1, + }, + format: this._getWebGPUColorFormat(framebuffer), + }; + + // Create non-multisampled texture for texture binding (always needed) + const colorTextureDescriptor = { + ...baseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC, + sampleCount: 1, + }; + framebuffer.colorTexture = this.device.createTexture(colorTextureDescriptor); + + // Create multisampled texture for rendering if antialiasing is enabled + if (framebuffer.antialias) { + const aaColorTextureDescriptor = { + ...baseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT, + sampleCount: this._getValidSampleCount(framebuffer.antialiasSamples), + }; + framebuffer.aaColorTexture = this.device.createTexture(aaColorTextureDescriptor); + } + + if (framebuffer.useDepth) { + const depthBaseDescriptor = { + size: { + width: framebuffer.width * framebuffer.density, + height: framebuffer.height * framebuffer.density, + depthOrArrayLayers: 1, + }, + format: this._getWebGPUDepthFormat(framebuffer), + }; + + // Create non-multisampled depth texture for texture binding (always needed) + const depthTextureDescriptor = { + ...depthBaseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING, + sampleCount: 1, + }; + framebuffer.depthTexture = this.device.createTexture(depthTextureDescriptor); + + // Create multisampled depth texture for rendering if antialiasing is enabled + if (framebuffer.antialias) { + const aaDepthTextureDescriptor = { + ...depthBaseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT, + sampleCount: this._getValidSampleCount(framebuffer.antialiasSamples), + }; + framebuffer.aaDepthTexture = this.device.createTexture(aaDepthTextureDescriptor); + } + } + + // Clear the framebuffer textures after creation + this._clearFramebufferTextures(framebuffer); + } + + _clearFramebufferTextures(framebuffer) { + const commandEncoder = this.device.createCommandEncoder(); + + // Clear the color texture (and multisampled texture if it exists) + const colorTexture = framebuffer.aaColorTexture || framebuffer.colorTexture; + const colorAttachment = { + view: colorTexture.createView(), + loadOp: "clear", + storeOp: "store", + clearValue: { r: 0, g: 0, b: 0, a: 0 }, + resolveTarget: framebuffer.aaColorTexture ? + framebuffer.colorTexture.createView() : undefined, + }; + + // Clear the depth texture if it exists + const depthTexture = framebuffer.aaDepthTexture || framebuffer.depthTexture; + const depthStencilAttachment = depthTexture ? { + view: depthTexture.createView(), + depthLoadOp: "clear", + depthStoreOp: "store", + depthClearValue: 1.0, + stencilLoadOp: "clear", + stencilStoreOp: "store", + depthReadOnly: false, + stencilReadOnly: false, + } : undefined; + + const renderPassDescriptor = { + colorAttachments: [colorAttachment], + depthStencilAttachment: depthStencilAttachment, + }; + + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + passEncoder.end(); + + this.queue.submit([commandEncoder.finish()]); + } + + _getFramebufferColorTextureView(framebuffer) { + if (framebuffer.colorTexture) { + return framebuffer.colorTexture.createView(); + } + return null; + } + + createFramebufferTextureHandle(framebufferTexture) { + const src = framebufferTexture; + let renderer = this; + return { + get view() { + return renderer._getFramebufferColorTextureView(src.framebuffer); + }, + get gpuTexture() { + return src.framebuffer.colorTexture; + } + }; + } + + _getWebGPUColorFormat(framebuffer) { + if (framebuffer.format === constants.FLOAT) { + return framebuffer.channels === RGBA ? 'rgba32float' : 'rgba32float'; + } else if (framebuffer.format === constants.HALF_FLOAT) { + return framebuffer.channels === RGBA ? 'rgba16float' : 'rgba16float'; + } else { + return framebuffer.channels === RGBA ? 'rgba8unorm' : 'rgba8unorm'; + } + } + + _getWebGPUDepthFormat(framebuffer) { + if (framebuffer.useStencil) { + return framebuffer.depthFormat === constants.FLOAT ? 'depth32float-stencil8' : 'depth24plus-stencil8'; + } else { + return framebuffer.depthFormat === constants.FLOAT ? 'depth32float' : 'depth24plus'; + } + } + + _deleteFramebufferTexture(texture) { + const handle = texture.rawTexture(); + if (handle.texture && handle.texture.destroy) { + handle.texture.destroy(); + } + this.textures.delete(texture); + } + + deleteFramebufferTextures(framebuffer) { + this._deleteFramebufferTexture(framebuffer.color) + if (framebuffer.depth) this._deleteFramebufferTexture(framebuffer.depth); + } + + deleteFramebufferResources(framebuffer) { + if (framebuffer.colorTexture && framebuffer.colorTexture.destroy) { + framebuffer.colorTexture.destroy(); + } + if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { + framebuffer.depthTexture.destroy(); + } + if (framebuffer.aaDepthTexture && framebuffer.aaDepthTexture.destroy) { + framebuffer.aaDepthTexture.destroy(); + } + } + + getFramebufferToBind(framebuffer) { + } + + updateFramebufferTexture(framebuffer, property) { + // No-op for WebGPU since antialiasing is handled at pipeline level + } + + bindFramebuffer(framebuffer) {} + + async readFramebufferPixels(framebuffer) { + const width = framebuffer.width * framebuffer.density; + const height = framebuffer.height * framebuffer.density; + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + // const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + const stagingBuffer = this.device.createBuffer({ + size: bufferSize, + usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, + }); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + origin: { x: 0, y: 0, z: 0 }, + mipLevel: 0, + aspect: 'all' + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow, rowsPerImage: height }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + // Wait for the copy operation to complete + // await this.queue.onSubmittedWorkDone(); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + // If alignment was needed, extract the actual pixel data + if (alignedBytesPerRow === unalignedBytesPerRow) { + const result = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + stagingBuffer.unmap(); + return result; + } else { + // Need to extract pixel data from aligned buffer + const result = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + result.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + stagingBuffer.unmap(); + return result; + } + } + + async readFramebufferPixel(framebuffer, x, y) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + + const bytesPerPixel = 4; + const alignedBytesPerRow = this._alignBytesPerRow(bytesPerPixel); + const bufferSize = alignedBytesPerRow; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width: 1, height: 1, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + const pixelData = new Uint8Array(mappedRange); + const result = [pixelData[0], pixelData[1], pixelData[2], pixelData[3]]; + + stagingBuffer.unmap(); + return result; + } + + async readFramebufferRegion(framebuffer, x, y, w, h) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + + const width = w * framebuffer.density; + const height = h * framebuffer.density; + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + mipLevel: 0, + origin: { x: x * framebuffer.density, y: y * framebuffer.density, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + let pixelData; + if (alignedBytesPerRow === unalignedBytesPerRow) { + pixelData = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + pixelData = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + pixelData.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // WebGPU doesn't need vertical flipping unlike WebGL + const region = new Image(width, height); + region.imageData = region.canvas.getContext('2d').createImageData(width, height); + region.imageData.data.set(pixelData); + region.pixels = region.imageData.data; + region.updatePixels(); + + if (framebuffer.density !== 1) { + region.pixelDensity(framebuffer.density); + } + + stagingBuffer.unmap(); + return region; + } + + updateFramebufferPixels(framebuffer) { + const width = framebuffer.width * framebuffer.density; + const height = framebuffer.height * framebuffer.density; + const bytesPerPixel = 4; + + const expectedLength = width * height * bytesPerPixel; + if (!framebuffer.pixels || framebuffer.pixels.length !== expectedLength) { + throw new Error( + 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' + ); + } + + this.device.queue.writeTexture( + { texture: framebuffer.colorTexture }, + framebuffer.pixels, + { + bytesPerRow: width * bytesPerPixel, + rowsPerImage: height + }, + { width, height, depthOrArrayLayers: 1 } + ); + } + + ////////////////////////////////////////////// + // Main canvas pixel methods + ////////////////////////////////////////////// + + _ensureCanvasReadbackTexture() { + if (!this.canvasReadbackTexture) { + const width = Math.ceil(this.width * this._pixelDensity); + const height = Math.ceil(this.height * this._pixelDensity); + + this.canvasReadbackTexture = this.device.createTexture({ + size: { width, height, depthOrArrayLayers: 1 }, + format: this.presentationFormat, + usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.COPY_SRC, + }); + } + return this.canvasReadbackTexture; + } + + _copyCanvasToReadbackTexture() { + // Get the current canvas texture BEFORE any awaiting + const canvasTexture = this.drawingContext.getCurrentTexture(); + + // Ensure readback texture exists + const readbackTexture = this._ensureCanvasReadbackTexture(); + + // Copy canvas texture to readback texture immediately + const copyEncoder = this.device.createCommandEncoder(); + copyEncoder.copyTextureToTexture( + { texture: canvasTexture }, + { texture: readbackTexture }, + { + width: Math.ceil(this.width * this._pixelDensity), + height: Math.ceil(this.height * this._pixelDensity), + depthOrArrayLayers: 1 + } + ); + this.device.queue.submit([copyEncoder.finish()]); + + return readbackTexture; + } + + _convertBGRtoRGB(pixelData) { + // Convert BGR to RGB by swapping red and blue channels + for (let i = 0; i < pixelData.length; i += 4) { + const temp = pixelData[i]; // Store red + pixelData[i] = pixelData[i + 2]; // Red = Blue + pixelData[i + 2] = temp; // Blue = Red + // Green (i + 1) and Alpha (i + 3) stay the same + } + return pixelData; + } + + async loadPixels() { + const width = this.width * this._pixelDensity; + const height = this.height * this._pixelDensity; + + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await + await this.queue.onSubmittedWorkDone(); + + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { texture: readbackTexture }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + if (alignedBytesPerRow === unalignedBytesPerRow) { + this.pixels = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + this.pixels = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + this.pixels.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // Convert BGR to RGB for main canvas + this._convertBGRtoRGB(this.pixels); + + stagingBuffer.unmap(); + return this.pixels; + } + + async _getPixel(x, y) { + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await + await this.queue.onSubmittedWorkDone(); + + const bytesPerPixel = 4; + const alignedBytesPerRow = this._alignBytesPerRow(bytesPerPixel); + const bufferSize = alignedBytesPerRow; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: readbackTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width: 1, height: 1, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + const pixelData = new Uint8Array(mappedRange); + + // Convert BGR to RGB for main canvas - swap red and blue + const result = [pixelData[2], pixelData[1], pixelData[0], pixelData[3]]; + + stagingBuffer.unmap(); + return result; + } + + async get(x, y, w, h) { + const pd = this._pixelDensity; + + if (typeof x === 'undefined' && typeof y === 'undefined') { + // get() - return entire canvas + x = y = 0; + w = this.width; + h = this.height; + } else { + x *= pd; + y *= pd; + + if (typeof w === 'undefined' && typeof h === 'undefined') { + // get(x,y) - single pixel + if (x < 0 || y < 0 || x >= this.width * pd || y >= this.height * pd) { + return [0, 0, 0, 0]; + } + + return this._getPixel(x, y); + } + // get(x,y,w,h) - region + } + + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await + await this.queue.onSubmittedWorkDone(); + + // Read region and create p5.Image + const width = w * pd; + const height = h * pd; + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: readbackTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + await this.queue.onSubmittedWorkDone(); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + let pixelData; + if (alignedBytesPerRow === unalignedBytesPerRow) { + pixelData = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + pixelData = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + pixelData.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // Convert BGR to RGB for main canvas + this._convertBGRtoRGB(pixelData); + + const region = new Image(width, height); + region.pixelDensity(pd); + const ctx = region.canvas.getContext('2d'); + const imageData = ctx.createImageData(width, height); + imageData.data.set(pixelData); + ctx.putImageData(imageData, 0, 0); + + stagingBuffer.unmap(); + return region; + } } function rendererWebGPU(p5, fn) { diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index efd8cc7e93..28382dda25 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -1,106 +1,314 @@ -import { vi } from 'vitest'; -import p5 from '../../../../src/app'; -import { visualSuite, visualTest } from '../visualTest'; -import rendererWebGPU from '../../../../src/webgpu/p5.RendererWebGPU'; +import { vi } from "vitest"; +import p5 from "../../../../src/app"; +import { visualSuite, visualTest } from "../visualTest"; +import rendererWebGPU from "../../../../src/webgpu/p5.RendererWebGPU"; p5.registerAddon(rendererWebGPU); -visualSuite('WebGPU', function() { - visualSuite('Shaders', function() { - visualTest('The color shader runs successfully', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - p5.background('white'); - for (const [i, color] of ['red', 'lime', 'blue'].entries()) { - p5.push(); - p5.rotate(p5.TWO_PI * (i / 3)); - p5.fill(color); - p5.translate(15, 0); - p5.noStroke(); - p5.circle(0, 0, 20); - p5.pop(); - } - screenshot(); - }); +visualSuite("WebGPU", function () { + visualSuite("Shaders", function () { + visualTest( + "The color shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.circle(0, 0, 20); + p5.pop(); + } + await screenshot(); + }, + ); - visualTest('The stroke shader runs successfully', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - p5.background('white'); - for (const [i, color] of ['red', 'lime', 'blue'].entries()) { - p5.push(); - p5.rotate(p5.TWO_PI * (i / 3)); - p5.translate(15, 0); - p5.stroke(color); - p5.strokeWeight(2); - p5.circle(0, 0, 20); - p5.pop(); - } - screenshot(); - }); + visualTest( + "The stroke shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.translate(15, 0); + p5.stroke(color); + p5.strokeWeight(2); + p5.circle(0, 0, 20); + p5.pop(); + } + await screenshot(); + }, + ); - visualTest('The material shader runs successfully', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - p5.background('white'); - p5.ambientLight(50); - p5.directionalLight(100, 100, 100, 0, 1, -1); - p5.pointLight(155, 155, 155, 0, -200, 500); - p5.specularMaterial(255); - p5.shininess(300); - for (const [i, color] of ['red', 'lime', 'blue'].entries()) { - p5.push(); - p5.rotate(p5.TWO_PI * (i / 3)); - p5.fill(color); - p5.translate(15, 0); - p5.noStroke(); - p5.sphere(10); - p5.pop(); - } - screenshot(); - }); + visualTest( + "The material shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + p5.ambientLight(50); + p5.directionalLight(100, 100, 100, 0, 1, -1); + p5.pointLight(155, 155, 155, 0, -200, 500); + p5.specularMaterial(255); + p5.shininess(300); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.sphere(10); + p5.pop(); + } + await screenshot(); + }, + ); - visualTest('Shader hooks can be used', async function(p5, screenshot) { + visualTest("Shader hooks can be used", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); const myFill = p5.baseMaterialShader().modify({ - 'Vertex getWorldInputs': `(inputs: Vertex) { + "Vertex getWorldInputs": `(inputs: Vertex) { var result = inputs; result.position.y += 10.0 * sin(inputs.position.x * 0.25); return result; }`, }); const myStroke = p5.baseStrokeShader().modify({ - 'StrokeVertex getWorldInputs': `(inputs: StrokeVertex) { + "StrokeVertex getWorldInputs": `(inputs: StrokeVertex) { var result = inputs; result.position.y += 10.0 * sin(inputs.position.x * 0.25); return result; }`, }); - p5.background('black'); + p5.background("black"); p5.shader(myFill); p5.strokeShader(myStroke); - p5.fill('red'); - p5.stroke('white'); + p5.fill("red"); + p5.stroke("white"); p5.strokeWeight(5); p5.circle(0, 0, 30); - screenshot(); + await screenshot(); }); - visualTest('Textures in the material shader work', async function(p5, screenshot) { + visualTest( + "Textures in the material shader work", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + const tex = p5.createImage(50, 50); + tex.loadPixels(); + for (let x = 0; x < tex.width; x++) { + for (let y = 0; y < tex.height; y++) { + const off = (x + y * tex.width) * 4; + tex.pixels[off] = p5.round((x / tex.width) * 255); + tex.pixels[off + 1] = p5.round((y / tex.height) * 255); + tex.pixels[off + 2] = 0; + tex.pixels[off + 3] = 255; + } + } + tex.updatePixels(); + p5.texture(tex); + p5.plane(p5.width, p5.height); + + await screenshot(); + }, + ); + }); + + visualSuite("Canvas Resizing", function () { + visualTest( + "Main canvas drawing after resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Resize the canvas + p5.resizeCanvas(30, 30); + // Draw to the main canvas after resize + p5.background(100, 0, 100); + p5.fill(0, 255, 255); + p5.noStroke(); + p5.circle(0, 0, 20); + await screenshot(); + }, + ); + }); + + visualSuite("Framebuffers", function () { + visualTest( + "Basic framebuffer draw to canvas", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create a framebuffer + const fbo = p5.createFramebuffer({ width: 25, height: 25 }); + + // Draw to the framebuffer + fbo.draw(() => { + p5.background(255, 0, 0); // Red background + p5.fill(0, 255, 0); // Green circle + p5.noStroke(); + p5.circle(0, 0, 20); + }); + + // Draw the framebuffer to the main canvas + p5.background(0, 0, 255); // Blue background + p5.texture(fbo); + p5.noStroke(); + p5.plane(25, 25); + + await screenshot(); + }, + ); + + visualTest( + "Framebuffer with different sizes", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create two different sized framebuffers + const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); + const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); + + // Draw to first framebuffer + fbo1.draw(() => { + p5.background(255, 100, 100); + p5.fill(255, 255, 0); + p5.noStroke(); + p5.rect(-5, -5, 10, 10); + }); + + // Draw to second framebuffer + fbo2.draw(() => { + p5.background(100, 255, 100); + p5.fill(255, 0, 255); + p5.noStroke(); + p5.circle(0, 0, 10); + }); + + // Draw both to main canvas + p5.background(50); + p5.push(); + p5.translate(-12.5, -12.5); + p5.texture(fbo1); + p5.noStroke(); + p5.plane(20, 20); + p5.pop(); + + p5.push(); + p5.translate(12.5, 12.5); + p5.texture(fbo2); + p5.noStroke(); + p5.plane(15, 15); + p5.pop(); + + await screenshot(); + }, + ); + + visualTest("Auto-sized framebuffer", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - const tex = p5.createImage(50, 50); - tex.loadPixels(); - for (let x = 0; x < tex.width; x++) { - for (let y = 0; y < tex.height; y++) { - const off = (x + y * tex.width) * 4; - tex.pixels[off] = p5.round((x / tex.width) * 255); - tex.pixels[off + 1] = p5.round((y / tex.height) * 255); - tex.pixels[off + 2] = 0; - tex.pixels[off + 3] = 255; + + // Create auto-sized framebuffer (should match canvas size) + const fbo = p5.createFramebuffer(); + + // Draw to the framebuffer + fbo.draw(() => { + p5.background(0); + p5.translate(-fbo.width / 2, -fbo.height / 2) + p5.stroke(255); + p5.strokeWeight(2); + p5.noFill(); + // Draw a grid pattern to verify size + for (let x = 0; x < 50; x += 10) { + p5.line(x, 0, x, 50); } - } - tex.updatePixels(); - p5.texture(tex); - p5.plane(p5.width, p5.height); + for (let y = 0; y < 50; y += 10) { + p5.line(0, y, 50, y); + } + p5.fill(255, 0, 0); + p5.noStroke(); + p5.circle(25, 25, 15); + }); + + // Draw the framebuffer to fill the main canvas + p5.texture(fbo); + p5.noStroke(); + p5.plane(50, 50); - screenshot(); + await screenshot(); }); + + visualTest( + "Auto-sized framebuffer after canvas resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create auto-sized framebuffer + const fbo = p5.createFramebuffer(); + + // Resize the canvas (framebuffer should auto-resize) + p5.resizeCanvas(30, 30); + + // Draw to the framebuffer after resize + fbo.draw(() => { + p5.background(100, 0, 100); + p5.translate(-fbo.width / 2, -fbo.height / 2) + p5.fill(0, 255, 255); + p5.noStroke(); + // Draw a shape that fills the new size + p5.rect(5, 5, 20, 20); + p5.fill(255, 255, 0); + p5.circle(15, 15, 10); + }); + + // Draw the framebuffer to the main canvas + p5.texture(fbo); + p5.noStroke(); + p5.plane(30, 30); + + await screenshot(); + }, + ); + + visualTest( + "Fixed-size framebuffer after manual resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create fixed-size framebuffer + const fbo = p5.createFramebuffer({ width: 20, height: 20 }); + + // Draw initial content + fbo.draw(() => { + p5.background(255, 200, 100); + p5.fill(0, 100, 200); + p5.noStroke(); + p5.circle(0, 0, 15); + }); + + // Manually resize the framebuffer + fbo.resize(35, 25); + + // Draw new content to the resized framebuffer + fbo.draw(() => { + p5.background(200, 255, 100); + p5.translate(-fbo.width / 2, -fbo.height / 2) + p5.fill(200, 0, 100); + p5.noStroke(); + // Draw content that uses the new size + p5.rect(5, 5, 25, 15); + p5.fill(0, 0, 255); + p5.circle(17.5, 12.5, 8); + }); + + // Draw the resized framebuffer to the main canvas + p5.background(50); + p5.texture(fbo); + p5.noStroke(); + p5.plane(35, 25); + + await screenshot(); + }, + ); }); }); diff --git a/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png new file mode 100644 index 0000000000..96849ce04c Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/metadata.json b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png new file mode 100644 index 0000000000..01be2eb74e Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/000.png new file mode 100644 index 0000000000..c3171b7360 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/000.png new file mode 100644 index 0000000000..bd7facce45 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png new file mode 100644 index 0000000000..1fb817b6b5 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png new file mode 100644 index 0000000000..155638a0c8 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/visualTest.js b/test/unit/visual/visualTest.js index 120ce79565..7d301d142b 100644 --- a/test/unit/visual/visualTest.js +++ b/test/unit/visual/visualTest.js @@ -89,43 +89,43 @@ export function visualSuite( /** * Image Diff Algorithm for p5.js Visual Tests - * + * * This algorithm addresses the challenge of cross-platform rendering differences in p5.js visual tests. * Different operating systems and browsers render graphics with subtle variations, particularly with * anti-aliasing, text rendering, and sub-pixel positioning. This can cause false negatives in tests * when the visual differences are acceptable rendering variations rather than actual bugs. - * + * * Key components of the approach: - * + * * 1. Initial pixel-by-pixel comparison: * - Uses pixelmatch to identify differences between expected and actual images * - Sets a moderate threshold (0.5) to filter out minor color/intensity variations * - Produces a diff image with red pixels marking differences - * + * * 2. Cluster identification using BFS (Breadth-First Search): * - Groups connected difference pixels into clusters * - Uses a queue-based BFS algorithm to find all connected pixels * - Defines connectivity based on 8-way adjacency (all surrounding pixels) - * + * * 3. Cluster categorization by type: * - Analyzes each pixel's neighborhood characteristics * - Specifically identifies "line shift" clusters - differences that likely represent * the same visual elements shifted by 1px due to platform rendering differences * - Line shifts are identified when >80% of pixels in a cluster have ≤2 neighboring diff pixels - * + * * 4. Intelligent failure criteria: * - Filters out clusters smaller than MIN_CLUSTER_SIZE pixels (noise reduction) * - Applies different thresholds for regular differences vs. line shifts * - Considers both the total number of significant pixels and number of distinct clusters - * - * This approach balances the need to catch genuine visual bugs (like changes to shape geometry, + * + * This approach balances the need to catch genuine visual bugs (like changes to shape geometry, * colors, or positioning) while tolerating acceptable cross-platform rendering variations. - * + * * Parameters: * - MIN_CLUSTER_SIZE: Minimum size for a cluster to be considered significant (default: 4) * - MAX_TOTAL_DIFF_PIXELS: Maximum allowed non-line-shift difference pixels (default: 40) * Note: These can be adjusted for further updation - * + * * Note for contributors: When running tests locally, you may not see these differences as they * mainly appear when tests run on different operating systems or browser rendering engines. * However, the same code may produce slightly different renderings on CI environments, particularly @@ -140,7 +140,7 @@ export async function checkMatch(actual, expected, p5) { if (narrow) { scale *= 2; } - + for (const img of [actual, expected]) { img.resize( Math.ceil(img.width * scale), @@ -151,28 +151,28 @@ export async function checkMatch(actual, expected, p5) { // Ensure both images have the same dimensions const width = expected.width; const height = expected.height; - + // Create canvases with background color const actualCanvas = p5.createGraphics(width, height); const expectedCanvas = p5.createGraphics(width, height); actualCanvas.pixelDensity(1); expectedCanvas.pixelDensity(1); - + actualCanvas.background(BG); expectedCanvas.background(BG); - + actualCanvas.image(actual, 0, 0); expectedCanvas.image(expected, 0, 0); - + // Load pixel data actualCanvas.loadPixels(); expectedCanvas.loadPixels(); - + // Create diff output canvas const diffCanvas = p5.createGraphics(width, height); diffCanvas.pixelDensity(1); diffCanvas.loadPixels(); - + // Run pixelmatch const diffCount = pixelmatch( actualCanvas.pixels, @@ -180,13 +180,13 @@ export async function checkMatch(actual, expected, p5) { diffCanvas.pixels, width, height, - { + { threshold: 0.5, includeAA: false, alpha: 0.1 } ); - + // If no differences, return early if (diffCount === 0) { actualCanvas.remove(); @@ -194,19 +194,19 @@ export async function checkMatch(actual, expected, p5) { diffCanvas.updatePixels(); return { ok: true, diff: diffCanvas }; } - + // Post-process to identify and filter out isolated differences const visited = new Set(); const clusterSizes = []; - + for (let y = 0; y < height; y++) { for (let x = 0; x < width; x++) { const pos = (y * width + x) * 4; - + // If this is a diff pixel (red in pixelmatch output) and not yet visited if ( - diffCanvas.pixels[pos] === 255 && - diffCanvas.pixels[pos + 1] === 0 && + diffCanvas.pixels[pos] === 255 && + diffCanvas.pixels[pos + 1] === 0 && diffCanvas.pixels[pos + 2] === 0 && !visited.has(pos) ) { @@ -216,37 +216,37 @@ export async function checkMatch(actual, expected, p5) { } } } - + // Define significance thresholds const MIN_CLUSTER_SIZE = 4; // Minimum pixels in a significant cluster const MAX_TOTAL_DIFF_PIXELS = 40; // Maximum total different pixels // Determine if the differences are significant const nonLineShiftClusters = clusterSizes.filter(c => !c.isLineShift && c.size >= MIN_CLUSTER_SIZE); - + // Calculate significant differences excluding line shifts const significantDiffPixels = nonLineShiftClusters.reduce((sum, c) => sum + c.size, 0); // Update the diff canvas diffCanvas.updatePixels(); - + // Clean up canvases actualCanvas.remove(); expectedCanvas.remove(); - + // Determine test result const ok = ( - diffCount === 0 || + diffCount === 0 || ( - significantDiffPixels === 0 || + significantDiffPixels === 0 || ( - (significantDiffPixels <= MAX_TOTAL_DIFF_PIXELS) && + (significantDiffPixels <= MAX_TOTAL_DIFF_PIXELS) && (nonLineShiftClusters.length <= 2) // Not too many significant clusters ) ) ); - return { + return { ok, diff: diffCanvas, details: { @@ -264,31 +264,31 @@ function findClusterSize(pixels, startX, startY, width, height, radius, visited) const queue = [{x: startX, y: startY}]; let size = 0; const clusterPixels = []; - + while (queue.length > 0) { const {x, y} = queue.shift(); const pos = (y * width + x) * 4; - + // Skip if already visited if (visited.has(pos)) continue; - + // Skip if not a diff pixel if (pixels[pos] !== 255 || pixels[pos + 1] !== 0 || pixels[pos + 2] !== 0) continue; - + // Mark as visited visited.add(pos); size++; clusterPixels.push({x, y}); - + // Add neighbors to queue for (let dy = -radius; dy <= radius; dy++) { for (let dx = -radius; dx <= radius; dx++) { const nx = x + dx; const ny = y + dy; - + // Skip if out of bounds if (nx < 0 || nx >= width || ny < 0 || ny >= height) continue; - + // Skip if already visited const npos = (ny * width + nx) * 4; if (!visited.has(npos)) { @@ -302,20 +302,20 @@ function findClusterSize(pixels, startX, startY, width, height, radius, visited) if (clusterPixels.length > 0) { // Count pixels with limited neighbors (line-like characteristic) let linelikePixels = 0; - + for (const {x, y} of clusterPixels) { // Count neighbors let neighbors = 0; for (let dy = -1; dy <= 1; dy++) { for (let dx = -1; dx <= 1; dx++) { if (dx === 0 && dy === 0) continue; // Skip self - + const nx = x + dx; const ny = y + dy; - + // Skip if out of bounds if (nx < 0 || nx >= width || ny < 0 || ny >= height) continue; - + const npos = (ny * width + nx) * 4; // Check if neighbor is a diff pixel if (pixels[npos] === 255 && pixels[npos + 1] === 0 && pixels[npos + 2] === 0) { @@ -323,13 +323,13 @@ function findClusterSize(pixels, startX, startY, width, height, radius, visited) } } } - + // Line-like pixels typically have 1-2 neighbors if (neighbors <= 2) { linelikePixels++; } } - + // If most pixels (>80%) in the cluster have ≤2 neighbors, it's likely a line shift isLineShift = linelikePixels / clusterPixels.length > 0.8; } @@ -407,8 +407,8 @@ export function visualTest( const actual = []; // Generate screenshots - await callback(myp5, () => { - const img = myp5.get(); + await callback(myp5, async () => { + const img = await myp5.get(); img.pixelDensity(1); actual.push(img); }); diff --git a/test/unit/webgl/p5.Framebuffer.js b/test/unit/webgl/p5.Framebuffer.js index f97cb6b57d..6a6d556351 100644 --- a/test/unit/webgl/p5.Framebuffer.js +++ b/test/unit/webgl/p5.Framebuffer.js @@ -461,7 +461,7 @@ suite('p5.Framebuffer', function() { } }); - test('get() creates a p5.Image with 1x pixel density', function() { + test('get() creates a p5.Image matching the source pixel density', function() { const mainCanvas = myp5.createCanvas(20, 20, myp5.WEBGL); myp5.pixelDensity(2); const fbo = myp5.createFramebuffer(); @@ -482,22 +482,17 @@ suite('p5.Framebuffer', function() { myp5.pop(); }); const img = fbo.get(); - const p2d = myp5.createGraphics(20, 20); - p2d.pixelDensity(1); myp5.image(fbo, -10, -10); - p2d.image(mainCanvas, 0, 0); fbo.loadPixels(); img.loadPixels(); - p2d.loadPixels(); expect(img.width).to.equal(fbo.width); expect(img.height).to.equal(fbo.height); - expect(img.pixels.length).to.equal(fbo.pixels.length / 4); - // The pixels should be approximately the same in the 1x image as when we - // draw the framebuffer onto a 1x canvas + expect(img.pixels.length).to.equal(fbo.pixels.length); + // The pixels should be approximately the same as the framebuffer's for (let i = 0; i < img.pixels.length; i++) { - expect(img.pixels[i]).to.be.closeTo(p2d.pixels[i], 2); + expect(img.pixels[i]).to.be.closeTo(fbo.pixels[i], 2); } }); }); diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js new file mode 100644 index 0000000000..452585b6c8 --- /dev/null +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -0,0 +1,262 @@ +import p5 from '../../../src/app.js'; +import rendererWebGPU from "../../../src/webgpu/p5.RendererWebGPU"; + +p5.registerAddon(rendererWebGPU); + +suite('WebGPU p5.Framebuffer', function() { + let myp5; + let prevPixelRatio; + + beforeAll(async function() { + prevPixelRatio = window.devicePixelRatio; + window.devicePixelRatio = 1; + myp5 = new p5(function(p) { + p.setup = function() {}; + }); + }); + + afterAll(function() { + myp5.remove(); + window.devicePixelRatio = prevPixelRatio; + }); + + suite('Creation and basic properties', function() { + test('framebuffers can be created with WebGPU renderer', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + expect(fbo).to.be.an('object'); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.autoSized()).to.equal(true); + }); + + test('framebuffers can be created with custom dimensions', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer({ width: 20, height: 30 }); + + expect(fbo.width).to.equal(20); + expect(fbo.height).to.equal(30); + expect(fbo.autoSized()).to.equal(false); + }); + + test('framebuffers have color texture', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + expect(fbo.color).to.be.an('object'); + expect(fbo.color.rawTexture).to.be.a('function'); + }); + + test('framebuffers can specify different formats', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer({ + format: 'float', + channels: 'rgb' + }); + + expect(fbo).to.be.an('object'); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + }); + }); + + suite('Auto-sizing behavior', function() { + test('auto-sized framebuffers change size with canvas', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + expect(fbo.autoSized()).to.equal(true); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.density).to.equal(1); + + myp5.resizeCanvas(15, 20); + myp5.pixelDensity(2); + expect(fbo.width).to.equal(15); + expect(fbo.height).to.equal(20); + expect(fbo.density).to.equal(2); + }); + + test('manually-sized framebuffers do not change size with canvas', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(3); + const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 1 }); + + expect(fbo.autoSized()).to.equal(false); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(1); + + myp5.resizeCanvas(5, 15); + myp5.pixelDensity(2); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(1); + }); + + test('manually-sized framebuffers can be made auto-sized', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 2 }); + + expect(fbo.autoSized()).to.equal(false); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(2); + + // Make it auto-sized + fbo.autoSized(true); + expect(fbo.autoSized()).to.equal(true); + + myp5.resizeCanvas(8, 12); + myp5.pixelDensity(3); + expect(fbo.width).to.equal(8); + expect(fbo.height).to.equal(12); + expect(fbo.density).to.equal(3); + }); + }); + + suite('Manual resizing', function() { + test('framebuffers can be manually resized', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.density).to.equal(1); + + fbo.resize(20, 25); + expect(fbo.width).to.equal(20); + expect(fbo.height).to.equal(25); + expect(fbo.autoSized()).to.equal(false); + }); + + test('resizing affects pixel density', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + fbo.pixelDensity(3); + expect(fbo.density).to.equal(3); + + fbo.resize(15, 20); + fbo.pixelDensity(2); + expect(fbo.width).to.equal(15); + expect(fbo.height).to.equal(20); + expect(fbo.density).to.equal(2); + }); + }); + + suite('Drawing functionality', function() { + test('can draw to framebuffer with draw() method', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + myp5.background(0, 255, 0); + + fbo.draw(() => { + myp5.background(0, 0, 255); + // myp5.fill(0, 255, 0); + }); + await myp5.loadPixels(); + // Drawing should have gone to the framebuffer, leaving the main + // canvas the same + expect([...myp5.pixels.slice(0, 3)]).toEqual([0, 255, 0]); + await fbo.loadPixels(); + // The framebuffer should have content + expect([...fbo.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + + // The content can be drawn back to the main canvas + myp5.imageMode(myp5.CENTER); + myp5.image(fbo, 0, 0); + await myp5.loadPixels(); + expect([...fbo.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + expect([...myp5.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + }); + + test('can use framebuffer as texture', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(255, 0, 0); + }); + + // Should not throw when used as texture + expect(() => { + myp5.texture(fbo); + myp5.plane(10, 10); + }).to.not.throw(); + }); + }); + + suite('Pixel access', function() { + test('loadPixels returns a promise in WebGPU', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(255, 0, 0); + }); + + const result = fbo.loadPixels(); + expect(result).to.be.a('promise'); + + const pixels = await result; + expect(pixels).toBeInstanceOf(Uint8Array); + expect(pixels.length).to.equal(10 * 10 * 4); + expect([...pixels.slice(0, 4)]).toEqual([255, 0, 0, 255]); + }); + + test('pixels property is set after loadPixels resolves', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const pixels = await fbo.loadPixels(); + expect(fbo.pixels).to.equal(pixels); + expect(fbo.pixels.length).to.equal(10 * 10 * 4); + }); + + test('get() returns a promise for single pixel in WebGPU', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const result = fbo.get(5, 5); + expect(result).to.be.a('promise'); + + const color = await result; + expect(color).to.be.an('array'); + expect(color).to.have.length(4); + expect([...color]).toEqual([100, 150, 200, 255]); + }); + + test('get() returns a promise for region in WebGPU', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const result = fbo.get(2, 2, 4, 4); + expect(result).to.be.a('promise'); + + const region = await result; + expect(region).to.be.an('object'); // Should be a p5.Image + expect(region.width).to.equal(4); + expect(region.height).to.equal(4); + expect([...region.pixels.slice(0, 4)]).toEqual([100, 150, 200, 255]); + }); + }); +}); diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 7dfe0e6e82..4220f9aa26 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -1,5 +1,6 @@ import { defineWorkspace } from 'vitest/config'; import vitePluginString from 'vite-plugin-string'; +console.log(`CI: ${process.env.CI}`) const plugins = [ vitePluginString({ @@ -38,7 +39,24 @@ export default defineWorkspace([ enabled: true, name: 'chrome', provider: 'webdriverio', - screenshotFailures: false + screenshotFailures: false, + providerOptions: { + capabilities: process.env.CI ? { + 'goog:chromeOptions': { + binary: '/usr/bin/google-chrome', + args: [ + '--enable-unsafe-webgpu', + '--enable-features=Vulkan', + '--use-cmd-decoder=passthrough', + '--disable-gpu-sandbox', + '--disable-software-rasterizer=false', + '--disable-dawn-features=disallow_unsafe_apis', + '--use-angle=vulkan', + '--use-vulkan=swiftshader', + ] + } + } : undefined + } } } }