diff --git a/3rdparty/muigui-0.x.module.js b/3rdparty/muigui-0.x.module.js index 022e62d9..db59d129 100644 --- a/3rdparty/muigui-0.x.module.js +++ b/3rdparty/muigui-0.x.module.js @@ -1,4 +1,4 @@ -/* muigui@0.0.7, license MIT */ +/* muigui@0.0.8, license MIT */ var css = { default: ` .muigui { @@ -867,7 +867,6 @@ function makeMinMaxPair(gui, properties, minPropName, maxPropName, options) { min, max: max - guiMinRange, }) -// .listen() .onChange(v => { maxGui.setValue(Math.min(max, Math.max(v + valueMinRange, properties[maxPropName]))); }); @@ -877,7 +876,6 @@ function makeMinMaxPair(gui, properties, minPropName, maxPropName, options) { min: min + guiMinRange, max, }) -// .listen() .onChange(v => { minGui.setValue(Math.max(min, Math.min(v - valueMinRange, properties[minPropName]))); }); @@ -2313,6 +2311,12 @@ class Container extends Controller { } return this; } + updateDisplay() { + for (const controller of this.#controllers) { + controller.updateDisplay(); + } + return this; + } remove(controller) { const ndx = this.#controllers.indexOf(controller); if (ndx >= 0) { diff --git a/webgpu/lessons/webgpu-environment-maps.md b/webgpu/lessons/webgpu-environment-maps.md index aa15653a..54e6aed7 100644 --- a/webgpu/lessons/webgpu-environment-maps.md +++ b/webgpu/lessons/webgpu-environment-maps.md @@ -27,7 +27,8 @@ Here's an environment map from the lobby of the Computer History Museum in Mount Based on [the code in the previous article](webgpu-cube-maps.html) let's load those 6 images instead of the canvases we generated. -From [the article on importing textures](webgpu-importing-textures.html) we had this function to load an image +From [the article on importing textures](webgpu-importing-textures.html) we had these two function. One to load an image and another to create a texture from +an image. ```js async function loadImageBitmap(url) { @@ -35,24 +36,46 @@ From [the article on importing textures](webgpu-importing-textures.html) we had const blob = await res.blob(); return await createImageBitmap(blob, { colorSpaceConversion: 'none' }); } + + async function createTextureFromImage(device, url, options) { + const imgBitmap = await loadImageBitmap(url); + return createTextureFromSource(device, imgBitmap, options); + } ``` -So we just need to load the 6 images above and pass them to our existing functions. +Let's add and one to load multiple images + +```js ++ async function createTextureFromImages(device, urls, options) { ++ const imgBitmaps = await Promise.all(url.map(loadImageBitmap)); ++ return createTextureFromSource(device, imgBitmaps, options); ++ } + + async function createTextureFromImage(device, url, options) { +- const imgBitmap = await loadImageBitmap(url); +- return createTextureFromSource(device, imgBitmap, options); ++ return createTextureFromImages(device, [url], options); + } +``` +While we were at it we also changed the existing function to use +the new one. Now we can use the new one to load the six images. ```js - const faceImages = await Promise.all([ - 'resources/images/computer-history-museum/pos-x.jpg', - 'resources/images/computer-history-museum/neg-x.jpg', - 'resources/images/computer-history-museum/pos-y.jpg', - 'resources/images/computer-history-museum/neg-y.jpg', - 'resources/images/computer-history-museum/pos-z.jpg', - 'resources/images/computer-history-museum/neg-z.jpg', - ].map(loadImageBitmap)); - - const texture = await createTextureFromSources( +- const texture = await createTextureFromSources( - device, faceCanvases, {mips: true, flipY: false}); -+ device, faceImages, {mips: true, flipY: false}); ++ const texture = await createTextureFromImages( ++ device, ++ [ ++ 'resources/images/computer-history-museum/pos-x.jpg', ++ 'resources/images/computer-history-museum/neg-x.jpg', ++ 'resources/images/computer-history-museum/pos-y.jpg', ++ 'resources/images/computer-history-museum/neg-y.jpg', ++ 'resources/images/computer-history-museum/pos-z.jpg', ++ 'resources/images/computer-history-museum/neg-z.jpg', ++ ], ++ {mips: true, flipY: false}, ++ ); ``` In fragment shader we want to know, for each fragment to be drawn, given a vector from diff --git a/webgpu/lessons/webgpu-lighting-spot.md b/webgpu/lessons/webgpu-lighting-spot.md index ec4a6d16..4205ce3c 100644 --- a/webgpu/lessons/webgpu-lighting-spot.md +++ b/webgpu/lessons/webgpu-lighting-spot.md @@ -290,6 +290,87 @@ It will be 1 if we're inside the `innerLimit`. And, it will be between 0 and 1 between those 2 limits. We then multiply the light and specular calculations by `inLight`. +And again we need to update our uniform buffer setup + +```js +- const uniformBufferSize = (12 + 16 + 16 + 4 + 4 + 4 + 4) * 4; ++ const uniformBufferSize = (12 + 16 + 16 + 4 + 4 + 4 + 4 + 4) * 4; + const uniformBuffer = device.createBuffer({ + label: 'uniforms', + size: uniformBufferSize, + usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, + }); + + const uniformValues = new Float32Array(uniformBufferSize / 4); + + // offsets to the various uniform values in float32 indices + const kNormalMatrixOffset = 0; + const kWorldViewProjectionOffset = 12; + const kWorldOffset = 28; + const kColorOffset = 44; + const kLightWorldPositionOffset = 48; + const kViewWorldPositionOffset = 52; + const kShininessOffset = 55; + const kLightDirectionOffset = 56; +- const kLimitOffset = 59; ++ const kInnerLimitOffset = 59; ++ const kOuterLimitOffset = 60; + + const normalMatrixValue = uniformValues.subarray( + kNormalMatrixOffset, kNormalMatrixOffset + 12); + const worldViewProjectionValue = uniformValues.subarray( + kWorldViewProjectionOffset, kWorldViewProjectionOffset + 16); + const worldValue = uniformValues.subarray( + kWorldOffset, kWorldOffset + 16); + const colorValue = uniformValues.subarray(kColorOffset, kColorOffset + 4); + const lightWorldPositionValue = uniformValues.subarray( + kLightWorldPositionOffset, kLightWorldPositionOffset + 3); + const viewWorldPositionValue = uniformValues.subarray( + kViewWorldPositionOffset, kViewWorldPositionOffset + 3); + const shininessValue = uniformValues.subarray( + kShininessOffset, kShininessOffset + 1); + const lightDirectionValue = uniformValues.subarray( + kLightDirectionOffset, kLightDirectionOffset + 3); +- const limitValue = uniformValues.subarray( +- kLimitOffset, kLimitOffset + 1); ++ const innerLimitValue = uniformValues.subarray( ++ kInnerLimitOffset, kInnerLimitOffset + 1); ++ const outerLimitValue = uniformValues.subarray( ++ kOuterLimitOffset, kOuterLimitOffset + 1); +``` + +and where we set them + +```js + const radToDegOptions = { min: -360, max: 360, step: 1, converters: GUI.converters.radToDeg }; ++ const limitOptions = { min: 0, max: 90, minRange: 1, step: 1, converters: GUI.converters.radToDeg }; + + const gui = new GUI(); + gui.onChange(render); + gui.add(settings, 'rotation', radToDegOptions); + gui.add(settings, 'shininess', { min: 1, max: 250 }); +- gui.add(settings, 'limit', limitOptions); ++ GUI.makeMinMaxPair(gui, settings, 'innerLimit', 'outerLimit', limitOptions); + gui.add(settings, 'aimOffsetX', -50, 50); + gui.add(settings, 'aimOffsetY', -50, 50); + + ... + + function render() { + + ... + + colorValue.set([0.2, 1, 0.2, 1]); // green + lightWorldPositionValue.set([-10, 30, 100]); + viewWorldPositionValue.set(eye); + shininessValue[0] = settings.shininess; +- limitValue[0] = Math.cos(settings.limit); ++ innerLimitValue[0] = Math.cos(settings.innerLimit); ++ outerLimitValue[0] = Math.cos(settings.outerLimit); + + ... +``` + And that works {{{example url="../webgpu-lighting-spot-w-linear-falloff.html" }}} @@ -300,7 +381,7 @@ One thing to be aware of is if `innerLimit` is equal to `outerLimit` then `limitRange` will be 0.0. We divide by `limitRange` and dividing by zero is bad/undefined. There's nothing to do in the shader here. We just need to make sure in our JavaScript that `innerLimit` is never equal to -`outerLimit`. Something we're not doing now. +`outerLimit` which, in this case, our gui does for us. WGSL also has a function we could use to slightly simplify this. It's called `smoothstep` it returns a value from 0 to 1 but diff --git a/webgpu/lessons/webgpu-transparency.md b/webgpu/lessons/webgpu-transparency.md index b00c4df7..37926c98 100644 --- a/webgpu/lessons/webgpu-transparency.md +++ b/webgpu/lessons/webgpu-transparency.md @@ -1,4 +1,109 @@ -Title: WebGPU Transparency +Title: WebGPU Transparency and Blending Description: Blending Pixels in WebGPU -TOC: Transparency +TOC: Transparency and Blending + +The basic solution to transparency in WebGPU is called "blending". +When you create a render pipeline, for each colorAttachment, you can blending +settings. + +The full list of default settings are + +``` +blend: { + color: { + operation: "add", + srcFactor: "one", + dstFactor: "zero", + }, + alpha: { + operation: "add", + srcFactor: "one", + dstFactor: "zero", + }, +} +``` + +Where `color` is what happens to the `rgb` portion of a color and `alpha` is +what happens to the `a` (alpha) portion. + +`operation` can be one of + + * "add" + * "subtract" + * "reverse-subtract" + * "min" + * "max" + +`srcFactor` and `dstFactor` and each be one of + + * "zero" + * "one" + * "src" + * "one-minus-src" + * "src-alpha" + * "one-minus-src-alpha" + * "dst" + * "one-minus-dst" + * "dst-alpha" + * "one-minus-dst-alpha" + * "src-alpha-saturated" + * "constant" + * "one-minus-constant" + +Most of them are relatively straight forward to understand. Think of it as + +``` + result = (src * srcFactor) operation (dst * dstFactor) +``` + +So consider the default where `operation` is `'add'`, `srcFactor` is `'one'` and +`dstFactor` is `'zero'`. This give us + +``` + result = (src * 1) add (dst * 0) + result = src * 1 + dst * 0 + result = src +``` + +As you can set the default result ends up being just `src`. + +Probably the most common setting for blending is + +``` +{ + operation: 'add', + srcFactor: 'one', + dstFactor: 'one-minus-src-alpha' +} +``` + +This mode is used most often with "premultiplied alpha" meaning it expects +that the "src" has already had it's RGB colors "premultiplied" by the alpha value. + +Let's say our color is 1, 0.5, 0.25 which is orange and we want it to be 33% +transparent so our alpha is 0.33. Then our "premultiplied color" would be + +``` + premultiplied + --------------------------------- + r = 1 * 0.33 = 0.33 + g = 0.5 * 0.33 = 0.165 + g = 0.25 * 0.33 = 0.0825 + a = 0.33 = 0.33 +``` + +How you get a pre-multiplied color is up to you. If you have un-premultiplied +colors then in the shader you could just + +```wgsl + return vec4f(color.rgb * color.a, color.a)`; +``` + +Otherwise, the functions we covered in [the article on importing textures](webgpu-importing-textures.html) take a `premultipliedAlpha: true` option. + +Let's make an example that shows these options. With 5 operations, 13 options +for srcFactor and 13 for dstFactor and all doubled for alpha that's 714025 +combinations so maybe it's better to limit the list to commonly used +ones. + diff --git a/webgpu/resources/js/timing-helper.js b/webgpu/resources/js/timing-helper.js index 25cebc48..e7029de1 100644 --- a/webgpu/resources/js/timing-helper.js +++ b/webgpu/resources/js/timing-helper.js @@ -7,49 +7,35 @@ function assert(cond, msg = '') { export default class TimingHelper { #device; #canTimestamp; - #querySets; - #currentSet; + #resultBuffer; + #resultBuffers = []; #state = 'free'; + #querySet; + #resolveBuffer; constructor(device) { this.#device = device; this.#canTimestamp = device.features.has('timestamp-query'); - this.#querySets = []; - this.#currentSet = undefined; + this.#querySet = device.createQuerySet({ + type: 'timestamp', + count: 2, + }); + this.#resolveBuffer = device.createBuffer({ + size: 2 * 8, + usage: GPUBufferUsage.QUERY_RESOLVE | GPUBufferUsage.COPY_SRC, + }); } - #getQuerySet() { - const device = this.#device; - if (this.#querySets.length === 0) { - const querySet = device.createQuerySet({ - type: 'timestamp', - count: 2, - }); - const resolveBuffer = device.createBuffer({ - size: 2 * 8, - usage: GPUBufferUsage.QUERY_RESOLVE | GPUBufferUsage.COPY_SRC, - }); - const resultBuffer = device.createBuffer({ - size: 2 * 8, - usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, - }); - this.#querySets.push({querySet, resolveBuffer, resultBuffer}); - } - return this.#querySets.pop(); - } #beginTimestampPass(encoder, fnName, descriptor) { if (this.#canTimestamp) { assert(this.#state === 'free', 'state not free'); this.#state = 'need resolve'; - assert(!this.#currentSet); - this.#currentSet = this.#getQuerySet(); - const pass = encoder[fnName]({ ...descriptor, ...{ timestampWrites: { - querySet: this.#currentSet.querySet, + querySet: this.#querySet, beginningOfPassWriteIndex: 0, endOfPassWriteIndex: 1, }, @@ -82,33 +68,31 @@ export default class TimingHelper { if (!this.#canTimestamp) { return; } - assert(!!this.#currentSet); assert(this.#state === 'need resolve', 'must call addTimestampToPass'); this.#state = 'wait for result'; - const { querySet, resolveBuffer, resultBuffer } = this.#currentSet; + this.#resultBuffer = this.#resultBuffers.pop || this.#device.createBuffer({ + size: 2 * 8, + usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, + }); - encoder.resolveQuerySet(querySet, 0, 2, resolveBuffer, 0); - encoder.copyBufferToBuffer(resolveBuffer, 0, resultBuffer, 0, resultBuffer.size); + encoder.resolveQuerySet(this.#querySet, 0, 2, this.#resolveBuffer, 0); + encoder.copyBufferToBuffer(this.#resolveBuffer, 0, this.#resultBuffer, 0, this.#resultBuffer.size); } async getResult() { if (!this.#canTimestamp) { return 0; } - assert(!!this.#currentSet); assert(this.#state === 'wait for result', 'must call resolveTiming'); this.#state = 'free'; - const q = this.#currentSet; - this.#currentSet = undefined; - - const { resultBuffer } = q; + const resultBuffer = this.#resultBuffer; await resultBuffer.mapAsync(GPUMapMode.READ); const times = new BigInt64Array(resultBuffer.getMappedRange()); const duration = Number(times[1] - times[0]); resultBuffer.unmap(); - this.#querySets.push(q); + this.#resultBuffers.push(resultBuffer); return duration; } } \ No newline at end of file diff --git a/webgpu/webgpu-environment-map.html b/webgpu/webgpu-environment-map.html index 6475e636..02a93100 100644 --- a/webgpu/webgpu-environment-map.html +++ b/webgpu/webgpu-environment-map.html @@ -332,17 +332,23 @@ return await createImageBitmap(blob, { colorSpaceConversion: 'none' }); } - const faceImages = await Promise.all([ - 'resources/images/computer-history-museum/pos-x.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-x.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/pos-y.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-y.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/pos-z.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-z.jpg', /* webgpufundamentals: url */ - ].map(loadImageBitmap)); - - const texture = await createTextureFromSources( - device, faceImages, {mips: true, flipY: false}); + async function createTextureFromImages(device, urls, options) { + const images = await Promise.all(urls.map(loadImageBitmap)); + return createTextureFromSources(device, images, options); + } + + const texture = await createTextureFromImages( + device, + [ + 'resources/images/computer-history-museum/pos-x.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-x.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/pos-y.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-y.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/pos-z.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-z.jpg', /* webgpufundamentals: url */ + ], + {mips: true, flipY: false}, + ); const sampler = device.createSampler({ magFilter: 'linear', diff --git a/webgpu/webgpu-lighting-spot-w-linear-falloff.html b/webgpu/webgpu-lighting-spot-w-linear-falloff.html index 3cb99250..fd75d5f3 100644 --- a/webgpu/webgpu-lighting-spot-w-linear-falloff.html +++ b/webgpu/webgpu-lighting-spot-w-linear-falloff.html @@ -745,13 +745,13 @@ }; const radToDegOptions = { min: -360, max: 360, step: 1, converters: GUI.converters.radToDeg }; + const limitOptions = { min: 0, max: 90, minRange: 1, step: 1, converters: GUI.converters.radToDeg }; const gui = new GUI(); gui.onChange(render); gui.add(settings, 'rotation', radToDegOptions); gui.add(settings, 'shininess', { min: 1, max: 250 }); - gui.add(settings, 'innerLimit', { min: 0, max: 180, step: 1, converters: GUI.converters.radToDeg }); - gui.add(settings, 'outerLimit', { min: 0, max: 180, step: 1, converters: GUI.converters.radToDeg }); + GUI.makeMinMaxPair(gui, settings, 'innerLimit', 'outerLimit', limitOptions); gui.add(settings, 'aimOffsetX', -50, 50); gui.add(settings, 'aimOffsetY', -50, 50); diff --git a/webgpu/webgpu-skybox-plus-environment-map.html b/webgpu/webgpu-skybox-plus-environment-map.html index ce616d57..40bcd2e9 100644 --- a/webgpu/webgpu-skybox-plus-environment-map.html +++ b/webgpu/webgpu-skybox-plus-environment-map.html @@ -386,17 +386,23 @@ return await createImageBitmap(blob, { colorSpaceConversion: 'none' }); } - const faceImages = await Promise.all([ - 'resources/images/computer-history-museum/pos-x.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-x.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/pos-y.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-y.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/pos-z.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-z.jpg', /* webgpufundamentals: url */ - ].map(loadImageBitmap)); - - const texture = await createTextureFromSources( - device, faceImages, {mips: true, flipY: false}); + async function createTextureFromImages(device, urls, options) { + const images = await Promise.all(urls.map(loadImageBitmap)); + return createTextureFromSources(device, images, options); + } + + const texture = await createTextureFromImages( + device, + [ + 'resources/images/computer-history-museum/pos-x.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-x.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/pos-y.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-y.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/pos-z.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-z.jpg', /* webgpufundamentals: url */ + ], + {mips: true, flipY: false}, + ); const sampler = device.createSampler({ magFilter: 'linear', diff --git a/webgpu/webgpu-skybox.html b/webgpu/webgpu-skybox.html index 18eaa303..7ab27907 100644 --- a/webgpu/webgpu-skybox.html +++ b/webgpu/webgpu-skybox.html @@ -260,17 +260,23 @@ return await createImageBitmap(blob, { colorSpaceConversion: 'none' }); } - const faceImages = await Promise.all([ - 'resources/images/computer-history-museum/pos-x.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-x.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/pos-y.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-y.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/pos-z.jpg', /* webgpufundamentals: url */ - 'resources/images/computer-history-museum/neg-z.jpg', /* webgpufundamentals: url */ - ].map(loadImageBitmap)); - - const texture = await createTextureFromSources( - device, faceImages, {mips: true, flipY: false}); + async function createTextureFromImages(device, urls, options) { + const images = await Promise.all(urls.map(loadImageBitmap)); + return createTextureFromSources(device, images, options); + } + + const texture = await createTextureFromImages( + device, + [ + 'resources/images/computer-history-museum/pos-x.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-x.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/pos-y.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-y.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/pos-z.jpg', /* webgpufundamentals: url */ + 'resources/images/computer-history-museum/neg-z.jpg', /* webgpufundamentals: url */ + ], + {mips: true, flipY: false}, + ); const sampler = device.createSampler({ magFilter: 'linear',