代码分析说明:
部分说明请参考 Hello Triangle MSAA 以下只解释该功能的实现部分代码
import { assert, makeSample, SampleInit } from '../../components/SampleLayout';
import triangleVertWGSL from '../../shaders/triangle.vert.wgsl';
import redFragWGSL from '../../shaders/red.frag.wgsl';
// 通过样式来设置Canvas 的宽度缩放动画
import styles from './animatedCanvasSize.module.css';
const init: SampleInit = async ({ canvas, pageState }) => {
const adapter = await navigator.gpu.requestAdapter();
assert(adapter, 'requestAdapter returned null');
const device = await adapter.requestDevice();
if (!pageState.active) return;
const context = canvas.getContext('webgpu') as GPUCanvasContext;
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
const devicePixelRatio = window.devicePixelRatio;
canvas.width = canvas.clientWidth * devicePixelRatio;
canvas.height = canvas.clientHeight * devicePixelRatio;
context.configure({
device,
format: presentationFormat,
alphaMode: 'premultiplied',
});
const sampleCount = 4;
const pipeline = device.createRenderPipeline({
layout: 'auto',
vertex: {
module: device.createShaderModule({
code: triangleVertWGSL,
}),
entryPoint: 'main',
},
fragment: {
module: device.createShaderModule({
code: redFragWGSL,
}),
entryPoint: 'main',
targets: [
{
format: presentationFormat,
},
],
},
primitive: {
topology: 'triangle-list',
},
multisample: {
count: 4,
},
});
// 渲染到目标纹理
let renderTarget: GPUTexture | undefined = undefined;
// 目标纹理的view
let renderTargetView: GPUTextureView;
// canvas 添加动画样式
canvas.classList.add(styles.animatedCanvasSize);
function frame() {
// Sample is no longer the active page.
if (!pageState.active) return;
const currentWidth = canvas.clientWidth * devicePixelRatio;
const currentHeight = canvas.clientHeight * devicePixelRatio;
// The canvas size is animating via CSS.
// When the size changes, we need to reallocate the render target.
// We also need to set the physical size of the canvas to match the computed CSS size.
// 如果canvas大小发生了变化
if (
(currentWidth !== canvas.width || currentHeight !== canvas.height) &&
currentWidth &&
currentHeight
) {
// 如果渲染目标存在
if (renderTarget !== undefined) {
// Destroy the previous render target 删除上一次的目标
renderTarget.destroy();
}
// Setting the canvas width and height will automatically resize the textures returned
// when calling getCurrentTexture() on the context.
// 设置当前的canvas 大小
canvas.width = currentWidth;
canvas.height = currentHeight;
// 创建渲染的目标纹理,包括多重采样
// Resize the multisampled render target to match the new canvas size.
renderTarget = device.createTexture({
size: [canvas.width, canvas.height],
sampleCount,
format: presentationFormat,
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});
// 渲染目标的view
renderTargetView = renderTarget.createView();
}
const commandEncoder = device.createCommandEncoder();
const renderPassDescriptor: GPURenderPassDescriptor = {
colorAttachments: [
{
// 传递渲染目标
view: renderTargetView,
// 最终渲染到canvas的view
resolveTarget: context.getCurrentTexture().createView(),
clearValue: { r: 0.2, g: 0.2, b: 0.2, a: 1.0 },
loadOp: 'clear',
storeOp: 'store',
},
],
};
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
passEncoder.setPipeline(pipeline);
passEncoder.draw(3);
passEncoder.end();
device.queue.submit([commandEncoder.finish()]);
requestAnimationFrame(frame);
}
requestAnimationFrame(frame);
};
顶点着色器
@vertex
fn main(
@builtin(vertex_index) VertexIndex : u32
) -> @builtin(position) vec4<f32> {
var pos = array<vec2<f32>, 3>(
vec2(0.0, 0.5),
vec2(-0.5, -0.5),
vec2(0.5, -0.5)
);
return vec4<f32>(pos[VertexIndex], 0.0, 1.0);
}
片元着色器
@fragment
fn main() -> @location(0) vec4<f32> {
return vec4(1.0, 0.0, 0.0, 1.0);
}
动画样式
@keyframes animated-size {
0% {
width: 10px;
height: 600px;
}
50% {
width: 100%;
height: 600px;
}
100% {
width: 10px;
height: 600px;
}
}
.animatedCanvasSize {
animation-duration: 3s;
animation-iteration-count: infinite;
animation-name: animated-size;
animation-timing-function: ease;
}
总结步骤:
- 整体步骤与上一个示例一致,不同点在于将目标纹理对象放在每一帧渲染中,目前渲染纹理的大小与Canvas的大小同步变化
网友评论