由于webGPU渲染逻辑,调用顺序不能乱,所以封装一系列绘制函数,方便调用。初始化函数:initWebGPU,初始管道:initPipeline,绘制函数:draw。
WebGPU初始化
initWebGPU方法,通过接收canvas对象,返回(设备device)、(上下文context)、(渲染类型format)、(舞台大小size)对象。把WebGPU初始化时的重复逻辑整理到方法中。
async function initWebGPU(canvas) {
if (!navigator.gpu)
throw new Error('Not Support WebGPU')
const adapter = await navigator.gpu.requestAdapter({
powerPreference: 'high-performance'
})
if (!adapter)
throw new Error('No Adapter Found')
const device = await adapter.requestDevice()
const context = canvas.getContext('webgpu')
const format = navigator.gpu.getPreferredCanvasFormat()
const size = { width: canvas.width, height: canvas.height }
context.configure({
device: device,
format: format,
})
return { device, context, format, size }
}
初始管线
initPipeline初始管道,通过接收一些配置参数,来生成一个渲染管线。
async function initPipeline(device, format, cellShaderModule, vertexBufferLayout) {
const descriptor = {
label: "Cell pipeline",
layout: "auto",
vertex: {
module: cellShaderModule,
entryPoint: "vertexMain",
buffers: [vertexBufferLayout]
},
fragment: {
module: cellShaderModule,
entryPoint: "fragmentMain",
targets: [{
format: format
}]
}
}
return await device.createRenderPipeline(descriptor)
}
绘制函数
draw绘制函数,封装了纷杂的绘制流程
function draw(device, context, pipeline, vertices) {
const vertexBuffer = device.createBuffer({
label: "Cell vertices",
size: vertices.byteLength,
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
});
device.queue.writeBuffer(vertexBuffer, 0, vertices);
const commandEncoder = device.createCommandEncoder()
const view = context.getCurrentTexture().createView()
const renderPassDescriptor = {
colorAttachments: [
{
view: view,
clearValue: { r: 0, g: 0, b: 0, a: 1.0 },
loadOp: 'clear',
storeOp: 'store'
}
]
}
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor)
passEncoder.setPipeline(pipeline)
passEncoder.setVertexBuffer(0, vertexBuffer);
passEncoder.draw(vertices.length / 2);
passEncoder.end()
device.queue.submit([commandEncoder.finish()])
}
运行
准备顶点数据、布局、着色器。顺序调用封装函数,渲染几何图形
const vertices = new Float32Array([
-0.8, -0.8,
0.8, -0.8,
0.8, 0.8,
-0.8, -0.8,
0.8, 0.8,
-0.8, 0.8,
]);
const vertexBufferLayout = {
arrayStride: 8,
attributes: [{
format: "float32x2",
offset: 0,
shaderLocation: 0, // Position, see vertex shader
}],
};
async function run() {
const canvas = document.querySelector('canvas')
if (!canvas)
throw new Error('No Canvas')
const { device, context, format } = await initWebGPU(canvas)
const cellShaderModule = device.createShaderModule({
label: 'Cell shader',
code: `
@vertex
fn vertexMain(@location(0) pos: vec2f) ->
@builtin(position) vec4f {
return vec4f(pos, 0, 1);
}
@fragment
fn fragmentMain() -> @location(0) vec4f {
return vec4f(1, 0, 0, 1);
}
`
});
const pipeline = await initPipeline(device, format, cellShaderModule, vertexBufferLayout)
// start draw
draw(device, context, pipeline, vertices)
window.addEventListener('resize', () => {
draw(device, context, pipeline, vertices)
})
}
run()