简介
现实中看到物体的颜色并不是物体的真实颜色,而是它反射光的颜色。本质上是物体洗手了光的一部分频段,然后那些不被光吸收的频段被反射到人眼中,才被看到感知成为颜色。如何来模拟真实的反射光线的原理:光的颜色乘以物体的颜色(对应的rgb分量相乘)。
正向光线追踪
- 光源向各个方向发射光线;
- 光线遇到物体表面会弹射;
- 当光线射入空白空间后或者能量几乎被物体吸收后可以直接被忽略;
- 最终到达屏幕面板,进入人眼(相机)的光线的颜色被累积记录在对应像素上;
索引、顶点数据缓冲区
const boxBuffer = {
vertex: device.createBuffer({
label: 'GPUBuffer store vertex',
size: box.vertex.byteLength,
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST
}),
index: device.createBuffer({
label: 'GPUBuffer store vertex index',
size: box.index.byteLength,
usage: GPUBufferUsage.INDEX | GPUBufferUsage.COPY_DST
})
}
device.queue.writeBuffer(boxBuffer.vertex, 0, box.vertex)
device.queue.writeBuffer(boxBuffer.index, 0, box.index)
mvp矩阵修改
// 拆分 mvp矩阵,模型矩阵与投影矩阵
const modelViewBuffer = device.createBuffer({
label: 'GPUBuffer store n*4x4 matrix',
size: 4 * 4 * 4,
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST
})
const projectionBuffer = device.createBuffer({
label: 'GPUBuffer store 4x4 matrix',
size: 4 * 4 * 4,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
})
//mvp矩阵缓冲区,绑定uniform组
const uniformGroup = device.createBindGroup({
label: 'Uniform Group with Matrix',
layout: pipeline.getBindGroupLayout(0),
entries: [
{
binding: 0,
resource: {
buffer: modelViewBuffer
}
},
{
binding: 1,
resource: {
buffer: projectionBuffer
}
}
]
})
光源缓存--环境光、平行光、点光源
// 环境光缓存
const ambientBuffer = device.createBuffer({
label: 'GPUBuffer store 4x4 matrix',
size: 1 * 4, // 1 x float32: intensity f32
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
})
// 点光源缓存
const pointBuffer = device.createBuffer({
label: 'GPUBuffer store 4x4 matrix',
size: 8 * 4, // 8 x float32: position vec4 + 4 configs
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
})
// 平行光缓存
const directionalBuffer = device.createBuffer({
label: 'GPUBuffer store 4x4 matrix',
size: 8 * 4, // 8 x float32: position vec4 + 4 configs
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
})
// 光源绑定组
const lightGroup = device.createBindGroup({
label: 'Uniform Group with matrix',
layout: pipeline.getBindGroupLayout(1),
entries: [
{
binding: 0,
resource: {
buffer: ambientBuffer
}
},
{
binding: 1,
resource: {
buffer: pointBuffer
}
},
{
binding: 2,
resource: {
buffer: directionalBuffer
}
}
]
})
编码器->管线设置
// 设置uniform组
passEncoder.setBindGroup(0, pipelineObj.uniformGroup)
// 设置光源组
passEncoder.setBindGroup(1, pipelineObj.lightGroup)
// 设置顶点数据
passEncoder.setVertexBuffer(0, pipelineObj.boxBuffer.vertex)
// 设置索引数据
passEncoder.setIndexBuffer(pipelineObj.boxBuffer.index, 'uint16')
// 根据索引绘制
passEncoder.drawIndexed(box.indexCount, 1, 0, 0, 0)
MVP矩阵
// 投影矩阵
let aspect = size.width / size.height
const projectionMatrix = getProjectionMatrix(aspect)
device.queue.writeBuffer(pipelineObj.projectionBuffer, 0, projectionMatrix)
// 模型矩阵-> 旋转、缩放、平移
function frame() {
{
const now = Date.now() / 1000
rotation.x = Math.sin(now)
rotation.y = Math.cos(now)
const modelView = getModelViewMatrix(position, rotation, scale)
device.queue.writeBuffer(pipelineObj.modelViewBuffer, 0, modelView)
}
...
}
光源矩阵
// 环境光
const ambient = new Float32Array([0.1])
// 点光源
const pointLight = new Float32Array(8)
pointLight[2] = -50 // z
pointLight[4] = 1 // intensity
pointLight[5] = 20 // radius
// 平行光
const directionalLight = new Float32Array(8)
directionalLight[4] = 0.5 // intensity
function frame() {
...
//灯光
{
const now = performance.now()
pointLight[0] = 10 * Math.sin(now / 1000)
pointLight[1] = 10 * Math.cos(now / 1000)
pointLight[2] = -60 + 10 * Math.cos(now / 1000)
directionalLight[0] = Math.sin(now / 1500)
directionalLight[2] = Math.cos(now / 1500)
// 写入光源缓存
device.queue.writeBuffer(pipelineObj.ambientBuffer, 0, ambient)
device.queue.writeBuffer(pipelineObj.pointBuffer, 0, pointLight)
device.queue.writeBuffer(pipelineObj.directionalBuffer, 0, directionalLight)
}
...
}
顶点着色器
@binding(0) @group(0) var<storage, read> modelViews : array<mat4x4<f32>>;
@group(0) @binding(1) var<uniform> projection : mat4x4<f32>;
struct VertexOutput {
@builtin(position) Position : vec4<f32>,
@location(0) fragPosition: vec3<f32>,
@location(1) fragNormal : vec3<f32>,
@location(2) fragUV : vec2<f32>,
@location(3) fragColor: vec4<f32>
};
@vertex
fn main(
@builtin(instance_index) index : u32,
@location(0) position : vec4<f32>,
@location(1) normal : vec3<f32>,
@location(2) uv : vec2<f32>
) -> VertexOutput {
let modelview = modelViews[index];
let mvp = projection * modelview;
var output : VertexOutput;
output.Position = mvp * position;
output.fragPosition = (modelview * position).xyz;
output.fragNormal = (modelview * vec4<f32>(normal, 0.0)).xyz;
output.fragUV = uv;
output.fragColor = 0.5 * (position + vec4<f32>(1.0, 1.0, 1.0, 1.0));
return output;
}
片元着色器
Ambient, Diffuse, Specular, Point, Directional, Spot 环境光,漫反射,镜面反射,点光源,平行光,聚光灯
@group(1) @binding(0) var<uniform> ambientIntensity : f32;
@group(1) @binding(1) var<uniform> pointLight : array<vec4<f32>, 2>;
@group(1) @binding(2) var<uniform> directionLight : array<vec4<f32>, 2>;
@fragment
fn main(
@location(0) fragPosition: vec3<f32>,
@location(1) fragNormal: vec3<f32>,
@location(2) fragUV: vec2<f32>,
@location(3) fragColor: vec4<f32>
) -> @location(0) vec4<f32> {
let objectColor = fragColor.rgb;
// 光源颜色
let ambintLightColor = vec3(1.0,1.0,1.0);
let pointLightColor = vec3(1.0,1.0,1.0);
let dirLightColor = vec3(1.0,1.0,1.0);
var lightResult = vec3(0.0, 0.0, 0.0);
// 环境光
lightResult += ambintLightColor * ambientIntensity;
// 平行光
var directionPosition = directionLight[0].xyz;
var directionIntensity: f32 = directionLight[1][0];
var diffuse: f32 = max(dot(normalize(directionPosition), fragNormal), 0.0);
lightResult += dirLightColor * directionIntensity * diffuse;
// 点光源
var pointPosition = pointLight[0].xyz;
var pointIntensity: f32 = pointLight[1][0];
var pointRadius: f32 = pointLight[1][1];
var L = pointPosition - fragPosition;
var distance = length(L);
if(distance < pointRadius){
var diffuse: f32 = max(dot(normalize(L), fragNormal), 0.0);
var distanceFactor: f32 = pow(1.0 - distance / pointRadius, 2.0);
lightResult += pointLightColor * pointIntensity * diffuse * distanceFactor;
}
return vec4<f32>(objectColor * lightResult, 1.0);
}