添加WebGPU游戏基本功能:几何体、纹理和着色器支持

This commit is contained in:
SpecialX
2025-11-19 11:15:29 +08:00
parent 4648ca10ad
commit 7d5265bced
6 changed files with 175 additions and 23 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 MiB

35
src/geometry.ts Normal file
View File

@@ -0,0 +1,35 @@
export class QuadGeometry {
public positions: number[];
public colors: number[];
public texCoords: number[];
constructor() {
this.positions = [
-0.5, -0.5,
0.5, -0.5,
-0.5, 0.5,
-0.5, 0.5,
0.5, 0.5,
0.5, -0.5
]
this.colors = [
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0
]
this.texCoords = [
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
0.0, 0.0,
1.0, 0.0,
1.0, 1.0
]
}
}

View File

@@ -1,11 +1,16 @@
import shaderSource from './shaders/shader.wgsl?raw';
import { QuadGeometry } from './geometry';
import { Texture } from './texture';
class Renderer{
private device! : GPUDevice;
private context! : GPUCanvasContext;
private pipeline! : GPURenderPipeline;
private postitionBuffer! : GPUBuffer;
private colorBuffer! : GPUBuffer;
private texCoordBuffer! : GPUBuffer;
private textureBindGroup! : GPUBindGroup;
private testTexture! : Texture;
public async initialize()
{
@@ -37,25 +42,18 @@ class Renderer{
format: navigator.gpu.getPreferredCanvasFormat()
})
this.testTexture = await Texture.createTextureFromUrl(this.device, 'assets/UVEditorColorGrid.png');
this.prepareModel();
this.postitionBuffer = this.CreateBuffer(new Float32Array([
-0.5, -0.5,
0.5, -0.5,
-0.5, 0.5,
-0.5, 0.5,
0.5, 0.5,
0.5, -0.5
]));
this.colorBuffer = this.CreateBuffer(new Float32Array([
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0
]));
const geometry = new QuadGeometry();
this.postitionBuffer = this.CreateBuffer(new Float32Array(geometry.positions));
this.colorBuffer = this.CreateBuffer(new Float32Array(geometry.colors));
this.texCoordBuffer = this.CreateBuffer(new Float32Array(geometry.texCoords));
}
private CreateBuffer(data: Float32Array)
@@ -102,12 +100,27 @@ class Renderer{
}]
}
const textureCoordBufferLayout : GPUVertexBufferLayout =
{
arrayStride: 2 * Float32Array.BYTES_PER_ELEMENT,
attributes: [
{
shaderLocation: 2,
offset: 0,
format: 'float32x2'
}],
stepMode: 'vertex'
}
const vertexState: GPUVertexState = {
module : shaderModule,
entryPoint: "VertexMain",
buffers:
[postitionBufferLayout,
colorBufferLayout]
[
postitionBufferLayout,
colorBufferLayout,
textureCoordBufferLayout
]
}
const fragmentState: GPUFragmentState = {
@@ -115,18 +128,65 @@ class Renderer{
entryPoint: "FragmentMain",
targets: [
{
format: navigator.gpu.getPreferredCanvasFormat()
format: navigator.gpu.getPreferredCanvasFormat(),
blend: {
color: {
srcFactor: 'one',
dstFactor: 'zero',
operation: 'add'
},
alpha: {
srcFactor: 'one',
dstFactor: 'zero',
operation: 'add'
}
}
}
]
}
const textureBindGroupLayout = this.device.createBindGroupLayout({
entries: [
{
binding: 0,
visibility: GPUShaderStage.FRAGMENT,
sampler: {}
},
{
binding: 1,
visibility: GPUShaderStage.FRAGMENT,
texture: {}
}
]
});
const pipelineLayout = this.device.createPipelineLayout({
bindGroupLayouts: [
textureBindGroupLayout
]
});
this.textureBindGroup = this.device.createBindGroup({
layout: textureBindGroupLayout,
entries: [
{
binding: 0,
resource: this.testTexture.sampler
},
{
binding: 1,
resource: this.testTexture.texture.createView()
}
]
});
this.pipeline = this.device.createRenderPipeline({
vertex: vertexState,
fragment: fragmentState,
primitive:{
topology: 'triangle-list',
},
layout: "auto"
layout: pipelineLayout
});
@@ -154,6 +214,8 @@ class Renderer{
passEncoder.setPipeline(this.pipeline );
passEncoder.setVertexBuffer(0, this.postitionBuffer);
passEncoder.setVertexBuffer(1, this.colorBuffer);
passEncoder.setVertexBuffer(2, this.texCoordBuffer);
passEncoder.setBindGroup(0, this.textureBindGroup)
passEncoder.draw(6);
passEncoder.end();

View File

@@ -1,24 +1,34 @@
struct VertexOut {
@builtin(position) pos: vec4<f32>,
@location(0) color: vec4<f32>,
@location(1) texcoord: vec2<f32>,
}
@vertex
fn VertexMain(
@location(0) pos: vec2<f32>,
@location(1) color: vec3<f32>,
@location(2) texcoord: vec2<f32>,
@builtin(vertex_index) vertexIndex: u32,
) -> VertexOut
{
var output : VertexOut;
output.pos = vec4<f32>(pos, 0.0, 1.0);
output.color = vec4<f32>(color, 1.0);
output.texcoord = vec2<f32>(texcoord);
return output;
}
@group(0) @binding(0)
var texSampler : sampler;
@group(0) @binding(1)
var tex : texture_2d<f32>;
@fragment
fn FragmentMain(fragData: VertexOut) -> @location(0) vec4<f32>
{
return fragData.color;
var texColor = textureSample(tex, texSampler, fragData.texcoord);
return fragData.color * texColor;
}

45
src/texture.ts Normal file
View File

@@ -0,0 +1,45 @@
export class Texture{
constructor(public texture: GPUTexture, public sampler: GPUSampler){}
public static async createTexture(device: GPUDevice, image: HTMLImageElement)
: Promise<Texture>
{
const texture = device.createTexture({
size: { width: image.width, height: image.height},
format: "rgba8unorm",
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST | GPUTextureUsage.RENDER_ATTACHMENT
});
const data = await createImageBitmap(image );
device.queue.copyExternalImageToTexture(
{ source: data },
{ texture: texture },
{ width: image.width, height: image.height }
);
const sampler = device.createSampler({
magFilter: "linear",
minFilter: "linear",
});
return new Texture(texture, sampler);
}
public static async createTextureFromUrl(device: GPUDevice, url: string): Promise<Texture>
{
const imagePromise = new Promise<HTMLImageElement>((resolve, reject) => {
const img = new Image();
img.src = url;
img.onload = () => resolve(img);
img.onerror = () => {
console.error('Failed to load image from url: ' + url);
reject();
}
});
const image = await imagePromise;
return await Texture.createTexture(device, image);
}
}

View File

@@ -18,7 +18,7 @@
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"erasableSyntaxOnly": false,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},