如果可能的话,如何在没有第三方库的情况下使用纯 WebGPU 渲染文本?
我还在学习WebGPU的过程中,了解不多,但是我找到了这个解决方案,但是它使用了第三方库
/* Canvas initialization */
const canvas = document.getElementById("canvas") as
HTMLCanvasElement;
/* WebGPU initialization */
const adapter: GPUAdapter = await navigator.gpu.
requestAdapter() as GPUAdapter;
const device: GPUDevice = await adapter.requestDevice() as
GPUDevice;
}
/* Font initialization */
const fontParser = new FontParser(device, "fonts/RobotoRegular.ttf");
/* Text block to render */
const textBlock = new TextBlock(
device, "Hello World", fontParser,
options = {
color = [0.6, 0.5. 0.6, 1.0],
spacing = 2,
width = 2,
size = 4,
isWinding = false
});
/* 1) Initialization */
const renderer = new Renderer(device, canvas, textBlock,
projectionMatrix, viewMatrix, color=[0.2, 0.0, 0.0,1.0]);
/* 2) Preparing data */
const perFrameData = this.renderer.prepare();
/* 3) Rendering */
this.renderer.render(perFrameData);
WebGPU 不直接渲染文本。
无论 API 如何,使用 GPU 渲染文本的常见方法是
将文本放入纹理中,将纹理渲染为单个四边形
将字形放入纹理图集中(其中包含许多不同图像的纹理)。渲染四边形,设置纹理坐标以选择您想要看到的字形。如何制作字形纹理图集取决于您。您可以离线预先创建它。在运行时使用 2D 画布 API 创建它。在 JavaScript 中编写字体渲染代码来读取字体文件并渲染出字形。
这部分取决于用例。像操作系统和浏览器这样的系统需要能够以多种字体和大小以及表情符号显示所有 unicode,因此通常它们使用纹理缓存并根据需要渲染新的字形(并丢弃未使用的字形)
通常从字体文件生成 SDF(有符号距离场)或类似数据,并使用该数据按程序为每个字形四边形生成片段着色器的输出。
3 远远超出了可以轻易解释的范围,而且也相对不常见。 2 和 1 是最常见的,按此顺序,2 是在浏览器中绘制此页面中的文本所使用的技术,至少截至 2023 年
此外,请注意,如果您想包容并支持所有语言,渲染文本可能会非常困难。如果您只关心 ASCII 或其他一些小子集,那么这相对容易。
<canvas></canvas>
<script type="module">
// WebGPU Simple Textured Quad - Import Canvas
// from https://webgpufundamentals.org/webgpu/webgpu-simple-textured-quad-import-canvas.html
import {mat4} from 'https://webgpufundamentals.org/3rdparty/wgpu-matrix.module.js';
const glyphWidth = 16;
const glyphHeight = 20;
const glyphsAcrossTexture = 16;
function genreateGlyphTextureAtlas() {
const ctx = document.createElement('canvas').getContext('2d');
ctx.canvas.width = 256;
ctx.canvas.height = 128;
let x = 0;
let y = 0;
ctx.font = '16px monospace';
ctx.textBaseline = 'middle';
ctx.textAlign = 'center';
ctx.fillStyle = 'white';
for (let c = 33; c < 128; ++c) {
ctx.fillText(String.fromCodePoint(c), x + glyphWidth / 2, y + glyphHeight / 2);
x += glyphWidth;
if (x >= ctx.canvas.width) {
x = 0;
y += glyphHeight;
}
}
return ctx.canvas;
}
async function main() {
const adapter = await navigator.gpu?.requestAdapter();
const device = await adapter?.requestDevice();
if (!device) {
fail('need a browser that supports WebGPU');
return;
}
// Get a WebGPU context from the canvas and configure it
const canvas = document.querySelector('canvas');
const context = canvas.getContext('webgpu');
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
context.configure({
device,
format: presentationFormat,
});
const module = device.createShaderModule({
label: 'our hardcoded textured quad shaders',
code: `
struct VSInput {
@location(0) position: vec4f,
@location(1) texcoord: vec2f,
@location(2) color: vec4f,
};
struct VSOutput {
@builtin(position) position: vec4f,
@location(0) texcoord: vec2f,
@location(1) color: vec4f,
};
struct Uniforms {
matrix: mat4x4f,
};
@group(0) @binding(2) var<uniform> uni: Uniforms;
@vertex fn vs(vin: VSInput) -> VSOutput {
var vsOutput: VSOutput;
vsOutput.position = uni.matrix * vin.position;
vsOutput.texcoord = vin.texcoord;
vsOutput.color = vin.color;
return vsOutput;
}
@group(0) @binding(0) var ourSampler: sampler;
@group(0) @binding(1) var ourTexture: texture_2d<f32>;
@fragment fn fs(fsInput: VSOutput) -> @location(0) vec4f {
return textureSample(ourTexture, ourSampler, fsInput.texcoord) * fsInput.color;
}
`,
});
const glyphCanvas = genreateGlyphTextureAtlas();
// so we can see it
document.body.appendChild(glyphCanvas);
glyphCanvas.style.backgroundColor = '#222';
const maxGlyphs = 100;
const floatsPerVertex = 2 + 2 + 4; // 2(pos) + 2(texcoord) + 4(color)
const vertexSize = floatsPerVertex * 4; // 4 bytes each float
const vertsPerGlyph = 6;
const vertexBufferSize = maxGlyphs * vertsPerGlyph * vertexSize;
const vertexBuffer = device.createBuffer({
label: 'vertices',
size: vertexBufferSize,
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
});
const indexBuffer = device.createBuffer({
label: 'indices',
size: maxGlyphs * vertsPerGlyph * 4,
usage: GPUBufferUsage.INDEX | GPUBufferUsage.COPY_DST,
});
// pre fill index buffer with quad indices
{
const indices = [];
for (let i = 0; i < maxGlyphs; ++i) {
const ndx = i * 4;
indices.push(ndx, ndx + 1, ndx + 2, ndx + 2, ndx + 1, ndx + 3);
}
device.queue.writeBuffer(indexBuffer, 0, new Uint32Array(indices));
}
function generateGlyphVerticesForText(s, color = [1, 1, 0, 1]) {
const vertexData = new Float32Array(maxGlyphs * floatsPerVertex * vertsPerGlyph);
const glyphUVWidth = glyphWidth / glyphCanvas.width;
const glyphUVheight = glyphHeight / glyphCanvas.height;
let offset = 0;
let x0 = 0;
let x1 = 1;
let y0 = 0;
let y1 = 1;
const addVertex = (x, y, u, v, r, g, b, a) => {
vertexData[offset++] = x;
vertexData[offset++] = y;
vertexData[offset++] = u;
vertexData[offset++] = v;
vertexData[offset++] = r;
vertexData[offset++] = g;
vertexData[offset++] = b;
vertexData[offset++] = a;
};
for (let i = 0; i < s.length; ++i) {
// convert char code to texcoords for glyph texture
const c = s.charCodeAt(i) - 33;
if (c >= 0) {
const glyphX = c % glyphsAcrossTexture;
const glyphY = Math.floor(c / glyphsAcrossTexture);
const u0 = (glyphX * glyphWidth) / glyphCanvas.width;
const v1 = (glyphY * glyphHeight) / glyphCanvas.height;
const u1 = u0 + glyphUVWidth;
const v0 = v1 + glyphUVheight;
addVertex(x0, y0, u0, v0, ...color);
addVertex(x1, y0, u1, v0, ...color);
addVertex(x0, y1, u0, v1, ...color);
addVertex(x1, y1, u1, v1, ...color);
}
x0 = x0 + 1;
x1 = x0 + 1;
}
return {
vertexData,
numGlyphs: offset / floatsPerVertex,
width: x0,
};
}
const { vertexData, numGlyphs, width } = generateGlyphVerticesForText('Hello world!');
device.queue.writeBuffer(vertexBuffer, 0, vertexData);
const pipeline = device.createRenderPipeline({
label: 'hardcoded textured quad pipeline',
layout: 'auto',
vertex: {
module,
entryPoint: 'vs',
buffers: [
{
arrayStride: vertexSize,
attributes: [
{shaderLocation: 0, offset: 0, format: 'float32x2'}, // position
{shaderLocation: 1, offset: 8, format: 'float32x2'}, // texcoord
{shaderLocation: 2, offset: 16, format: 'float32x4'}, // color
],
},
],
},
fragment: {
module,
entryPoint: 'fs',
targets: [
{
format: presentationFormat,
blend: {
color: {
srcFactor: 'one',
dstFactor: 'one-minus-src-alpha',
operation: 'add',
},
alpha: {
srcFactor: 'one',
dstFactor: 'one-minus-src-alpha',
operation: 'add',
},
},
},
],
},
});
function copySourceToTexture(device, texture, source, {flipY} = {}) {
device.queue.copyExternalImageToTexture(
{ source, flipY, },
{ texture },
{ width: source.width, height: source.height },
);
}
function createTextureFromSource(device, source, options = {}) {
const texture = device.createTexture({
format: 'rgba8unorm',
size: [source.width, source.height],
usage: GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
copySourceToTexture(device, texture, source, options);
return texture;
}
const texture = createTextureFromSource(device, glyphCanvas, {mips: true});
const sampler = device.createSampler();
// create a buffer for the uniform values
const uniformBufferSize =
16 * 4; // matrix is 16 32bit floats (4bytes each)
const uniformBuffer = device.createBuffer({
label: 'uniforms for quad',
size: uniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
// create a typedarray to hold the values for the uniforms in JavaScript
const kMatrixOffset = 0;
const uniformValues = new Float32Array(uniformBufferSize / 4);
const matrix = uniformValues.subarray(kMatrixOffset, 16);
const bindGroup = device.createBindGroup({
layout: pipeline.getBindGroupLayout(0),
entries: [
{ binding: 0, resource: sampler },
{ binding: 1, resource: texture.createView() },
{ binding: 2, resource: { buffer: uniformBuffer }},
],
});
const renderPassDescriptor = {
label: 'our basic canvas renderPass',
colorAttachments: [
{
// view: <- to be filled out when we render
clearValue: [0.3, 0.3, 0.3, 1],
loadOp: 'clear',
storeOp: 'store',
},
],
};
function render(time) {
time *= 0.001;
const fov = 60 * Math.PI / 180; // 60 degrees in radians
const aspect = canvas.clientWidth / canvas.clientHeight;
const zNear = 0.001;
const zFar = 50;
const projectionMatrix = mat4.perspective(fov, aspect, zNear, zFar);
const cameraPosition = [0, 0, 5];
const up = [0, 1, 0];
const target = [0, 0, 0];
const viewMatrix = mat4.lookAt(cameraPosition, target, up);
const viewProjectionMatrix = mat4.multiply(projectionMatrix, viewMatrix);
// Get the current texture from the canvas context and
// set it as the texture to render to.
renderPassDescriptor.colorAttachments[0].view =
context.getCurrentTexture().createView();
const encoder = device.createCommandEncoder({
label: 'render quad encoder',
});
const pass = encoder.beginRenderPass(renderPassDescriptor);
pass.setPipeline(pipeline);
mat4.rotateY(viewProjectionMatrix, time, matrix);
mat4.translate(matrix, [-width / 2, -0.5, 0], matrix);
// copy the values from JavaScript to the GPU
device.queue.writeBuffer(uniformBuffer, 0, uniformValues);
pass.setBindGroup(0, bindGroup);
pass.setVertexBuffer(0, vertexBuffer);
pass.setIndexBuffer(indexBuffer, 'uint32');
pass.drawIndexed(numGlyphs * 6);
pass.end();
const commandBuffer = encoder.finish();
device.queue.submit([commandBuffer]);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
function fail(msg) {
// eslint-disable-next-line no-alert
alert(msg);
}
main();
</script>
它的分辨率低且块状,因为
nearest
(默认)
之类的东西
\n
并转到下一行
请注意,可以说,如果可以的话,您应该使用 HTML 在浏览器中呈现文本。这是最简单的方法。换句话说,请考虑您是否确实需要 3d 文本。例如,游戏中的文本通常位于边缘的统计数据中,或者在线游戏的聊天区域中。所有这些在 HTML 中都更容易完成。