Unconditionally create a gpu device

One drawback of the current implementation is that the GPU device can be
unavailable at the time of the first pattern fill, which causes the
GPU-accelerated canvas to be move on the main thread because of putImageData.

Most of the shading patterns stuff will be moved to the GPU and in order
to avoid creating some useless data we've to know if the GPU is available or not.

So in this patch we create the GPU device during the worker initialization
and pass a flag to the evaluator to know if the GPU is available or not.
This commit is contained in:
calixteman 2026-04-05 14:30:18 +02:00
parent dd8f0a327f
commit a9f142c796
No known key found for this signature in database
GPG Key ID: 0C5442631EE0691F
7 changed files with 97 additions and 89 deletions

View File

@ -105,7 +105,7 @@ const DefaultPartialEvaluatorOptions = Object.freeze({
iccUrl: null,
standardFontDataUrl: null,
wasmUrl: null,
prepareWebGPU: null,
hasGPU: false,
});
const PatternType = {
@ -1520,8 +1520,7 @@ class PartialEvaluator {
resources,
this._pdfFunctionFactory,
this.globalColorSpaceCache,
localColorSpaceCache,
this.options.prepareWebGPU
localColorSpaceCache
);
patternIR = shadingFill.getIR();
} catch (reason) {

View File

@ -46,18 +46,24 @@ const ShadingType = {
};
class Pattern {
// eslint-disable-next-line no-unused-private-class-members
static #hasGPU = false;
constructor() {
unreachable("Cannot initialize Pattern.");
}
static setOptions({ hasGPU }) {
this.#hasGPU = hasGPU;
}
static parseShading(
shading,
xref,
res,
pdfFunctionFactory,
globalColorSpaceCache,
localColorSpaceCache,
prepareWebGPU = null
localColorSpaceCache
) {
const dict = shading instanceof BaseStream ? shading.dict : shading;
const type = dict.get("ShadingType");
@ -65,7 +71,6 @@ class Pattern {
try {
switch (type) {
case ShadingType.FUNCTION_BASED:
prepareWebGPU?.();
return new FunctionBasedShading(
dict,
xref,
@ -88,7 +93,6 @@ class Pattern {
case ShadingType.LATTICE_FORM_MESH:
case ShadingType.COONS_PATCH_MESH:
case ShadingType.TENSOR_PATCH_MESH:
prepareWebGPU?.();
return new MeshShading(
shading,
xref,

View File

@ -27,6 +27,7 @@ import { JpegStream } from "./jpeg_stream.js";
import { JpxImage } from "./jpx.js";
import { MissingDataException } from "./core_utils.js";
import { OperatorList } from "./operator_list.js";
import { Pattern } from "./pattern.js";
import { PDFDocument } from "./document.js";
import { PDFFunctionFactory } from "./function.js";
import { Stream } from "./stream.js";
@ -73,19 +74,6 @@ class BasePdfManager {
evaluatorOptions.isImageDecoderSupported &&=
FeatureTest.isImageDecoderSupported;
// Set up a one-shot callback so evaluators can notify the main thread that
// WebGPU-acceleratable content was found. The flag ensures the message is
// sent at most once per document.
if (evaluatorOptions.enableWebGPU) {
let prepareWebGPUSent = false;
evaluatorOptions.prepareWebGPU = () => {
if (!prepareWebGPUSent) {
prepareWebGPUSent = true;
handler.send("PrepareWebGPU", null);
}
};
}
delete evaluatorOptions.enableWebGPU;
this.evaluatorOptions = Object.freeze(evaluatorOptions);
// Initialize image-options once per document.
@ -99,6 +87,7 @@ class BasePdfManager {
CmykICCBasedCS.setOptions(options);
JBig2CCITTFaxWasmImage.setOptions(options);
PDFFunctionFactory.setOptions(options);
Pattern.setOptions(options);
}
get docId() {

View File

@ -75,7 +75,7 @@ import { DOMCanvasFactory } from "./canvas_factory.js";
import { DOMFilterFactory } from "./filter_factory.js";
import { getNetworkStream } from "display-network_stream";
import { GlobalWorkerOptions } from "./worker_options.js";
import { initWebGPUMesh } from "./webgpu_mesh.js";
import { initGPU } from "./webgpu.js";
import { MathClamp } from "../shared/math_clamp.js";
import { Metadata } from "./metadata.js";
import { OptionalContentConfig } from "./optional_content_config.js";
@ -323,6 +323,9 @@ function getDocument(src = {}) {
: DOMBinaryDataFactory);
const enableHWA = src.enableHWA === true;
const enableWebGPU = src.enableWebGPU === true;
// Start GPU initialisation immediately so it runs in parallel with the
// worker bootstrap; the resolved boolean is forwarded to the worker.
const gpuPromise = enableWebGPU ? initGPU() : Promise.resolve(false);
const useWasm = src.useWasm !== false;
const pagesMapper = src.pagesMapper || new PagesMapper();
@ -405,7 +408,7 @@ function getDocument(src = {}) {
iccUrl,
standardFontDataUrl,
wasmUrl,
enableWebGPU,
hasGPU: false, // Set below.
},
};
const transportParams = {
@ -419,8 +422,8 @@ function getDocument(src = {}) {
},
};
worker.promise
.then(function () {
Promise.all([worker.promise, gpuPromise])
.then(function ([, hasGPU]) {
if (task.destroyed) {
throw new Error("Loading aborted");
}
@ -428,6 +431,8 @@ function getDocument(src = {}) {
throw new Error("Worker was destroyed");
}
docParams.evaluatorOptions.hasGPU = hasGPU;
const workerIdPromise = worker.messageHandler.sendWithPromise(
"GetDocRequest",
docParams,
@ -2843,13 +2848,6 @@ class WorkerTransport {
this.#onProgress(data);
});
messageHandler.on("PrepareWebGPU", () => {
if (this.destroyed) {
return;
}
initWebGPUMesh();
});
if (typeof PDFJSDev === "undefined" || !PDFJSDev.test("MOZCENTRAL")) {
messageHandler.on("FetchBinaryData", async data => {
if (this.destroyed) {

View File

@ -13,7 +13,7 @@
* limitations under the License.
*/
import { drawMeshWithGPU, isWebGPUMeshReady } from "./webgpu_mesh.js";
import { drawMeshWithGPU, isGPUReady, loadMeshShader } from "./webgpu.js";
import {
FormatError,
info,
@ -441,6 +441,9 @@ class MeshShadingPattern extends BaseShadingPattern {
this._bbox = IR[6];
this._background = IR[7];
this.matrix = null;
// Pre-compile the mesh pipeline now that we know GPU-renderable content
// is present; no-op if the GPU is not available or already compiled.
loadMeshShader();
}
_createMeshCanvas(combinedScale, backgroundColor, canvasFactory) {
@ -486,7 +489,7 @@ class MeshShadingPattern extends BaseShadingPattern {
const paddedHeight = height + BORDER_SIZE * 2;
const tmpCanvas = canvasFactory.create(paddedWidth, paddedHeight);
if (isWebGPUMeshReady()) {
if (isGPUReady()) {
tmpCanvas.context.drawImage(
drawMeshWithGPU(
this._figures,

View File

@ -20,7 +20,7 @@ import { MeshFigureType } from "../shared/util.js";
// applies the affine transform supplied via a uniform buffer to map them
// to NDC (X: -1..1 left→right, Y: -1..1 bottom→top).
// Colors are delivered as unorm8x4 (r,g,b,_) and passed through as-is.
const WGSL = /* wgsl */ `
const MESH_WGSL = /* wgsl */ `
struct Uniforms {
offsetX : f32,
offsetY : f32,
@ -65,12 +65,12 @@ fn fs_main(in : VertexOutput) -> @location(0) vec4<f32> {
}
`;
class WebGPUMesh {
class WebGPU {
#initPromise = null;
#device = null;
#pipeline = null;
#meshPipeline = null;
// Format chosen to match the OffscreenCanvas swapchain on this device.
#preferredFormat = null;
@ -85,57 +85,62 @@ class WebGPUMesh {
return false;
}
this.#preferredFormat = navigator.gpu.getPreferredCanvasFormat();
const device = (this.#device = await adapter.requestDevice());
const shaderModule = device.createShaderModule({ code: WGSL });
this.#pipeline = device.createRenderPipeline({
layout: "auto",
vertex: {
module: shaderModule,
entryPoint: "vs_main",
buffers: [
{
// Buffer 0: PDF content-space coords, 2 × float32 per vertex.
arrayStride: 2 * 4,
attributes: [
{ shaderLocation: 0, offset: 0, format: "float32x2" },
],
},
{
// Buffer 1: vertex colors, 4 × unorm8 per vertex (r, g, b, _).
arrayStride: 4,
attributes: [
{ shaderLocation: 1, offset: 0, format: "unorm8x4" },
],
},
],
},
fragment: {
module: shaderModule,
entryPoint: "fs_main",
// Use the canvas-preferred format so the OffscreenCanvas swapchain
// and the pipeline output format always agree.
targets: [{ format: this.#preferredFormat }],
},
primitive: { topology: "triangle-list" },
});
this.#device = await adapter.requestDevice();
return true;
} catch {
return false;
}
}
/**
* Start GPU initialization.
* @returns {Promise<boolean>} true when a GPU device is available.
*/
init() {
if (this.#initPromise === null) {
this.#initPromise = this.#initGPU();
}
return (this.#initPromise ||= this.#initGPU());
}
get isReady() {
return this.#device !== null;
}
/**
* Compile (and cache) the Gouraud-mesh pipeline.
*/
loadMeshShader() {
if (!this.#device || this.#meshPipeline) {
return;
}
const shaderModule = this.#device.createShaderModule({ code: MESH_WGSL });
this.#meshPipeline = this.#device.createRenderPipeline({
layout: "auto",
vertex: {
module: shaderModule,
entryPoint: "vs_main",
buffers: [
{
// Buffer 0: PDF content-space coords, 2 × float32 per vertex.
arrayStride: 2 * 4,
attributes: [{ shaderLocation: 0, offset: 0, format: "float32x2" }],
},
{
// Buffer 1: vertex colors, 4 × unorm8 per vertex (r, g, b, _).
arrayStride: 4,
attributes: [{ shaderLocation: 1, offset: 0, format: "unorm8x4" }],
},
],
},
fragment: {
module: shaderModule,
entryPoint: "fs_main",
// Use the canvas-preferred format so the OffscreenCanvas swapchain
// and the pipeline output format always agree.
targets: [{ format: this.#preferredFormat }],
},
primitive: { topology: "triangle-list" },
});
}
/**
* Build flat Float32Array (positions) and Uint8Array (colors) vertex
* streams for non-indexed triangle-list rendering.
@ -248,6 +253,9 @@ class WebGPUMesh {
paddedHeight,
borderSize
) {
// Lazily compile the mesh pipeline the first time we need to draw.
this.loadMeshShader();
const device = this.#device;
const { offsetX, offsetY, scaleX, scaleY } = context;
const { posData, colData, vertexCount } = this.#buildVertexStreams(
@ -294,7 +302,7 @@ class WebGPUMesh {
);
const bindGroup = device.createBindGroup({
layout: this.#pipeline.getBindGroupLayout(0),
layout: this.#meshPipeline.getBindGroupLayout(0),
entries: [{ binding: 0, resource: { buffer: uniformBuffer } }],
});
@ -330,7 +338,7 @@ class WebGPUMesh {
],
});
if (vertexCount > 0) {
renderPass.setPipeline(this.#pipeline);
renderPass.setPipeline(this.#meshPipeline);
renderPass.setBindGroup(0, bindGroup);
renderPass.setVertexBuffer(0, posBuffer);
renderPass.setVertexBuffer(1, colBuffer);
@ -351,14 +359,25 @@ class WebGPUMesh {
}
}
const _webGPUMesh = new WebGPUMesh();
const _webGPU = new WebGPU();
function initWebGPUMesh() {
_webGPUMesh.init();
/**
* Start GPU initialization as early as possible.
* @returns {Promise<boolean>} true if a GPU device was acquired.
*/
function initGPU() {
return _webGPU.init();
}
function isWebGPUMeshReady() {
return _webGPUMesh.isReady;
function isGPUReady() {
return _webGPU.isReady;
}
/**
* Pre-compile the Gouraud-mesh WGSL pipeline.
*/
function loadMeshShader() {
_webGPU.loadMeshShader();
}
function drawMeshWithGPU(
@ -369,7 +388,7 @@ function drawMeshWithGPU(
paddedHeight,
borderSize
) {
return _webGPUMesh.draw(
return _webGPU.draw(
figures,
context,
backgroundColor,
@ -379,4 +398,4 @@ function drawMeshWithGPU(
);
}
export { drawMeshWithGPU, initWebGPUMesh, isWebGPUMeshReady };
export { drawMeshWithGPU, initGPU, isGPUReady, loadMeshShader };

View File

@ -197,12 +197,8 @@ describe("api", function () {
expect(loadingTask).toBeInstanceOf(PDFDocumentLoadingTask);
// This can be somewhat random -- we cannot guarantee perfect
// 'Terminate' message to the worker before/after setting up pdfManager.
const destroyed = loadingTask._worker.promise.then(() =>
loadingTask.destroy()
);
await destroyed;
expect(true).toEqual(true);
await loadingTask._worker.promise.then(() => loadingTask.destroy());
await loadingTask.promise.catch(() => {});
});
it("creates pdf doc from TypedArray", async function () {