Refactor to improve performance around image masks (bug 1941562)

* add support for directly writing masks into `rgbaBuf` if their size matches
* add support for writing into `rgbaBuf` to `resizeImageMask` to avoid extra
  allocs/copies
* respect `actualHeight` to avoid unnecessary work on non-emitted rows
* mark more operations as `internal`

This changes the path for what I believe is the common case for masks:
a mask to add transparency to the accompanying opaque image, both being
equal in size.
The other paths are not meaninfully unchanged.
That increases my confidence as these new paths can be easily tested
with a PNG with transparency.
This commit is contained in:
Titus Wormer 2026-03-17 10:04:33 +01:00
parent 777251da85
commit e914326346
No known key found for this signature in database
GPG Key ID: E6E581152ED04E2E

View File

@ -35,47 +35,46 @@ import { JpxImage } from "./jpx.js";
import { Name } from "./primitives.js";
/**
* Resizes an image mask with 1 component.
* @param {TypedArray} src - The source buffer.
* @param {number} bpc - Number of bits per component.
* @param {number} w1 - Original width.
* @param {number} h1 - Original height.
* @param {number} w2 - New width.
* @param {number} h2 - New height.
* @returns {TypedArray} The resized image mask buffer.
* Configuration for {@linkcode PDFImage.fillGrayBuffer}.
*
* @typedef FillGrayBufferOptions
* @property {number} [destWidth]
* Destination width; defaults to the source image width (no resampling).
* @property {number} [destHeight]
* Destination height; defaults to the source image height (no resampling).
* @property {boolean} [invertOutput=false]
* Whether to invert the output values (as in `x = 255 - x`).
* @property {number} [maxRows]
* Maximum number of destination rows to write.
* @property {number} [offset=0]
* Where to start.
* @property {number} [stride=1]
* Step size between consecutive elements.
*/
function resizeImageMask(src, bpc, w1, h1, w2, h2) {
const length = w2 * h2;
let dest;
if (bpc <= 8) {
dest = new Uint8Array(length);
} else if (bpc <= 16) {
dest = new Uint16Array(length);
} else {
dest = new Uint32Array(length);
}
const xRatio = w1 / w2;
const yRatio = h1 / h2;
let i,
j,
py,
newIndex = 0,
oldIndex;
const xScaled = new Uint16Array(w2);
const w1Scanline = w1;
for (i = 0; i < w2; i++) {
xScaled[i] = Math.floor(i * xRatio);
}
for (i = 0; i < h2; i++) {
py = Math.floor(i * yRatio) * w1Scanline;
for (j = 0; j < w2; j++) {
oldIndex = py + xScaled[j];
dest[newIndex++] = src[oldIndex];
}
}
return dest;
}
/**
* Configuration for {@linkcode FillMaskAlphaCallback} functions.
*
* @typedef FillMaskAlphaOptions
* @property {number} maxRows
* Maximum number of image rows to write; defaults to the full image height.
* @property {number} offset
* Where to start.
* @property {number} stride
* Step size between consecutive elements.
*/
/**
* Fills the alpha values for the mask.
*
* @callback FillMaskAlphaCallback
* @param {Uint8ClampedArray} buffer
* Buffer to write the alpha values to.
* @param {FillMaskAlphaOptions} options
* Configuration for filling the alpha values.
* @return {Promise<undefined> | undefined | void}
* Optional promise that resolves when the alpha values have been filled.
*/
class PDFImage {
constructor({
@ -242,6 +241,8 @@ class PDFImage {
this.jpxDecoderOptions.isIndexedColormap =
this.colorSpace.name === "Indexed";
}
} else {
this.numComps = 1;
}
this.decode = dict.getArray("D", "Decode");
@ -599,67 +600,64 @@ class PDFImage {
'PDFImage.fillOpacity: Unsupported "rgbaBuf" type.'
);
}
const smask = this.smask;
const mask = this.mask;
let alphaBuf, sw, sh, i, ii, j;
/** @type {FillMaskAlphaCallback} */
let apply;
if (smask) {
sw = smask.width;
sh = smask.height;
alphaBuf = new Uint8ClampedArray(sw * sh);
await smask.fillGrayBuffer(alphaBuf);
if (sw !== width || sh !== height) {
alphaBuf = resizeImageMask(alphaBuf, smask.bpc, sw, sh, width, height);
}
} else if (mask) {
if (mask instanceof PDFImage) {
sw = mask.width;
sh = mask.height;
alphaBuf = new Uint8ClampedArray(sw * sh);
mask.numComps = 1;
await mask.fillGrayBuffer(alphaBuf);
// Need to invert values in rgbaBuf
for (i = 0, ii = sw * sh; i < ii; ++i) {
alphaBuf[i] = 255 - alphaBuf[i];
}
if (sw !== width || sh !== height) {
alphaBuf = resizeImageMask(alphaBuf, mask.bpc, sw, sh, width, height);
}
} else if (Array.isArray(mask)) {
if (this.smask) {
apply = (buffer, options) =>
this.smask.fillGrayBuffer(buffer, {
...options,
destWidth: width,
destHeight: height,
});
} else if (this.mask) {
if (this.mask instanceof PDFImage) {
// Single mask.
apply = (buffer, options) =>
this.mask.fillGrayBuffer(buffer, {
...options,
invertOutput: true,
destWidth: width,
destHeight: height,
});
} else if (Array.isArray(this.mask)) {
// Color key mask: if any of the components are outside the range
// then they should be painted.
alphaBuf = new Uint8ClampedArray(width * height);
const numComps = this.numComps;
for (i = 0, ii = width * height; i < ii; ++i) {
let opacity = 0;
const imageOffset = i * numComps;
for (j = 0; j < numComps; ++j) {
const color = image[imageOffset + j];
const maskOffset = j * 2;
if (color < mask[maskOffset] || color > mask[maskOffset + 1]) {
opacity = 255;
break;
apply = (buffer, { maxRows, offset, stride }) => {
for (let i = 0, ii = width * maxRows; i < ii; ++i) {
let opacity = 0;
const imageOffset = i * this.numComps;
for (let j = 0; j < this.numComps; ++j) {
const color = image[imageOffset + j];
const maskOffset = j * 2;
if (
color < this.mask[maskOffset] ||
color > this.mask[maskOffset + 1]
) {
opacity = 255;
break;
}
}
buffer[i * stride + offset] = opacity;
}
alphaBuf[i] = opacity;
}
};
} else {
throw new FormatError("Unknown mask format.");
}
}
if (alphaBuf) {
for (i = 0, j = 3, ii = width * actualHeight; i < ii; ++i, j += 4) {
rgbaBuf[j] = alphaBuf[i];
}
} else {
// No mask.
for (i = 0, j = 3, ii = width * actualHeight; i < ii; ++i, j += 4) {
rgbaBuf[j] = 255;
}
apply = (buffer, { maxRows, offset, stride }) => {
for (let i = 0, ii = width * maxRows; i < ii; ++i) {
buffer[i * stride + offset] = 255;
}
};
}
await apply(rgbaBuf, {
maxRows: actualHeight,
offset: 3,
stride: 4,
});
}
undoPreblend(buffer, width, height) {
@ -722,7 +720,7 @@ class PDFImage {
imgData.kind = ImageKind.RGBA_32BPP;
const imgArray = (imgData.data = await this.getImageBytes(
originalHeight * originalWidth * 4,
{}
{ internal: isOffscreenCanvasSupported && mustBeResized }
));
if (isOffscreenCanvasSupported) {
@ -769,7 +767,9 @@ class PDFImage {
if (image) {
return image;
}
const data = await this.getImageBytes(originalHeight * rowBytes, {});
const data = await this.getImageBytes(originalHeight * rowBytes, {
internal: isOffscreenCanvasSupported && mustBeResized,
});
if (isOffscreenCanvasSupported) {
if (mustBeResized) {
return ImageResizer.createImage(
@ -835,6 +835,7 @@ class PDFImage {
drawWidth,
drawHeight,
forceRGBA: true,
internal: true,
});
return this.createBitmap(
ImageKind.RGBA_32BPP,
@ -855,6 +856,7 @@ class PDFImage {
drawWidth,
drawHeight,
forceRGB: true,
internal: mustBeResized,
});
if (mustBeResized) {
// The image is too big so we resize it.
@ -950,7 +952,32 @@ class PDFImage {
return imgData;
}
async fillGrayBuffer(buffer) {
/**
* Fills `buffer` with decoded grayscale values from the image.
*
* When `destWidth`/`destHeight` match the source image dimensions (or are
* omitted), pixels are sampled linearly with no extra allocation.
* When they differ, nearest-neighbour resampling is used, sampling decoded
* pixels directly from the `comps` array with no intermediate buffer.
*
* @param {Uint8ClampedArray} buffer
* Buffer to fill with grayscale values.
* @param {FillGrayBufferOptions} [options]
* Configuration (optional).
* @returns {Promise<undefined>}
* Promise that resolves to `undefined`.
*/
async fillGrayBuffer(
buffer,
{
destWidth,
destHeight,
invertOutput,
maxRows,
offset = 0,
stride = 1,
} = {}
) {
if (typeof PDFJSDev === "undefined" || PDFJSDev.test("TESTING")) {
assert(
buffer instanceof Uint8ClampedArray,
@ -964,31 +991,75 @@ class PDFImage {
);
}
const width = this.width;
const height = this.height;
const srcWidth = this.width;
const srcHeight = this.height;
const bpc = this.bpc;
// rows start at byte boundary
const rowBytes = (width * numComps * bpc + 7) >> 3;
const imgArray = await this.getImageBytes(height * rowBytes, {
const rowBytes = (srcWidth * numComps * bpc + 7) >> 3;
const imgArray = await this.getImageBytes(srcHeight * rowBytes, {
internal: true,
});
const comps = this.getComponents(imgArray);
let i, length;
const resolvedDestWidth = destWidth ?? srcWidth;
const resolvedDestHeight = destHeight ?? srcHeight;
const needsResampling =
resolvedDestWidth !== srcWidth || resolvedDestHeight !== srcHeight;
const rows =
maxRows === undefined
? resolvedDestHeight
: Math.min(resolvedDestHeight, maxRows);
let outputWidth = srcWidth;
let yRatio = 0;
let xScaled = null;
if (needsResampling) {
outputWidth = resolvedDestWidth;
yRatio = srcHeight / resolvedDestHeight;
const xRatio = srcWidth / resolvedDestWidth;
xScaled = new Uint32Array(resolvedDestWidth);
for (let i = 0; i < resolvedDestWidth; i++) {
xScaled[i] = Math.floor(i * xRatio);
}
}
const mask = invertOutput ? 0xff : 0;
if (bpc === 1) {
// inline decoding (= inversion) for 1 bpc images
length = width * height;
if (this.needsDecode) {
// invert and scale to {0, 255}
for (i = 0; i < length; ++i) {
buffer[i] = (comps[i] - 1) & 255;
if (xScaled) {
const xMap = xScaled;
let destIndex = offset;
if (this.needsDecode) {
for (let row = 0; row < rows; row++) {
const py = Math.floor(row * yRatio) * srcWidth;
for (let col = 0; col < outputWidth; col++) {
buffer[destIndex] = ((comps[py + xMap[col]] - 1) & 255) ^ mask;
destIndex += stride;
}
}
} else {
for (let row = 0; row < rows; row++) {
const py = Math.floor(row * yRatio) * srcWidth;
for (let col = 0; col < outputWidth; col++) {
buffer[destIndex] = (-comps[py + xMap[col]] & 255) ^ mask;
destIndex += stride;
}
}
}
} else {
// scale to {0, 255}
for (i = 0; i < length; ++i) {
buffer[i] = -comps[i] & 255;
const length = outputWidth * rows;
if (this.needsDecode) {
// invert and scale to {0, 255}
for (let i = 0; i < length; ++i) {
buffer[i * stride + offset] = ((comps[i] - 1) & 255) ^ mask;
}
} else {
// scale to {0, 255}
for (let i = 0; i < length; ++i) {
buffer[i * stride + offset] = (-comps[i] & 255) ^ mask;
}
}
}
return;
@ -997,11 +1068,23 @@ class PDFImage {
if (this.needsDecode) {
this.decodeBuffer(comps);
}
length = width * height;
// we aren't using a colorspace so we need to scale the value
const scale = 255 / ((1 << bpc) - 1);
for (i = 0; i < length; ++i) {
buffer[i] = scale * comps[i];
if (xScaled) {
const xMap = xScaled;
let destIndex = offset;
for (let row = 0; row < rows; row++) {
const py = Math.floor(row * yRatio) * srcWidth;
for (let col = 0; col < outputWidth; col++) {
buffer[destIndex] = (scale * comps[py + xMap[col]]) ^ mask;
destIndex += stride;
}
}
} else {
const length = outputWidth * rows;
for (let i = 0; i < length; ++i) {
buffer[i * stride + offset] = (scale * comps[i]) ^ mask;
}
}
}
@ -1068,14 +1151,16 @@ class PDFImage {
this.jpxDecoderOptions
);
// If imageBytes came from a DecodeStream, we're safe to transfer it
// (and thus detach its underlying buffer) because it will constitute
// the entire DecodeStream's data. But if it came from a Stream, we
// need to copy it because it'll only be a portion of the Stream's
// data, and the rest will be read later on.
if (internal || this.image instanceof DecodeStream) {
// Internal callers never transfer/return raw bytes out of the worker,
// and DecodeStream-backed bytes are self-contained for the decode.
return imageBytes;
}
// Stream-backed image data can be a subarray into shared stream storage,
// so returning it directly would risk detaching/mutating bytes that
// subsequent stream reads still need.
// Always return a fresh copy.
assert(
imageBytes instanceof Uint8Array,
'PDFImage.getImageBytes: Unsupported "imageBytes" type.'