Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 54 additions & 15 deletions src/core/GltfLoader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -142,21 +142,26 @@ export interface GltfLoaderOptions {
*/
resolveUri?: (uri: string) => Promise<ArrayBuffer>;
/**
* Maximum accepted size (in bytes) for a plain JSON glTF payload. This limit is
* enforced at two points:
* Maximum accepted raw byte length for a JSON glTF payload (plain `.gltf`) or
* the JSON chunk of a GLB file, checked before UTF-8 decoding.
*
* 1. On the raw `ArrayBuffer` byte length before decoding.
* 2. On the approximate in-memory UTF-16 footprint of the decoded string
* (`text.length * 2`) before `JSON.parse` is called.
* Defaults to 64 MiB. Raise this value only when loading unusually large assets.
*/
maxJsonBufferBytes?: number;
/**
* Maximum accepted approximate in-memory UTF-16 heap footprint of the decoded
* JSON string (`text.length * 2`), checked before `JSON.parse` is called.
* Enforced for both plain `.gltf` payloads and the JSON chunk of GLB files.
*
* Because a JavaScript string stores each code unit as two bytes (UTF-16), a
* 27-byte ASCII JSON payload produces a string whose heap footprint is
* approximately 54 bytes. Callers should set this limit to at least twice the
* expected source byte size when dealing with predominantly ASCII content.
* 64 MiB ASCII JSON buffer decodes to a string with a ~128 MiB heap footprint.
* Setting this separately from `maxJsonBufferBytes` lets callers tune raw-byte
* and heap-footprint limits independently.
*
* Defaults to 64 MiB. Raise this value only when loading unusually large assets.
* Defaults to twice `maxJsonBufferBytes` (128 MiB). Raise this value only when
* loading unusually large assets.
*/
maxJsonBufferBytes?: number;
maxJsonStringBytes?: number;
/**
* When `true`, each VEC3 normal vector is re-normalized to unit length after
* loading. Useful when the source asset was exported with non-unit normals.
Expand Down Expand Up @@ -250,16 +255,28 @@ function wrapGltfError(prefix: string, cause: unknown): Error {
*/
export function parseContainer(
buffer: ArrayBuffer,
options?: Pick<GltfLoaderOptions, 'maxJsonBufferBytes'>,
options?: Pick<GltfLoaderOptions, 'maxJsonBufferBytes' | 'maxJsonStringBytes'>,
): {
json: GltfAsset;
binChunk: ArrayBuffer | undefined;
} {
const header = new DataView(buffer);
const maxJsonBufferBytes = options?.maxJsonBufferBytes ?? MAX_JSON_BUFFER_BYTES;
const maxJsonStringBytes = options?.maxJsonStringBytes ?? maxJsonBufferBytes * 2;

Comment thread
ormidales marked this conversation as resolved.
if (!Number.isFinite(maxJsonBufferBytes) || maxJsonBufferBytes < 0) {
throw new RangeError(
`maxJsonBufferBytes must be a finite non-negative number (got ${maxJsonBufferBytes}).`,
);
}
if (!Number.isFinite(maxJsonStringBytes) || maxJsonStringBytes < 0) {
throw new RangeError(
`maxJsonStringBytes must be a finite non-negative number (got ${maxJsonStringBytes}).`,
);
}

if (buffer.byteLength >= 12 && header.getUint32(0, true) === GLB_MAGIC) {
return parseGlb(buffer);
return parseGlb(buffer, maxJsonBufferBytes, maxJsonStringBytes);
}

// Treat the whole buffer as UTF-8 JSON
Expand All @@ -271,10 +288,10 @@ export function parseContainer(
}
const text = UTF8_DECODER.decode(buffer);
// Approximate heap usage of the decoded UTF-16 string (2 bytes per code unit)
if (text.length * 2 > maxJsonBufferBytes) {
if (text.length * 2 > maxJsonStringBytes) {
throw new Error(
`JSON glTF string too large (~${text.length * 2} UTF-16 bytes). ` +
`Maximum supported decoded size is ${maxJsonBufferBytes} bytes.`,
`Maximum supported decoded size is ${maxJsonStringBytes} bytes.`,
);
}
const json = safeParseGltfJson(text);
Expand All @@ -284,7 +301,11 @@ export function parseContainer(
/**
* Parse a GLB (binary glTF) container according to the glTF 2.0 spec §5.
*/
function parseGlb(buffer: ArrayBuffer): {
function parseGlb(
buffer: ArrayBuffer,
maxJsonBufferBytes: number,
maxJsonStringBytes: number,
): {
json: GltfAsset;
binChunk: ArrayBuffer | undefined;
} {
Expand All @@ -304,11 +325,29 @@ function parseGlb(buffer: ArrayBuffer): {
if (chunkLength === 0) {
throw new Error(`Invalid chunk length: ${chunkLength}`);
}
if (offset + 8 + chunkLength > buffer.byteLength) {
throw new Error(
`GLB chunk at offset ${offset} extends beyond end of file ` +
`(chunk needs ${offset + 8 + chunkLength} bytes, file is ${buffer.byteLength} bytes).`,
);
}
const chunkType = view.getUint32(offset + 4, true);
const chunkData = buffer.slice(offset + 8, offset + 8 + chunkLength);
Comment thread
ormidales marked this conversation as resolved.

if (chunkType === GLB_CHUNK_JSON) {
if (chunkData.byteLength > maxJsonBufferBytes) {
throw new Error(
`GLB JSON chunk too large (${chunkData.byteLength} bytes). ` +
`Maximum supported size is ${maxJsonBufferBytes} bytes.`,
);
}
const text = UTF8_DECODER.decode(chunkData);
if (text.length * 2 > maxJsonStringBytes) {
throw new Error(
`GLB JSON chunk string too large (~${text.length * 2} UTF-16 bytes). ` +
`Maximum supported decoded size is ${maxJsonStringBytes} bytes.`,
);
}
json = safeParseGltfJson(text);
} else if (chunkType === GLB_CHUNK_BIN) {
binChunk = chunkData;
Expand Down
31 changes: 16 additions & 15 deletions src/core/ShaderCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,19 @@ export class ShaderCache {
return `fnv1a-shader-${primary}-${secondary}`;
}

/**
* Throws a {@link RangeError} if `key` is defined and exceeds
* {@link MAX_KEY_LENGTH}. Call this at the top of every public method that
* accepts an optional explicit key.
*/
private static assertKeyLength(key: string | undefined): void {
if (key !== undefined && key.length > ShaderCache.MAX_KEY_LENGTH) {
throw new RangeError(
`ShaderCache: explicit key exceeds maximum length of ${ShaderCache.MAX_KEY_LENGTH} characters.`,
);
}
}

/** key → compiled WebGLShader */
private readonly shaders: Map<string, WebGLShader> = new Map();

Expand Down Expand Up @@ -111,11 +124,7 @@ export class ShaderCache {
* @param key Optional cache key. Defaults to an FNV-1a hash of the source string.
*/
getShader(type: number, source: string, key?: string): WebGLShader {
if (key !== undefined && key.length > ShaderCache.MAX_KEY_LENGTH) {
throw new RangeError(
`ShaderCache: explicit key exceeds maximum length of ${ShaderCache.MAX_KEY_LENGTH} characters.`,
);
}
ShaderCache.assertKeyLength(key);
const cacheKey = key ?? ShaderCache.hashShaderSource(source);
const existing = this.shaders.get(cacheKey);
if (existing) return existing;
Expand All @@ -136,11 +145,7 @@ export class ShaderCache {
getProgram(vertexSource: string, fragmentSource: string, key?: string): WebGLProgram {
// Explicit-key path: bypass auto-hashing entirely.
if (key !== undefined) {
if (key.length > ShaderCache.MAX_KEY_LENGTH) {
throw new RangeError(
`ShaderCache: explicit key exceeds maximum length of ${ShaderCache.MAX_KEY_LENGTH} characters.`,
);
}
ShaderCache.assertKeyLength(key);
const existing = this.programs.get(key);
if (existing !== undefined) return existing;
return this.compileAndCache(vertexSource, fragmentSource, key, undefined);
Expand Down Expand Up @@ -259,11 +264,7 @@ export class ShaderCache {
*/
getProgramKey(vertexSource: string, fragmentSource: string, key?: string): string {
if (key !== undefined) {
if (key.length > ShaderCache.MAX_KEY_LENGTH) {
throw new RangeError(
`ShaderCache: explicit key exceeds maximum length of ${ShaderCache.MAX_KEY_LENGTH} characters.`,
);
}
ShaderCache.assertKeyLength(key);
return key;
}
const hashKey = ShaderCache.hashSources(vertexSource, fragmentSource);
Expand Down
91 changes: 84 additions & 7 deletions tests/gltf.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,16 +173,15 @@ describe('parseContainer', () => {
expect(() => parseContainer(oversized)).toThrow(/payload too large/);
});

it('rejects decoded JSON string exceeding maxJsonBufferBytes before JSON.parse is called', () => {
it('rejects decoded JSON string exceeding maxJsonStringBytes before JSON.parse is called', () => {
const json = JSON.stringify(minimalGltf()); // pure ASCII: buf.byteLength == json.length
const buf = new TextEncoder().encode(json).buffer as ArrayBuffer;
const parseSpy = vi.spyOn(JSON, 'parse');
try {
// maxJsonBufferBytes == json.length: the byte check passes (buf.byteLength <= limit),
// but the decoded-string guard fires because text.length * 2 == json.length * 2 > json.length.
// JSON.parse must never be reached.
// maxJsonStringBytes less than text.length * 2: the byte check passes, but
// the decoded-string guard fires. JSON.parse must never be reached.
try {
parseContainer(buf, { maxJsonBufferBytes: json.length });
parseContainer(buf, { maxJsonStringBytes: json.length });
} catch {
// expected — the decoded-string guard fired
}
Expand All @@ -191,8 +190,40 @@ describe('parseContainer', () => {
parseSpy.mockRestore();
}

// With maxJsonBufferBytes == json.length * 2, both guards pass and parsing succeeds.
expect(() => parseContainer(buf, { maxJsonBufferBytes: json.length * 2 })).not.toThrow();
// With maxJsonStringBytes >= text.length * 2, both guards pass and parsing succeeds.
expect(() => parseContainer(buf, { maxJsonStringBytes: json.length * 2 })).not.toThrow();
});

it('maxJsonBufferBytes and maxJsonStringBytes are independent limits', () => {
const json = JSON.stringify(minimalGltf()); // pure ASCII
const buf = new TextEncoder().encode(json).buffer as ArrayBuffer;
// Buffer within maxJsonBufferBytes but string too large:
expect(() =>
parseContainer(buf, { maxJsonBufferBytes: json.length, maxJsonStringBytes: json.length }),
).toThrow(/string too large/);
// Buffer too large regardless of string limit:
expect(() =>
parseContainer(buf, { maxJsonBufferBytes: json.length - 1 }),
).toThrow(/payload too large/);
});

it('rejects GLB JSON chunk exceeding maxJsonBufferBytes', () => {
const glb = buildGlb(minimalGltf());
// Read the first chunk's length directly from the GLB header (bytes 12-15, little-endian).
// This is the actual on-disk byte length of the JSON chunk, independent of re-stringify.
const jsonChunkBytes = new DataView(glb).getUint32(12, true);
expect(() =>
parseContainer(glb, { maxJsonBufferBytes: jsonChunkBytes - 1 }),
).toThrow(/GLB JSON chunk too large/);
});

it('rejects GLB JSON chunk decoded string exceeding maxJsonStringBytes', () => {
const glb = buildGlb(minimalGltf());
// text.length * 2 for a pure-ASCII JSON string equals json.length * 2.
// Setting maxJsonStringBytes to 0 ensures the guard fires.
expect(() =>
parseContainer(glb, { maxJsonStringBytes: 0 }),
).toThrow(/GLB JSON chunk string too large/);
});

it('parses GLB container with JSON + BIN chunks', () => {
Expand Down Expand Up @@ -237,6 +268,52 @@ describe('parseContainer', () => {
expect(() => parseContainer(glb)).toThrow(/Invalid chunk length/);
});

it('throws when GLB chunk extends beyond end of file', () => {
// Build a GLB with a JSON chunk header that claims more bytes than remain.
// Header: magic(4) + version(4) + totalLength(4) + chunkLen(4) + chunkType(4) = 28 bytes total,
// but chunkLen is set to 100 so offset+8+100 > 28.
const glb = new ArrayBuffer(28);
const view = new DataView(glb);
view.setUint32(0, 0x46546C67, true); // magic
view.setUint32(4, 2, true); // version 2
view.setUint32(8, 28, true); // total length
view.setUint32(12, 100, true); // chunk length — exceeds file
view.setUint32(16, 0x4E4F534A, true); // JSON chunk type

expect(() => parseContainer(glb)).toThrow(/extends beyond end of file/);
});

it('throws RangeError when maxJsonBufferBytes is NaN', () => {
const buf = jsonToBuffer(minimalGltf());
expect(() => parseContainer(buf, { maxJsonBufferBytes: NaN })).toThrow(RangeError);
expect(() => parseContainer(buf, { maxJsonBufferBytes: NaN })).toThrow(
/maxJsonBufferBytes must be a finite non-negative number/,
);
});

it('throws RangeError when maxJsonBufferBytes is Infinity', () => {
const buf = jsonToBuffer(minimalGltf());
expect(() => parseContainer(buf, { maxJsonBufferBytes: Infinity })).toThrow(RangeError);
});

it('throws RangeError when maxJsonBufferBytes is negative', () => {
const buf = jsonToBuffer(minimalGltf());
expect(() => parseContainer(buf, { maxJsonBufferBytes: -1 })).toThrow(RangeError);
});

it('throws RangeError when maxJsonStringBytes is NaN', () => {
const buf = jsonToBuffer(minimalGltf());
expect(() => parseContainer(buf, { maxJsonStringBytes: NaN })).toThrow(RangeError);
expect(() => parseContainer(buf, { maxJsonStringBytes: NaN })).toThrow(
/maxJsonStringBytes must be a finite non-negative number/,
);
});

it('throws RangeError when maxJsonStringBytes is negative', () => {
const buf = jsonToBuffer(minimalGltf());
expect(() => parseContainer(buf, { maxJsonStringBytes: -1 })).toThrow(RangeError);
});

it('throws when GLB has no JSON chunk', () => {
// Build a GLB header with no chunks following
const glb = new ArrayBuffer(12);
Expand Down
Loading