From 2d7862a35860edcecaa2aa8e6e11737b9bd8a013 Mon Sep 17 00:00:00 2001 From: Jeremy Maitin-Shepard Date: Fri, 19 Apr 2024 11:53:47 -0700 Subject: [PATCH] fix: fix lazy loading of JavaScript and WebAssembly modules Previously, a number of JavaScript and WebAssembly modules were incorrectly loaded eagerly when not actually needed. --- src/async_computation/decode_gzip.ts | 3 +-- src/async_computation/obj_mesh.ts | 2 +- src/async_computation/vtk_mesh.ts | 2 +- src/mesh/draco/index.ts | 37 +++++++++++++++++----------- src/sliceview/compresso/index.ts | 35 +++++++++++++++----------- src/sliceview/png/index.ts | 35 +++++++++++++++----------- src/util/gzip.ts | 6 ++--- 7 files changed, 69 insertions(+), 51 deletions(-) diff --git a/src/async_computation/decode_gzip.ts b/src/async_computation/decode_gzip.ts index f7ae456d7..68eeabd70 100644 --- a/src/async_computation/decode_gzip.ts +++ b/src/async_computation/decode_gzip.ts @@ -14,11 +14,10 @@ * limitations under the License. */ -import pako from "pako"; import { decodeGzip } from "#src/async_computation/decode_gzip_request.js"; import { registerAsyncComputation } from "#src/async_computation/handler.js"; registerAsyncComputation(decodeGzip, async (data: Uint8Array) => { - const result = pako.inflate(data); + const result = (await import("pako")).inflate(data); return { value: result, transfer: [result.buffer] }; }); diff --git a/src/async_computation/obj_mesh.ts b/src/async_computation/obj_mesh.ts index be1d5e44c..68646946d 100644 --- a/src/async_computation/obj_mesh.ts +++ b/src/async_computation/obj_mesh.ts @@ -24,7 +24,7 @@ import { Uint32ArrayBuilder } from "#src/util/uint32array_builder.js"; registerAsyncComputation( parseOBJFromArrayBuffer, async (buffer: ArrayBuffer) => { - buffer = maybeDecompressGzip(buffer); + buffer = await maybeDecompressGzip(buffer); let text = new TextDecoder().decode(buffer); // Strip comments text = text.replace(/#.*/g, ""); diff --git a/src/async_computation/vtk_mesh.ts b/src/async_computation/vtk_mesh.ts index 4031a6534..761d930c4 100644 --- a/src/async_computation/vtk_mesh.ts +++ b/src/async_computation/vtk_mesh.ts @@ -22,7 +22,7 @@ import { maybeDecompressGzip } from "#src/util/gzip.js"; registerAsyncComputation( parseVTKFromArrayBuffer, async (buffer: ArrayBuffer) => { - const mesh = parseVTK(maybeDecompressGzip(buffer)); + const mesh = parseVTK(await maybeDecompressGzip(buffer)); return { value: { data: mesh, size: getTriangularMeshSize(mesh) }, transfer: [ diff --git a/src/mesh/draco/index.ts b/src/mesh/draco/index.ts index b0d086b82..c1f14bf15 100644 --- a/src/mesh/draco/index.ts +++ b/src/mesh/draco/index.ts @@ -60,26 +60,33 @@ const libraryEnv = { throw `proc exit: ${code}`; }, }; -const dracoModulePromise = (async () => { - const m = (wasmModule = ( - await WebAssembly.instantiateStreaming( - fetch(new URL("./neuroglancer_draco.wasm", import.meta.url)), - { - env: libraryEnv, - wasi_snapshot_preview1: libraryEnv, - }, - ) - ).instance); - (m.exports._initialize as Function)(); - return m; -})(); +let dracoModulePromise: Promise | undefined; + +function getDracoModulePromise() { + if (dracoModulePromise == undefined) { + dracoModulePromise = (async () => { + const m = (wasmModule = ( + await WebAssembly.instantiateStreaming( + fetch(new URL("./neuroglancer_draco.wasm", import.meta.url)), + { + env: libraryEnv, + wasi_snapshot_preview1: libraryEnv, + }, + ) + ).instance); + (m.exports._initialize as Function)(); + return m; + })(); + } + return dracoModulePromise; +} export async function decodeDracoPartitioned( buffer: Uint8Array, vertexQuantizationBits: number, partition: boolean, ): Promise { - const m = await dracoModulePromise; + const m = await getDracoModulePromise(); const offset = (m.exports.malloc as Function)(buffer.byteLength); const heap = new Uint8Array((m.exports.memory as WebAssembly.Memory).buffer); heap.set(buffer, offset); @@ -101,7 +108,7 @@ export async function decodeDracoPartitioned( } export async function decodeDraco(buffer: Uint8Array): Promise { - const m = await dracoModulePromise; + const m = await getDracoModulePromise(); const offset = (m.exports.malloc as Function)(buffer.byteLength); const heap = new Uint8Array((m.exports.memory as WebAssembly.Memory).buffer); heap.set(buffer, offset); diff --git a/src/sliceview/compresso/index.ts b/src/sliceview/compresso/index.ts index c33a4224c..3ab4100a3 100644 --- a/src/sliceview/compresso/index.ts +++ b/src/sliceview/compresso/index.ts @@ -21,19 +21,26 @@ const libraryEnv = { }, }; -const compressoModulePromise = (async () => { - const m = ( - await WebAssembly.instantiateStreaming( - fetch(new URL("./compresso.wasm", import.meta.url)), - { - env: libraryEnv, - wasi_snapshot_preview1: libraryEnv, - }, - ) - ).instance; - (m.exports._initialize as Function)(); - return m; -})(); +let compressoModulePromise: Promise | undefined; + +function getCompressoModulePromise() { + if (compressoModulePromise === undefined) { + compressoModulePromise = (async () => { + const m = ( + await WebAssembly.instantiateStreaming( + fetch(new URL("./compresso.wasm", import.meta.url)), + { + env: libraryEnv, + wasi_snapshot_preview1: libraryEnv, + }, + ) + ).instance; + (m.exports._initialize as Function)(); + return m; + })(); + } + return compressoModulePromise; +} // not a full implementation of read header, just the parts we need function readHeader(buffer: Uint8Array): { @@ -69,7 +76,7 @@ function readHeader(buffer: Uint8Array): { export async function decompressCompresso( buffer: Uint8Array, ): Promise { - const m = await compressoModulePromise; + const m = await getCompressoModulePromise(); const { sx, sy, sz, dataWidth } = readHeader(buffer); const voxels = sx * sy * sz; diff --git a/src/sliceview/png/index.ts b/src/sliceview/png/index.ts index 020640c25..86a25797f 100644 --- a/src/sliceview/png/index.ts +++ b/src/sliceview/png/index.ts @@ -23,19 +23,26 @@ const libraryEnv = { }, }; -const pngModulePromise = (async () => { - const m = ( - await WebAssembly.instantiateStreaming( - fetch(new URL("./libpng.wasm", import.meta.url)), - { - env: libraryEnv, - wasi_snapshot_preview1: libraryEnv, - }, - ) - ).instance; - (m.exports._initialize as Function)(); - return m; -})(); +let pngModulePromise: Promise | undefined; + +function getPngModulePromise() { + if (pngModulePromise === undefined) { + pngModulePromise = (async () => { + const m = ( + await WebAssembly.instantiateStreaming( + fetch(new URL("./libpng.wasm", import.meta.url)), + { + env: libraryEnv, + wasi_snapshot_preview1: libraryEnv, + }, + ) + ).instance; + (m.exports._initialize as Function)(); + return m; + })(); + } + return pngModulePromise; +} enum PngColorSpace { GRAYSCALE = 0, @@ -169,7 +176,7 @@ export async function decompressPng( bytesPerPixel: number, convertToGrayscale: boolean, ): Promise { - const m = await pngModulePromise; + const m = await getPngModulePromise(); let { sx, sy, dataWidth, numChannels } = readHeader(buffer); if (convertToGrayscale) { diff --git a/src/util/gzip.ts b/src/util/gzip.ts index a653a7d2b..35ab3cfad 100644 --- a/src/util/gzip.ts +++ b/src/util/gzip.ts @@ -14,8 +14,6 @@ * limitations under the License. */ -import pako from "pako"; - /** * Detects gzip format based on the 2 magic bytes at the start. */ @@ -27,7 +25,7 @@ export function isGzipFormat(data: ArrayBufferView) { /** * Decompress `data` if it is in gzip format, otherwise just return it. */ -export function maybeDecompressGzip(data: ArrayBuffer | ArrayBufferView) { +export async function maybeDecompressGzip(data: ArrayBuffer | ArrayBufferView) { let byteView: Uint8Array; if (data instanceof ArrayBuffer) { byteView = new Uint8Array(data); @@ -35,7 +33,7 @@ export function maybeDecompressGzip(data: ArrayBuffer | ArrayBufferView) { byteView = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); } if (isGzipFormat(byteView)) { - return pako.inflate(byteView); + return (await import("pako")).inflate(byteView); } return byteView; }