Skip to content

Commit

Permalink
OffscreenCanvas (#127)
Browse files Browse the repository at this point in the history
  • Loading branch information
wcandillon authored Sep 24, 2024
1 parent 30a3ac8 commit 920aed3
Show file tree
Hide file tree
Showing 17 changed files with 279 additions and 135 deletions.
13 changes: 10 additions & 3 deletions apps/paper/src/Tests.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,19 @@

import React, { useEffect, useState } from "react";
import { Dimensions, Text, View, Image } from "react-native";
import "react-native-wgpu";
import { GPUOffscreenCanvas } from "react-native-wgpu";
import { mat4, vec3, mat3 } from "wgpu-matrix";

import { useClient } from "./useClient";
import { cubeVertexArray } from "./components/cube";
import { redFragWGSL, triangleVertWGSL } from "./Triangle/triangle";
import { NativeDrawingContext } from "./components/NativeDrawingContext";
import type { AssetProps } from "./components/useAssets";
import { Texture } from "./components/Texture";

export const CI = process.env.CI === "true";

const { width } = Dimensions.get("window");
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();

const useWebGPU = () => {
const [adapter, setAdapter] = useState<GPUAdapter | null>(null);
Expand Down Expand Up @@ -42,7 +42,13 @@ export const Tests = ({ assets: { di3D, saturn, moon } }: AssetProps) => {
client.onmessage = (e) => {
const tree = JSON.parse(e.data);
if (tree.code) {
const ctx = new NativeDrawingContext(device, 1024, 1024);
const canvas = new GPUOffscreenCanvas(1024, 1024);
const ctx = canvas.getContext("webgpu")!;
ctx.configure({
device,
format: presentationFormat,
alphaMode: "premultiplied",
});
const result = eval(
`(function Main() {
return (${tree.code})(this.ctx);
Expand All @@ -67,6 +73,7 @@ export const Tests = ({ assets: { di3D, saturn, moon } }: AssetProps) => {
redFragWGSL,
},
ctx,
canvas: ctx.canvas,
mat4,
vec3,
mat3,
Expand Down
11 changes: 0 additions & 11 deletions apps/paper/src/components/DrawingContext.ts

This file was deleted.

52 changes: 0 additions & 52 deletions apps/paper/src/components/NativeDrawingContext.ts

This file was deleted.

3 changes: 2 additions & 1 deletion packages/webgpu/.eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"ignorePatterns": ["**/*/components/meshes"],
"rules": {
"no-bitwise": "off",
"@typescript-eslint/no-require-imports": "off"
"@typescript-eslint/no-require-imports": "off",
"no-dupe-class-members": "off"
}
}
158 changes: 158 additions & 0 deletions packages/webgpu/src/Offscreen.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
export class GPUOffscreenCanvas implements OffscreenCanvas {
width: number;
height: number;
oncontextlost: ((this: OffscreenCanvas, ev: Event) => any) | null = null;
oncontextrestored: ((this: OffscreenCanvas, ev: Event) => any) | null = null;

private context: GPUOffscreenCanvasContext;

constructor(width: number, height: number) {
this.width = width;
this.height = height;
this.context = new GPUOffscreenCanvasContext(this);
}

convertToBlob(_options?: ImageEncodeOptions): Promise<Blob> {
// Implementation for converting the canvas content to a Blob
throw new Error("Method not implemented.");
}

// Overloaded method signatures
getContext(
contextId: "2d",
options?: any,
): OffscreenCanvasRenderingContext2D | null;
getContext(
contextId: "bitmaprenderer",
options?: any,
): ImageBitmapRenderingContext | null;
getContext(contextId: "webgl", options?: any): WebGLRenderingContext | null;
getContext(contextId: "webgl2", options?: any): WebGL2RenderingContext | null;
getContext(
contextId: OffscreenRenderingContextId,
options?: any,
): OffscreenRenderingContext | null;
getContext(contextId: "webgpu"): GPUCanvasContext | null;
getContext(
contextId: unknown,
_options?: any,
): OffscreenRenderingContext | GPUCanvasContext | null {
if (contextId === "webgpu") {
return this.context;
}
// Implement other context types if necessary
return null;
}

transferToImageBitmap(): ImageBitmap {
// Implementation for transferring the canvas content to an ImageBitmap
throw new Error("Method not implemented.");
}

addEventListener<K extends keyof OffscreenCanvasEventMap>(
_type: K,
_listener: (this: OffscreenCanvas, ev: OffscreenCanvasEventMap[K]) => any,
_options?: boolean | AddEventListenerOptions,
): void {
// Event listener implementation
throw new Error("Method not implemented.");
}

removeEventListener<K extends keyof OffscreenCanvasEventMap>(
_type: K,
_listener: (this: OffscreenCanvas, ev: OffscreenCanvasEventMap[K]) => any,
_options?: boolean | EventListenerOptions,
): void {
// Remove event listener implementation
throw new Error("Method not implemented.");
}

dispatchEvent(_event: Event): boolean {
// Event dispatch implementation
throw new Error("Method not implemented.");
}

getImageData() {
const device = this.context.getDevice();
const texture = this.context.getTexture();
const commandEncoder = device.createCommandEncoder();
const bytesPerRow = this.width * 4;
const buffer = device.createBuffer({
size: bytesPerRow * this.height,
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,
});
commandEncoder.copyTextureToBuffer(
{ texture: texture },
{ buffer: buffer, bytesPerRow },
[this.width, this.height],
);
device.queue.submit([commandEncoder.finish()]);

return buffer.mapAsync(GPUMapMode.READ).then(() => {
const arrayBuffer = buffer.getMappedRange();
const uint8Array = new Uint8Array(arrayBuffer);
const data = Array.from(uint8Array);
buffer.unmap();
return {
data,
width: this.width,
height: this.height,
format: navigator.gpu.getPreferredCanvasFormat(),
};
});
}
}

class GPUOffscreenCanvasContext implements GPUCanvasContext {
__brand = "GPUCanvasContext" as const;

private textureFormat: GPUTextureFormat = "bgra8unorm";
private texture: GPUTexture | null = null;
private device: GPUDevice | null = null;

constructor(public readonly canvas: OffscreenCanvas) {}

getDevice() {
if (!this.device) {
throw new Error("Device is not configured.");
}
return this.device;
}

getTexture() {
if (!this.texture) {
throw new Error("Texture is not configured");
}
return this.texture;
}

configure(config: GPUCanvasConfiguration) {
// Configure the canvas context with the device and format
this.device = config.device;
this.texture = config.device.createTexture({
size: [this.canvas.width, this.canvas.height],
format: this.textureFormat,
usage:
GPUTextureUsage.RENDER_ATTACHMENT |
GPUTextureUsage.COPY_SRC |
GPUTextureUsage.TEXTURE_BINDING,
});
return undefined;
}

unconfigure() {
// Unconfigure the canvas context
if (this.texture) {
this.texture.destroy();
}
return undefined;
}

getCurrentTexture(): GPUTexture {
if (!this.texture) {
throw new Error("Texture is not configured");
}
return this.texture;
}
}
8 changes: 4 additions & 4 deletions packages/webgpu/src/__tests__/ExternalTexture.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { checkImage, client, encodeImage } from "./setup";
describe("External Textures", () => {
it("Simple (1)", async () => {
const result = await client.eval(
({ gpu, device, ctx, urls: { fTexture } }) => {
({ gpu, device, ctx, canvas, urls: { fTexture } }) => {
const module = device.createShaderModule({
label: "our hardcoded textured quad shaders",
code: /* wgsl */ `
Expand Down Expand Up @@ -131,7 +131,7 @@ describe("External Textures", () => {
device.queue.submit([commandBuffer]);
}
render();
return ctx.getImageData();
return canvas.getImageData();
});
});
});
Expand All @@ -143,7 +143,7 @@ describe("External Textures", () => {
});
it("Simple (2)", async () => {
const result = await client.eval(
({ gpu, device, ctx, urls: { fTexture } }) => {
({ gpu, device, ctx, canvas, urls: { fTexture } }) => {
const module = device.createShaderModule({
label: "our hardcoded textured quad shaders",
code: /* wgsl */ `
Expand Down Expand Up @@ -271,7 +271,7 @@ describe("External Textures", () => {
device.queue.submit([commandBuffer]);
}
render();
return ctx.getImageData();
return canvas.getImageData();
});
});
});
Expand Down
10 changes: 8 additions & 2 deletions packages/webgpu/src/__tests__/Texture.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,13 @@ describe("Texture", () => {
});
it("Create texture and reads it", async () => {
const result = await client.eval(
({ device, shaders: { triangleVertWGSL, redFragWGSL }, gpu, ctx }) => {
({
device,
shaders: { triangleVertWGSL, redFragWGSL },
gpu,
ctx,
canvas,
}) => {
const pipeline = device.createRenderPipeline({
layout: "auto",
vertex: {
Expand Down Expand Up @@ -182,7 +188,7 @@ describe("Texture", () => {
passEncoder.end();

device.queue.submit([commandEncoder.finish()]);
return ctx.getImageData();
return canvas.getImageData();
},
);
const image = encodeImage(result);
Expand Down
11 changes: 0 additions & 11 deletions packages/webgpu/src/__tests__/components/DrawingContext.ts

This file was deleted.

Loading

0 comments on commit 920aed3

Please sign in to comment.