Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(ext/canvas): OffscreenCanvas #23773

Draft
wants to merge 14 commits into
base: main
Choose a base branch
from
Prev Previous commit
Next Next commit
implement further
  • Loading branch information
crowlKats committed May 10, 2024
commit 591d28751f9c7c3a096071e9e2140bcccd4d633c
9 changes: 8 additions & 1 deletion ext/canvas/01_image.js
Original file line number Diff line number Diff line change
Expand Up @@ -351,4 +351,11 @@ function getBitmapData(imageBitmap) {

internals.getBitmapData = getBitmapData;

export { _bitmapData, _width, _height, _detached, createImageBitmap, ImageBitmap };
export {
_bitmapData,
_detached,
_height,
_width,
createImageBitmap,
ImageBitmap,
};
239 changes: 203 additions & 36 deletions ext/canvas/02_canvas.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,25 @@
import webidl from "ext:deno_webidl/00_webidl.js";
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.

import * as webidl from "ext:deno_webidl/00_webidl.js";
import { DOMException } from "ext:deno_web/01_dom_exception.js";
import { op_image_encode_png } from "ext:core/ops";
import { _width, _height, _bitmapData, _detached, ImageBitmap } from "ext:deno_canvas/01_image.js";
import { core, primordials } from "ext:core/mod.js";
import { createFilteredInspectProxy } from "ext:deno_console/01_console.js";
const { _width, _height, _bitmapData, _detached, ImageBitmap } = core
.createLazyLoader("ext:deno_canvas/01_image.js")();
import { loadWebGPU } from "ext:deno_webgpu/00_init.js";
const {
ObjectPrototypeIsPrototypeOf,
Symbol,
SymbolFor,
} = primordials;
import { assert } from "ext:deno_web/00_infra.js";

const _context = Symbol("[[context]]");
const _canvasBitmap = Symbol("[[canvasBitmap]]");
const _contextMode = Symbol("[[contextMode]]");
class OffscreenCanvas extends EventTarget {
[_bitmapData];
[_canvasBitmap];

[_width];
get width() {
Expand All @@ -32,23 +45,39 @@ class OffscreenCanvas extends EventTarget {
const prefix = "Failed to construct 'OffscreenCanvas'";
webidl.requiredArguments(arguments.length, 2, prefix);

width = webidl.converters["unsigned long long"](width, prefix, "Argument 1", {
enforceRange: true,
});
height = webidl.converters["unsigned long long"](height, prefix, "Argument 2", {
enforceRange: true,
});
width = webidl.converters["unsigned long long"](
width,
prefix,
"Argument 1",
{
enforceRange: true,
},
);
height = webidl.converters["unsigned long long"](
height,
prefix,
"Argument 2",
{
enforceRange: true,
},
);

this[_width] = width;
this[_height] = height;
this[_bitmapData] = new Uint8Array(width * height * 3);
this[_canvasBitmap] = {
data: new Uint8Array(width * height * 3),
};
}

getContext(contextId, options = null) {
webidl.assertBranded(this, OffscreenCanvasPrototype);
const prefix = "Failed to call 'getContext' on 'OffscreenCanvas'";
webidl.requiredArguments(arguments.length, 1, prefix);
contextId = webidl.converters.OffscreenRenderingContextId(contextId, prefix, "Argument 1");
contextId = webidl.converters.OffscreenRenderingContextId(
contextId,
prefix,
"Argument 1",
);
options = webidl.converters.any(options);

if (webidl.type(options) !== "Object") {
Expand All @@ -58,10 +87,11 @@ class OffscreenCanvas extends EventTarget {
if (contextId === "bitmaprenderer") {
switch (this[_contextMode]) {
case null: {
const settings = webidl.converters.ImageBitmapRenderingContextSettings(options, prefix, "Argument 2");
const settings = webidl.converters
.ImageBitmapRenderingContextSettings(options, prefix, "Argument 2");
const context = webidl.createBranded(ImageBitmapRenderingContext);
context[_canvas] = this;
context[_bitmapData] = this[_bitmapData];
context[_canvasBitmap] = this[_canvasBitmap];
setOutputBitmap(context);
context[_alpha] = settings.alpha;

Expand All @@ -79,8 +109,11 @@ class OffscreenCanvas extends EventTarget {
} else if (contextId === "webgpu") {
switch (this[_contextMode]) {
case null: {
// TODO Let context be the result of following the instructions given in WebGPU's Canvas Rendering section. [WEBGPU]
this[_contextMode] = "bitmaprenderer";
const context = webidl.createBranded(GPUCanvasContext);
context[_canvas] = this;
// TODO: Replace the drawing buffer of context.

this[_contextMode] = "webgpu";
this[_context] = context;
return context;
}
Expand All @@ -92,7 +125,10 @@ class OffscreenCanvas extends EventTarget {
}
}
} else {
throw new DOMException(`Context '${contextId}' not implemented`, "NotSupportedError");
throw new DOMException(
`Context '${contextId}' not implemented`,
"NotSupportedError",
);
}
}

Expand All @@ -101,37 +137,50 @@ class OffscreenCanvas extends EventTarget {
// TODO: If the value of this OffscreenCanvas object's [[Detached]] internal slot is set to true, then throw an "InvalidStateError" DOMException.

if (!this[_contextMode]) {
throw new DOMException("Cannot get bitmap from canvas without a context", "InvalidStateError");
throw new DOMException(
"Cannot get bitmap from canvas without a context",
"InvalidStateError",
);
}

let image = webidl.createBranded(ImageBitmap);
image[_bitmapData] = this[_bitmapData];
image[_bitmapData] = this[_canvasBitmap].data;
image[_width] = this[_width];
image[_height] = this[_height];
this[_bitmapData] = new Uint8Array(this[_bitmapData].length);
this[_canvasBitmap].data = new Uint8Array(this[_bitmapData].length);
return image;
}

convertToBlob(options = {}) {
webidl.assertBranded(this, OffscreenCanvasPrototype);
const prefix = "Failed to call 'getContext' on 'OffscreenCanvas'";
options = webidl.converters.ImageEncodeOptions(options, prefix, "Argument 1");
options = webidl.converters.ImageEncodeOptions(
options,
prefix,
"Argument 1",
);

// TODO: If the value of this OffscreenCanvas object's [[Detached]] internal slot is set to true, then return a promise rejected with an "InvalidStateError" DOMException.

if (this[_bitmapData].length === 0) {
if (this[_canvasBitmap].data.length === 0) {
throw new DOMException("", "DOMException");
}

const png = op_image_encode_png(this[_bitmapData], this[_width], this[_height]);
const png = op_image_encode_png(
this[_canvasBitmap].data,
this[_width],
this[_height],
);

if (!png) {
throw new DOMException("", "EncodingError");
}

return Promise.resolve(new Blob([png], {
type: "image/png",
}));
return Promise.resolve(
new Blob([png], {
type: "image/png",
}),
);
}
}
const OffscreenCanvasPrototype = OffscreenCanvas.prototype;
Expand Down Expand Up @@ -161,49 +210,166 @@ class ImageBitmapRenderingContext {
setOutputBitmap(this);
} else {
if (bitmap[_detached]) {
throw new DOMException("bitmap cannot be used as it has been detached", "InvalidStateError");
throw new DOMException(
"bitmap cannot be used as it has been detached",
"InvalidStateError",
);
}
setOutputBitmap(this, bitmap);
bitmap[_detached] = true;
bitmap[_bitmapData] = null;
bitmap[_canvasBitmap] = null;
}
}
}
const ImageBitmapRenderingContextPrototype = ImageBitmapRenderingContext.prototype;
const ImageBitmapRenderingContextPrototype =
ImageBitmapRenderingContext.prototype;

function setOutputBitmap(context, bitmap) {
if (!bitmap) {
context[_bitmapMode] = "blank";
context[_bitmapData] = new Uint8Array(context[_canvas][_bitmapData].length);
context[_canvasBitmap].data = new Uint8Array(
context[_canvasBitmap].data.length,
);
} else {
context[_bitmapMode] = "valid";
context[_bitmapData] = bitmap[_bitmapData];
context[_canvasBitmap].data = bitmap[_bitmapData];
}
}

const _configuration = Symbol("[[configuration]]");
const _textureDescriptor = Symbol("[[textureDescriptor]]");
const _currentTexture = Symbol("[[currentTexture]]");
const _drawingBuffer = Symbol("[[drawingBuffer]]");
class GPUCanvasContext {
[_configuration];
/** @type {GPUTexture | undefined} */
[_currentTexture];

[_canvas];
get canvas() {
webidl.assertBranded(this, GPUCanvasContextPrototype);
return this[_canvas];
}

constructor() {
webidl.illegalConstructor();
}

configure(configuration) {
webidl.assertBranded(this, GPUCanvasContextPrototype);
const prefix = "Failed to execute 'configure' on 'GPUCanvasContext'";
webidl.requiredArguments(arguments.length, 1, prefix);
configuration = webidl.converters.GPUCanvasConfiguration(configuration, {
prefix,
context: "Argument 1",
});

const descriptor = getTextureDescriptorForCanvasAndConfiguration(
this[_canvas],
configuration,
);
this[_configuration] = configuration;
this[_textureDescriptor] = descriptor;

// TODO: Replace the drawing buffer of this, which resets this.[[drawingBuffer]] with a bitmap with the new format and tags.
}

unconfigure() {
webidl.assertBranded(this, GPUCanvasContextPrototype);

this[_configuration] = null;
this[_textureDescriptor] = null;
replaceDrawingBuffer(this);
}

getCurrentTexture() {
webidl.assertBranded(this, GPUCanvasContextPrototype);

if (!this[_configuration]) {
throw new DOMException(
"The context was not configured",
"InvalidStateError",
);
}
assert(this[_textureDescriptor]);

if (!this[_currentTexture]) {
replaceDrawingBuffer(this);

this[_currentTexture] = this[_configuration].device.createTexture(
this[_textureDescriptor],
);
// TODO: except with the GPUTexture's underlying storage pointing to this.[[drawingBuffer]].
}

return this[_currentTexture];
}

[SymbolFor("Deno.privateCustomInspect")](inspect, inspectOptions) {
return inspect(
createFilteredInspectProxy({
object: this,
evaluate: ObjectPrototypeIsPrototypeOf(GPUCanvasContextPrototype, this),
keys: [
"canvas",
],
}),
inspectOptions,
);
}
}
const GPUCanvasContextPrototype = GPUCanvasContext.prototype;

function getTextureDescriptorForCanvasAndConfiguration(canvas, configuration) {
return {
label: "GPUCanvasContextConfigurationTexture",
size: [canvas[_width], canvas[_height], 1],
format: configuration.format,
usage: configuration.usage,
viewFormats: configuration.viewFormats,
};
}

function replaceDrawingBuffer(context) {
expireCurrentTexture(context);

// TODO
}

function expireCurrentTexture(context) {
if (context[_currentTexture]) {
context[_currentTexture].destroy();
context[_currentTexture] = null;
}
}

// ENUM: OffscreenRenderingContextId
webidl.converters["OffscreenRenderingContextId"] = webidl.createEnumConverter(
"OffscreenRenderingContextId",
[
"2d", "bitmaprenderer", "webgl", "webgl2", "webgpu"
],
["bitmaprenderer", "webgpu"],
);

// DICT: ImageEncodeOptions
const dictImageEncodeOptions = [
{ key: "type", converter: webidl.converters.DOMString, defaultValue: "image/png" },
{
key: "type",
converter: webidl.converters.DOMString,
defaultValue: "image/png",
},
{
key: "quality",
converter: webidl.converters["unrestricted double"],
}
},
];
webidl.converters["ImageEncodeOptions"] = webidl
.createDictionaryConverter(
"ImageEncodeOptions",
dictImageEncodeOptions,
);

webidl.converters["ImageBitmap?"] = webidl.createNullableConverter(webidl.converters["ImageBitmap"]);
webidl.converters["ImageBitmap?"] = webidl.createNullableConverter(
webidl.createInterfaceConverter("ImageBitmap", ImageBitmap.prototype),
);

// DICT: ImageBitmapRenderingContextSettings
const dictImageBitmapRenderingContextSettings = [
Expand All @@ -215,3 +381,4 @@ webidl.converters["ImageBitmapRenderingContextSettings"] = webidl
dictImageBitmapRenderingContextSettings,
);

export { OffscreenCanvas };
Loading