From 606f5af69ab5b87d5ed1629dec7f3220b5468116 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 21 Aug 2023 18:08:38 +0530 Subject: [PATCH 01/60] 1.36.1 (#20221) Bumped versions for 1.36.1 (Recover the 1.36.1 commit) Co-authored-by: littledivy --- .github/workflows/ci.generate.ts | 2 +- .github/workflows/ci.yml | 8 +++--- Cargo.lock | 46 ++++++++++++++++---------------- Cargo.toml | 44 +++++++++++++++--------------- Releases.md | 23 ++++++++++++++++ bench_util/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- cli/deno_std.rs | 2 +- cli/napi/sym/Cargo.toml | 2 +- ext/broadcast_channel/Cargo.toml | 2 +- ext/cache/Cargo.toml | 2 +- ext/console/Cargo.toml | 2 +- ext/crypto/Cargo.toml | 2 +- ext/fetch/Cargo.toml | 2 +- ext/ffi/Cargo.toml | 2 +- ext/fs/Cargo.toml | 2 +- ext/http/Cargo.toml | 2 +- ext/io/Cargo.toml | 2 +- ext/kv/Cargo.toml | 2 +- ext/napi/Cargo.toml | 2 +- ext/net/Cargo.toml | 2 +- ext/node/Cargo.toml | 2 +- ext/tls/Cargo.toml | 2 +- ext/url/Cargo.toml | 2 +- ext/web/Cargo.toml | 2 +- ext/webidl/Cargo.toml | 2 +- ext/websocket/Cargo.toml | 2 +- ext/webstorage/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- 29 files changed, 97 insertions(+), 74 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 63a89bf9f42ab9..cf9ec8ac167749 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import * as yaml from "https://deno.land/std@0.173.0/encoding/yaml.ts"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 48; +const cacheVersion = 49; const Runners = (() => { const ubuntuRunner = "ubuntu-22.04"; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9fe9d490936463..945a5eed5a8dc4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -311,8 +311,8 @@ jobs: path: |- ~/.cargo/registry/index ~/.cargo/registry/cache - key: '48-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' - restore-keys: '48-cargo-home-${{ matrix.os }}' + key: '49-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + restore-keys: '49-cargo-home-${{ matrix.os }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -324,7 +324,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '48-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '49-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -612,7 +612,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '48-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '49-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index b5997d082339ed..3d9c9eab18d9e5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -791,7 +791,7 @@ checksum = "8d7439c3735f405729d52c3fbbe4de140eaf938a1fe47d227c27f8254d4302a5" [[package]] name = "deno" -version = "1.36.0" +version = "1.36.1" dependencies = [ "async-trait", "atty", @@ -939,7 +939,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.108.0" +version = "0.109.0" dependencies = [ "bencher", "deno_core", @@ -949,7 +949,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.108.0" +version = "0.109.0" dependencies = [ "async-trait", "deno_core", @@ -959,7 +959,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.46.0" +version = "0.47.0" dependencies = [ "async-trait", "deno_core", @@ -990,7 +990,7 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.114.0" +version = "0.115.0" dependencies = [ "deno_core", ] @@ -1023,7 +1023,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.128.0" +version = "0.129.0" dependencies = [ "aes", "aes-gcm", @@ -1092,7 +1092,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.138.0" +version = "0.139.0" dependencies = [ "bytes", "data-url", @@ -1108,7 +1108,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.101.0" +version = "0.102.0" dependencies = [ "deno_core", "dlopen", @@ -1124,7 +1124,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.24.0" +version = "0.25.0" dependencies = [ "async-trait", "deno_core", @@ -1164,7 +1164,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.109.0" +version = "0.110.0" dependencies = [ "async-compression", "async-trait", @@ -1200,7 +1200,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.24.0" +version = "0.25.0" dependencies = [ "async-trait", "deno_core", @@ -1213,7 +1213,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.22.0" +version = "0.23.0" dependencies = [ "anyhow", "async-trait", @@ -1271,7 +1271,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.44.0" +version = "0.45.0" dependencies = [ "deno_core", "libloading", @@ -1279,7 +1279,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.106.0" +version = "0.107.0" dependencies = [ "deno_core", "deno_tls", @@ -1295,7 +1295,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.51.0" +version = "0.52.0" dependencies = [ "aes", "brotli", @@ -1391,7 +1391,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.122.0" +version = "0.123.0" dependencies = [ "atty", "console_static_text", @@ -1475,7 +1475,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.101.0" +version = "0.102.0" dependencies = [ "deno_core", "once_cell", @@ -1489,7 +1489,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.114.0" +version = "0.115.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1500,7 +1500,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.145.0" +version = "0.146.0" dependencies = [ "async-trait", "base64-simd", @@ -1519,7 +1519,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.114.0" +version = "0.115.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1527,7 +1527,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.119.0" +version = "0.120.0" dependencies = [ "bytes", "deno_core", @@ -1544,7 +1544,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.109.0" +version = "0.110.0" dependencies = [ "deno_core", "deno_web", @@ -3181,7 +3181,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.44.0" +version = "0.45.0" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.32", diff --git a/Cargo.toml b/Cargo.toml index 7dba722d89bd6b..d404b34462b2d5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,9 +43,9 @@ deno_ast = { version = "0.27.0", features = ["transpiling"] } deno_core = "0.200.0" -deno_runtime = { version = "0.122.0", path = "./runtime" } -napi_sym = { version = "0.44.0", path = "./cli/napi/sym" } -deno_bench_util = { version = "0.108.0", path = "./bench_util" } +deno_runtime = { version = "0.123.0", path = "./runtime" } +napi_sym = { version = "0.45.0", path = "./cli/napi/sym" } +deno_bench_util = { version = "0.109.0", path = "./bench_util" } test_util = { path = "./test_util" } deno_lockfile = "0.15.0" deno_media_type = { version = "0.1.1", features = ["module_specifier"] } @@ -53,25 +53,25 @@ deno_npm = "0.11.0" deno_semver = "0.3.0" # exts -deno_broadcast_channel = { version = "0.108.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.46.0", path = "./ext/cache" } -deno_console = { version = "0.114.0", path = "./ext/console" } -deno_crypto = { version = "0.128.0", path = "./ext/crypto" } -deno_fetch = { version = "0.138.0", path = "./ext/fetch" } -deno_ffi = { version = "0.101.0", path = "./ext/ffi" } -deno_fs = { version = "0.24.0", path = "./ext/fs" } -deno_http = { version = "0.109.0", path = "./ext/http" } -deno_io = { version = "0.24.0", path = "./ext/io" } -deno_net = { version = "0.106.0", path = "./ext/net" } -deno_node = { version = "0.51.0", path = "./ext/node" } -deno_kv = { version = "0.22.0", path = "./ext/kv" } -deno_tls = { version = "0.101.0", path = "./ext/tls" } -deno_url = { version = "0.114.0", path = "./ext/url" } -deno_web = { version = "0.145.0", path = "./ext/web" } -deno_webidl = { version = "0.114.0", path = "./ext/webidl" } -deno_websocket = { version = "0.119.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.109.0", path = "./ext/webstorage" } -deno_napi = { version = "0.44.0", path = "./ext/napi" } +deno_broadcast_channel = { version = "0.109.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.47.0", path = "./ext/cache" } +deno_console = { version = "0.115.0", path = "./ext/console" } +deno_crypto = { version = "0.129.0", path = "./ext/crypto" } +deno_fetch = { version = "0.139.0", path = "./ext/fetch" } +deno_ffi = { version = "0.102.0", path = "./ext/ffi" } +deno_fs = { version = "0.25.0", path = "./ext/fs" } +deno_http = { version = "0.110.0", path = "./ext/http" } +deno_io = { version = "0.25.0", path = "./ext/io" } +deno_net = { version = "0.107.0", path = "./ext/net" } +deno_node = { version = "0.52.0", path = "./ext/node" } +deno_kv = { version = "0.23.0", path = "./ext/kv" } +deno_tls = { version = "0.102.0", path = "./ext/tls" } +deno_url = { version = "0.115.0", path = "./ext/url" } +deno_web = { version = "0.146.0", path = "./ext/web" } +deno_webidl = { version = "0.115.0", path = "./ext/webidl" } +deno_websocket = { version = "0.120.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.110.0", path = "./ext/webstorage" } +deno_napi = { version = "0.45.0", path = "./ext/napi" } aes = "=0.8.2" anyhow = "1.0.57" diff --git a/Releases.md b/Releases.md index 2cfb37ad416e8b..0dceb9c4af45a3 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,29 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.36.1 / 2023.08.10 + +- feat(unstable): rename `deno_modules` to `vendor` (#20065) +- fix(ext/abort): trigger AbortSignal events in correct order (#20095) +- fix(ext/file): resolve unresolved Promise in Blob.stream (#20039) +- fix(ext/http): serveHttp brotli compression level should be fastest (#20058) +- fix(ext/http): unify default gzip compression level (#20050) +- fix(ext/timers): some timers are not resolved (#20055) +- fix(fmt): do not insert expr stmt leading semi-colon in do while stmt body + (#20093) +- fix(node): polyfill process.title (#20044) +- fix(node): repl._builtinLibs (#20046) +- fix(node/async_hooks): don't pop async context frame if stack if empty + (#20077) +- fix(test): handle ASCII escape chars in test name (#20081) +- fix(test): make test runner work when global setTimeout is replaced (#20052) +- fix(test): use only a single timeout for op sanitizers (#20042) +- fix(unstable): vendor cache override should handle forbidden windows directory + names (#20069) +- fix(unstable): vendor cache should support adding files to hashed directories + (#20070) +- perf(ext/headers): use .push loop instead of spread operator (#20108) + ### 1.36.0 / 2023.08.03 - feat(bench): add BenchContext::start() and BenchContext::end() (#18734) diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index ace55526ef272f..5100e44ccc911f 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.108.0" +version = "0.109.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 97cfef0a6956a0..0879b45e82df5b 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.36.0" +version = "1.36.1" authors.workspace = true default-run = "deno" edition.workspace = true diff --git a/cli/deno_std.rs b/cli/deno_std.rs index b7cf25ff0f01b0..bbef0b1c66a330 100644 --- a/cli/deno_std.rs +++ b/cli/deno_std.rs @@ -2,4 +2,4 @@ // WARNING: Ensure this is the only deno_std version reference as this // is automatically updated by the version bump workflow. -pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.197.0/"; +pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.198.0/"; diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml index 3c49403b88bc3f..a00f7a732a6dd0 100644 --- a/cli/napi/sym/Cargo.toml +++ b/cli/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.44.0" +version = "0.45.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index d28759eb4fcab2..5d46a6597d3ffe 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.108.0" +version = "0.109.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 76567ea4a237ba..aad31ce024c0e9 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.46.0" +version = "0.47.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index d42ef47a2c6037..a2e13afc32b740 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.114.0" +version = "0.115.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 119c489ffa944e..ea9962e66c2059 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.128.0" +version = "0.129.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 5d6b07af674782..1a28f0c74c22a3 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.138.0" +version = "0.139.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 1e1d29243c9333..2e557092360b32 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.101.0" +version = "0.102.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index 484285f858281d..9a2d175ce6e169 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.24.0" +version = "0.25.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index f310e1cf19f1c8..5d17a561ea16c7 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.109.0" +version = "0.110.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 0c9dedec1e0661..0b7e7839e17577 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.24.0" +version = "0.25.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index e819796eee9122..7866605c0580d4 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.22.0" +version = "0.23.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index aeae702c9175e4..b8868473a7881e 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.44.0" +version = "0.45.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index f6036db2e2d5f0..a0bb2f557ff0fe 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.106.0" +version = "0.107.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 9f4ba46948ded7..717fd438cd1d29 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.51.0" +version = "0.52.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 068cfb62dda0f6..4c67c1edff91f8 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.101.0" +version = "0.102.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index de76572e13b55e..e847a3d3e938d2 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.114.0" +version = "0.115.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index 7de084d7f8c747..dbc2df8c09d785 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.145.0" +version = "0.146.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 91e58be0030a5e..a84f5986ae9767 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.114.0" +version = "0.115.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 21434cf3da6773..d45ac341d4619e 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.119.0" +version = "0.120.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 0461e74e37e13b..40ac41cfb598ad 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.109.0" +version = "0.110.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index a7463920a2ca2a..e7ef6e862e4dea 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.122.0" +version = "0.123.0" authors.workspace = true edition.workspace = true license.workspace = true From 28142a74dd513c13a8b2ae1534d4765c8a2c932e Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Thu, 10 Aug 2023 19:41:09 +0200 Subject: [PATCH 02/60] perf(ext/headers): optimize getHeader using for loop (#20115) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR optimizes the `getHeader` function by replacing `.filter` and `.map` with a `for` loop **this patch** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.0 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ----------------------------------------------------------------- ----------------------------- Headers.get 132.2 ns/iter 7,564,093.4 (125.81 ns … 147.66 ns) 133.79 ns 144.92 ns 145.36 ns ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.0 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ----------------------------------------------------------------- ----------------------------- Headers.get 191.48 ns/iter 5,222,523.6 (182.75 ns … 212.22 ns) 193.5 ns 205.96 ns 211.51 ns ``` ```js const headers = new Headers({ "Content-Type": "application/json", "Date": "Thu, 10 Aug 2023 07:45:10 GMT", "X-Deno": "Deno", "Powered-By": "Deno", "Content-Encoding": "gzip", "Set-Cookie": "__Secure-ID=123; Secure; Domain=example.com", "Content-Length": "150", "Vary": "Accept-Encoding, Accept, X-Requested-With", }); Deno.bench("Headers.get", () => { const i = headers.get("x-deno"); }); ``` --- ext/fetch/20_headers.js | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index 45bd29ad3ef047..fabd39c0e89026 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -22,12 +22,10 @@ import { const primordials = globalThis.__bootstrap.primordials; const { ArrayIsArray, - ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeSort, ArrayPrototypeJoin, ArrayPrototypeSplice, - ArrayPrototypeFilter, ObjectEntries, ObjectHasOwn, RegExpPrototypeExec, @@ -162,13 +160,13 @@ function appendHeader(headers, name, value) { */ function getHeader(list, name) { const lowercaseName = byteLowerCase(name); - const entries = ArrayPrototypeMap( - ArrayPrototypeFilter( - list, - (entry) => byteLowerCase(entry[0]) === lowercaseName, - ), - (entry) => entry[1], - ); + const entries = []; + for (let i = 0; i < list.length; i++) { + if (byteLowerCase(list[i][0]) === lowercaseName) { + ArrayPrototypePush(entries, list[i][1]); + } + } + if (entries.length === 0) { return null; } else { From 1a1283fa4d1a88f9e9d2f55ec3fd1d4dd2dd4d8b Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Thu, 10 Aug 2023 19:45:55 +0200 Subject: [PATCH 03/60] perf(ext/http): use ServeHandlerInfo class instead of object literal (#20122) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR improves performance of `Deno.Serve` when providing `info` argument by creating `ServeHandlerInfo` class instead of creating an object literal with a getter on every request. ```js Deno.serve((_req, info) => new Response(info.remoteAddr.transport) }); ``` ### Benchmarks ``` wrk -d 10s --latency http://127.0.0.1:4500 Running 10s test @ http://127.0.0.1:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 42.34us 16.30us 1.66ms 95.88% Req/Sec 118.17k 2.95k 127.38k 76.73% Latency Distribution 50% 38.00us 75% 41.00us 90% 56.00us 99% 83.00us 2375298 requests in 10.10s, 319.40MB read Requests/sec: 235177.04 Transfer/sec: 31.62MB ``` **main** ``` wrk -d 10s --latency http://127.0.0.1:4500 Running 10s test @ http://127.0.0.1:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 78.86us 211.06us 3.58ms 96.52% Req/Sec 105.90k 4.35k 117.41k 78.22% Latency Distribution 50% 41.00us 75% 53.00us 90% 62.00us 99% 1.18ms 2127534 requests in 10.10s, 286.09MB read Requests/sec: 210647.49 Transfer/sec: 28.33MB ``` ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.0 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 -------------------------------------------------------------------------- ----------------------------- new ServeHandlerInfo 3.43 ns/iter 291,508,889.3 (3.07 ns … 12.21 ns) 3.42 ns 3.84 ns 3.87 ns {} with getter 133.84 ns/iter 7,471,528.9 (92.9 ns … 458.95 ns) 132.45 ns 364.96 ns 429.43 ns ``` ---- ### Drawbacks: `.remoteAddr` is now not enumerable ``` ServeHandlerInfo {} ``` vs ``` { remoteAddr: [Getter] } ``` It'll break any code trying to iterate through `info` keys (Doubt there's anyone doing it though) ```js Deno.serve((req, info) => { console.log(Object.keys(info).length === 0) // true; return new Response("yes"); }); --- ext/http/00_serve.js | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index af4353e0eb4dd9..3447f48e21415d 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -369,6 +369,16 @@ class CallbackContext { } } +class ServeHandlerInfo { + #inner = null; + constructor(inner) { + this.#inner = inner; + } + get remoteAddr() { + return this.#inner.remoteAddr; + } +} + function fastSyncResponseOrStream(req, respBody) { if (respBody === null || respBody === undefined) { // Don't set the body @@ -535,11 +545,10 @@ function mapToCallback(context, callback, onError) { if (hasOneCallback) { response = await callback(request); } else { - response = await callback(request, { - get remoteAddr() { - return innerRequest.remoteAddr; - }, - }); + response = await callback( + request, + new ServeHandlerInfo(innerRequest), + ); } } else { response = await callback(); From 5515cee53927fb1976657b36a6b760ebee7fbfca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 10 Aug 2023 20:19:20 +0200 Subject: [PATCH 04/60] fix(node): don't print warning on process.dlopen.flags (#20124) Closes https://github.com/denoland/deno/issues/20075 --- ext/node/polyfills/process.ts | 9 ++++----- test_napi/common.js | 4 +++- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index 2f1c2968f91a34..4c375760d504d6 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -292,11 +292,10 @@ function _kill(pid: number, sig: number): number { } } -// TODO(bartlomieju): flags is currently not supported. -export function dlopen(module, filename, flags) { - if (typeof flags !== "undefined") { - warnNotImplemented("process.dlopen doesn't support 'flags' argument"); - } +export function dlopen(module, filename, _flags) { + // NOTE(bartlomieju): _flags is currently ignored, but we don't warn for it + // as it makes DX bad, even though it might not be needed: + // https://github.com/denoland/deno/issues/20075 Module._extensions[".node"](module, filename); return module; } diff --git a/test_napi/common.js b/test_napi/common.js index 5ad0e9cf3c8439..ede045666984bb 100644 --- a/test_napi/common.js +++ b/test_napi/common.js @@ -21,6 +21,8 @@ export function loadTestLibrary() { // Internal, used in ext/node const module = {}; - process.dlopen(module, specifier); + // Pass some flag, it should be ignored, but make sure it doesn't print + // warnings. + process.dlopen(module, specifier, 0); return module.exports; } From d70c43f3eba00cfc32cf1e931677f19a0932cbe1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 10 Aug 2023 23:35:01 +0200 Subject: [PATCH 05/60] perf(http): use Cow<[u8]> for setting header (#20112) --- ext/http/http_next.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 3c3724924c6268..2e9b315ca14341 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -385,18 +385,23 @@ pub fn op_http_read_request_body( state.resource_table.add_rc(body_resource) } -#[op2] +#[op2(fast)] pub fn op_http_set_response_header( #[smi] slab_id: SlabId, - #[serde] name: ByteString, - #[serde] value: ByteString, + #[string(onebyte)] name: Cow<[u8]>, + #[string(onebyte)] value: Cow<[u8]>, ) { let mut http = slab_get(slab_id); let resp_headers = http.response().headers_mut(); // These are valid latin-1 strings let name = HeaderName::from_bytes(&name).unwrap(); - // SAFETY: These are valid latin-1 strings - let value = unsafe { HeaderValue::from_maybe_shared_unchecked(value) }; + let value = match value { + Cow::Borrowed(bytes) => HeaderValue::from_bytes(bytes).unwrap(), + // SAFETY: These are valid latin-1 strings + Cow::Owned(bytes_vec) => unsafe { + HeaderValue::from_maybe_shared_unchecked(bytes::Bytes::from(bytes_vec)) + }, + }; resp_headers.append(name, value); } From 8e060e7da8a34265597d23dd4c4d45348ffaf4f9 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Fri, 11 Aug 2023 13:04:23 +0530 Subject: [PATCH 06/60] fix(node): object keys in publicEncrypt (#20128) Fixes https://github.com/denoland/deno/issues/19935 --- .../unit_node/crypto/crypto_cipher_test.ts | 15 ++++++ ext/node/polyfills/internal/crypto/cipher.ts | 48 +++++++++++++++++-- ext/node/polyfills/internal/crypto/keys.ts | 2 +- 3 files changed, 60 insertions(+), 5 deletions(-) diff --git a/cli/tests/unit_node/crypto/crypto_cipher_test.ts b/cli/tests/unit_node/crypto/crypto_cipher_test.ts index a8a5130cf15d57..417f5035cc1e90 100644 --- a/cli/tests/unit_node/crypto/crypto_cipher_test.ts +++ b/cli/tests/unit_node/crypto/crypto_cipher_test.ts @@ -33,6 +33,21 @@ Deno.test({ }, }); +Deno.test({ + name: "rsa public encrypt (options) and private decrypt", + fn() { + const encrypted = crypto.publicEncrypt( + { key: Buffer.from(rsaPublicKey) }, + input, + ); + const decrypted = crypto.privateDecrypt( + Buffer.from(rsaPrivateKey), + Buffer.from(encrypted), + ); + assertEquals(decrypted, input); + }, +}); + Deno.test({ name: "rsa private encrypt and private decrypt", fn() { diff --git a/ext/node/polyfills/internal/crypto/cipher.ts b/ext/node/polyfills/internal/crypto/cipher.ts index e954562c58cc8e..3a8b41f0697aed 100644 --- a/ext/node/polyfills/internal/crypto/cipher.ts +++ b/ext/node/polyfills/internal/crypto/cipher.ts @@ -13,13 +13,26 @@ import { Buffer } from "node:buffer"; import { notImplemented } from "ext:deno_node/_utils.ts"; import type { TransformOptions } from "ext:deno_node/_stream.d.ts"; import { Transform } from "ext:deno_node/_stream.mjs"; -import { KeyObject } from "./keys.ts"; +import { + getArrayBufferOrView, + KeyObject, +} from "ext:deno_node/internal/crypto/keys.ts"; import type { BufferEncoding } from "ext:deno_node/_global.d.ts"; import type { BinaryLike, Encoding, } from "ext:deno_node/internal/crypto/types.ts"; import { getDefaultEncoding } from "ext:deno_node/internal/crypto/util.ts"; +import { + isAnyArrayBuffer, + isArrayBufferView, +} from "ext:deno_node/internal/util/types.ts"; + +function isStringOrBuffer(val) { + return typeof val === "string" || + isArrayBufferView(val) || + isAnyArrayBuffer(val); +} const { ops, encode } = globalThis.__bootstrap.core; @@ -355,24 +368,51 @@ export function privateEncrypt( privateKey: ArrayBufferView | string | KeyObject, buffer: ArrayBufferView | string | KeyObject, ): Buffer { + const { data } = prepareKey(privateKey); const padding = privateKey.padding || 1; - return ops.op_node_private_encrypt(privateKey, buffer, padding); + + buffer = getArrayBufferOrView(buffer, "buffer"); + return ops.op_node_private_encrypt(data, buffer, padding); } export function privateDecrypt( privateKey: ArrayBufferView | string | KeyObject, buffer: ArrayBufferView | string | KeyObject, ): Buffer { + const { data } = prepareKey(privateKey); const padding = privateKey.padding || 1; - return ops.op_node_private_decrypt(privateKey, buffer, padding); + + buffer = getArrayBufferOrView(buffer, "buffer"); + return ops.op_node_private_decrypt(data, buffer, padding); } export function publicEncrypt( publicKey: ArrayBufferView | string | KeyObject, buffer: ArrayBufferView | string | KeyObject, ): Buffer { + const { data } = prepareKey(publicKey); const padding = publicKey.padding || 1; - return ops.op_node_public_encrypt(publicKey, buffer, padding); + + buffer = getArrayBufferOrView(buffer, "buffer"); + return ops.op_node_public_encrypt(data, buffer, padding); +} + +function prepareKey(key) { + // TODO(@littledivy): handle these cases + // - node KeyObject + // - web CryptoKey + if (isStringOrBuffer(key)) { + return { data: getArrayBufferOrView(key, "key") }; + } else if (typeof key == "object") { + const { key: data, encoding } = key; + if (!isStringOrBuffer(data)) { + throw new TypeError("Invalid key type"); + } + + return { data: getArrayBufferOrView(data, "key", encoding) }; + } + + throw new TypeError("Invalid key type"); } export function publicDecrypt() { diff --git a/ext/node/polyfills/internal/crypto/keys.ts b/ext/node/polyfills/internal/crypto/keys.ts index f6e5cbb86c4911..be85b44a3e7454 100644 --- a/ext/node/polyfills/internal/crypto/keys.ts +++ b/ext/node/polyfills/internal/crypto/keys.ts @@ -39,7 +39,7 @@ import { forgivingBase64UrlEncode as encodeToBase64Url, } from "ext:deno_web/00_infra.js"; -const getArrayBufferOrView = hideStackFrames( +export const getArrayBufferOrView = hideStackFrames( ( buffer, name, From 3615afa21747d851acbb0759daefd8f9c240bfd6 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Fri, 11 Aug 2023 17:12:35 +0530 Subject: [PATCH 07/60] fix(ext/node): support dictionary option in zlib init (#20035) Fixes https://github.com/denoland/deno/issues/19540 --- cli/tests/unit_node/zlib_test.ts | 18 ++++++++++++++++++ ext/node/ops/zlib/mod.rs | 8 ++++++-- ext/node/polyfills/_zlib.mjs | 15 +++++++++++---- ext/node/polyfills/_zlib_binding.mjs | 2 +- 4 files changed, 36 insertions(+), 7 deletions(-) diff --git a/cli/tests/unit_node/zlib_test.ts b/cli/tests/unit_node/zlib_test.ts index 96d392d1db2ce7..6b09c50b0853e5 100644 --- a/cli/tests/unit_node/zlib_test.ts +++ b/cli/tests/unit_node/zlib_test.ts @@ -11,6 +11,7 @@ import { brotliDecompressSync, createBrotliCompress, createBrotliDecompress, + createDeflate, } from "node:zlib"; import { Buffer } from "node:buffer"; import { createReadStream, createWriteStream } from "node:fs"; @@ -60,3 +61,20 @@ Deno.test("brotli compression", async () => { // pass } }); + +Deno.test( + "zlib create deflate with dictionary", + { sanitizeResources: false }, + async () => { + const promise = deferred(); + const handle = createDeflate({ + dictionary: Buffer.alloc(0), + }); + + handle.on("close", () => promise.resolve()); + handle.end(); + handle.destroy(); + + await promise; + }, +); diff --git a/ext/node/ops/zlib/mod.rs b/ext/node/ops/zlib/mod.rs index 3d58d16f9476c1..2ddf6f2cd43b03 100644 --- a/ext/node/ops/zlib/mod.rs +++ b/ext/node/ops/zlib/mod.rs @@ -344,7 +344,7 @@ pub fn op_zlib_init( window_bits: i32, mem_level: i32, strategy: i32, - dictionary: Option<&[u8]>, + dictionary: &[u8], ) -> Result { let resource = zlib(state, handle)?; let mut zlib = resource.inner.borrow_mut(); @@ -373,7 +373,11 @@ pub fn op_zlib_init( zlib.init_stream()?; - zlib.dictionary = dictionary.map(|buf| buf.to_vec()); + zlib.dictionary = if !dictionary.is_empty() { + Some(dictionary.to_vec()) + } else { + None + }; Ok(zlib.err) } diff --git a/ext/node/polyfills/_zlib.mjs b/ext/node/polyfills/_zlib.mjs index f7336580007a62..a66ab6d0460da7 100644 --- a/ext/node/polyfills/_zlib.mjs +++ b/ext/node/polyfills/_zlib.mjs @@ -11,6 +11,10 @@ import util from "node:util"; import { ok as assert } from "node:assert"; import { zlib as zlibConstants } from "ext:deno_node/internal_binding/constants.ts"; import { nextTick } from "ext:deno_node/_next_tick.ts"; +import { + isAnyArrayBuffer, + isArrayBufferView, +} from "ext:deno_node/internal/util/types.ts"; var kRangeErrorMessage = "Cannot create final Buffer. It would be larger " + "than 0x" + kMaxLength.toString(16) + " bytes"; @@ -321,9 +325,12 @@ function Zlib(opts, mode) { } } - if (opts.dictionary) { - if (!Buffer.isBuffer(opts.dictionary)) { - throw new Error("Invalid dictionary: it should be a Buffer instance"); + let dictionary = opts.dictionary; + if (dictionary !== undefined && !isArrayBufferView(dictionary)) { + if (isAnyArrayBuffer(dictionary)) { + dictionary = Buffer.from(dictionary); + } else { + throw new TypeError("Invalid dictionary"); } } @@ -354,7 +361,7 @@ function Zlib(opts, mode) { level, opts.memLevel || zlibConstants.Z_DEFAULT_MEMLEVEL, strategy, - opts.dictionary, + dictionary, ); this._buffer = Buffer.allocUnsafe(this._chunkSize); diff --git a/ext/node/polyfills/_zlib_binding.mjs b/ext/node/polyfills/_zlib_binding.mjs index a04e7fed75d057..0b155cfd51657a 100644 --- a/ext/node/polyfills/_zlib_binding.mjs +++ b/ext/node/polyfills/_zlib_binding.mjs @@ -149,7 +149,7 @@ class Zlib { windowBits, memLevel, strategy, - dictionary, + dictionary ?? new Uint8Array(0), ); if (err != Z_OK) { From dd3d0f6d0be9da3b97b8cc556e5372e1c3e53efd Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Fri, 11 Aug 2023 17:27:41 +0530 Subject: [PATCH 08/60] fix(node): implement TLSSocket._start (#20120) Closes https://github.com/denoland/deno/issues/19983 Closes https://github.com/denoland/deno/issues/18303 Closes https://github.com/denoland/deno/issues/16681 Closes https://github.com/denoland/deno/issues/19978 --- cli/tests/unit_node/tls_test.ts | 32 +++++++++++++++++++ ext/node/polyfills/_tls_wrap.ts | 16 ++++++++-- .../polyfills/internal_binding/stream_wrap.ts | 17 +++++++++- 3 files changed, 62 insertions(+), 3 deletions(-) diff --git a/cli/tests/unit_node/tls_test.ts b/cli/tests/unit_node/tls_test.ts index 79c1e634ca0cb4..7a270c60b3fa4d 100644 --- a/cli/tests/unit_node/tls_test.ts +++ b/cli/tests/unit_node/tls_test.ts @@ -56,6 +56,38 @@ Connection: close await serve; }); +// https://github.com/denoland/deno/pull/20120 +Deno.test("tls.connect mid-read tcp->tls upgrade", async () => { + const ctl = new AbortController(); + const serve = serveTls(() => new Response("hello"), { + port: 8443, + key, + cert, + signal: ctl.signal, + }); + + await delay(200); + + const conn = tls.connect({ + host: "localhost", + port: 8443, + secureContext: { + ca: rootCaCert, + // deno-lint-ignore no-explicit-any + } as any, + }); + + conn.setEncoding("utf8"); + conn.write(`GET / HTTP/1.1\nHost: www.google.com\n\n`); + + conn.on("data", (_) => { + conn.destroy(); + ctl.abort(); + }); + + await serve; +}); + Deno.test("tls.createServer creates a TLS server", async () => { const p = deferred(); const server = tls.createServer( diff --git a/ext/node/polyfills/_tls_wrap.ts b/ext/node/polyfills/_tls_wrap.ts index 39df239d0d2867..416bd4136f8f31 100644 --- a/ext/node/polyfills/_tls_wrap.ts +++ b/ext/node/polyfills/_tls_wrap.ts @@ -26,6 +26,11 @@ import { import { EventEmitter } from "node:events"; import { kEmptyObject } from "ext:deno_node/internal/util.mjs"; import { nextTick } from "ext:deno_node/_next_tick.ts"; +import { kHandle } from "ext:deno_node/internal/stream_base_commons.ts"; +import { + isAnyArrayBuffer, + isArrayBufferView, +} from "ext:deno_node/internal/util/types.ts"; const kConnectOptions = Symbol("connect-options"); const kIsVerified = Symbol("verified"); @@ -71,7 +76,11 @@ export class TLSSocket extends net.Socket { [kPendingSession]: any; [kConnectOptions]: any; ssl: any; - _start: any; + + _start() { + this[kHandle].afterConnect(); + } + constructor(socket: any, opts: any = kEmptyObject) { const tlsOptions = { ...opts }; @@ -84,6 +93,9 @@ export class TLSSocket extends net.Socket { let caCerts = tlsOptions?.secureContext?.ca; if (typeof caCerts === "string") caCerts = [caCerts]; + else if (isArrayBufferView(caCerts) || isAnyArrayBuffer(caCerts)) { + caCerts = [new TextDecoder().decode(caCerts)]; + } tlsOptions.caCerts = caCerts; super({ @@ -139,9 +151,9 @@ export class TLSSocket extends net.Socket { handle.afterConnect = async (req: any, status: number) => { try { const conn = await Deno.startTls(handle[kStreamBaseField], options); + handle[kStreamBaseField] = conn; tlssock.emit("secure"); tlssock.removeListener("end", onConnectEnd); - handle[kStreamBaseField] = conn; } catch { // TODO(kt3k): Handle this } diff --git a/ext/node/polyfills/internal_binding/stream_wrap.ts b/ext/node/polyfills/internal_binding/stream_wrap.ts index 528dd7c3f265a4..66ebbe682dfadb 100644 --- a/ext/node/polyfills/internal_binding/stream_wrap.ts +++ b/ext/node/polyfills/internal_binding/stream_wrap.ts @@ -314,9 +314,16 @@ export class LibuvStreamWrap extends HandleWrap { let buf = BUF; let nread: number | null; + const ridBefore = this[kStreamBaseField]!.rid; try { nread = await this[kStreamBaseField]!.read(buf); } catch (e) { + // Try to read again if the underlying stream resource + // changed. This can happen during TLS upgrades (eg. STARTTLS) + if (ridBefore != this[kStreamBaseField]!.rid) { + return this.#read(); + } + if ( e instanceof Deno.errors.Interrupted || e instanceof Deno.errors.BadResource @@ -365,15 +372,23 @@ export class LibuvStreamWrap extends HandleWrap { async #write(req: WriteWrap, data: Uint8Array) { const { byteLength } = data; + const ridBefore = this[kStreamBaseField]!.rid; + + let nwritten = 0; try { // TODO(crowlKats): duplicate from runtime/js/13_buffer.js - let nwritten = 0; while (nwritten < data.length) { nwritten += await this[kStreamBaseField]!.write( data.subarray(nwritten), ); } } catch (e) { + // Try to read again if the underlying stream resource + // changed. This can happen during TLS upgrades (eg. STARTTLS) + if (ridBefore != this[kStreamBaseField]!.rid) { + return this.#write(req, data.subarray(nwritten)); + } + let status: number; // TODO(cmorten): map err to status codes From bf9ddb7c2e9978af27ba517b391b03db94da46c0 Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Sat, 12 Aug 2023 18:41:07 +0200 Subject: [PATCH 09/60] perf(ext/request): optimize Request constructor (#20141) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR optimizes `Request` constructor when `init` is not empty. This path is also used by `fetch` when `options` argument is used ```js fetch("https://deno.land", { method: "POST", body: 'land' }); ``` - Removed 3 extra calls to `headerListFromHeaders` - Avoid `Object.keys` & `headerList` clone if `init.headers` is set - Only empty `headersList` (`.splice`) if it's not already empty. ## Benchmarks **this patch** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ----------------------------------------------------------------------------- ----------------------------- Request without headers 1.86 µs/iter 536,440.7 (1.67 µs … 2.76 µs) 1.89 µs 2.76 µs 2.76 µs Request with headers 1.96 µs/iter 509,440.5 (1.83 µs … 2.17 µs) 1.99 µs 2.17 µs 2.17 µs ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ----------------------------------------------------------------------------- ----------------------------- Request without headers 1.96 µs/iter 510,201.5 (1.81 µs … 2.64 µs) 2 µs 2.64 µs 2.64 µs Request with headers 2.03 µs/iter 493,526.6 (1.84 µs … 2.31 µs) 2.08 µs 2.31 µs 2.31 µs ``` ```js Deno.bench("Request without headers", () => { const r = new Request("https://deno.land", { method: "POST", body: '{"foo": "bar"}', }); }); Deno.bench("Request with headers", () => { const r = new Request("https://deno.land", { method: "POST", body: '{"foo": "bar"}', headers: { "Content-Type": "application/json", }, }); }); ``` --- ext/fetch/23_request.js | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index afd3c1c50c77c0..cfdce01d3ea6bb 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -366,20 +366,16 @@ class Request { this[_headers] = headersFromHeaderList(request.headerList, "request"); // 32. - if (ObjectKeys(init).length > 0) { - let headers = ArrayPrototypeSlice( - headerListFromHeaders(this[_headers]), + if (init.headers || ObjectKeys(init).length > 0) { + const headerList = headerListFromHeaders(this[_headers]); + const headers = init.headers ?? ArrayPrototypeSlice( + headerList, 0, - headerListFromHeaders(this[_headers]).length, + headerList.length, ); - if (init.headers !== undefined) { - headers = init.headers; + if (headerList.length !== 0) { + ArrayPrototypeSplice(headerList, 0, headerList.length); } - ArrayPrototypeSplice( - headerListFromHeaders(this[_headers]), - 0, - headerListFromHeaders(this[_headers]).length, - ); fillHeaders(this[_headers], headers); } From aaa3608cc15208fcf7dfc9285cdd31ba1958ec9b Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Sat, 12 Aug 2023 18:42:06 +0200 Subject: [PATCH 10/60] perf(ext/headers): cache iterableHeaders for immutable Headers (#20132) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR caches `_iterableHeaders` for immutable `Headers` increasing the performance of `fetch` & server if headers are iterated. Should close #19466 I only cached immutable headers to address this comment https://github.com/denoland/deno/issues/19466#issuecomment-1589892373 since I didn't find any occurrence of header mutation on immutable headers. We can discuss caching for non-immutable, but I think this is a great first step. ## BENCHMARK ### Server ```js const addr = Deno.args[0] ?? "127.0.0.1:4500"; const [hostname, port] = addr.split(":"); const { serve } = Deno; serve({ hostname, port: Number(port), reusePort: true }, (req) => { const headers = [...req.headers]; // req.headers are immutable, cannot set/append/delete return new Response("ok"); }); ``` Used `wrk` with 5 headers ``` wrk -d 10s --latency -H "X-Deno: true" -H "Accept: application/json" -H "X-Foo: bar" -H "User-Agent: wrk" -H "Accept-Encoding: gzip, br" http://127.0.0.1:4500 ``` **This patch** ``` Running 10s test @ http://127.0.0.1:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 70.18us 22.89us 679.00us 81.37% Req/Sec 71.55k 9.69k 82.18k 89.60% Latency Distribution 50% 59.00us 75% 89.00us 90% 98.00us 99% 159.00us 1437891 requests in 10.10s, 193.35MB read Requests/sec: 142369.83 Transfer/sec: 19.14MB ``` **main** ``` Running 10s test @ http://127.0.0.1:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 112.78us 36.47us 2.09ms 77.99% Req/Sec 44.30k 1.65k 49.14k 74.26% Latency Distribution 50% 99.00us 75% 136.00us 90% 162.00us 99% 213.00us 890588 requests in 10.10s, 118.91MB read Requests/sec: 88176.37 Transfer/sec: 11.77MB ``` ### fetch ```js const res = await fetch('http://127.0.0.1:4500'); Deno.bench("Headers iterator", () => { const i = [...res.headers]; // res.headers are immutable, cannot set/append/delete }); ``` **this patch** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ---------------------------------------------------------------------- ----------------------------- Headers iterator 329.5 ns/iter 3,034,909.0 (318.55 ns … 364.34 ns) 331.1 ns 355.72 ns 364.34 ns ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ---------------------------------------------------------------------- ----------------------------- Headers iterator 2.59 µs/iter 386,372.1 (2.56 µs … 2.68 µs) 2.59 µs 2.68 µs 2.68 µs ``` --- ext/fetch/20_headers.js | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index fabd39c0e89026..9e8f994fed0bf5 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -44,6 +44,7 @@ const { const _headerList = Symbol("header list"); const _iterableHeaders = Symbol("iterable headers"); +const _iterableHeadersCache = Symbol("iterable headers cache"); const _guard = Symbol("guard"); /** @@ -229,6 +230,13 @@ class Headers { get [_iterableHeaders]() { const list = this[_headerList]; + if ( + this[_guard] === "immutable" && + this[_iterableHeadersCache] !== undefined + ) { + return this[_iterableHeadersCache]; + } + // The order of steps are not similar to the ones suggested by the // spec but produce the same result. const headers = {}; @@ -264,7 +272,7 @@ class Headers { ArrayPrototypePush(entries, cookies[i]); } - return ArrayPrototypeSort( + ArrayPrototypeSort( entries, (a, b) => { const akey = a[0]; @@ -274,6 +282,10 @@ class Headers { return 0; }, ); + + this[_iterableHeadersCache] = entries; + + return entries; } /** @param {HeadersInit} [init] */ From d0525dd692e7771d5742202db623e0f4a4fe750c Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Sat, 12 Aug 2023 18:42:23 +0200 Subject: [PATCH 11/60] perf(ext/headers): use regex.test instead of .exec (#20125) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR improves the performance of `Headers.get` by using `Regex.test` instead of `.exec`. Also replaced the `Map` used for caching with an object which is a bit faster **This patch** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ----------------------------------------------------------------------- ----------------------------- Headers.get 124.71 ns/iter 8,018,687.3 (115.11 ns … 265.66 ns) 126.05 ns 136.12 ns 142.37 ns ``` **1.36.1** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.0 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ----------------------------------------------------------------------- ----------------------------- Headers.get 218.91 ns/iter 4,568,172.3 (165.37 ns … 264.44 ns) 241.62 ns 260.94 ns 262.67 ns ``` ```js const headers = new Headers({ "Content-Type": "application/json", "Date": "Thu, 10 Aug 2023 07:45:10 GMT", "X-Deno": "Deno", "Powered-By": "Deno", "Content-Encoding": "gzip", "Set-Cookie": "__Secure-ID=123; Secure; Domain=example.com", "Content-Length": "150", "Vary": "Accept-Encoding, Accept, X-Requested-With", }); Deno.bench("Headers.get", () => { headers.get("x-deno"); }); ``` --- ext/fetch/20_headers.js | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index 9e8f994fed0bf5..929eaf816a5c90 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -28,12 +28,7 @@ const { ArrayPrototypeSplice, ObjectEntries, ObjectHasOwn, - RegExpPrototypeExec, - SafeMap, - MapPrototypeGet, - MapPrototypeHas, - MapPrototypeSet, - MapPrototypeClear, + RegExpPrototypeTest, Symbol, SymbolFor, SymbolIterator, @@ -102,19 +97,23 @@ function checkForInvalidValueChars(value) { return true; } -const HEADER_NAME_CACHE = new SafeMap(); +let HEADER_NAME_CACHE = {}; +let HEADER_CACHE_SIZE = 0; const HEADER_NAME_CACHE_SIZE_BOUNDARY = 4096; function checkHeaderNameForHttpTokenCodePoint(name) { - if (MapPrototypeHas(HEADER_NAME_CACHE, name)) { - return MapPrototypeGet(HEADER_NAME_CACHE, name); + const fromCache = HEADER_NAME_CACHE[name]; + if (fromCache !== undefined) { + return fromCache; } - const valid = RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) !== null; + const valid = RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name); - if (HEADER_NAME_CACHE.size > HEADER_NAME_CACHE_SIZE_BOUNDARY) { - MapPrototypeClear(HEADER_NAME_CACHE); + if (HEADER_CACHE_SIZE > HEADER_NAME_CACHE_SIZE_BOUNDARY) { + HEADER_NAME_CACHE = {}; + HEADER_CACHE_SIZE = 0; } - MapPrototypeSet(HEADER_NAME_CACHE, name, valid); + HEADER_CACHE_SIZE++; + HEADER_NAME_CACHE[name] = valid; return valid; } From 4e08665973106e1219659c275ef31cc5f3759f7f Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Sat, 12 Aug 2023 20:29:00 +0200 Subject: [PATCH 12/60] perf(ext/request): optimize validate and normalize HTTP method (#20143) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR optimizes `Request` constructor init method step. It doubles the speed for known lowercased methods. I also added `PATCH` to known methods **this patch** ``` benchmark time (avg) iter/s (min … max) p75 p99 p995 ---------------------------------------------------------------------------- ----------------------------- method: GET 1.49 µs/iter 669,336.9 (1.35 µs … 2.02 µs) 1.54 µs 2.02 µs 2.02 µs method: PATCH 1.85 µs/iter 540,921.5 (1.65 µs … 2.02 µs) 1.91 µs 2.02 µs 2.02 µs method: get 1.49 µs/iter 669,067.9 (1.28 µs … 1.69 µs) 1.55 µs 1.69 µs 1.69 µs ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ---------------------------------------------------------------------------- ----------------------------- method: GET 1.5 µs/iter 665,232.3 (1.3 µs … 2.02 µs) 1.54 µs 2.02 µs 2.02 µs method: PATCH 2.47 µs/iter 404,052.7 (2.06 µs … 4.05 µs) 2.51 µs 4.05 µs 4.05 µs method: get 3 µs/iter 333,277.2 (2.72 µs … 4.04 µs) 3.05 µs 4.04 µs 4.04 µs ``` ```js Deno.bench("method: GET", () => { const r = new Request("https://deno.land", { method: "GET", }); }); Deno.bench("method: PATCH", () => { const r = new Request("https://deno.land", { method: "PATCH", body: '{"foo": "bar"}', }); }); Deno.bench("method: get", () => { const r = new Request("https://deno.land", { method: "get", }); }); ``` --- ext/fetch/23_request.js | 45 ++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index cfdce01d3ea6bb..5232cc13c96c97 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -202,31 +202,29 @@ function cloneInnerRequest(request, skipBody = false) { }; } -/** - * @param {string} m - * @returns {boolean} - */ -function isKnownMethod(m) { - return ( - m === "DELETE" || - m === "GET" || - m === "HEAD" || - m === "OPTIONS" || - m === "POST" || - m === "PUT" - ); -} +// method => normalized method +const KNOWN_METHODS = { + "DELETE": "DELETE", + "delete": "DELETE", + "GET": "GET", + "get": "GET", + "HEAD": "HEAD", + "head": "HEAD", + "OPTIONS": "OPTIONS", + "options": "OPTIONS", + "PATCH": "PATCH", + "patch": "PATCH", + "POST": "POST", + "post": "POST", + "PUT": "PUT", + "put": "PUT", +}; + /** * @param {string} m * @returns {string} */ function validateAndNormalizeMethod(m) { - // Fast path for well-known methods - if (isKnownMethod(m)) { - return m; - } - - // Regular path if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, m) === null) { throw new TypeError("Method is not valid."); } @@ -325,9 +323,10 @@ class Request { // 25. if (init.method !== undefined) { - let method = init.method; - method = validateAndNormalizeMethod(method); - request.method = method; + const method = init.method; + // fast path: check for known methods + request.method = KNOWN_METHODS[method] ?? + validateAndNormalizeMethod(method); } // 26. From 3abe95baf4309f7020b3bc82d26acd261d365ead Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Sat, 12 Aug 2023 13:04:45 -0600 Subject: [PATCH 13/60] chore: deno_core -> 0.201.0 (#20135) --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 3 +-- cli/tests/testdata/node/test.out | 2 +- cli/tests/testdata/run/heapstats.js.out | 4 ++-- ext/io/fs.rs | 12 ++++-------- tools/wpt/expectation.json | 8 ++++++-- 6 files changed, 22 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3d9c9eab18d9e5..31f069f0aee977 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -997,9 +997,9 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.200.0" +version = "0.202.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8ba264b90ceb6e95b39d82e674d8ecae86ca012f900338ea50d1a077d9d75fd" +checksum = "e9d4f3ad9c2861e0bb8745e1f228aaee04782a9ab6a3c3bbb887e60d7faf087a" dependencies = [ "anyhow", "bytes", @@ -1370,9 +1370,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.78.0" +version = "0.80.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffd1c83b1fd465ee0156f2917c9af9ca09fe2bf54052a2cae1a8dcbc7b89aefc" +checksum = "abb6a1ceabfbab1c29b32872e68ec994d393b58ccdf12a835d150199555496f3" dependencies = [ "deno-proc-macro-rules", "lazy-regex", @@ -4474,9 +4474,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.111.0" +version = "0.113.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "309b3060a9627882514f3a3ce3cc08ceb347a76aeeadc58f138c3f189cf88b71" +checksum = "3fa7b3ecd650d790ff8781402d0704d35a2f51c3bec87fe92d43eea6d371f05d" dependencies = [ "bytes", "derive_more", @@ -5903,9 +5903,9 @@ dependencies = [ [[package]] name = "v8" -version = "0.74.3" +version = "0.75.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eedac634b8dd39b889c5b62349cbc55913780226239166435c5cf66771792ea" +checksum = "f9be435abe79a8427b0969f1ac0a3c0e91644235f68a3de5da4a27ec69666985" dependencies = [ "bitflags 1.3.2", "fslock", diff --git a/Cargo.toml b/Cargo.toml index d404b34462b2d5..83495e13e57d40 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -38,10 +38,9 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -v8 = { version = "0.74.1", default-features = false } deno_ast = { version = "0.27.0", features = ["transpiling"] } -deno_core = "0.200.0" +deno_core = "0.202.0" deno_runtime = { version = "0.123.0", path = "./runtime" } napi_sym = { version = "0.45.0", path = "./cli/napi/sym" } diff --git a/cli/tests/testdata/node/test.out b/cli/tests/testdata/node/test.out index 8b7f0780f38ca3..3c54a15e891aae 100644 --- a/cli/tests/testdata/node/test.out +++ b/cli/tests/testdata/node/test.out @@ -147,7 +147,7 @@ error: Error: rejected from reject fail at [WILDCARD] ./node/test.js (uncaught error) -error: Error: rejected from unhandled rejection fail +error: (in promise) Error: rejected from unhandled rejection fail Promise.reject(new Error("rejected from unhandled rejection fail")); ^ at [WILDCARD] diff --git a/cli/tests/testdata/run/heapstats.js.out b/cli/tests/testdata/run/heapstats.js.out index 9542663331f730..b75a755f8e9bfe 100644 --- a/cli/tests/testdata/run/heapstats.js.out +++ b/cli/tests/testdata/run/heapstats.js.out @@ -1,2 +1,2 @@ -Allocated: 8MB -Freed: -8MB +Allocated: 4MB +Freed: -4MB diff --git a/ext/io/fs.rs b/ext/io/fs.rs index 1ebe0e7c1f0f3c..3c98bf861c698a 100644 --- a/ext/io/fs.rs +++ b/ext/io/fs.rs @@ -170,14 +170,10 @@ impl FsStat { pub trait File { fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult; async fn read(self: Rc, limit: usize) -> FsResult { - let vec = vec![0; limit]; - let buf = BufMutView::from(vec); - let (nread, buf) = self.read_byob(buf).await?; - let mut vec = buf.unwrap_vec(); - if vec.len() != nread { - vec.truncate(nread); - } - Ok(BufView::from(vec)) + let buf = BufMutView::new(limit); + let (nread, mut buf) = self.read_byob(buf).await?; + buf.truncate(nread); + Ok(buf.into_view()) } async fn read_byob( self: Rc, diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index b75183d89d90cf..289698c1e6c529 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -2807,8 +2807,12 @@ }, "class-string-interface.any.html": true, "class-string-interface.any.worker.html": true, - "class-string-iterator-prototype-object.any.html": true, - "class-string-iterator-prototype-object.any.worker.html": true, + "class-string-iterator-prototype-object.any.html": [ + "Object.prototype.toString applied after deleting @@toStringTag" + ], + "class-string-iterator-prototype-object.any.worker.html": [ + "Object.prototype.toString applied after deleting @@toStringTag" + ], "class-string-named-properties-object.window.html": false, "global-immutable-prototype.any.html": [ "Setting to a different prototype" From 337535f556ea539642e36e284cefb3b21d5f39b9 Mon Sep 17 00:00:00 2001 From: Kira Date: Sun, 13 Aug 2023 04:04:17 +0200 Subject: [PATCH 14/60] fix(cli) error gracefully when script arg is not present and `--v8-flags` is present in `deno run` (#20145) Fix #20022, fix #19627 (duplicate) #17333 upgraded clap from version 3.1 to version 4. clap version 3.2.0 (intentionally) broke a behavior that deno was relying on to make `deno run --v8-flags=--help` work without specifying a file, see clap-rs/clap#3793. The workaround was to make the script argument required _unless_ `--v8-flags` is present. This broke the expectation that all successfully parsed `run` commands have the script argument set, leading to the panic on `matches.remove_many::("script_arg").unwrap()`. Clap, as far as I was able to find out, does not currently offer a neat solution to this problem. This PR adds logic to create and return a custom clap error when a parsed run command does not have the script argument. I added an appropriate test. --- cli/args/flags.rs | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/cli/args/flags.rs b/cli/args/flags.rs index fdfb65f6295a7f..bd3740bdb54c33 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -831,7 +831,7 @@ pub fn flags_from_vec(args: Vec) -> clap::error::Result { "lint" => lint_parse(&mut flags, &mut m), "lsp" => lsp_parse(&mut flags, &mut m), "repl" => repl_parse(&mut flags, &mut m), - "run" => run_parse(&mut flags, &mut m), + "run" => run_parse(&mut flags, &mut m, app)?, "task" => task_parse(&mut flags, &mut m), "test" => test_parse(&mut flags, &mut m), "types" => types_parse(&mut flags, &mut m), @@ -3245,10 +3245,22 @@ fn repl_parse(flags: &mut Flags, matches: &mut ArgMatches) { ); } -fn run_parse(flags: &mut Flags, matches: &mut ArgMatches) { +fn run_parse( + flags: &mut Flags, + matches: &mut ArgMatches, + app: Command, +) -> clap::error::Result<()> { runtime_args_parse(flags, matches, true, true); - let mut script_arg = matches.remove_many::("script_arg").unwrap(); + let mut script_arg = + matches.remove_many::("script_arg").ok_or_else(|| { + let mut app = app; + let subcommand = &mut app.find_subcommand_mut("run").unwrap(); + subcommand.error( + clap::error::ErrorKind::MissingRequiredArgument, + "[SCRIPT_ARG] may only be omitted with --v8-flags=--help", + ) + })?; let script = script_arg.next().unwrap(); flags.argv.extend(script_arg); @@ -3259,6 +3271,8 @@ fn run_parse(flags: &mut Flags, matches: &mut ArgMatches) { script, watch: watch_arg_parse_with_paths(matches), }); + + Ok(()) } fn task_parse(flags: &mut Flags, matches: &mut ArgMatches) { @@ -3996,6 +4010,12 @@ mod tests { ..Flags::default() } ); + + let r = flags_from_vec(svec!["deno", "run", "--v8-flags=--expose-gc"]); + assert!(r + .unwrap_err() + .to_string() + .contains("[SCRIPT_ARG] may only be omitted with --v8-flags=--help")); } #[test] From 049aebee34449e78c4c2174b374398df63e9d4b1 Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Mon, 14 Aug 2023 15:14:02 +0200 Subject: [PATCH 15/60] perf(ext/node): cache `IncomingMessageForServer.headers` (#20147) This PR adds caching to node's `req.headers` ```js import express from "npm:express"; const app = express(); app.get("/", function (req, res) { const ua = req.header("User-Agent"); const auth = req.header("Authorization"); const type = req.header("Content-Type"); const ip = req.header("X-Forwarded-For"); res.end(); }); app.listen(3000); ``` **this PR** ``` wrk -d 10s --latency http://127.0.0.1:3000 Running 10s test @ http://127.0.0.1:3000 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 155.64us 152.14us 5.74ms 97.39% Req/Sec 35.00k 1.97k 39.10k 80.69% Latency Distribution 50% 123.00us 75% 172.00us 90% 214.00us 99% 563.00us 703420 requests in 10.10s, 50.31MB read Requests/sec: 69648.45 Transfer/sec: 4.98MB ``` **main** ``` wrk -d 10s --latency http://127.0.0.1:3000 Running 10s test @ http://127.0.0.1:3000 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 217.95us 786.89us 26.26ms 98.23% Req/Sec 32.32k 2.54k 37.19k 87.13% Latency Distribution 50% 130.00us 75% 191.00us 90% 232.00us 99% 1.88ms 649411 requests in 10.10s, 46.45MB read Requests/sec: 64300.44 Transfer/sec: 4.60MB ``` --- ext/node/polyfills/http.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index b34c45f9c9586b..2d80c2cd9924a8 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -1438,6 +1438,7 @@ export class ServerResponse extends NodeWritable { // TODO(@AaronO): optimize export class IncomingMessageForServer extends NodeReadable { #req: Request; + #headers: Record; url: string; method: string; // Polyfills part of net.Socket object. @@ -1484,7 +1485,10 @@ export class IncomingMessageForServer extends NodeReadable { } get headers() { - return Object.fromEntries(this.#req.headers.entries()); + if (!this.#headers) { + this.#headers = Object.fromEntries(this.#req.headers.entries()); + } + return this.#headers; } get upgrade(): boolean { From e77d55839d0df36f9eac644b61b3795a0e80ae6b Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Mon, 14 Aug 2023 21:13:55 +0200 Subject: [PATCH 16/60] perf(ext/headers): optimize headers iterable (#20155) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR makes more optimizations to headers iterable by removing `ObjectEntries` which was consistently prominent in the flame graph when benchmarking an express server. **this PR** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ------------------------------------------------------------------ ----------------------------- headers iter 9.6 µs/iter 104,134.1 (8.74 µs … 131.31 µs) 9.47 µs 12.61 µs 17.81 µs ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ------------------------------------------------------------------ ----------------------------- headers iter 12.87 µs/iter 77,675.9 (11.97 µs … 132.34 µs) 12.76 µs 16.49 µs 26.4 µs ``` ```js const headers = new Headers({ "Content-Type": "application/json", "X-Content-Type": "application/json", "Date": "Thu, 14 Aug 2023 17:45:10 GMT", "X-Deno": "Deno", "Powered-By": "Deno", "Content-Encoding": "gzip", "Set-Cookie": "__Secure-ID=123; Secure; Domain=example.com", "Content-Length": "150", "Vary": "Accept-Encoding, Accept, X-Requested-With", }); Deno.bench('headers iter', () => { [...headers] }) ``` --- ext/fetch/20_headers.js | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index 929eaf816a5c90..4e1729e01ca12f 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -26,7 +26,6 @@ const { ArrayPrototypeSort, ArrayPrototypeJoin, ArrayPrototypeSplice, - ObjectEntries, ObjectHasOwn, RegExpPrototypeTest, Symbol, @@ -238,8 +237,8 @@ class Headers { // The order of steps are not similar to the ones suggested by the // spec but produce the same result. - const headers = {}; - const cookies = []; + const seenHeaders = {}; + const entries = []; for (let i = 0; i < list.length; ++i) { const entry = list[i]; const name = byteLowerCase(entry[0]); @@ -250,27 +249,25 @@ class Headers { // so must be given to the user as multiple headers. // The else block of the if statement is spec compliant again. if (name === "set-cookie") { - ArrayPrototypePush(cookies, [name, value]); + ArrayPrototypePush(entries, [name, value]); } else { // The following code has the same behaviour as getHeader() // at the end of loop. But it avoids looping through the entire // list to combine multiple values with same header name. It // instead gradually combines them as they are found. - let header = headers[name]; - if (header && header.length > 0) { - header += "\x2C\x20" + value; + const seenHeaderIndex = seenHeaders[name]; + if (seenHeaderIndex !== undefined) { + const entryValue = entries[seenHeaderIndex][1]; + entries[seenHeaderIndex][1] = entryValue.length > 0 + ? entryValue + "\x2C\x20" + value + : value; } else { - header = value; + seenHeaders[name] = entries.length; // store header index in entries array + ArrayPrototypePush(entries, [name, value]); } - headers[name] = header; } } - const entries = ObjectEntries(headers); - for (let i = 0; i < cookies.length; ++i) { - ArrayPrototypePush(entries, cookies[i]); - } - ArrayPrototypeSort( entries, (a, b) => { From 4001fad25fd421a8f5d53895361ff9326f7aabcd Mon Sep 17 00:00:00 2001 From: Evan <96965321+0xIchigo@users.noreply.github.com> Date: Mon, 14 Aug 2023 20:11:12 -0400 Subject: [PATCH 17/60] fix(ext/net): implement a graceful error on an invalid SSL certificate (#20157) The goal of this PR is to address issue #19520 where Deno panics when encountering an invalid SSL certificate. This PR achieves that goal by removing an `.expect()` statement and implementing a match statement on `tsl_config` (found in [/ext/net/ops_tsl.rs](https://github.com/denoland/deno/blob/e071382768fa57b5288a6a5ba90e73bf5870b169/ext/net/ops_tls.rs#L1058)) to check whether the desired configuration is valid --------- Co-authored-by: Matt Mastracci --- cli/tests/testdata/tls/invalid.crt | 3 +++ cli/tests/testdata/tls/invalid.key | 3 +++ cli/tests/unit/tls_test.ts | 28 ++++++++++++++++++++++++++++ ext/net/ops_tls.rs | 8 +++++++- 4 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 cli/tests/testdata/tls/invalid.crt create mode 100644 cli/tests/testdata/tls/invalid.key diff --git a/cli/tests/testdata/tls/invalid.crt b/cli/tests/testdata/tls/invalid.crt new file mode 100644 index 00000000000000..688e32ede7d079 --- /dev/null +++ b/cli/tests/testdata/tls/invalid.crt @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +INVALID +-----END CERTIFICATE----- diff --git a/cli/tests/testdata/tls/invalid.key b/cli/tests/testdata/tls/invalid.key new file mode 100644 index 00000000000000..b57bc2f68e0caa --- /dev/null +++ b/cli/tests/testdata/tls/invalid.key @@ -0,0 +1,3 @@ +-----BEGIN PRIVATE KEY----- +INVALID +-----END PRIVATE KEY----- diff --git a/cli/tests/unit/tls_test.ts b/cli/tests/unit/tls_test.ts index 1f0702f623f2e8..8162c53b564bc3 100644 --- a/cli/tests/unit/tls_test.ts +++ b/cli/tests/unit/tls_test.ts @@ -1491,3 +1491,31 @@ Deno.test({ }); listener.close(); }); + +Deno.test( + { permissions: { net: true, read: true } }, + function listenTLSInvalidCert() { + assertThrows(() => { + Deno.listenTls({ + hostname: "localhost", + port: 3500, + certFile: "cli/tests/testdata/tls/invalid.crt", + keyFile: "cli/tests/testdata/tls/localhost.key", + }); + }, Deno.errors.InvalidData); + }, +); + +Deno.test( + { permissions: { net: true, read: true } }, + function listenTLSInvalidKey() { + assertThrows(() => { + Deno.listenTls({ + hostname: "localhost", + port: 3500, + certFile: "cli/tests/testdata/tls/localhost.crt", + keyFile: "cli/tests/testdata/tls/invalid.key", + }); + }, Deno.errors.InvalidData); + }, +); diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index ac9c80f7a5cc3a..7b1cb4e0a5ca8b 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -1055,7 +1055,13 @@ where .with_safe_defaults() .with_no_client_auth() .with_single_cert(cert_chain, key_der) - .expect("invalid key or certificate"); + .map_err(|e| { + custom_error( + "InvalidData", + format!("Error creating TLS certificate: {:?}", e), + ) + })?; + if let Some(alpn_protocols) = args.alpn_protocols { tls_config.alpn_protocols = alpn_protocols.into_iter().map(|s| s.into_bytes()).collect(); From 6a5b02bc791a4f492ee9991bf4622c04a80cc891 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 15 Aug 2023 09:10:54 +0200 Subject: [PATCH 18/60] fix(require): use canonicalized path for loading content (#20133) --- ext/node/polyfills/01_require.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index 092e7490a39c1c..eb845f23755c39 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -833,7 +833,9 @@ Module._resolveFilename = function ( isMain, parentPath, ); - if (filename) return filename; + if (filename) { + return ops.op_require_real_path(filename); + } const requireStack = []; for (let cursor = parent; cursor; cursor = moduleParentCache.get(cursor)) { ArrayPrototypePush(requireStack, cursor.filename || cursor.id); @@ -891,7 +893,7 @@ Module.prototype.load = function (filename) { ); } - Module._extensions[extension](this, filename); + Module._extensions[extension](this, this.filename); this.loaded = true; // TODO: do caching From 7895d154c37addadf3f900060956da6d76f9067b Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Tue, 15 Aug 2023 09:21:02 +0200 Subject: [PATCH 19/60] fix(ext/fetch): clone second branch chunks in Body.clone() (#20057) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR makes `Body.clone()` spec compliant: https://fetch.spec.whatwg.org/#concept-body-clone > 1, Let « out1, out2 » be the result of [teeing](https://streams.spec.whatwg.org/#readablestream-tee) body’s [stream](https://fetch.spec.whatwg.org/#concept-body-stream). > ... > To tee a [ReadableStream](https://streams.spec.whatwg.org/#readablestream) stream, return ? [ReadableStreamTee](https://streams.spec.whatwg.org/#readable-stream-tee)(stream, true). --- Closes #10994 --- ext/fetch/22_body.js | 3 ++- ext/web/06_streams.js | 21 +++++++++++++++++++-- tools/wpt/expectation.json | 32 ++------------------------------ 3 files changed, 23 insertions(+), 33 deletions(-) diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index 9fe00b14458033..644b9f76f8874a 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -33,6 +33,7 @@ import { readableStreamCollectIntoUint8Array, readableStreamDisturb, ReadableStreamPrototype, + readableStreamTee, readableStreamThrowIfErrored, } from "ext:deno_web/06_streams.js"; const primordials = globalThis.__bootstrap.primordials; @@ -194,7 +195,7 @@ class InnerBody { * @returns {InnerBody} */ clone() { - const { 0: out1, 1: out2 } = this.stream.tee(); + const { 0: out1, 1: out2 } = readableStreamTee(this.stream, true); this.streamOrStatic = out1; const second = new InnerBody(out2); second.source = core.deserialize(core.serialize(this.source)); diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index beab2ec1228c4a..01f84aa2cfa486 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -9,6 +9,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import * as webidl from "ext:deno_webidl/00_webidl.js"; +import { structuredClone } from "ext:deno_web/02_structured_clone.js"; import { AbortSignalPrototype, add, @@ -2847,9 +2848,24 @@ function readableStreamDefaultTee(stream, cloneForBranch2) { queueMicrotask(() => { readAgain = false; const value1 = value; - const value2 = value; + let value2 = value; - // TODO(lucacasonato): respect clonedForBranch2. + if (canceled2 === false && cloneForBranch2 === true) { + try { + value2 = structuredClone(value2); + } catch (cloneError) { + readableStreamDefaultControllerError( + branch1[_controller], + cloneError, + ); + readableStreamDefaultControllerError( + branch2[_controller], + cloneError, + ); + cancelPromise.resolve(readableStreamCancel(stream, cloneError)); + return; + } + } if (canceled1 === false) { readableStreamDefaultControllerEnqueue( @@ -6464,6 +6480,7 @@ export { readableStreamForRidUnrefableRef, readableStreamForRidUnrefableUnref, ReadableStreamPrototype, + readableStreamTee, readableStreamThrowIfErrored, TransformStream, TransformStreamDefaultController, diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index 289698c1e6c529..86153084193107 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -4382,36 +4382,8 @@ "response-from-stream.any.worker.html": true, "response-cancel-stream.any.html": true, "response-cancel-stream.any.worker.html": true, - "response-clone.any.html": [ - "Check response clone use structureClone for teed ReadableStreams (Int8Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Int16Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Int32Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (ArrayBufferchunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint8Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint8ClampedArraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint16Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint32Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (BigInt64Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (BigUint64Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Float32Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Float64Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (DataViewchunk)" - ], - "response-clone.any.worker.html": [ - "Check response clone use structureClone for teed ReadableStreams (Int8Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Int16Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Int32Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (ArrayBufferchunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint8Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint8ClampedArraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint16Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Uint32Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (BigInt64Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (BigUint64Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Float32Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (Float64Arraychunk)", - "Check response clone use structureClone for teed ReadableStreams (DataViewchunk)" - ], + "response-clone.any.html": true, + "response-clone.any.worker.html": true, "response-consume-empty.any.html": [ "Consume empty FormData response body as text" ], From 578936b4909a08ed28611961c2cbccabcd522a35 Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Tue, 15 Aug 2023 16:59:35 +0200 Subject: [PATCH 20/60] perf(ext/node): optimize http headers (#20163) This PR optimizes Node's `IncomingMessageForServer.headers` by replacing `Object.fromEntries()` with a loop and `headers.entries` with `headersEntries` which returns the internal array directly instead of an iterator ## Benchmarks Using `wrk` with 5 headers ``` wrk -d 10s --latency -H "X-Deno: true" -H "Accept: application/json" -H "X-Foo: bar" -H "User-Agent: wrk" -H "Accept-Encoding: gzip, br" http://127.0.0.1:3000 ``` **this PR** ``` Running 10s test @ http://127.0.0.1:3000 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 167.53us 136.89us 2.75ms 97.33% Req/Sec 31.98k 1.38k 36.39k 70.30% Latency Distribution 50% 134.00us 75% 191.00us 90% 234.00us 99% 544.00us 642548 requests in 10.10s, 45.96MB read Requests/sec: 63620.36 Transfer/sec: 4.55MB ``` **main** ``` Running 10s test @ http://127.0.0.1:3000 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 181.31us 132.54us 3.79ms 97.13% Req/Sec 29.21k 1.45k 32.93k 79.21% Latency Distribution 50% 148.00us 75% 198.00us 90% 261.00us 99% 545.00us 586939 requests in 10.10s, 41.98MB read Requests/sec: 58114.01 Transfer/sec: 4.16MB ``` ```js import express from "npm:express"; const app = express(); app.get("/", function (req, res) { req.headers; res.end(); }); app.listen(3000); ``` --- ext/fetch/20_headers.js | 9 +++++++++ ext/node/polyfills/http.ts | 8 +++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index 4e1729e01ca12f..39127b1ecf2e2b 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -508,6 +508,14 @@ function guardFromHeaders(headers) { return headers[_guard]; } +/** + * @param {Headers} headers + * @returns {[string, string][]} + */ +function headersEntries(headers) { + return headers[_iterableHeaders]; +} + export { fillHeaders, getDecodeSplitHeader, @@ -515,5 +523,6 @@ export { guardFromHeaders, headerListFromHeaders, Headers, + headersEntries, headersFromHeaderList, }; diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 2d80c2cd9924a8..609a046ac833e2 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -55,6 +55,7 @@ import { import { getTimerDuration } from "ext:deno_node/internal/timers.mjs"; import { serve, upgradeHttpRaw } from "ext:deno_http/00_serve.js"; import { createHttpClient } from "ext:deno_fetch/22_http_client.js"; +import { headersEntries } from "ext:deno_fetch/20_headers.js"; import { timerId } from "ext:deno_web/03_abort_signal.js"; import { clearTimeout as webClearTimeout } from "ext:deno_web/02_timers.js"; import { TcpConn } from "ext:deno_net/01_net.js"; @@ -1486,7 +1487,12 @@ export class IncomingMessageForServer extends NodeReadable { get headers() { if (!this.#headers) { - this.#headers = Object.fromEntries(this.#req.headers.entries()); + this.#headers = {}; + const entries = headersEntries(this.#req.headers); + for (let i = 0; i < entries.length; i++) { + const entry = entries[i]; + this.#headers[entry[0]] = entry[1]; + } } return this.#headers; } From 3ba09eacd2e46e7c823524c309032b21f57381dc Mon Sep 17 00:00:00 2001 From: await-ovo <13152410380@163.com> Date: Tue, 15 Aug 2023 23:04:36 +0800 Subject: [PATCH 21/60] fix(runtime): navigator.userAgent in web worker (#20129) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes https://github.com/denoland/deno/issues/20079 --------- Co-authored-by: Bartek Iwańczuk --- cli/tests/testdata/workers/test.ts | 20 +++++++++++ .../testdata/workers/worker_navigator.ts | 11 ++++++ runtime/js/98_global_scope.js | 36 +++++++++++-------- runtime/js/99_main.js | 3 +- tools/wpt/expectation.json | 9 ++++- 5 files changed, 63 insertions(+), 16 deletions(-) create mode 100644 cli/tests/testdata/workers/worker_navigator.ts diff --git a/cli/tests/testdata/workers/test.ts b/cli/tests/testdata/workers/test.ts index 506bb4c312064e..90cc6a649a277e 100644 --- a/cli/tests/testdata/workers/test.ts +++ b/cli/tests/testdata/workers/test.ts @@ -119,6 +119,26 @@ Deno.test({ }, }); +Deno.test({ + name: "worker navigator", + fn: async function () { + const workerOptions: WorkerOptions = { type: "module" }; + const w = new Worker( + import.meta.resolve("./worker_navigator.ts"), + workerOptions, + ); + + const promise = deferred(); + w.onmessage = (e) => { + promise.resolve(e.data); + }; + + w.postMessage("Hello, world!"); + assertEquals(await promise, "string, object, string, number"); + w.terminate(); + }, +}); + Deno.test({ name: "worker fetch API", fn: async function () { diff --git a/cli/tests/testdata/workers/worker_navigator.ts b/cli/tests/testdata/workers/worker_navigator.ts new file mode 100644 index 00000000000000..bd364a8f947e27 --- /dev/null +++ b/cli/tests/testdata/workers/worker_navigator.ts @@ -0,0 +1,11 @@ +onmessage = function () { + postMessage( + [ + typeof navigator.language, + typeof navigator.languages, + typeof navigator.userAgent, + typeof navigator.hardwareConcurrency, + ].join(", "), + ); + close(); +}; diff --git a/runtime/js/98_global_scope.js b/runtime/js/98_global_scope.js index 1084f5c2482965..c916ef819f7944 100644 --- a/runtime/js/98_global_scope.js +++ b/runtime/js/98_global_scope.js @@ -225,21 +225,29 @@ ObjectDefineProperties(WorkerNavigator.prototype, { webidl.assertBranded(this, WorkerNavigatorPrototype); return numCpus; }, - language: { - configurable: true, - enumerable: true, - get() { - webidl.assertBranded(this, WorkerNavigatorPrototype); - return language; - }, + }, + userAgent: { + configurable: true, + enumerable: true, + get() { + webidl.assertBranded(this, WorkerNavigatorPrototype); + return userAgent; + }, + }, + language: { + configurable: true, + enumerable: true, + get() { + webidl.assertBranded(this, WorkerNavigatorPrototype); + return language; }, - languages: { - configurable: true, - enumerable: true, - get() { - webidl.assertBranded(this, WorkerNavigatorPrototype); - return [language]; - }, + }, + languages: { + configurable: true, + enumerable: true, + get() { + webidl.assertBranded(this, WorkerNavigatorPrototype); + return [language]; }, }, }); diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index dc68396469c1c7..c8fdabc258e1c1 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -567,7 +567,7 @@ function bootstrapWorkerRuntime( 10: pid, 11: target, 12: v8Version, - // 13: userAgent, + 13: userAgent, // 14: inspectFlag, 15: enableTestingFeaturesFlag, } = runtimeOptions; @@ -633,6 +633,7 @@ function bootstrapWorkerRuntime( location.setLocationHref(location_); setNumCpus(cpuCount); + setUserAgent(userAgent); setLanguage(locale); globalThis.pollForMessages = pollForMessages; diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index 86153084193107..326b7f55d3b9fe 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -6177,7 +6177,14 @@ "taintEnabled", "oscpu" ], - "navigator.any.worker.html": false, + "navigator.any.worker.html": [ + "appCodeName", + "appName", + "appVersion", + "platform", + "product", + "userAgent value" + ], "per-global.window.html": false } } From 851e08bd1fff078f4c5587fc30f4c90ec8842678 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Tue, 15 Aug 2023 13:36:36 -0600 Subject: [PATCH 22/60] feat(ext/node): eagerly bootstrap node (#20153) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit To fix bugs around detection of when node emulation is required, we will just eagerly initialize it. The improvements we make to reduce the impact of the startup time: - [x] Process stdin/stdout/stderr are lazily created - [x] node.js global proxy no longer allocates on each access check - [x] Process checks for `beforeExit` listeners before doing expensive shutdown work - [x] Process should avoid adding global event handlers until listeners are added Benchmarking this PR (`89de7e1ff`) vs main (`41cad2179`) ``` 12:36 $ third_party/prebuilt/mac/hyperfine --warmup 100 -S none './deno-41cad2179 run ./empty.js' './deno-89de7e1ff run ./empty.js' Benchmark 1: ./deno-41cad2179 run ./empty.js Time (mean ± σ): 24.3 ms ± 1.6 ms [User: 16.2 ms, System: 6.0 ms] Range (min … max): 21.1 ms … 29.1 ms 115 runs Benchmark 2: ./deno-89de7e1ff run ./empty.js Time (mean ± σ): 24.0 ms ± 1.4 ms [User: 16.3 ms, System: 5.6 ms] Range (min … max): 21.3 ms … 28.6 ms 126 runs ``` Fixes https://github.com/denoland/deno/issues/20142 Fixes https://github.com/denoland/deno/issues/15826 Fixes https://github.com/denoland/deno/issues/20028 --- cli/factory.rs | 10 -- cli/npm/resolvers/mod.rs | 5 - cli/standalone/mod.rs | 10 -- cli/tools/repl/session.rs | 14 -- cli/worker.rs | 78 ++-------- ext/node/global.rs | 6 +- ext/node/lib.rs | 24 --- ext/node/polyfills/02_init.js | 4 + ext/node/polyfills/process.ts | 266 +++++++++++++++++++++++++--------- ext/node/resolution.rs | 7 +- runtime/js/99_main.js | 19 ++- runtime/ops/worker_host.rs | 25 ---- runtime/web_worker.rs | 36 +---- runtime/worker.rs | 14 +- runtime/worker_bootstrap.rs | 204 +++++++++++--------------- 15 files changed, 323 insertions(+), 399 deletions(-) diff --git a/cli/factory.rs b/cli/factory.rs index 9c553620dfb713..6a99bb2da94604 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -44,7 +44,6 @@ use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; -use crate::worker::HasNodeSpecifierChecker; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; @@ -623,7 +622,6 @@ impl CliFactory { StorageKeyResolver::from_options(&self.options), self.npm_resolver().await?.clone(), node_resolver.clone(), - Box::new(CliHasNodeSpecifierChecker(self.graph_container().clone())), self.blob_store().clone(), Box::new(CliModuleLoaderFactory::new( &self.options, @@ -683,11 +681,3 @@ impl CliFactory { }) } } - -struct CliHasNodeSpecifierChecker(Arc); - -impl HasNodeSpecifierChecker for CliHasNodeSpecifierChecker { - fn has_node_specifier(&self) -> bool { - self.0.graph().has_node_specifier - } -} diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 9ae84d7f90810b..1b3f57c9a3cb5a 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -179,11 +179,6 @@ impl CliNpmResolver { specifier.as_ref().starts_with(root_dir_url.as_str()) } - /// If the resolver has resolved any npm packages. - pub fn has_packages(&self) -> bool { - self.resolution.has_packages() - } - /// Adds package requirements to the resolver and ensures everything is setup. pub async fn add_package_reqs( &self, diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 92aba6d294ce5d..f6c489487da229 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -26,7 +26,6 @@ use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; -use crate::worker::HasNodeSpecifierChecker; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; use deno_core::anyhow::Context; @@ -266,14 +265,6 @@ impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { } } -struct StandaloneHasNodeSpecifierChecker; - -impl HasNodeSpecifierChecker for StandaloneHasNodeSpecifierChecker { - fn has_node_specifier(&self) -> bool { - false - } -} - struct StandaloneRootCertStoreProvider { ca_stores: Option>, ca_data: Option, @@ -438,7 +429,6 @@ pub async fn run( StorageKeyResolver::empty(), npm_resolver.clone(), node_resolver, - Box::new(StandaloneHasNodeSpecifierChecker), Default::default(), Box::new(module_loader_factory), root_cert_store_provider, diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 4a30c93c4406e8..9261299dfa49e4 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -25,7 +25,6 @@ use deno_core::serde_json; use deno_core::serde_json::Value; use deno_core::LocalInspectorSession; use deno_graph::source::Resolver; -use deno_runtime::deno_node; use deno_runtime::worker::MainWorker; use deno_semver::npm::NpmPackageReqReference; use once_cell::sync::Lazy; @@ -123,7 +122,6 @@ struct TsEvaluateResponse { } pub struct ReplSession { - has_node_modules_dir: bool, npm_resolver: Arc, resolver: Arc, pub worker: MainWorker, @@ -131,7 +129,6 @@ pub struct ReplSession { pub context_id: u64, pub language_server: ReplLanguageServer, pub notifications: Rc>>, - has_initialized_node_runtime: bool, referrer: ModuleSpecifier, } @@ -183,14 +180,12 @@ impl ReplSession { .unwrap(); let mut repl_session = ReplSession { - has_node_modules_dir: cli_options.has_node_modules_dir(), npm_resolver, resolver, worker, session, context_id, language_server, - has_initialized_node_runtime: false, referrer, notifications: Rc::new(RefCell::new(notification_rx)), }; @@ -515,15 +510,6 @@ impl ReplSession { let has_node_specifier = resolved_imports.iter().any(|url| url.scheme() == "node"); if !npm_imports.is_empty() || has_node_specifier { - if !self.has_initialized_node_runtime { - deno_node::initialize_runtime( - &mut self.worker.js_runtime, - self.has_node_modules_dir, - None, - )?; - self.has_initialized_node_runtime = true; - } - self.npm_resolver.add_package_reqs(&npm_imports).await?; // prevent messages in the repl about @types/node not being cached diff --git a/cli/worker.rs b/cli/worker.rs index a712dc9c643ca0..b451cdbed5c7fe 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -8,7 +8,6 @@ use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; -use deno_core::futures::task::LocalFutureObj; use deno_core::futures::FutureExt; use deno_core::located_script_name; use deno_core::parking_lot::Mutex; @@ -32,7 +31,6 @@ use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::inspector_server::InspectorServer; use deno_runtime::ops::worker_host::CreateWebWorkerCb; -use deno_runtime::ops::worker_host::WorkerEventCb; use deno_runtime::permissions::PermissionsContainer; use deno_runtime::web_worker::WebWorker; use deno_runtime::web_worker::WebWorkerOptions; @@ -97,7 +95,6 @@ struct SharedWorkerState { storage_key_resolver: StorageKeyResolver, npm_resolver: Arc, node_resolver: Arc, - has_node_specifier_checker: Box, blob_store: Arc, broadcast_channel: InMemoryBroadcastChannel, shared_array_buffer_store: SharedArrayBufferStore, @@ -110,11 +107,7 @@ struct SharedWorkerState { } impl SharedWorkerState { - pub fn should_initialize_node_runtime(&self) -> bool { - self.npm_resolver.has_packages() - || self.has_node_specifier_checker.has_node_specifier() - || self.options.is_npm_main - } + // Currently empty } pub struct CliMainWorker { @@ -140,7 +133,6 @@ impl CliMainWorker { log::debug!("main_module {}", self.main_module); if self.is_main_cjs { - self.initialize_main_module_for_node()?; deno_node::load_cjs_module( &mut self.worker.js_runtime, &self.main_module.to_file_path().unwrap().to_string_lossy(), @@ -266,22 +258,9 @@ impl CliMainWorker { &mut self, id: ModuleId, ) -> Result<(), AnyError> { - if self.shared.should_initialize_node_runtime() { - self.initialize_main_module_for_node()?; - } self.worker.evaluate_module(id).await } - fn initialize_main_module_for_node(&mut self) -> Result<(), AnyError> { - deno_node::initialize_runtime( - &mut self.worker.js_runtime, - self.shared.options.has_node_modules_dir, - self.shared.options.maybe_binary_npm_command_name.as_deref(), - )?; - - Ok(()) - } - pub async fn maybe_setup_coverage_collector( &mut self, ) -> Result, AnyError> { @@ -312,7 +291,6 @@ impl CliMainWorkerFactory { storage_key_resolver: StorageKeyResolver, npm_resolver: Arc, node_resolver: Arc, - has_node_specifier_checker: Box, blob_store: Arc, module_loader_factory: Box, root_cert_store_provider: Arc, @@ -327,7 +305,6 @@ impl CliMainWorkerFactory { storage_key_resolver, npm_resolver, node_resolver, - has_node_specifier_checker, blob_store, broadcast_channel: Default::default(), shared_array_buffer_store: Default::default(), @@ -404,10 +381,6 @@ impl CliMainWorkerFactory { let create_web_worker_cb = create_web_worker_callback(shared.clone(), stdio.clone()); - let web_worker_preload_module_cb = - create_web_worker_preload_module_callback(shared); - let web_worker_pre_execute_module_cb = - create_web_worker_pre_execute_module_callback(shared.clone()); let maybe_storage_key = shared .storage_key_resolver @@ -448,6 +421,11 @@ impl CliMainWorkerFactory { unstable: shared.options.unstable, user_agent: version::get_user_agent().to_string(), inspect: shared.options.is_inspecting, + has_node_modules_dir: shared.options.has_node_modules_dir, + maybe_binary_npm_command_name: shared + .options + .maybe_binary_npm_command_name + .clone(), }, extensions, startup_snapshot: crate::js::deno_isolate_init(), @@ -461,8 +439,6 @@ impl CliMainWorkerFactory { source_map_getter: maybe_source_map_getter, format_js_error_fn: Some(Arc::new(format_js_error)), create_web_worker_cb, - web_worker_preload_module_cb, - web_worker_pre_execute_module_cb, maybe_inspector_server, should_break_on_first_statement: shared.options.inspect_brk, should_wait_for_inspector_session: shared.options.inspect_wait, @@ -555,38 +531,6 @@ impl CliMainWorkerFactory { } } -// TODO(bartlomieju): this callback could have default value -// and not be required -fn create_web_worker_preload_module_callback( - _shared: &Arc, -) -> Arc { - Arc::new(move |worker| { - let fut = async move { Ok(worker) }; - LocalFutureObj::new(Box::new(fut)) - }) -} - -fn create_web_worker_pre_execute_module_callback( - shared: Arc, -) -> Arc { - Arc::new(move |mut worker| { - let shared = shared.clone(); - let fut = async move { - // this will be up to date after pre-load - if shared.should_initialize_node_runtime() { - deno_node::initialize_runtime( - &mut worker.js_runtime, - shared.options.has_node_modules_dir, - None, - )?; - } - - Ok(worker) - }; - LocalFutureObj::new(Box::new(fut)) - }) -} - fn create_web_worker_callback( shared: Arc, stdio: deno_runtime::deno_io::Stdio, @@ -602,9 +546,6 @@ fn create_web_worker_callback( shared.module_loader_factory.create_source_map_getter(); let create_web_worker_cb = create_web_worker_callback(shared.clone(), stdio.clone()); - let preload_module_cb = create_web_worker_preload_module_callback(&shared); - let pre_execute_module_cb = - create_web_worker_pre_execute_module_callback(shared.clone()); let extensions = ops::cli_exts(shared.npm_resolver.clone()); @@ -636,6 +577,11 @@ fn create_web_worker_callback( unstable: shared.options.unstable, user_agent: version::get_user_agent().to_string(), inspect: shared.options.is_inspecting, + has_node_modules_dir: shared.options.has_node_modules_dir, + maybe_binary_npm_command_name: shared + .options + .maybe_binary_npm_command_name + .clone(), }, extensions, startup_snapshot: crate::js::deno_isolate_init(), @@ -646,8 +592,6 @@ fn create_web_worker_callback( root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), seed: shared.options.seed, create_web_worker_cb, - preload_module_cb, - pre_execute_module_cb, format_js_error_fn: Some(Arc::new(format_js_error)), source_map_getter: maybe_source_map_getter, module_loader, diff --git a/ext/node/global.rs b/ext/node/global.rs index 78e009971d460c..52c1b6bb91b97e 100644 --- a/ext/node/global.rs +++ b/ext/node/global.rs @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::mem::MaybeUninit; use std::rc::Rc; use deno_core::v8; @@ -266,13 +267,14 @@ fn current_mode(scope: &mut v8::HandleScope) -> Mode { let Some(v8_string) = v8::StackTrace::current_script_name_or_source_url(scope) else { return Mode::Deno; }; - let string = v8_string.to_rust_string_lossy(scope); let op_state = deno_core::JsRuntime::op_state_from(scope); let op_state = op_state.borrow(); let Some(node_resolver) = op_state.try_borrow::>() else { return Mode::Deno; }; - if node_resolver.in_npm_package_with_cache(string) { + let mut buffer = [MaybeUninit::uninit(); 2048]; + let str = v8_string.to_rust_cow_lossy(scope, &mut buffer); + if node_resolver.in_npm_package_with_cache(str) { Mode::Node } else { Mode::Deno diff --git a/ext/node/lib.rs b/ext/node/lib.rs index e2643a84faa2bc..c7d617666fcc79 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -8,7 +8,6 @@ use std::rc::Rc; use deno_core::error::AnyError; use deno_core::located_script_name; use deno_core::op; -use deno_core::serde_json; use deno_core::serde_v8; use deno_core::url::Url; #[allow(unused_imports)] @@ -558,29 +557,6 @@ deno_core::extension!(deno_node, }, ); -pub fn initialize_runtime( - js_runtime: &mut JsRuntime, - uses_local_node_modules_dir: bool, - maybe_binary_command_name: Option<&str>, -) -> Result<(), AnyError> { - let argv0 = if let Some(binary_command_name) = maybe_binary_command_name { - serde_json::to_string(binary_command_name)? - } else { - "undefined".to_string() - }; - let source_code = format!( - r#"(function loadBuiltinNodeModules(usesLocalNodeModulesDir, argv0) {{ - Deno[Deno.internal].node.initialize( - usesLocalNodeModulesDir, - argv0 - ); - }})({uses_local_node_modules_dir}, {argv0});"#, - ); - - js_runtime.execute_script(located_script_name!(), source_code.into())?; - Ok(()) -} - pub fn load_cjs_module( js_runtime: &mut JsRuntime, module: &str, diff --git a/ext/node/polyfills/02_init.js b/ext/node/polyfills/02_init.js index d73d5d822a4cd2..e3061c95d6e5f5 100644 --- a/ext/node/polyfills/02_init.js +++ b/ext/node/polyfills/02_init.js @@ -7,6 +7,10 @@ const requireImpl = internals.requireImpl; import { nodeGlobals } from "ext:deno_node/00_globals.js"; import "node:module"; +globalThis.nodeBootstrap = function (usesLocalNodeModulesDir, argv0) { + initialize(usesLocalNodeModulesDir, argv0); +}; + let initialized = false; function initialize( diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index 4c375760d504d6..c7c22b562d2f77 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -33,6 +33,8 @@ export { _nextTick as nextTick, chdir, cwd, env, version, versions }; import { createWritableStdioStream, initStdin, + Readable, + Writable, } from "ext:deno_node/_process/streams.mjs"; import { enableNextTick, @@ -52,15 +54,42 @@ export let platform = ""; // TODO(kt3k): This should be set at start up time export let pid = 0; -// TODO(kt3k): Give better types to stdio objects -// deno-lint-ignore no-explicit-any -let stderr = null as any; -// deno-lint-ignore no-explicit-any -let stdin = null as any; -// deno-lint-ignore no-explicit-any -let stdout = null as any; +// We want streams to be as lazy as possible, but we cannot export a getter in a module. To +// work around this we make these proxies that eagerly instantiate the underlying object on +// first access of any property/method. +function makeLazyStream(objectFactory: () => T): T { + return new Proxy({}, { + get: function (_, prop, receiver) { + // deno-lint-ignore no-explicit-any + return Reflect.get(objectFactory() as any, prop, receiver); + }, + has: function (_, prop) { + // deno-lint-ignore no-explicit-any + return Reflect.has(objectFactory() as any, prop); + }, + ownKeys: function (_) { + // deno-lint-ignore no-explicit-any + return Reflect.ownKeys(objectFactory() as any); + }, + set: function (_, prop, value, receiver) { + // deno-lint-ignore no-explicit-any + return Reflect.set(objectFactory() as any, prop, value, receiver); + }, + getPrototypeOf: function (_) { + // deno-lint-ignore no-explicit-any + return Reflect.getPrototypeOf(objectFactory() as any); + }, + getOwnPropertyDescriptor(_, prop) { + // deno-lint-ignore no-explicit-any + return Reflect.getOwnPropertyDescriptor(objectFactory() as any, prop); + }, + }) as T; +} + +export let stderr = makeLazyStream(getStderr); +export let stdin = makeLazyStream(getStdin); +export let stdout = makeLazyStream(getStdout); -export { stderr, stdin, stdout }; import { getBinding } from "ext:deno_node/internal_binding/mod.ts"; import * as constants from "ext:deno_node/internal_binding/constants.ts"; import * as uv from "ext:deno_node/internal_binding/uv.ts"; @@ -605,13 +634,19 @@ class Process extends EventEmitter { memoryUsage = memoryUsage; /** https://nodejs.org/api/process.html#process_process_stderr */ - stderr = stderr; + get stderr(): Writable { + return getStderr(); + } /** https://nodejs.org/api/process.html#process_process_stdin */ - stdin = stdin; + get stdin(): Readable { + return getStdin(); + } /** https://nodejs.org/api/process.html#process_process_stdout */ - stdout = stdout; + get stdout(): Writable { + return getStdout(); + } /** https://nodejs.org/api/process.html#process_process_version */ version = version; @@ -704,6 +739,115 @@ addReadOnlyProcessAlias("throwDeprecation", "--throw-deprecation"); export const removeListener = process.removeListener; export const removeAllListeners = process.removeAllListeners; +let unhandledRejectionListenerCount = 0; +let uncaughtExceptionListenerCount = 0; +let beforeExitListenerCount = 0; +let exitListenerCount = 0; + +process.on("newListener", (event: string) => { + switch (event) { + case "unhandledRejection": + unhandledRejectionListenerCount++; + break; + case "uncaughtException": + uncaughtExceptionListenerCount++; + break; + case "beforeExit": + beforeExitListenerCount++; + break; + case "exit": + exitListenerCount++; + break; + default: + return; + } + synchronizeListeners(); +}); + +process.on("removeListener", (event: string) => { + switch (event) { + case "unhandledRejection": + unhandledRejectionListenerCount--; + break; + case "uncaughtException": + uncaughtExceptionListenerCount--; + break; + case "beforeExit": + beforeExitListenerCount--; + break; + case "exit": + exitListenerCount--; + break; + default: + return; + } + synchronizeListeners(); +}); + +function processOnError(event: ErrorEvent) { + if (process.listenerCount("uncaughtException") > 0) { + event.preventDefault(); + } + + uncaughtExceptionHandler(event.error, "uncaughtException"); +} + +function processOnBeforeUnload(event: Event) { + process.emit("beforeExit", process.exitCode || 0); + processTicksAndRejections(); + if (core.eventLoopHasMoreWork()) { + event.preventDefault(); + } +} + +function processOnUnload() { + if (!process._exiting) { + process._exiting = true; + process.emit("exit", process.exitCode || 0); + } +} + +function synchronizeListeners() { + // Install special "unhandledrejection" handler, that will be called + // last. + if ( + unhandledRejectionListenerCount > 0 || uncaughtExceptionListenerCount > 0 + ) { + internals.nodeProcessUnhandledRejectionCallback = (event) => { + if (process.listenerCount("unhandledRejection") === 0) { + // The Node.js default behavior is to raise an uncaught exception if + // an unhandled rejection occurs and there are no unhandledRejection + // listeners. + + event.preventDefault(); + uncaughtExceptionHandler(event.reason, "unhandledRejection"); + return; + } + + event.preventDefault(); + process.emit("unhandledRejection", event.reason, event.promise); + }; + } else { + internals.nodeProcessUnhandledRejectionCallback = undefined; + } + + if (uncaughtExceptionListenerCount > 0) { + globalThis.addEventListener("error", processOnError); + } else { + globalThis.removeEventListener("error", processOnError); + } + if (beforeExitListenerCount > 0) { + globalThis.addEventListener("beforeunload", processOnBeforeUnload); + } else { + globalThis.removeEventListener("beforeunload", processOnBeforeUnload); + } + if (exitListenerCount > 0) { + globalThis.addEventListener("unload", processOnUnload); + } else { + globalThis.removeEventListener("unload", processOnUnload); + } +} + // Should be called only once, in `runtime/js/99_main.js` when the runtime is // bootstrapped. internals.__bootstrapNodeProcess = function ( @@ -748,68 +892,52 @@ internals.__bootstrapNodeProcess = function ( core.setMacrotaskCallback(runNextTicks); enableNextTick(); - // Install special "unhandledrejection" handler, that will be called - // last. - internals.nodeProcessUnhandledRejectionCallback = (event) => { - if (process.listenerCount("unhandledRejection") === 0) { - // The Node.js default behavior is to raise an uncaught exception if - // an unhandled rejection occurs and there are no unhandledRejection - // listeners. - if (process.listenerCount("uncaughtException") === 0) { - throw event.reason; - } - - event.preventDefault(); - uncaughtExceptionHandler(event.reason, "unhandledRejection"); - return; - } - - event.preventDefault(); - process.emit("unhandledRejection", event.reason, event.promise); - }; - - globalThis.addEventListener("error", (event) => { - if (process.listenerCount("uncaughtException") > 0) { - event.preventDefault(); - } - - uncaughtExceptionHandler(event.error, "uncaughtException"); - }); - - globalThis.addEventListener("beforeunload", (e) => { - process.emit("beforeExit", process.exitCode || 0); - processTicksAndRejections(); - if (core.eventLoopHasMoreWork()) { - e.preventDefault(); - } - }); - - globalThis.addEventListener("unload", () => { - if (!process._exiting) { - process._exiting = true; - process.emit("exit", process.exitCode || 0); - } - }); - - // Initializes stdin - stdin = process.stdin = initStdin(); - - /** https://nodejs.org/api/process.html#process_process_stderr */ - stderr = process.stderr = createWritableStdioStream( - io.stderr, - "stderr", - ); - - /** https://nodejs.org/api/process.html#process_process_stdout */ - stdout = process.stdout = createWritableStdioStream( - io.stdout, - "stdout", - ); - process.setStartTime(Date.now()); // @ts-ignore Remove setStartTime and #startTime is not modifiable delete process.setStartTime; delete internals.__bootstrapNodeProcess; }; +// deno-lint-ignore no-explicit-any +let stderr_ = null as any; +// deno-lint-ignore no-explicit-any +let stdin_ = null as any; +// deno-lint-ignore no-explicit-any +let stdout_ = null as any; + +function getStdin(): Readable { + if (!stdin_) { + stdin_ = initStdin(); + stdin = stdin_; + Object.defineProperty(process, "stdin", { get: () => stdin_ }); + } + return stdin_; +} + +/** https://nodejs.org/api/process.html#process_process_stdout */ +function getStdout(): Writable { + if (!stdout_) { + stdout_ = createWritableStdioStream( + io.stdout, + "stdout", + ); + stdout = stdout_; + Object.defineProperty(process, "stdout", { get: () => stdout_ }); + } + return stdout_; +} + +/** https://nodejs.org/api/process.html#process_process_stderr */ +function getStderr(): Writable { + if (!stderr_) { + stderr_ = createWritableStdioStream( + io.stderr, + "stderr", + ); + stderr = stderr_; + Object.defineProperty(process, "stderr", { get: () => stderr_ }); + } + return stderr_; +} + export default process; diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index 4c43fcbad49fe2..20501b0f1cf420 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; @@ -128,10 +129,10 @@ impl NodeResolver { self.npm_resolver.in_npm_package(specifier) } - pub fn in_npm_package_with_cache(&self, specifier: String) -> bool { + pub fn in_npm_package_with_cache(&self, specifier: Cow) -> bool { let mut cache = self.in_npm_package_cache.lock(); - if let Some(result) = cache.get(&specifier) { + if let Some(result) = cache.get(specifier.as_ref()) { return *result; } @@ -141,7 +142,7 @@ impl NodeResolver { } else { false }; - cache.insert(specifier, result); + cache.insert(specifier.into_owned(), result); result } diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index c8fdabc258e1c1..fdd82862c7b0d2 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -438,6 +438,7 @@ function bootstrapMainRuntime(runtimeOptions) { if (hasBootstrapped) { throw new Error("Worker runtime already bootstrapped"); } + const nodeBootstrap = globalThis.nodeBootstrap; const { 0: args, @@ -456,6 +457,8 @@ function bootstrapMainRuntime(runtimeOptions) { 13: userAgent, 14: inspectFlag, // 15: enableTestingFeaturesFlag + 16: hasNodeModulesDir, + 17: maybeBinaryNpmCommandName, } = runtimeOptions; performance.setTimeOrigin(DateNow()); @@ -464,12 +467,13 @@ function bootstrapMainRuntime(runtimeOptions) { // Remove bootstrapping data from the global scope delete globalThis.__bootstrap; delete globalThis.bootstrap; + delete globalThis.nodeBootstrap; hasBootstrapped = true; // If the `--location` flag isn't set, make `globalThis.location` `undefined` and // writable, so that they can mock it themselves if they like. If the flag was // set, define `globalThis.location`, using the provided value. - if (location_ === undefined) { + if (location_ == null) { mainRuntimeGlobalProperties.location = { writable: true, }; @@ -542,6 +546,10 @@ function bootstrapMainRuntime(runtimeOptions) { ObjectDefineProperty(globalThis, "Deno", util.readOnly(finalDenoNs)); util.log("args", args); + + if (nodeBootstrap) { + nodeBootstrap(hasNodeModulesDir, maybeBinaryNpmCommandName); + } } function bootstrapWorkerRuntime( @@ -553,6 +561,8 @@ function bootstrapWorkerRuntime( throw new Error("Worker runtime already bootstrapped"); } + const nodeBootstrap = globalThis.nodeBootstrap; + const { 0: args, 1: cpuCount, @@ -570,6 +580,8 @@ function bootstrapWorkerRuntime( 13: userAgent, // 14: inspectFlag, 15: enableTestingFeaturesFlag, + 16: hasNodeModulesDir, + 17: maybeBinaryNpmCommandName, } = runtimeOptions; performance.setTimeOrigin(DateNow()); @@ -580,6 +592,7 @@ function bootstrapWorkerRuntime( // Remove bootstrapping data from the global scope delete globalThis.__bootstrap; delete globalThis.bootstrap; + delete globalThis.nodeBootstrap; hasBootstrapped = true; if (unstableFlag) { @@ -649,6 +662,10 @@ function bootstrapWorkerRuntime( // Setup `Deno` global - we're actually overriding already // existing global `Deno` with `Deno` namespace from "./deno.ts". ObjectDefineProperty(globalThis, "Deno", util.readOnly(finalDenoNs)); + + if (nodeBootstrap) { + nodeBootstrap(hasNodeModulesDir, maybeBinaryNpmCommandName); + } } globalThis.bootstrap = { diff --git a/runtime/ops/worker_host.rs b/runtime/ops/worker_host.rs index f96ae38e8a8986..9bfbd9d10f0146 100644 --- a/runtime/ops/worker_host.rs +++ b/runtime/ops/worker_host.rs @@ -13,7 +13,6 @@ use crate::web_worker::WorkerControlEvent; use crate::web_worker::WorkerId; use crate::worker::FormatJsErrorFn; use deno_core::error::AnyError; -use deno_core::futures::future::LocalFutureObj; use deno_core::op; use deno_core::serde::Deserialize; use deno_core::CancelFuture; @@ -40,10 +39,6 @@ pub type CreateWebWorkerCb = dyn Fn(CreateWebWorkerArgs) -> (WebWorker, Sendable + Sync + Send; -pub type WorkerEventCb = dyn Fn(WebWorker) -> LocalFutureObj<'static, Result> - + Sync - + Send; - /// A holder for callback that is used to create a new /// WebWorker. It's a struct instead of a type alias /// because `GothamState` used in `OpState` overrides @@ -54,12 +49,6 @@ struct CreateWebWorkerCbHolder(Arc); #[derive(Clone)] struct FormatJsErrorFnHolder(Option>); -#[derive(Clone)] -struct PreloadModuleCbHolder(Arc); - -#[derive(Clone)] -struct PreExecuteModuleCbHolder(Arc); - pub struct WorkerThread { worker_handle: WebWorkerHandle, cancel_handle: Rc, @@ -98,8 +87,6 @@ deno_core::extension!( ], options = { create_web_worker_cb: Arc, - preload_module_cb: Arc, - pre_execute_module_cb: Arc, format_js_error_fn: Option>, }, state = |state, options| { @@ -109,12 +96,6 @@ deno_core::extension!( let create_web_worker_cb_holder = CreateWebWorkerCbHolder(options.create_web_worker_cb); state.put::(create_web_worker_cb_holder); - let preload_module_cb_holder = - PreloadModuleCbHolder(options.preload_module_cb); - state.put::(preload_module_cb_holder); - let pre_execute_module_cb_holder = - PreExecuteModuleCbHolder(options.pre_execute_module_cb); - state.put::(pre_execute_module_cb_holder); let format_js_error_fn_holder = FormatJsErrorFnHolder(options.format_js_error_fn); state.put::(format_js_error_fn_holder); @@ -174,10 +155,6 @@ fn op_create_worker( let worker_id = state.take::(); let create_web_worker_cb = state.take::(); state.put::(create_web_worker_cb.clone()); - let preload_module_cb = state.take::(); - state.put::(preload_module_cb.clone()); - let pre_execute_module_cb = state.take::(); - state.put::(pre_execute_module_cb.clone()); let format_js_error_fn = state.take::(); state.put::(format_js_error_fn.clone()); state.put::(worker_id.next().unwrap()); @@ -221,8 +198,6 @@ fn op_create_worker( worker, module_specifier, maybe_source_code, - preload_module_cb.0, - pre_execute_module_cb.0, format_js_error_fn.0, ) })?; diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index a3b93836c3039f..0c4e95140438a7 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -338,8 +338,6 @@ pub struct WebWorkerOptions { pub module_loader: Rc, pub npm_resolver: Option>, pub create_web_worker_cb: Arc, - pub preload_module_cb: Arc, - pub pre_execute_module_cb: Arc, pub format_js_error_fn: Option>, pub source_map_getter: Option>, pub worker_type: WebWorkerType, @@ -460,8 +458,6 @@ impl WebWorker { ops::runtime::deno_runtime::init_ops_and_esm(main_module.clone()), ops::worker_host::deno_worker_host::init_ops_and_esm( options.create_web_worker_cb.clone(), - options.preload_module_cb.clone(), - options.pre_execute_module_cb.clone(), options.format_js_error_fn.clone(), ), ops::fs_events::deno_fs_events::init_ops_and_esm(), @@ -600,7 +596,7 @@ impl WebWorker { .unwrap() .into(); bootstrap_fn - .call(scope, undefined.into(), &[args.into(), name_str, id_str]) + .call(scope, undefined.into(), &[args, name_str, id_str]) .unwrap(); } // TODO(bartlomieju): this could be done using V8 API, without calling `execute_script`. @@ -782,11 +778,9 @@ fn print_worker_error( /// This function should be called from a thread dedicated to this worker. // TODO(bartlomieju): check if order of actions is aligned to Worker spec pub fn run_web_worker( - worker: WebWorker, + mut worker: WebWorker, specifier: ModuleSpecifier, mut maybe_source_code: Option, - preload_module_cb: Arc, - pre_execute_module_cb: Arc, format_js_error_fn: Option>, ) -> Result<(), AnyError> { let name = worker.name.to_string(); @@ -796,20 +790,6 @@ pub fn run_web_worker( let fut = async move { let internal_handle = worker.internal_handle.clone(); - let result = (preload_module_cb)(worker).await; - - let mut worker = match result { - Ok(worker) => worker, - Err(e) => { - print_worker_error(&e, &name, format_js_error_fn.as_deref()); - internal_handle - .post_event(WorkerControlEvent::TerminalError(e)) - .expect("Failed to post message to host"); - - // Failure to execute script is a terminal error, bye, bye. - return Ok(()); - } - }; // Execute provided source code immediately let result = if let Some(source_code) = maybe_source_code.take() { @@ -821,18 +801,6 @@ pub fn run_web_worker( // script instead of module match worker.preload_main_module(&specifier).await { Ok(id) => { - worker = match (pre_execute_module_cb)(worker).await { - Ok(worker) => worker, - Err(e) => { - print_worker_error(&e, &name, format_js_error_fn.as_deref()); - internal_handle - .post_event(WorkerControlEvent::TerminalError(e)) - .expect("Failed to post message to host"); - - // Failure to execute script is a terminal error, bye, bye. - return Ok(()); - } - }; worker.start_polling_for_messages(); worker.execute_main_module(id).await } diff --git a/runtime/worker.rs b/runtime/worker.rs index 060f5537bdc4fb..5eefd5fa8951bd 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -178,8 +178,6 @@ pub struct WorkerOptions { pub npm_resolver: Option>, // Callbacks invoked when creating new instance of WebWorker pub create_web_worker_cb: Arc, - pub web_worker_preload_module_cb: Arc, - pub web_worker_pre_execute_module_cb: Arc, pub format_js_error_fn: Option>, /// Source map reference for errors. @@ -221,12 +219,6 @@ pub struct WorkerOptions { impl Default for WorkerOptions { fn default() -> Self { Self { - web_worker_preload_module_cb: Arc::new(|_| { - unimplemented!("web workers are not supported") - }), - web_worker_pre_execute_module_cb: Arc::new(|_| { - unimplemented!("web workers are not supported") - }), create_web_worker_cb: Arc::new(|_| { unimplemented!("web workers are not supported") }), @@ -362,8 +354,6 @@ impl MainWorker { ops::runtime::deno_runtime::init_ops_and_esm(main_module.clone()), ops::worker_host::deno_worker_host::init_ops_and_esm( options.create_web_worker_cb.clone(), - options.web_worker_preload_module_cb.clone(), - options.web_worker_pre_execute_module_cb.clone(), options.format_js_error_fn.clone(), ), ops::fs_events::deno_fs_events::init_ops_and_esm(), @@ -478,9 +468,7 @@ impl MainWorker { let bootstrap_fn = self.bootstrap_fn_global.take().unwrap(); let bootstrap_fn = v8::Local::new(scope, bootstrap_fn); let undefined = v8::undefined(scope); - bootstrap_fn - .call(scope, undefined.into(), &[args.into()]) - .unwrap(); + bootstrap_fn.call(scope, undefined.into(), &[args]).unwrap(); } /// See [JsRuntime::execute_script](deno_core::JsRuntime::execute_script) diff --git a/runtime/worker_bootstrap.rs b/runtime/worker_bootstrap.rs index 9627281a68ba17..0f533344f9d881 100644 --- a/runtime/worker_bootstrap.rs +++ b/runtime/worker_bootstrap.rs @@ -2,6 +2,8 @@ use deno_core::v8; use deno_core::ModuleSpecifier; +use serde::Serialize; +use std::cell::RefCell; use std::thread; use crate::colors; @@ -55,6 +57,8 @@ pub struct BootstrapOptions { pub unstable: bool, pub user_agent: String, pub inspect: bool, + pub has_node_modules_dir: bool, + pub maybe_binary_npm_command_name: Option, } impl Default for BootstrapOptions { @@ -80,135 +84,91 @@ impl Default for BootstrapOptions { unstable: Default::default(), inspect: Default::default(), args: Default::default(), + has_node_modules_dir: Default::default(), + maybe_binary_npm_command_name: None, } } } +/// This is a struct that we use to serialize the contents of the `BootstrapOptions` +/// struct above to a V8 form. While `serde_v8` is not as fast as hand-coding this, +/// it's "fast enough" while serializing a large tuple like this that it doesn't appear +/// on flamegraphs. +/// +/// Note that a few fields in here are derived from the process and environment and +/// are not sourced from the underlying `BootstrapOptions`. +/// +/// Keep this in sync with `99_main.js`. +#[derive(Serialize)] +struct BootstrapV8<'a>( + // args + &'a Vec, + // cpu_count + i32, + // log_level + i32, + // runtime_version + &'a str, + // locale + &'a str, + // location + Option<&'a str>, + // no_color + bool, + // is_tty + bool, + // ts_version + &'a str, + // unstable + bool, + // process_id + i32, + // env!("TARGET") + &'a str, + // v8_version + &'a str, + // user_agent + &'a str, + // inspect + bool, + // enable_testing_features + bool, + // has_node_modules_dir + bool, + // maybe_binary_npm_command_name + Option<&'a str>, +); + impl BootstrapOptions { + /// Return the v8 equivalent of this structure. pub fn as_v8<'s>( &self, scope: &mut v8::HandleScope<'s>, - ) -> v8::Local<'s, v8::Array> { - let array = v8::Array::new(scope, 16); - - { - let args = v8::Array::new(scope, self.args.len() as i32); - for (idx, arg) in self.args.iter().enumerate() { - let arg_str = v8::String::new(scope, arg).unwrap(); - args.set_index(scope, idx as u32, arg_str.into()); - } - array.set_index(scope, 0, args.into()); - } - - { - let val = v8::Integer::new(scope, self.cpu_count as i32); - array.set_index(scope, 1, val.into()); - } - - { - let val = v8::Integer::new(scope, self.log_level as i32); - array.set_index(scope, 2, val.into()); - } - - { - let val = v8::String::new_from_one_byte( - scope, - self.runtime_version.as_bytes(), - v8::NewStringType::Internalized, - ) - .unwrap(); - array.set_index(scope, 3, val.into()); - } - - { - let val = v8::String::new_from_one_byte( - scope, - self.locale.as_bytes(), - v8::NewStringType::Normal, - ) - .unwrap(); - array.set_index(scope, 4, val.into()); - } - - { - let val: v8::Local = if let Some(location) = &self.location { - v8::String::new(scope, location.as_str()).unwrap().into() - } else { - v8::undefined(scope).into() - }; - - array.set_index(scope, 5, val); - } - - { - let val = v8::Boolean::new(scope, self.no_color); - array.set_index(scope, 6, val.into()); - } - - { - let val = v8::Boolean::new(scope, self.is_tty); - array.set_index(scope, 7, val.into()); - } - - { - let val = v8::String::new_from_one_byte( - scope, - self.ts_version.as_bytes(), - v8::NewStringType::Normal, - ) - .unwrap(); - array.set_index(scope, 8, val.into()); - } - - { - let val = v8::Boolean::new(scope, self.unstable); - array.set_index(scope, 9, val.into()); - } - - { - let val = v8::Integer::new(scope, std::process::id() as i32); - array.set_index(scope, 10, val.into()); - } - - { - let val = v8::String::new_external_onebyte_static( - scope, - env!("TARGET").as_bytes(), - ) - .unwrap(); - array.set_index(scope, 11, val.into()); - } - - { - let val = v8::String::new_from_one_byte( - scope, - deno_core::v8_version().as_bytes(), - v8::NewStringType::Normal, - ) - .unwrap(); - array.set_index(scope, 12, val.into()); - } - - { - let val = v8::String::new_from_one_byte( - scope, - self.user_agent.as_bytes(), - v8::NewStringType::Normal, - ) - .unwrap(); - array.set_index(scope, 13, val.into()); - } - - { - let val = v8::Boolean::new(scope, self.inspect); - array.set_index(scope, 14, val.into()); - } - - { - let val = v8::Boolean::new(scope, self.enable_testing_features); - array.set_index(scope, 15, val.into()); - } - - array + ) -> v8::Local<'s, v8::Value> { + let scope = RefCell::new(scope); + let ser = deno_core::serde_v8::Serializer::new(&scope); + + let bootstrap = BootstrapV8( + &self.args, + self.cpu_count as _, + self.log_level as _, + &self.runtime_version, + &self.locale, + self.location.as_ref().map(|l| l.as_str()), + self.no_color, + self.is_tty, + &self.ts_version, + self.unstable, + std::process::id() as _, + env!("TARGET"), + deno_core::v8_version(), + &self.user_agent, + self.inspect, + self.enable_testing_features, + self.has_node_modules_dir, + self.maybe_binary_npm_command_name.as_deref(), + ); + + bootstrap.serialize(ser).unwrap() } } From 346a3360b2d549e40b33b5441bbd5d600bfe67ab Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Tue, 15 Aug 2023 16:30:33 -0600 Subject: [PATCH 23/60] fix(runtime): use host header for inspector websocket URL (#20171) If a `host` header is specified, use that for the generated websocket URLs. Fixes #20087 --- cli/tests/integration/inspector_tests.rs | 38 +++++++++++++++++++----- runtime/inspector_server.rs | 37 ++++++++++++++++------- 2 files changed, 56 insertions(+), 19 deletions(-) diff --git a/cli/tests/integration/inspector_tests.rs b/cli/tests/integration/inspector_tests.rs index 6b2deb0bff9fc1..79422ee5a89742 100644 --- a/cli/tests/integration/inspector_tests.rs +++ b/cli/tests/integration/inspector_tests.rs @@ -9,6 +9,8 @@ use deno_runtime::deno_fetch::reqwest; use fastwebsockets::FragmentCollector; use fastwebsockets::Frame; use fastwebsockets::WebSocket; +use http::header::HOST; +use hyper::header::HeaderValue; use hyper::upgrade::Upgraded; use hyper::Body; use hyper::Request; @@ -704,14 +706,34 @@ async fn inspector_json() { let mut url = ws_url.clone(); let _ = url.set_scheme("http"); url.set_path("/json"); - let resp = reqwest::get(url).await.unwrap(); - assert_eq!(resp.status(), reqwest::StatusCode::OK); - let endpoint_list: Vec = - serde_json::from_str(&resp.text().await.unwrap()).unwrap(); - let matching_endpoint = endpoint_list - .iter() - .find(|e| e["webSocketDebuggerUrl"] == ws_url.as_str()); - assert!(matching_endpoint.is_some()); + let client = reqwest::Client::new(); + + // Ensure that the webSocketDebuggerUrl matches the host header + for (host, expected) in [ + (None, ws_url.as_str()), + (Some("some.random.host"), "ws://some.random.host/"), + (Some("some.random.host:1234"), "ws://some.random.host:1234/"), + (Some("[::1]:1234"), "ws://[::1]:1234/"), + ] { + let mut req = reqwest::Request::new(reqwest::Method::GET, url.clone()); + if let Some(host) = host { + req + .headers_mut() + .insert(HOST, HeaderValue::from_static(host)); + } + let resp = client.execute(req).await.unwrap(); + assert_eq!(resp.status(), reqwest::StatusCode::OK); + let endpoint_list: Vec = + serde_json::from_str(&resp.text().await.unwrap()).unwrap(); + let matching_endpoint = endpoint_list.iter().find(|e| { + e["webSocketDebuggerUrl"] + .as_str() + .unwrap() + .contains(expected) + }); + assert!(matching_endpoint.is_some()); + } + child.kill().unwrap(); } diff --git a/runtime/inspector_server.rs b/runtime/inspector_server.rs index 330e91c3a8563e..70dda9832ee120 100644 --- a/runtime/inspector_server.rs +++ b/runtime/inspector_server.rs @@ -16,6 +16,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::task::spawn; +use deno_core::url::Url; use deno_core::InspectorMsg; use deno_core::InspectorSessionProxy; use deno_core::JsRuntime; @@ -189,11 +190,12 @@ fn handle_ws_request( fn handle_json_request( inspector_map: Rc>>, + host: Option, ) -> http::Result> { let data = inspector_map .borrow() .values() - .map(|info| info.get_json_metadata()) + .map(move |info| info.get_json_metadata(&host)) .collect::>(); http::Response::builder() .status(http::StatusCode::OK) @@ -224,7 +226,7 @@ async fn server( .map(|info| { eprintln!( "Debugger listening on {}", - info.get_websocket_debugger_url() + info.get_websocket_debugger_url(&info.host.to_string()) ); eprintln!("Visit chrome://inspect to connect to the debugger."); if info.wait_for_session { @@ -258,6 +260,17 @@ async fn server( future::ok::<_, Infallible>(hyper::service::service_fn( move |req: http::Request| { future::ready({ + // If the host header can make a valid URL, use it + let host = req + .headers() + .get("host") + .and_then(|host| host.to_str().ok()) + .and_then(|host| Url::parse(&format!("http://{host}")).ok()) + .and_then(|url| match (url.host(), url.port()) { + (Some(host), Some(port)) => Some(format!("{host}:{port}")), + (Some(host), None) => Some(format!("{host}")), + _ => None, + }); match (req.method(), req.uri().path()) { (&http::Method::GET, path) if path.starts_with("/ws/") => { handle_ws_request(req, Rc::clone(&inspector_map)) @@ -266,10 +279,10 @@ async fn server( handle_json_version_request(json_version_response.clone()) } (&http::Method::GET, "/json") => { - handle_json_request(Rc::clone(&inspector_map)) + handle_json_request(Rc::clone(&inspector_map), host) } (&http::Method::GET, "/json/list") => { - handle_json_request(Rc::clone(&inspector_map)) + handle_json_request(Rc::clone(&inspector_map), host) } _ => http::Response::builder() .status(http::StatusCode::NOT_FOUND) @@ -381,27 +394,29 @@ impl InspectorInfo { } } - fn get_json_metadata(&self) -> Value { + fn get_json_metadata(&self, host: &Option) -> Value { + let host_listen = format!("{}", self.host); + let host = host.as_ref().unwrap_or(&host_listen); json!({ "description": "deno", - "devtoolsFrontendUrl": self.get_frontend_url(), + "devtoolsFrontendUrl": self.get_frontend_url(host), "faviconUrl": "https://deno.land/favicon.ico", "id": self.uuid.to_string(), "title": self.get_title(), "type": "node", "url": self.url.to_string(), - "webSocketDebuggerUrl": self.get_websocket_debugger_url(), + "webSocketDebuggerUrl": self.get_websocket_debugger_url(host), }) } - pub fn get_websocket_debugger_url(&self) -> String { - format!("ws://{}/ws/{}", &self.host, &self.uuid) + pub fn get_websocket_debugger_url(&self, host: &str) -> String { + format!("ws://{}/ws/{}", host, &self.uuid) } - fn get_frontend_url(&self) -> String { + fn get_frontend_url(&self, host: &str) -> String { format!( "devtools://devtools/bundled/js_app.html?ws={}/ws/{}&experiments=true&v8only=true", - &self.host, &self.uuid + host, &self.uuid ) } From 08b9f920f67d0de9516221d64e64cbb150a605e2 Mon Sep 17 00:00:00 2001 From: Evan <96965321+0xIchigo@users.noreply.github.com> Date: Wed, 16 Aug 2023 05:28:49 -0400 Subject: [PATCH 24/60] fix(ext/node): allow for the reassignment of userInfo() on Windows (#20165) The goal of this PR is to address issue #20106 where a `TypeError` occurs when the variables `uid` and `gid` from `userInfo()` in `node:os` are reassigned if the user is on Windows. Both `uid` and `gid` are marked as `const` therefore producing a `TypeError` when the two are reassigned. This PR achieves that goal by marking `uid` and `gid` as `let` --- cli/tests/node_compat/test/parallel/test-os.js | 3 +-- ext/node/polyfills/os.ts | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/cli/tests/node_compat/test/parallel/test-os.js b/cli/tests/node_compat/test/parallel/test-os.js index 09d97222c58552..d8425d0d52b52d 100644 --- a/cli/tests/node_compat/test/parallel/test-os.js +++ b/cli/tests/node_compat/test/parallel/test-os.js @@ -216,7 +216,6 @@ if (common.isWindows && process.env.USERPROFILE) { process.env.HOME = home; } -/* TODO(kt3k): Enable this test const pwd = os.userInfo(); is.object(pwd); const pwdBuf = os.userInfo({ encoding: 'buffer' }); @@ -245,7 +244,7 @@ is.string(pwd.username); assert.ok(pwd.homedir.includes(path.sep)); assert.strictEqual(pwd.username, pwdBuf.username.toString('utf8')); assert.strictEqual(pwd.homedir, pwdBuf.homedir.toString('utf8')); -*/ + assert.strictEqual(`${os.hostname}`, os.hostname()); assert.strictEqual(`${os.homedir}`, os.homedir()); diff --git a/ext/node/polyfills/os.ts b/ext/node/polyfills/os.ts index a874c942cbc3c2..c552b5a0ad11c1 100644 --- a/ext/node/polyfills/os.ts +++ b/ext/node/polyfills/os.ts @@ -320,8 +320,8 @@ export function uptime(): number { export function userInfo( options: UserInfoOptions = { encoding: "utf-8" }, ): UserInfo { - const uid = Deno.uid(); - const gid = Deno.gid(); + let uid = Deno.uid(); + let gid = Deno.gid(); if (isWindows) { uid = -1; From 75ea2c1b20c89edd4310793572dca4a9e934cc95 Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Wed, 16 Aug 2023 12:58:03 +0200 Subject: [PATCH 25/60] perf(ext/urlpattern): optimize URLPattern.exec (#20170) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR optimizes `URLPattern.exec` - Use component keys from constructor instead of calling it on every `.exec`. AFAIK keys should always be `protocol`,`username`,`password`,`hostname`,`port`,`pathname`,`search`,`hash`. Haven't looked much into it but I think it's safe to define these outside the constructor as well. - Add a fast path for `/^$/u` (default regexp) and empty input - Replaced `ArrayPrototypeMap` & `ObjectFromEntries` with a `for` loop. **this PR** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 --------------------------------------------------------------- ----------------------------- exec 1 2.17 µs/iter 461,022.8 (2.14 µs … 2.27 µs) 2.18 µs 2.27 µs 2.27 µs exec 2 4.13 µs/iter 242,173.4 (4.08 µs … 4.27 µs) 4.15 µs 4.27 µs 4.27 µs exec 3 2.55 µs/iter 391,508.1 (2.53 µs … 2.68 µs) 2.56 µs 2.68 µs 2.68 µs ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 --------------------------------------------------------------- ----------------------------- exec 1 2.45 µs/iter 408,092.4 (2.41 µs … 2.55 µs) 2.46 µs 2.55 µs 2.55 µs exec 2 4.41 µs/iter 226,706.0 (3.49 µs … 399.56 µs) 4.39 µs 5.49 µs 6.07 µs exec 3 2.99 µs/iter 334,833.4 (2.94 µs … 3.21 µs) 2.99 µs 3.21 µs 3.21 µs ``` --- ext/url/01_urlpattern.js | 51 +++++++++++++++++++++++++--------------- 1 file changed, 32 insertions(+), 19 deletions(-) diff --git a/ext/url/01_urlpattern.js b/ext/url/01_urlpattern.js index 04bb50fd7f42a6..e6d21e49de60a6 100644 --- a/ext/url/01_urlpattern.js +++ b/ext/url/01_urlpattern.js @@ -12,10 +12,7 @@ const ops = core.ops; import * as webidl from "ext:deno_webidl/00_webidl.js"; const primordials = globalThis.__bootstrap.primordials; const { - ArrayPrototypeMap, ArrayPrototypePop, - ObjectFromEntries, - ObjectKeys, RegExpPrototypeExec, RegExpPrototypeTest, SafeRegExp, @@ -24,6 +21,7 @@ const { TypeError, } = primordials; +const EMPTY_MATCH = [""]; const _components = Symbol("components"); /** @@ -37,6 +35,16 @@ const _components = Symbol("components"); * @property {Component} search * @property {Component} hash */ +const COMPONENTS_KEYS = [ + "protocol", + "username", + "password", + "hostname", + "port", + "pathname", + "search", + "hash", +]; /** * @typedef Component @@ -64,19 +72,20 @@ class URLPattern { const components = ops.op_urlpattern_parse(input, baseURL); - const keys = ObjectKeys(components); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; + for (let i = 0; i < COMPONENTS_KEYS.length; ++i) { + const key = COMPONENTS_KEYS[i]; try { components[key].regexp = new SafeRegExp( components[key].regexpString, "u", ); + // used for fast path + components[key].matchOnEmptyInput = + components[key].regexpString === "^$"; } catch (e) { throw new TypeError(`${prefix}: ${key} is invalid; ${e.message}`); } } - this[_components] = components; } @@ -144,9 +153,8 @@ class URLPattern { const values = res[0]; - const keys = ObjectKeys(values); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; + for (let i = 0; i < COMPONENTS_KEYS.length; ++i) { + const key = COMPONENTS_KEYS[i]; if (!RegExpPrototypeTest(this[_components][key].regexp, values[key])) { return false; } @@ -185,21 +193,26 @@ class URLPattern { /** @type {URLPatternResult} */ const result = { inputs }; - const keys = ObjectKeys(values); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; + for (let i = 0; i < COMPONENTS_KEYS.length; ++i) { + const key = COMPONENTS_KEYS[i]; /** @type {Component} */ const component = this[_components][key]; const input = values[key]; - const match = RegExpPrototypeExec(component.regexp, input); + + const match = component.matchOnEmptyInput && input === "" + ? EMPTY_MATCH // fast path + : RegExpPrototypeExec(component.regexp, input); + if (match === null) { return null; } - const groupEntries = ArrayPrototypeMap( - component.groupNameList, - (name, i) => [name, match[i + 1] ?? ""], - ); - const groups = ObjectFromEntries(groupEntries); + + const groups = {}; + const groupList = component.groupNameList; + for (let i = 0; i < groupList.length; ++i) { + groups[groupList[i]] = match[i + 1] ?? ""; + } + result[key] = { input, groups, From 3dab9ead279edf4fac869ca7c6ae0f9c284ecde9 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Wed, 16 Aug 2023 14:02:15 +0200 Subject: [PATCH 26/60] fix: release ReadeableStream in fetch (#17365) Fixes #16648 --------- Co-authored-by: Aapo Alasuutari --- cli/tests/unit/fetch_test.ts | 27 +++++++++++++++++++++++++++ ext/fetch/26_fetch.js | 1 + 2 files changed, 28 insertions(+) diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts index 204a159a7d44f5..83386d2ee52d7a 100644 --- a/cli/tests/unit/fetch_test.ts +++ b/cli/tests/unit/fetch_test.ts @@ -1951,6 +1951,33 @@ Deno.test( }, ); +Deno.test( + { permissions: { net: true } }, + async function fetchRequestBodyEmptyStream() { + const body = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([])); + controller.close(); + }, + }); + + await assertRejects( + async () => { + const controller = new AbortController(); + const promise = fetch("http://localhost:4545/echo_server", { + body, + method: "POST", + signal: controller.signal, + }); + controller.abort(); + await promise; + }, + DOMException, + "The signal has been aborted", + ); + }, +); + Deno.test("Request with subarray TypedArray body", async () => { const body = new Uint8Array([1, 2, 3, 4, 5]).subarray(1); const req = new Request("https://example.com", { method: "POST", body }); diff --git a/ext/fetch/26_fetch.js b/ext/fetch/26_fetch.js index 6be63d077cc136..311a197a8772e2 100644 --- a/ext/fetch/26_fetch.js +++ b/ext/fetch/26_fetch.js @@ -268,6 +268,7 @@ async function mainFetch(req, recursive, terminator) { } } WeakMapPrototypeDelete(requestBodyReaders, req); + reader.releaseLock(); core.tryClose(requestBodyRid); })(); } From 3b4efd339da777771dc5561cb70b0e10303794f2 Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Thu, 17 Aug 2023 10:35:18 +0200 Subject: [PATCH 27/60] perf(ext/event): optimize Event constructor (#20181) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR optimizes `Event` constructor - ~Added a fast path for empty `eventInitDict`~ Removed `EventInit` dictionary converter - Don't make `isTrusted` a [LegacyUnforgeable](https://webidl.spec.whatwg.org/#LegacyUnforgeable) property. Doing so makes it non-spec compliant but calling `Object/Reflect.defineProperty` on the constructor is a big bottleneck. Node did the same a few months ago https://github.com/nodejs/node/pull/46974. In my opinion, the performance gains are worth deviating from the spec for a browser-related property. **This PR** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ------------------------------------------------------------------------------- ----------------------------- event constructor no init 36.69 ns/iter 27,257,504.6 (33.36 ns … 42.45 ns) 37.71 ns 39.61 ns 40.07 ns event constructor 36.7 ns/iter 27,246,776.6 (33.35 ns … 56.03 ns) 37.73 ns 40.14 ns 41.74 ns ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 ------------------------------------------------------------------------------- ----------------------------- event constructor no init 380.48 ns/iter 2,628,275.8 (366.66 ns … 399.39 ns) 384.58 ns 398.27 ns 399.39 ns event constructor 480.33 ns/iter 2,081,882.6 (466.67 ns … 503.47 ns) 484.27 ns 501.28 ns 503.47 ns ``` ```js Deno.bench("event constructor no init", () => { const event = new Event("foo"); }); Deno.bench("event constructor", () => { const event = new Event("foo", { bubbles: true, cancelable: false }); }); ``` towards https://github.com/denoland/deno/issues/20167 --- cli/tests/unit/event_test.ts | 15 +------ ext/web/02_event.js | 78 ++++++++++++++---------------------- tools/wpt/expectation.json | 4 +- 3 files changed, 34 insertions(+), 63 deletions(-) diff --git a/cli/tests/unit/event_test.ts b/cli/tests/unit/event_test.ts index d81023da1359ba..88ff803af7df85 100644 --- a/cli/tests/unit/event_test.ts +++ b/cli/tests/unit/event_test.ts @@ -1,5 +1,5 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -import { assert, assertEquals, assertStringIncludes } from "./test_util.ts"; +import { assertEquals, assertStringIncludes } from "./test_util.ts"; Deno.test(function eventInitializedWithType() { const type = "click"; @@ -80,19 +80,6 @@ Deno.test(function eventInitializedWithNonStringType() { assertEquals(event.cancelable, false); }); -// ref https://github.com/web-platform-tests/wpt/blob/master/dom/events/Event-isTrusted.any.js -Deno.test(function eventIsTrusted() { - const desc1 = Object.getOwnPropertyDescriptor(new Event("x"), "isTrusted"); - assert(desc1); - assertEquals(typeof desc1.get, "function"); - - const desc2 = Object.getOwnPropertyDescriptor(new Event("x"), "isTrusted"); - assert(desc2); - assertEquals(typeof desc2!.get, "function"); - - assertEquals(desc1!.get, desc2!.get); -}); - Deno.test(function eventInspectOutput() { // deno-lint-ignore no-explicit-any const cases: Array<[any, (event: any) => string]> = [ diff --git a/ext/web/02_event.js b/ext/web/02_event.js index 859da2121787c1..d59a897a62672c 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -122,20 +122,6 @@ const isTrusted = ObjectGetOwnPropertyDescriptor({ }, }, "isTrusted").get; -webidl.converters.EventInit = webidl.createDictionaryConverter("EventInit", [{ - key: "bubbles", - defaultValue: false, - converter: webidl.converters.boolean, -}, { - key: "cancelable", - defaultValue: false, - converter: webidl.converters.boolean, -}, { - key: "composed", - defaultValue: false, - converter: webidl.converters.boolean, -}]); - const _attributes = Symbol("[[attributes]]"); const _canceledFlag = Symbol("[[canceledFlag]]"); const _stopPropagationFlag = Symbol("[[stopPropagationFlag]]"); @@ -161,36 +147,7 @@ class Event { this[_isTrusted] = false; this[_path] = []; - if (!eventInitDict[_skipInternalInit]) { - webidl.requiredArguments( - arguments.length, - 1, - "Failed to construct 'Event'", - ); - type = webidl.converters.DOMString( - type, - "Failed to construct 'Event'", - "Argument 1", - ); - const eventInit = webidl.converters.EventInit( - eventInitDict, - "Failed to construct 'Event'", - "Argument 2", - ); - this[_attributes] = { - type, - ...eventInit, - currentTarget: null, - eventPhase: Event.NONE, - target: null, - timeStamp: DateNow(), - }; - // [LegacyUnforgeable] - ReflectDefineProperty(this, "isTrusted", { - enumerable: true, - get: isTrusted, - }); - } else { + if (eventInitDict?.[_skipInternalInit]) { this[_attributes] = { type, data: eventInitDict.data ?? null, @@ -202,10 +159,30 @@ class Event { target: null, timeStamp: 0, }; - // TODO(@littledivy): Not spec compliant but performance is hurt badly - // for users of `_skipInternalInit`. - this.isTrusted = false; + return; } + + webidl.requiredArguments( + arguments.length, + 1, + "Failed to construct 'Event'", + ); + type = webidl.converters.DOMString( + type, + "Failed to construct 'Event'", + "Argument 1", + ); + + this[_attributes] = { + type, + bubbles: !!eventInitDict.bubbles, + cancelable: !!eventInitDict.cancelable, + composed: !!eventInitDict.composed, + currentTarget: null, + eventPhase: Event.NONE, + target: null, + timeStamp: DateNow(), + }; } [SymbolFor("Deno.privateCustomInspect")](inspect) { @@ -435,6 +412,13 @@ class Event { } } +// Not spec compliant. The spec defines it as [LegacyUnforgeable] +// but doing so has a big performance hit +ReflectDefineProperty(Event.prototype, "isTrusted", { + enumerable: true, + get: isTrusted, +}); + function defineEnumerableProps( Ctor, props, diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index 326b7f55d3b9fe..c3deef9bd12048 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -2229,8 +2229,8 @@ ], "AddEventListenerOptions-signal.any.html": true, "AddEventListenerOptions-signal.any.worker.html": true, - "Event-isTrusted.any.html": true, - "Event-isTrusted.any.worker.html": true, + "Event-isTrusted.any.html": false, + "Event-isTrusted.any.worker.html": false, "EventTarget-add-remove-listener.any.html": true, "EventTarget-add-remove-listener.any.worker.html": true, "EventTarget-addEventListener.any.html": true, From 15f57a8535aef0455436b9e3aecbb880cf4ec968 Mon Sep 17 00:00:00 2001 From: Heyang Zhou Date: Thu, 17 Aug 2023 18:53:55 +0800 Subject: [PATCH 28/60] fix(ext/kv): retry transaction on `SQLITE_BUSY` errors (#20189) Properly handle the `SQLITE_BUSY` error code by retrying the transaction. Also wraps database initialization logic in a transaction to protect against incomplete/concurrent initializations. Fixes https://github.com/denoland/deno/issues/20116. --- Cargo.lock | 1 + cli/tests/unit/kv_test.ts | 50 +++++++++++++++ ext/kv/Cargo.toml | 1 + ext/kv/sqlite.rs | 128 +++++++++++++++++++++++++++----------- 4 files changed, 144 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 31f069f0aee977..944694a6d73e0b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1220,6 +1220,7 @@ dependencies = [ "base64 0.13.1", "deno_core", "hex", + "log", "num-bigint", "rand", "rusqlite", diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 3081917da4fd5b..74a8ed6b340863 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -1756,3 +1756,53 @@ dbTest("atomic operation is exposed", (db) => { const ao = db.atomic(); assert(ao instanceof Deno.AtomicOperation); }); + +Deno.test({ + name: "racy open", + async fn() { + for (let i = 0; i < 100; i++) { + const filename = await Deno.makeTempFile({ prefix: "racy_open_db" }); + try { + const [db1, db2, db3] = await Promise.all([ + Deno.openKv(filename), + Deno.openKv(filename), + Deno.openKv(filename), + ]); + db1.close(); + db2.close(); + db3.close(); + } finally { + await Deno.remove(filename); + } + } + }, +}); + +Deno.test({ + name: "racy write", + async fn() { + const filename = await Deno.makeTempFile({ prefix: "racy_write_db" }); + const concurrency = 20; + const iterations = 5; + try { + const dbs = await Promise.all( + Array(concurrency).fill(0).map(() => Deno.openKv(filename)), + ); + try { + for (let i = 0; i < iterations; i++) { + await Promise.all( + dbs.map((db) => db.atomic().sum(["counter"], 1n).commit()), + ); + } + assertEquals( + ((await dbs[0].get(["counter"])).value as Deno.KvU64).value, + BigInt(concurrency * iterations), + ); + } finally { + dbs.forEach((db) => db.close()); + } + } finally { + await Deno.remove(filename); + } + }, +}); diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index 7866605c0580d4..645689b74bd201 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -19,6 +19,7 @@ async-trait.workspace = true base64.workspace = true deno_core.workspace = true hex.workspace = true +log.workspace = true num-bigint.workspace = true rand.workspace = true rusqlite.workspace = true diff --git a/ext/kv/sqlite.rs b/ext/kv/sqlite.rs index 808bf9b4f84108..aea438d2dfa7ab 100644 --- a/ext/kv/sqlite.rs +++ b/ext/kv/sqlite.rs @@ -3,6 +3,7 @@ use std::borrow::Cow; use std::cell::Cell; use std::cell::RefCell; +use std::future::Future; use std::marker::PhantomData; use std::path::Path; use std::path::PathBuf; @@ -21,6 +22,7 @@ use deno_core::task::spawn; use deno_core::task::spawn_blocking; use deno_core::AsyncRefCell; use deno_core::OpState; +use rand::Rng; use rusqlite::params; use rusqlite::OpenFlags; use rusqlite::OptionalExtension; @@ -165,28 +167,41 @@ impl DatabaseHandler for SqliteDbHandler

{ } } - let default_storage_dir = self.default_storage_dir.clone(); - let conn = spawn_blocking(move || { - let conn = match (path.as_deref(), &default_storage_dir) { - (Some(":memory:"), _) | (None, None) => { - rusqlite::Connection::open_in_memory()? - } - (Some(path), _) => { - let flags = - OpenFlags::default().difference(OpenFlags::SQLITE_OPEN_URI); - rusqlite::Connection::open_with_flags(path, flags)? - } - (None, Some(path)) => { - std::fs::create_dir_all(path)?; - let path = path.join("kv.sqlite3"); - rusqlite::Connection::open(&path)? - } - }; + let conn = sqlite_retry_loop(|| { + let path = path.clone(); + let default_storage_dir = self.default_storage_dir.clone(); + async move { + spawn_blocking(move || { + let conn = match (path.as_deref(), &default_storage_dir) { + (Some(":memory:"), _) | (None, None) => { + rusqlite::Connection::open_in_memory()? + } + (Some(path), _) => { + let flags = + OpenFlags::default().difference(OpenFlags::SQLITE_OPEN_URI); + rusqlite::Connection::open_with_flags(path, flags)? + } + (None, Some(path)) => { + std::fs::create_dir_all(path)?; + let path = path.join("kv.sqlite3"); + rusqlite::Connection::open(path)? + } + }; - conn.pragma_update(None, "journal_mode", "wal")?; - conn.execute(STATEMENT_CREATE_MIGRATION_TABLE, [])?; + conn.pragma_update(None, "journal_mode", "wal")?; + + Ok::<_, AnyError>(conn) + }) + .await + .unwrap() + } + }) + .await?; + let conn = Rc::new(AsyncRefCell::new(Cell::new(Some(conn)))); + SqliteDb::run_tx(conn.clone(), |tx| { + tx.execute(STATEMENT_CREATE_MIGRATION_TABLE, [])?; - let current_version: usize = conn + let current_version: usize = tx .query_row( "select version from migration_state where k = 0", [], @@ -198,21 +213,22 @@ impl DatabaseHandler for SqliteDbHandler

{ for (i, migration) in MIGRATIONS.iter().enumerate() { let version = i + 1; if version > current_version { - conn.execute_batch(migration)?; - conn.execute( + tx.execute_batch(migration)?; + tx.execute( "replace into migration_state (k, version) values(?, ?)", [&0, &version], )?; } } - Ok::<_, AnyError>(conn) + tx.commit()?; + + Ok(()) }) - .await - .unwrap()?; + .await?; Ok(SqliteDb { - conn: Rc::new(AsyncRefCell::new(Cell::new(Some(conn)))), + conn, queue: OnceCell::new(), }) } @@ -223,11 +239,48 @@ pub struct SqliteDb { queue: OnceCell, } +async fn sqlite_retry_loop>>( + mut f: impl FnMut() -> Fut, +) -> Result { + loop { + match f().await { + Ok(x) => return Ok(x), + Err(e) => { + if let Some(x) = e.downcast_ref::() { + if x.sqlite_error_code() == Some(rusqlite::ErrorCode::DatabaseBusy) { + log::debug!("kv: Database is busy, retrying"); + tokio::time::sleep(Duration::from_millis( + rand::thread_rng().gen_range(5..20), + )) + .await; + continue; + } + } + return Err(e); + } + } + } +} + impl SqliteDb { async fn run_tx( conn: Rc>>>, f: F, ) -> Result + where + F: (FnOnce(rusqlite::Transaction<'_>) -> Result) + + Clone + + Send + + 'static, + R: Send + 'static, + { + sqlite_retry_loop(|| Self::run_tx_inner(conn.clone(), f.clone())).await + } + + async fn run_tx_inner( + conn: Rc>>>, + f: F, + ) -> Result where F: (FnOnce(rusqlite::Transaction<'_>) -> Result) + Send @@ -579,9 +632,10 @@ impl Database for SqliteDb { requests: Vec, _options: SnapshotReadOptions, ) -> Result, AnyError> { + let requests = Arc::new(requests); Self::run_tx(self.conn.clone(), move |tx| { let mut responses = Vec::with_capacity(requests.len()); - for request in requests { + for request in &*requests { let mut stmt = tx.prepare_cached(if request.reverse { STATEMENT_KV_RANGE_SCAN_REVERSE } else { @@ -622,9 +676,10 @@ impl Database for SqliteDb { &self, write: AtomicWrite, ) -> Result, AnyError> { + let write = Arc::new(write); let (has_enqueues, commit_result) = Self::run_tx(self.conn.clone(), move |tx| { - for check in write.checks { + for check in &write.checks { let real_versionstamp = tx .prepare_cached(STATEMENT_KV_POINT_GET_VERSION_ONLY)? .query_row([check.key.as_slice()], |row| row.get(0)) @@ -639,10 +694,10 @@ impl Database for SqliteDb { .prepare_cached(STATEMENT_INC_AND_GET_DATA_VERSION)? .query_row([], |row| row.get(0))?; - for mutation in write.mutations { - match mutation.kind { + for mutation in &write.mutations { + match &mutation.kind { MutationKind::Set(value) => { - let (value, encoding) = encode_value(&value); + let (value, encoding) = encode_value(value); let changed = tx .prepare_cached(STATEMENT_KV_POINT_SET)? .execute(params![mutation.key, &value, &encoding, &version])?; @@ -659,7 +714,7 @@ impl Database for SqliteDb { &tx, &mutation.key, "sum", - &operand, + operand, version, |a, b| a.wrapping_add(b), )?; @@ -669,7 +724,7 @@ impl Database for SqliteDb { &tx, &mutation.key, "min", - &operand, + operand, version, |a, b| a.min(b), )?; @@ -679,7 +734,7 @@ impl Database for SqliteDb { &tx, &mutation.key, "max", - &operand, + operand, version, |a, b| a.max(b), )?; @@ -693,12 +748,13 @@ impl Database for SqliteDb { .as_millis() as u64; let has_enqueues = !write.enqueues.is_empty(); - for enqueue in write.enqueues { + for enqueue in &write.enqueues { let id = Uuid::new_v4().to_string(); let backoff_schedule = serde_json::to_string( &enqueue .backoff_schedule - .or_else(|| Some(DEFAULT_BACKOFF_SCHEDULE.to_vec())), + .as_deref() + .or_else(|| Some(&DEFAULT_BACKOFF_SCHEDULE[..])), )?; let keys_if_undelivered = serde_json::to_string(&enqueue.keys_if_undelivered)?; From 32947e5ea56fb22f28713ed8798c43ce0e232c94 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Thu, 17 Aug 2023 07:52:37 -0600 Subject: [PATCH 29/60] feat(ext/web): resourceForReadableStream (#20180) Extracted from fast streams work. This is a resource wrapper for `ReadableStream`, allowing us to treat all `ReadableStream` instances as resources, and remove special paths in both `fetch` and `serve`. Performance with a ReadableStream response yields ~18% improvement: ``` return new Response(new ReadableStream({ start(controller) { controller.enqueue(new Uint8Array([104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100])); controller.close(); } }) ``` This patch: ``` 12:36 $ third_party/prebuilt/mac/wrk http://localhost:8080 Running 10s test @ http://localhost:8080 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 99.96us 100.03us 6.65ms 98.84% Req/Sec 47.73k 2.43k 51.02k 89.11% 959308 requests in 10.10s, 117.10MB read Requests/sec: 94978.71 Transfer/sec: 11.59MB ``` main: ``` Running 10s test @ http://localhost:8080 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 163.03us 685.51us 19.73ms 99.27% Req/Sec 39.50k 3.98k 66.11k 95.52% 789582 requests in 10.10s, 82.83MB read Requests/sec: 78182.65 Transfer/sec: 8.20MB ``` --- Cargo.lock | 2 + cli/tests/integration/js_unit_tests.rs | 1 + cli/tests/unit/serve_test.ts | 36 +-- cli/tests/unit/streams_test.ts | 299 +++++++++++++++++++++++++ cli/tests/unit_node/http_test.ts | 3 +- ext/http/00_serve.js | 154 ++----------- ext/http/http_next.rs | 48 ++-- ext/http/lib.rs | 1 - ext/http/response_body.rs | 83 ++----- ext/web/06_streams.js | 83 ++++++- ext/web/Cargo.toml | 2 + ext/web/lib.rs | 7 + ext/web/stream_resource.rs | 274 ++++++++++++++++++++++ 13 files changed, 743 insertions(+), 250 deletions(-) create mode 100644 cli/tests/unit/streams_test.ts create mode 100644 ext/web/stream_resource.rs diff --git a/Cargo.lock b/Cargo.lock index 944694a6d73e0b..6cdb4f551f4b21 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1505,6 +1505,7 @@ version = "0.146.0" dependencies = [ "async-trait", "base64-simd", + "bytes", "deno_bench_util", "deno_console", "deno_core", @@ -1512,6 +1513,7 @@ dependencies = [ "deno_webidl", "encoding_rs", "flate2", + "futures", "serde", "tokio", "uuid", diff --git a/cli/tests/integration/js_unit_tests.rs b/cli/tests/integration/js_unit_tests.rs index c8039f89c846ca..f54280b23cf012 100644 --- a/cli/tests/integration/js_unit_tests.rs +++ b/cli/tests/integration/js_unit_tests.rs @@ -78,6 +78,7 @@ util::unit_test_factory!( signal_test, stat_test, stdio_test, + streams_test, structured_clone_test, symlink_test, sync_test, diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 68d03e84638ea2..f0a5b430be9681 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -693,24 +693,30 @@ function createStreamTest(count: number, delay: number, action: string) { onError: createOnErrorCb(ac), }); - await listeningPromise; - const resp = await fetch(`http://127.0.0.1:${servePort}/`); - const text = await resp.text(); + try { + await listeningPromise; + const resp = await fetch(`http://127.0.0.1:${servePort}/`); + if (action == "Throw") { + try { + await resp.text(); + fail(); + } catch (_) { + // expected + } + } else { + const text = await resp.text(); - ac.abort(); - await server.finished; - let expected = ""; - if (action == "Throw" && count < 2 && delay < 1000) { - // NOTE: This is specific to the current implementation. In some cases where a stream errors, we - // don't send the first packet. - expected = ""; - } else { - for (let i = 0; i < count; i++) { - expected += `a${i}`; + let expected = ""; + for (let i = 0; i < count; i++) { + expected += `a${i}`; + } + + assertEquals(text, expected); } + } finally { + ac.abort(); + await server.finished; } - - assertEquals(text, expected); }); } diff --git a/cli/tests/unit/streams_test.ts b/cli/tests/unit/streams_test.ts new file mode 100644 index 00000000000000..4a573c9344644f --- /dev/null +++ b/cli/tests/unit/streams_test.ts @@ -0,0 +1,299 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { fail } from "https://deno.land/std@v0.42.0/testing/asserts.ts"; +import { assertEquals, Deferred, deferred } from "./test_util.ts"; + +const { + core, + resourceForReadableStream, + // @ts-expect-error TypeScript (as of 3.7) does not support indexing namespaces by symbol +} = Deno[Deno.internal]; + +const LOREM = + "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."; + +// Hello world, with optional close +// deno-lint-ignore no-explicit-any +function helloWorldStream(close?: boolean, completion?: Deferred) { + return new ReadableStream({ + start(controller) { + controller.enqueue("hello, world"); + if (close == true) { + controller.close(); + } + }, + cancel(reason) { + completion?.resolve(reason); + }, + }).pipeThrough(new TextEncoderStream()); +} + +// Hello world, with optional close +function errorStream(type: "string" | "controller" | "TypeError") { + return new ReadableStream({ + start(controller) { + controller.enqueue("hello, world"); + }, + pull(controller) { + if (type == "string") { + throw "Uh oh (string)!"; + } + if (type == "TypeError") { + throw TypeError("Uh oh (TypeError)!"); + } + controller.error("Uh oh (controller)!"); + }, + }).pipeThrough(new TextEncoderStream()); +} + +// Long stream with Lorem Ipsum text. +function longStream() { + return new ReadableStream({ + start(controller) { + for (let i = 0; i < 4; i++) { + setTimeout(() => { + controller.enqueue(LOREM); + if (i == 3) { + controller.close(); + } + }, i * 100); + } + }, + }).pipeThrough(new TextEncoderStream()); +} + +// Empty stream, closes either immediately or on a call to pull. +function emptyStream(onPull: boolean) { + return new ReadableStream({ + start(controller) { + if (!onPull) { + controller.close(); + } + }, + pull(controller) { + if (onPull) { + controller.close(); + } + }, + }).pipeThrough(new TextEncoderStream()); +} + +// Include an empty chunk +function emptyChunkStream() { + return new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([1])); + controller.enqueue(new Uint8Array([])); + controller.enqueue(new Uint8Array([2])); + controller.close(); + }, + }); +} + +// Creates a stream with the given number of packets, a configurable delay between packets, and a final +// action (either "Throw" or "Close"). +function makeStreamWithCount( + count: number, + delay: number, + action: "Throw" | "Close", +): ReadableStream { + function doAction(controller: ReadableStreamDefaultController, i: number) { + if (i == count) { + if (action == "Throw") { + controller.error(new Error("Expected error!")); + } else { + controller.close(); + } + } else { + controller.enqueue(String.fromCharCode("a".charCodeAt(0) + i)); + + if (delay == 0) { + doAction(controller, i + 1); + } else { + setTimeout(() => doAction(controller, i + 1), delay); + } + } + } + + return new ReadableStream({ + start(controller) { + if (delay == 0) { + doAction(controller, 0); + } else { + setTimeout(() => doAction(controller, 0), delay); + } + }, + }).pipeThrough(new TextEncoderStream()); +} + +// Normal stream operation +Deno.test(async function readableStream() { + const rid = resourceForReadableStream(helloWorldStream()); + const buffer = new Uint8Array(1024); + const nread = await core.ops.op_read(rid, buffer); + assertEquals(nread, 12); + core.ops.op_close(rid); +}); + +// Close the stream after reading everything +Deno.test(async function readableStreamClose() { + const cancel = deferred(); + const rid = resourceForReadableStream(helloWorldStream(false, cancel)); + const buffer = new Uint8Array(1024); + const nread = await core.ops.op_read(rid, buffer); + assertEquals(nread, 12); + core.ops.op_close(rid); + assertEquals(await cancel, undefined); +}); + +// Close the stream without reading everything +Deno.test(async function readableStreamClosePartialRead() { + const cancel = deferred(); + const rid = resourceForReadableStream(helloWorldStream(false, cancel)); + const buffer = new Uint8Array(5); + const nread = await core.ops.op_read(rid, buffer); + assertEquals(nread, 5); + core.ops.op_close(rid); + assertEquals(await cancel, undefined); +}); + +// Close the stream without reading anything +Deno.test(async function readableStreamCloseWithoutRead() { + const cancel = deferred(); + const rid = resourceForReadableStream(helloWorldStream(false, cancel)); + core.ops.op_close(rid); + assertEquals(await cancel, undefined); +}); + +Deno.test(async function readableStreamPartial() { + const rid = resourceForReadableStream(helloWorldStream()); + const buffer = new Uint8Array(5); + const nread = await core.ops.op_read(rid, buffer); + assertEquals(nread, 5); + const buffer2 = new Uint8Array(1024); + const nread2 = await core.ops.op_read(rid, buffer2); + assertEquals(nread2, 7); + core.ops.op_close(rid); +}); + +Deno.test(async function readableStreamLongReadAll() { + const rid = resourceForReadableStream(longStream()); + const buffer = await core.ops.op_read_all(rid); + assertEquals(buffer.length, LOREM.length * 4); + core.ops.op_close(rid); +}); + +Deno.test(async function readableStreamLongByPiece() { + const rid = resourceForReadableStream(longStream()); + let total = 0; + for (let i = 0; i < 100; i++) { + const length = await core.ops.op_read(rid, new Uint8Array(16)); + total += length; + if (length == 0) { + break; + } + } + assertEquals(total, LOREM.length * 4); + core.ops.op_close(rid); +}); + +for ( + const type of [ + "string", + "TypeError", + "controller", + ] as ("string" | "TypeError" | "controller")[] +) { + Deno.test(`readableStreamError_${type}`, async function () { + const rid = resourceForReadableStream(errorStream(type)); + assertEquals(12, await core.ops.op_read(rid, new Uint8Array(16))); + try { + await core.ops.op_read(rid, new Uint8Array(1)); + fail(); + } catch (e) { + assertEquals(e.message, `Uh oh (${type})!`); + } + core.ops.op_close(rid); + }); +} + +Deno.test(async function readableStreamEmptyOnStart() { + const rid = resourceForReadableStream(emptyStream(true)); + const buffer = new Uint8Array(1024); + const nread = await core.ops.op_read(rid, buffer); + assertEquals(nread, 0); + core.ops.op_close(rid); +}); + +Deno.test(async function readableStreamEmptyOnPull() { + const rid = resourceForReadableStream(emptyStream(false)); + const buffer = new Uint8Array(1024); + const nread = await core.ops.op_read(rid, buffer); + assertEquals(nread, 0); + core.ops.op_close(rid); +}); + +Deno.test(async function readableStreamEmptyReadAll() { + const rid = resourceForReadableStream(emptyStream(false)); + const buffer = await core.ops.op_read_all(rid); + assertEquals(buffer.length, 0); + core.ops.op_close(rid); +}); + +Deno.test(async function readableStreamWithEmptyChunk() { + const rid = resourceForReadableStream(emptyChunkStream()); + const buffer = await core.ops.op_read_all(rid); + assertEquals(buffer, new Uint8Array([1, 2])); + core.ops.op_close(rid); +}); + +Deno.test(async function readableStreamWithEmptyChunkOneByOne() { + const rid = resourceForReadableStream(emptyChunkStream()); + assertEquals(1, await core.ops.op_read(rid, new Uint8Array(1))); + assertEquals(1, await core.ops.op_read(rid, new Uint8Array(1))); + assertEquals(0, await core.ops.op_read(rid, new Uint8Array(1))); + core.ops.op_close(rid); +}); + +for (const count of [0, 1, 2, 3]) { + for (const delay of [0, 1, 10]) { + // Creating a stream that errors in start will throw + if (delay > 0) { + createStreamTest(count, delay, "Throw"); + } + createStreamTest(count, delay, "Close"); + } +} + +function createStreamTest( + count: number, + delay: number, + action: "Throw" | "Close", +) { + Deno.test(`streamCount${count}Delay${delay}${action}`, async () => { + let rid; + try { + rid = resourceForReadableStream( + makeStreamWithCount(count, delay, action), + ); + for (let i = 0; i < count; i++) { + const buffer = new Uint8Array(1); + await core.ops.op_read(rid, buffer); + } + if (action == "Throw") { + try { + const buffer = new Uint8Array(1); + assertEquals(1, await core.ops.op_read(rid, buffer)); + fail(); + } catch (e) { + // We expect this to be thrown + assertEquals(e.message, "Expected error!"); + } + } else { + const buffer = new Uint8Array(1); + assertEquals(0, await core.ops.op_read(rid, buffer)); + } + } finally { + core.ops.op_close(rid); + } + }); +} diff --git a/cli/tests/unit_node/http_test.ts b/cli/tests/unit_node/http_test.ts index a361ff25711e7c..706c672f14a01c 100644 --- a/cli/tests/unit_node/http_test.ts +++ b/cli/tests/unit_node/http_test.ts @@ -141,8 +141,7 @@ Deno.test("[node/http] chunked response", async () => { } }); -// TODO(kt3k): This test case exercises the workaround for https://github.com/denoland/deno/issues/17194 -// This should be removed when #17194 is resolved. +// Test empty chunks: https://github.com/denoland/deno/issues/17194 Deno.test("[node/http] empty chunk in the middle of response", async () => { const promise = deferred(); diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 3447f48e21415d..265b797066fbd6 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -30,9 +30,9 @@ import { import { Deferred, getReadableStreamResourceBacking, - readableStreamClose, readableStreamForRid, ReadableStreamPrototype, + resourceForReadableStream, } from "ext:deno_web/06_streams.js"; import { listen, TcpConn } from "ext:deno_net/01_net.js"; import { listenTls } from "ext:deno_net/02_tls.js"; @@ -41,10 +41,6 @@ const { Error, ObjectPrototypeIsPrototypeOf, PromisePrototypeCatch, - SafeSet, - SafeSetIterator, - SetPrototypeAdd, - SetPrototypeDelete, Symbol, SymbolFor, TypeError, @@ -61,7 +57,6 @@ const { op_http_set_promise_complete, op_http_set_response_body_bytes, op_http_set_response_body_resource, - op_http_set_response_body_stream, op_http_set_response_body_text, op_http_set_response_header, op_http_set_response_headers, @@ -339,7 +334,6 @@ class InnerRequest { class CallbackContext { abortController; - responseBodies; scheme; fallbackHost; serverRid; @@ -352,7 +346,6 @@ class CallbackContext { { once: true }, ); this.abortController = new AbortController(); - this.responseBodies = new SafeSet(); this.serverRid = args[0]; this.scheme = args[1]; this.fallbackHost = args[2]; @@ -379,23 +372,24 @@ class ServeHandlerInfo { } } -function fastSyncResponseOrStream(req, respBody) { +function fastSyncResponseOrStream(req, respBody, status) { if (respBody === null || respBody === undefined) { // Don't set the body - return null; + op_http_set_promise_complete(req, status); + return; } const stream = respBody.streamOrStatic; const body = stream.body; if (ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, body)) { - op_http_set_response_body_bytes(req, body); - return null; + op_http_set_response_body_bytes(req, body, status); + return; } if (typeof body === "string") { - op_http_set_response_body_text(req, body); - return null; + op_http_set_response_body_text(req, body, status); + return; } // At this point in the response it needs to be a stream @@ -408,115 +402,16 @@ function fastSyncResponseOrStream(req, respBody) { req, resourceBacking.rid, resourceBacking.autoClose, + status, + ); + } else { + const rid = resourceForReadableStream(stream); + op_http_set_response_body_resource( + req, + rid, + true, + status, ); - return null; - } - - return stream; -} - -async function asyncResponse(responseBodies, req, status, stream) { - const reader = stream.getReader(); - let responseRid; - let closed = false; - let timeout; - - try { - // IMPORTANT: We get a performance boost from this optimization, but V8 is very - // sensitive to the order and structure. Benchmark any changes to this code. - - // Optimize for streams that are done in zero or one packets. We will not - // have to allocate a resource in this case. - const { value: value1, done: done1 } = await reader.read(); - if (done1) { - closed = true; - // Exit 1: no response body at all, extreme fast path - // Reader will be closed by finally block - return; - } - - // The second value cannot block indefinitely, as someone may be waiting on a response - // of the first packet that may influence this packet. We set this timeout arbitrarily to 250ms - // and we race it. - let timeoutPromise; - timeout = setTimeout(() => { - responseRid = op_http_set_response_body_stream(req); - SetPrototypeAdd(responseBodies, responseRid); - op_http_set_promise_complete(req, status); - // TODO(mmastrac): if this promise fails before we get to the await below, it crashes - // the process with an error: - // - // 'Uncaught (in promise) BadResource: failed to write'. - // - // To avoid this, we're going to swallow errors here and allow the code later in the - // file to re-throw them in a way that doesn't appear to be an uncaught promise rejection. - timeoutPromise = PromisePrototypeCatch( - core.writeAll(responseRid, value1), - () => null, - ); - }, 250); - const { value: value2, done: done2 } = await reader.read(); - - if (timeoutPromise) { - await timeoutPromise; - if (done2) { - closed = true; - // Exit 2(a): read 2 is EOS, and timeout resolved. - // Reader will be closed by finally block - // Response stream will be closed by finally block. - return; - } - - // Timeout resolved, value1 written but read2 is not EOS. Carry value2 forward. - } else { - clearTimeout(timeout); - timeout = undefined; - - if (done2) { - // Exit 2(b): read 2 is EOS, and timeout did not resolve as we read fast enough. - // Reader will be closed by finally block - // No response stream - closed = true; - op_http_set_response_body_bytes(req, value1); - return; - } - - responseRid = op_http_set_response_body_stream(req); - SetPrototypeAdd(responseBodies, responseRid); - op_http_set_promise_complete(req, status); - // Write our first packet - await core.writeAll(responseRid, value1); - } - - await core.writeAll(responseRid, value2); - while (true) { - const { value, done } = await reader.read(); - if (done) { - closed = true; - break; - } - await core.writeAll(responseRid, value); - } - } catch (error) { - closed = true; - try { - await reader.cancel(error); - } catch { - // Pass - } - } finally { - if (!closed) { - readableStreamClose(reader); - } - if (timeout !== undefined) { - clearTimeout(timeout); - } - if (responseRid) { - core.tryClose(responseRid); - SetPrototypeDelete(responseBodies, responseRid); - } else { - op_http_set_promise_complete(req, status); - } } } @@ -528,7 +423,6 @@ async function asyncResponse(responseBodies, req, status, stream) { * This function returns a promise that will only reject in the case of abnormal exit. */ function mapToCallback(context, callback, onError) { - const responseBodies = context.responseBodies; const signal = context.abortController.signal; const hasCallback = callback.length > 0; const hasOneCallback = callback.length === 1; @@ -591,15 +485,7 @@ function mapToCallback(context, callback, onError) { } } - // Attempt to respond quickly to this request, otherwise extract the stream - const stream = fastSyncResponseOrStream(req, inner.body); - if (stream !== null) { - // Handle the stream asynchronously - await asyncResponse(responseBodies, req, status, stream); - } else { - op_http_set_promise_complete(req, status); - } - + fastSyncResponseOrStream(req, inner.body, status); innerRequest?.close(); }; } @@ -755,10 +641,6 @@ function serveHttpOn(context, callback) { } PromisePrototypeCatch(callback(req), promiseErrorHandler); } - - for (const streamRid of new SafeSetIterator(context.responseBodies)) { - core.tryClose(streamRid); - } })(); return { diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 2e9b315ca14341..60ef83b0f71982 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -10,7 +10,6 @@ use crate::request_properties::HttpPropertyExtractor; use crate::response_body::Compression; use crate::response_body::ResponseBytes; use crate::response_body::ResponseBytesInner; -use crate::response_body::V8StreamHttpResponseBody; use crate::slab::slab_drop; use crate::slab::slab_get; use crate::slab::slab_init; @@ -30,6 +29,7 @@ use deno_core::task::JoinHandle; use deno_core::v8; use deno_core::AsyncRefCell; use deno_core::AsyncResult; +use deno_core::BufView; use deno_core::ByteString; use deno_core::CancelFuture; use deno_core::CancelHandle; @@ -573,6 +573,7 @@ fn ensure_vary_accept_encoding(hmap: &mut HeaderMap) { fn set_response( slab_id: SlabId, length: Option, + status: u16, response_fn: impl FnOnce(Compression) -> ResponseBytesInner, ) { let mut http = slab_get(slab_id); @@ -583,7 +584,14 @@ fn set_response( length, response.headers_mut(), ); - response.body_mut().initialize(response_fn(compression)) + response.body_mut().initialize(response_fn(compression)); + + // The Javascript code should never provide a status that is invalid here (see 23_response.js), so we + // will quitely ignore invalid values. + if let Ok(code) = StatusCode::from_u16(status) { + *response.status_mut() = code; + } + http.complete(); } #[op2(fast)] @@ -592,6 +600,7 @@ pub fn op_http_set_response_body_resource( #[smi] slab_id: SlabId, #[smi] stream_rid: ResourceId, auto_close: bool, + status: u16, ) -> Result<(), AnyError> { // If the stream is auto_close, we will hold the last ref to it until the response is complete. let resource = if auto_close { @@ -603,6 +612,7 @@ pub fn op_http_set_response_body_resource( set_response( slab_id, resource.size_hint().1.map(|s| s as usize), + status, move |compression| { ResponseBytesInner::from_resource(compression, resource, auto_close) }, @@ -611,43 +621,35 @@ pub fn op_http_set_response_body_resource( Ok(()) } -#[op2(fast)] -#[smi] -pub fn op_http_set_response_body_stream( - state: &mut OpState, - #[smi] slab_id: SlabId, -) -> Result { - // TODO(mmastrac): what should this channel size be? - let (tx, rx) = tokio::sync::mpsc::channel(1); - set_response(slab_id, None, |compression| { - ResponseBytesInner::from_v8(compression, rx) - }); - - Ok(state.resource_table.add(V8StreamHttpResponseBody::new(tx))) -} - #[op2(fast)] pub fn op_http_set_response_body_text( #[smi] slab_id: SlabId, #[string] text: String, + status: u16, ) { if !text.is_empty() { - set_response(slab_id, Some(text.len()), |compression| { + set_response(slab_id, Some(text.len()), status, |compression| { ResponseBytesInner::from_vec(compression, text.into_bytes()) }); + } else { + op_http_set_promise_complete::call(slab_id, status); } } -#[op2(fast)] +// Skipping `fast` because we prefer an owned buffer here. +#[op2] pub fn op_http_set_response_body_bytes( #[smi] slab_id: SlabId, - #[buffer] buffer: &[u8], + #[buffer] buffer: JsBuffer, + status: u16, ) { if !buffer.is_empty() { - set_response(slab_id, Some(buffer.len()), |compression| { - ResponseBytesInner::from_slice(compression, buffer) + set_response(slab_id, Some(buffer.len()), status, |compression| { + ResponseBytesInner::from_bufview(compression, BufView::from(buffer)) }); - }; + } else { + op_http_set_promise_complete::call(slab_id, status); + } } #[op2(async)] diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 8060b5a1e89eee..e0c5c89d02595d 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -115,7 +115,6 @@ deno_core::extension!( http_next::op_http_set_promise_complete, http_next::op_http_set_response_body_bytes, http_next::op_http_set_response_body_resource, - http_next::op_http_set_response_body_stream, http_next::op_http_set_response_body_text, http_next::op_http_set_response_header, http_next::op_http_set_response_headers, diff --git a/ext/http/response_body.rs b/ext/http/response_body.rs index 3697b2732f0f07..bd9d6f43326e1a 100644 --- a/ext/http/response_body.rs +++ b/ext/http/response_body.rs @@ -1,5 +1,4 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use std::borrow::Cow; use std::cell::RefCell; use std::future::Future; use std::io::Write; @@ -11,18 +10,12 @@ use brotli::enc::encode::BrotliEncoderParameter; use brotli::ffi::compressor::BrotliEncoderState; use bytes::Bytes; use bytes::BytesMut; -use deno_core::error::bad_resource; use deno_core::error::AnyError; use deno_core::futures::ready; use deno_core::futures::FutureExt; -use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::BufView; -use deno_core::CancelHandle; -use deno_core::CancelTryFuture; -use deno_core::RcRef; use deno_core::Resource; -use deno_core::WriteOutcome; use flate2::write::GzEncoder; use http::HeaderMap; use hyper1::body::Body; @@ -126,8 +119,8 @@ pub enum Compression { pub enum ResponseStream { /// A resource stream, piped in fast mode. Resource(ResourceBodyAdapter), - /// A JS-backed stream, written in JS and transported via pipe. - V8Stream(tokio::sync::mpsc::Receiver), + #[cfg(test)] + TestChannel(tokio::sync::mpsc::Receiver), } #[derive(Default)] @@ -217,13 +210,6 @@ impl ResponseBytesInner { } } - pub fn from_v8( - compression: Compression, - rx: tokio::sync::mpsc::Receiver, - ) -> Self { - Self::from_stream(compression, ResponseStream::V8Stream(rx)) - } - pub fn from_resource( compression: Compression, stm: Rc, @@ -235,12 +221,12 @@ impl ResponseBytesInner { ) } - pub fn from_slice(compression: Compression, bytes: &[u8]) -> Self { + pub fn from_bufview(compression: Compression, buf: BufView) -> Self { match compression { Compression::GZip => { let mut writer = GzEncoder::new(Vec::new(), flate2::Compression::fast()); - writer.write_all(bytes).unwrap(); + writer.write_all(&buf).unwrap(); Self::Bytes(BufView::from(writer.finish().unwrap())) } Compression::Brotli => { @@ -251,11 +237,11 @@ impl ResponseBytesInner { // (~4MB) let mut writer = brotli::CompressorWriter::new(Vec::new(), 65 * 1024, 6, 22); - writer.write_all(bytes).unwrap(); + writer.write_all(&buf).unwrap(); writer.flush().unwrap(); Self::Bytes(BufView::from(writer.into_inner())) } - _ => Self::Bytes(BufView::from(bytes.to_vec())), + _ => Self::Bytes(buf), } } @@ -368,14 +354,16 @@ impl PollFrame for ResponseStream { ) -> std::task::Poll { match &mut *self { ResponseStream::Resource(res) => Pin::new(res).poll_frame(cx), - ResponseStream::V8Stream(res) => Pin::new(res).poll_frame(cx), + #[cfg(test)] + ResponseStream::TestChannel(rx) => Pin::new(rx).poll_frame(cx), } } fn size_hint(&self) -> SizeHint { match self { ResponseStream::Resource(res) => res.size_hint(), - ResponseStream::V8Stream(res) => res.size_hint(), + #[cfg(test)] + ResponseStream::TestChannel(_) => SizeHint::default(), } } } @@ -414,6 +402,7 @@ impl PollFrame for ResourceBodyAdapter { } } +#[cfg(test)] impl PollFrame for tokio::sync::mpsc::Receiver { fn poll_frame( mut self: Pin<&mut Self>, @@ -761,52 +750,6 @@ impl PollFrame for BrotliResponseStream { } } -/// A response body object that can be passed to V8. This body will feed byte buffers to a channel which -/// feed's hyper's HTTP response. -pub struct V8StreamHttpResponseBody( - AsyncRefCell>>, - CancelHandle, -); - -impl V8StreamHttpResponseBody { - pub fn new(sender: tokio::sync::mpsc::Sender) -> Self { - Self(AsyncRefCell::new(Some(sender)), CancelHandle::default()) - } -} - -impl Resource for V8StreamHttpResponseBody { - fn name(&self) -> Cow { - "responseBody".into() - } - - fn write( - self: Rc, - buf: BufView, - ) -> AsyncResult { - let cancel_handle = RcRef::map(&self, |this| &this.1); - Box::pin( - async move { - let nwritten = buf.len(); - - let res = RcRef::map(self, |this| &this.0).borrow().await; - if let Some(tx) = res.as_ref() { - tx.send(buf) - .await - .map_err(|_| bad_resource("failed to write"))?; - Ok(WriteOutcome::Full { nwritten }) - } else { - Err(bad_resource("failed to write")) - } - } - .try_or_cancel(cancel_handle), - ) - } - - fn close(self: Rc) { - self.1.cancel(); - } -} - #[cfg(test)] mod tests { use super::*; @@ -892,7 +835,7 @@ mod tests { expected.extend(v); } let (tx, rx) = tokio::sync::mpsc::channel(1); - let underlying = ResponseStream::V8Stream(rx); + let underlying = ResponseStream::TestChannel(rx); let mut resp = GZipResponseStream::new(underlying); let handle = tokio::task::spawn(async move { for chunk in v { @@ -934,7 +877,7 @@ mod tests { expected.extend(v); } let (tx, rx) = tokio::sync::mpsc::channel(1); - let underlying = ResponseStream::V8Stream(rx); + let underlying = ResponseStream::TestChannel(rx); let mut resp = BrotliResponseStream::new(underlying); let handle = tokio::task::spawn(async move { for chunk in v { diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index 01f84aa2cfa486..0849d221d11885 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -1,4 +1,5 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase // @ts-check /// @@ -7,7 +8,17 @@ /// const core = globalThis.Deno.core; -const ops = core.ops; +const internals = globalThis.__bootstrap.internals; +const { + op_arraybuffer_was_detached, + op_transfer_arraybuffer, + op_readable_stream_resource_allocate, + op_readable_stream_resource_get_sink, + op_readable_stream_resource_write_error, + op_readable_stream_resource_write_buf, + op_readable_stream_resource_close, + op_readable_stream_resource_await_close, +} = core.ensureFastOps(); import * as webidl from "ext:deno_webidl/00_webidl.js"; import { structuredClone } from "ext:deno_web/02_structured_clone.js"; import { @@ -61,6 +72,7 @@ const { SafeWeakMap, // TODO(lucacasonato): add SharedArrayBuffer to primordials // SharedArrayBufferPrototype, + String, Symbol, SymbolAsyncIterator, SymbolIterator, @@ -218,7 +230,7 @@ function isDetachedBuffer(O) { return false; } return ArrayBufferPrototypeGetByteLength(O) === 0 && - ops.op_arraybuffer_was_detached(O); + op_arraybuffer_was_detached(O); } /** @@ -244,7 +256,7 @@ function canTransferArrayBuffer(O) { * @returns {ArrayBufferLike} */ function transferArrayBuffer(O) { - return ops.op_transfer_arraybuffer(O); + return op_transfer_arraybuffer(O); } /** @@ -695,6 +707,68 @@ function isReadableStreamDisturbed(stream) { return stream[_disturbed]; } +/** + * Create a new resource that wraps a ReadableStream. The resource will support + * read operations, and those read operations will be fed by the output of the + * ReadableStream source. + * @param {ReadableStream} stream + * @returns {number} + */ +function resourceForReadableStream(stream) { + const reader = acquireReadableStreamDefaultReader(stream); + + // Allocate the resource + const rid = op_readable_stream_resource_allocate(); + + // Close the Reader we get from the ReadableStream when the resource is closed, ignoring any errors + PromisePrototypeCatch( + PromisePrototypeThen( + op_readable_stream_resource_await_close(rid), + () => reader.cancel(), + ), + () => {}, + ); + + // The ops here look like op_write_all/op_close, but we're not actually writing to a + // real resource. + (async () => { + try { + // This allocation is freed in the finally block below, guaranteeing it won't leak + const sink = op_readable_stream_resource_get_sink(rid); + try { + while (true) { + let value; + try { + const read = await reader.read(); + value = read.value; + if (read.done) { + break; + } + } catch (err) { + const message = err.message; + if (message) { + await op_readable_stream_resource_write_error(sink, err.message); + } else { + await op_readable_stream_resource_write_error(sink, String(err)); + } + break; + } + // If the chunk has non-zero length, write it + if (value.length > 0) { + await op_readable_stream_resource_write_buf(sink, value); + } + } + } finally { + op_readable_stream_resource_close(sink); + } + } catch (err) { + // Something went terribly wrong with this stream -- log and continue + console.error("Unexpected internal error on stream", err); + } + })(); + return rid; +} + const DEFAULT_CHUNK_SIZE = 64 * 1024; // 64 KiB // A finalization registry to clean up underlying resources that are GC'ed. @@ -6454,6 +6528,8 @@ webidl.converters.StreamPipeOptions = webidl { key: "signal", converter: webidl.converters.AbortSignal }, ]); +internals.resourceForReadableStream = resourceForReadableStream; + export { // Non-Public _state, @@ -6482,6 +6558,7 @@ export { ReadableStreamPrototype, readableStreamTee, readableStreamThrowIfErrored, + resourceForReadableStream, TransformStream, TransformStreamDefaultController, WritableStream, diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index dbc2df8c09d785..b923bc95ef6402 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -16,9 +16,11 @@ path = "lib.rs" [dependencies] async-trait.workspace = true base64-simd = "0.8" +bytes.workspace = true deno_core.workspace = true encoding_rs.workspace = true flate2.workspace = true +futures.workspace = true serde = "1.0.149" tokio.workspace = true uuid = { workspace = true, features = ["serde"] } diff --git a/ext/web/lib.rs b/ext/web/lib.rs index 374815804cc673..88937efb2cac38 100644 --- a/ext/web/lib.rs +++ b/ext/web/lib.rs @@ -4,6 +4,7 @@ mod blob; mod compression; mod hr_timer_lock; mod message_port; +mod stream_resource; mod timers; use deno_core::error::range_error; @@ -90,6 +91,12 @@ deno_core::extension!(deno_web, op_cancel_handle, op_sleep, op_transfer_arraybuffer, + stream_resource::op_readable_stream_resource_allocate, + stream_resource::op_readable_stream_resource_get_sink, + stream_resource::op_readable_stream_resource_write_error, + stream_resource::op_readable_stream_resource_write_buf, + stream_resource::op_readable_stream_resource_close, + stream_resource::op_readable_stream_resource_await_close, ], esm = [ "00_infra.js", diff --git a/ext/web/stream_resource.rs b/ext/web/stream_resource.rs new file mode 100644 index 00000000000000..4c2a7564832f31 --- /dev/null +++ b/ext/web/stream_resource.rs @@ -0,0 +1,274 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use deno_core::anyhow::Error; +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::op2; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::BufView; +use deno_core::CancelFuture; +use deno_core::CancelHandle; +use deno_core::JsBuffer; +use deno_core::OpState; +use deno_core::RcLike; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use futures::stream::Peekable; +use futures::Stream; +use futures::StreamExt; +use std::borrow::Cow; +use std::cell::RefCell; +use std::ffi::c_void; +use std::future::Future; +use std::pin::Pin; +use std::rc::Rc; +use std::task::Context; +use std::task::Poll; +use std::task::Waker; +use tokio::sync::mpsc::Receiver; +use tokio::sync::mpsc::Sender; + +type SenderCell = RefCell>>>; + +// This indirection allows us to more easily integrate the fast streams work at a later date +#[repr(transparent)] +struct ChannelStreamAdapter(C); + +impl Stream for ChannelStreamAdapter +where + C: ChannelBytesRead, +{ + type Item = Result; + fn poll_next( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + self.0.poll_recv(cx) + } +} + +pub trait ChannelBytesRead: Unpin + 'static { + fn poll_recv( + &mut self, + cx: &mut Context<'_>, + ) -> Poll>>; +} + +impl ChannelBytesRead for tokio::sync::mpsc::Receiver> { + fn poll_recv( + &mut self, + cx: &mut Context<'_>, + ) -> Poll>> { + self.poll_recv(cx) + } +} + +#[allow(clippy::type_complexity)] +struct ReadableStreamResource { + reader: AsyncRefCell< + Peekable>>>, + >, + cancel_handle: CancelHandle, + data: ReadableStreamResourceData, +} + +impl ReadableStreamResource { + pub fn cancel_handle(self: &Rc) -> impl RcLike { + RcRef::map(self, |s| &s.cancel_handle).clone() + } + + async fn read(self: Rc, limit: usize) -> Result { + let cancel_handle = self.cancel_handle(); + let peekable = RcRef::map(self, |this| &this.reader); + let mut peekable = peekable.borrow_mut().await; + match Pin::new(&mut *peekable) + .peek_mut() + .or_cancel(cancel_handle) + .await? + { + None => Ok(BufView::empty()), + // Take the actual error since we only have a reference to it + Some(Err(_)) => Err(peekable.next().await.unwrap().err().unwrap()), + Some(Ok(bytes)) => { + if bytes.len() <= limit { + // We can safely take the next item since we peeked it + return peekable.next().await.unwrap(); + } + // The remainder of the bytes after we split it is still left in the peek buffer + let ret = bytes.split_to(limit); + Ok(ret) + } + } + } +} + +impl Resource for ReadableStreamResource { + fn name(&self) -> Cow { + Cow::Borrowed("readableStream") + } + + fn read(self: Rc, limit: usize) -> AsyncResult { + Box::pin(ReadableStreamResource::read(self, limit)) + } +} + +// TODO(mmastrac): Move this to deno_core +#[derive(Clone, Debug, Default)] +pub struct CompletionHandle { + inner: Rc>, +} + +#[derive(Debug, Default)] +struct CompletionHandleInner { + complete: bool, + success: bool, + waker: Option, +} + +impl CompletionHandle { + pub fn complete(&self, success: bool) { + let mut mut_self = self.inner.borrow_mut(); + mut_self.complete = true; + mut_self.success = success; + if let Some(waker) = mut_self.waker.take() { + drop(mut_self); + waker.wake(); + } + } +} + +impl Future for CompletionHandle { + type Output = bool; + + fn poll( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let mut mut_self = self.inner.borrow_mut(); + if mut_self.complete { + return std::task::Poll::Ready(mut_self.success); + } + + mut_self.waker = Some(cx.waker().clone()); + std::task::Poll::Pending + } +} + +fn sender_closed() -> Error { + type_error("sender closed") +} + +/// Allocate a resource that wraps a ReadableStream. +#[op2(fast)] +#[smi] +pub fn op_readable_stream_resource_allocate(state: &mut OpState) -> ResourceId { + let (tx, rx) = tokio::sync::mpsc::channel(1); + let tx = RefCell::new(Some(tx)); + let completion = CompletionHandle::default(); + let tx = Box::new(tx); + let resource = ReadableStreamResource { + cancel_handle: Default::default(), + reader: AsyncRefCell::new(ChannelStreamAdapter(rx).peekable()), + data: ReadableStreamResourceData { + tx: Box::into_raw(tx), + completion, + }, + }; + state.resource_table.add(resource) +} + +#[op2(fast)] +pub fn op_readable_stream_resource_get_sink( + state: &mut OpState, + #[smi] rid: ResourceId, +) -> *const c_void { + let Ok(resource) = state.resource_table.get::(rid) else { + return std::ptr::null(); + }; + resource.data.tx as _ +} + +fn get_sender(sender: *const c_void) -> Option>> { + // SAFETY: We know this is a valid v8::External + unsafe { + (sender as *const SenderCell) + .as_ref() + .and_then(|r| r.borrow_mut().as_ref().cloned()) + } +} + +fn drop_sender(sender: *const c_void) { + // SAFETY: We know this is a valid v8::External + unsafe { + assert!(!sender.is_null()); + _ = Box::from_raw(sender as *mut SenderCell); + } +} + +#[op2(async)] +pub fn op_readable_stream_resource_write_buf( + sender: *const c_void, + #[buffer] buffer: JsBuffer, +) -> impl Future> { + let sender = get_sender(sender); + async move { + let sender = sender.ok_or_else(sender_closed)?; + sender + .send(Ok(buffer.into())) + .await + .map_err(|_| sender_closed())?; + Ok(()) + } +} + +#[op2(async)] +pub fn op_readable_stream_resource_write_error( + sender: *const c_void, + #[string] error: String, +) -> impl Future> { + let sender = get_sender(sender); + async move { + let sender = sender.ok_or_else(sender_closed)?; + sender + .send(Err(type_error(Cow::Owned(error)))) + .await + .map_err(|_| sender_closed())?; + Ok(()) + } +} + +#[op2(fast)] +#[smi] +pub fn op_readable_stream_resource_close(sender: *const c_void) { + drop_sender(sender); +} + +#[op2(async)] +pub fn op_readable_stream_resource_await_close( + state: &mut OpState, + #[smi] rid: ResourceId, +) -> impl Future { + let completion = state + .resource_table + .get::(rid) + .ok() + .map(|r| r.data.completion.clone()); + + async move { + if let Some(completion) = completion { + completion.await; + } + } +} + +struct ReadableStreamResourceData { + tx: *const SenderCell, + completion: CompletionHandle, +} + +impl Drop for ReadableStreamResourceData { + fn drop(&mut self) { + self.completion.complete(true); + } +} From 1b4c910075e4d16c0904a27cf0d1cdb9e7ded4a5 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 17 Aug 2023 10:39:06 -0400 Subject: [PATCH 30/60] fix(npm): do not panic providing file url to require.resolve paths (#20182) Closes #19922 --- cli/tests/integration/npm_tests.rs | 10 ++++++++++ .../npm/require_resolve_url/package.json | 7 +++++++ .../npm/require_resolve_url/url_paths.out | 2 ++ .../npm/require_resolve_url/url_paths.ts | 12 ++++++++++++ ext/node/ops/require.rs | 18 +++++++++++------- 5 files changed, 42 insertions(+), 7 deletions(-) create mode 100644 cli/tests/testdata/npm/require_resolve_url/package.json create mode 100644 cli/tests/testdata/npm/require_resolve_url/url_paths.out create mode 100644 cli/tests/testdata/npm/require_resolve_url/url_paths.ts diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs index fb8d88dcac5ca7..f27befe5476248 100644 --- a/cli/tests/integration/npm_tests.rs +++ b/cli/tests/integration/npm_tests.rs @@ -2155,3 +2155,13 @@ itest!(check_package_file_dts_dmts_dcts { http_server: true, exit_code: 1, }); + +itest!(require_resolve_url_paths { + args: "run -A --quiet --node-modules-dir url_paths.ts", + output: "npm/require_resolve_url/url_paths.out", + envs: env_vars_for_npm_tests_no_sync_download(), + http_server: true, + exit_code: 0, + cwd: Some("npm/require_resolve_url/"), + copy_temp_dir: Some("npm/require_resolve_url/"), +}); diff --git a/cli/tests/testdata/npm/require_resolve_url/package.json b/cli/tests/testdata/npm/require_resolve_url/package.json new file mode 100644 index 00000000000000..814c77a5bfa920 --- /dev/null +++ b/cli/tests/testdata/npm/require_resolve_url/package.json @@ -0,0 +1,7 @@ +{ + "name": "@denotest/example", + "version": "1.0.0", + "dependencies": { + "@denotest/esm-basic": "*" + } +} diff --git a/cli/tests/testdata/npm/require_resolve_url/url_paths.out b/cli/tests/testdata/npm/require_resolve_url/url_paths.out new file mode 100644 index 00000000000000..404d7dcbacfb94 --- /dev/null +++ b/cli/tests/testdata/npm/require_resolve_url/url_paths.out @@ -0,0 +1,2 @@ +file:///[WILDCARD]/npm/require_resolve_url/ +[WILDCARD]require_resolve_url[WILDCARD]node_modules[WILDCARD].deno[WILDCARD]@denotest+esm-basic@1.0.0[WILDCARD]node_modules[WILDCARD]@denotest[WILDCARD]esm-basic[WILDCARD]main.mjs diff --git a/cli/tests/testdata/npm/require_resolve_url/url_paths.ts b/cli/tests/testdata/npm/require_resolve_url/url_paths.ts new file mode 100644 index 00000000000000..7826568fe792f1 --- /dev/null +++ b/cli/tests/testdata/npm/require_resolve_url/url_paths.ts @@ -0,0 +1,12 @@ +import { createRequire } from "node:module"; +const require = createRequire(import.meta.url); + +console.log(getParentUrl()); +console.log(require.resolve("@denotest/esm-basic", { + paths: [getParentUrl()], +})); + +function getParentUrl() { + const fileUrl = import.meta.url; + return fileUrl.substring(0, fileUrl.lastIndexOf("/") + 1); +} diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index f91970eefe5271..3b77ff571a52e6 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -95,13 +95,17 @@ where { let fs = state.borrow::(); // Guarantee that "from" is absolute. - let from = deno_core::resolve_path( - &from, - &(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?, - ) - .unwrap() - .to_file_path() - .unwrap(); + let from = if from.starts_with("file:///") { + Url::parse(&from)?.to_file_path().unwrap() + } else { + deno_core::resolve_path( + &from, + &(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?, + ) + .unwrap() + .to_file_path() + .unwrap() + }; ensure_read_permission::

(state, &from)?; From 90006b1f2d6cc318307fba23c2dd4df93221dc7b Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Thu, 17 Aug 2023 15:46:11 +0100 Subject: [PATCH 31/60] fix(lsp): pass fmt options to completion requests (#20184) Fixes https://github.com/denoland/vscode_deno/issues/856. --- cli/lsp/language_server.rs | 16 ++- cli/lsp/tsc.rs | 198 ++++++++++++++++++++++++++++++++++-- cli/tsc/99_main_compiler.js | 13 +-- cli/tsc/compiler.d.ts | 5 + 4 files changed, 213 insertions(+), 19 deletions(-) diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index f0da80bab8dce1..f532430c88c152 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -69,6 +69,7 @@ use super::text; use super::tsc; use super::tsc::Assets; use super::tsc::AssetsSnapshot; +use super::tsc::GetCompletionDetailsArgs; use super::tsc::TsServer; use super::urls; use super::urls::LspClientUrl; @@ -1899,6 +1900,7 @@ impl Inner { line_index.offset_tsc(diagnostic.range.start)? ..line_index.offset_tsc(diagnostic.range.end)?, codes, + (&self.fmt_options.options).into(), ) .await; for action in actions { @@ -2007,7 +2009,11 @@ impl Inner { })?; let combined_code_actions = self .ts_server - .get_combined_code_fix(self.snapshot(), &code_action_data) + .get_combined_code_fix( + self.snapshot(), + &code_action_data, + (&self.fmt_options.options).into(), + ) .await?; if combined_code_actions.commands.is_some() { error!("Deno does not support code actions with commands."); @@ -2047,6 +2053,7 @@ impl Inner { .get_edits_for_refactor( self.snapshot(), action_data.specifier, + (&self.fmt_options.options).into(), line_index.offset_tsc(action_data.range.start)? ..line_index.offset_tsc(action_data.range.end)?, action_data.refactor_name, @@ -2402,6 +2409,7 @@ impl Inner { trigger_character, trigger_kind, }, + (&self.fmt_options.options).into(), ) .await; @@ -2436,9 +2444,13 @@ impl Inner { })?; if let Some(data) = &data.tsc { let specifier = &data.specifier; + let args = GetCompletionDetailsArgs { + format_code_settings: Some((&self.fmt_options.options).into()), + ..data.into() + }; let result = self .ts_server - .get_completion_details(self.snapshot(), data.into()) + .get_completion_details(self.snapshot(), args) .await; match result { Ok(maybe_completion_info) => { diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 75ed8ebe383ea0..99dd92193cb366 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -21,6 +21,7 @@ use super::urls::LspClientUrl; use super::urls::LspUrlMap; use super::urls::INVALID_SPECIFIER; +use crate::args::FmtOptionsConfig; use crate::args::TsConfig; use crate::cache::HttpCache; use crate::lsp::cache::CacheMetadata; @@ -95,6 +96,35 @@ type Request = ( CancellationToken, ); +/// Relevant subset of https://github.com/denoland/deno/blob/80331d1fe5b85b829ac009fdc201c128b3427e11/cli/tsc/dts/typescript.d.ts#L6658. +#[derive(Clone, Debug, Default, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FormatCodeSettings { + convert_tabs_to_spaces: Option, + indent_size: Option, + semicolons: Option, +} + +impl From<&FmtOptionsConfig> for FormatCodeSettings { + fn from(config: &FmtOptionsConfig) -> Self { + FormatCodeSettings { + convert_tabs_to_spaces: Some(!config.use_tabs.unwrap_or(false)), + indent_size: Some(config.indent_width.unwrap_or(2)), + semicolons: match config.semi_colons { + Some(false) => Some(SemicolonPreference::Remove), + _ => Some(SemicolonPreference::Insert), + }, + } + } +} + +#[derive(Clone, Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum SemicolonPreference { + Insert, + Remove, +} + #[derive(Clone, Debug)] pub struct TsServer(mpsc::UnboundedSender); @@ -202,9 +232,15 @@ impl TsServer { specifier: ModuleSpecifier, range: Range, codes: Vec, + format_code_settings: FormatCodeSettings, ) -> Vec { - let req = - RequestMethod::GetCodeFixes((specifier, range.start, range.end, codes)); + let req = RequestMethod::GetCodeFixes(( + specifier, + range.start, + range.end, + codes, + format_code_settings, + )); match self.request(snapshot, req).await { Ok(items) => items, Err(err) => { @@ -243,10 +279,12 @@ impl TsServer { &self, snapshot: Arc, code_action_data: &CodeActionData, + format_code_settings: FormatCodeSettings, ) -> Result { let req = RequestMethod::GetCombinedCodeFix(( code_action_data.specifier.clone(), json!(code_action_data.fix_id.clone()), + format_code_settings, )); self.request(snapshot, req).await.map_err(|err| { log::error!("Unable to get combined fix from TypeScript: {}", err); @@ -258,12 +296,14 @@ impl TsServer { &self, snapshot: Arc, specifier: ModuleSpecifier, + format_code_settings: FormatCodeSettings, range: Range, refactor_name: String, action_name: String, ) -> Result { let req = RequestMethod::GetEditsForRefactor(( specifier, + format_code_settings, TextSpan { start: range.start, length: range.end - range.start, @@ -330,8 +370,14 @@ impl TsServer { specifier: ModuleSpecifier, position: u32, options: GetCompletionsAtPositionOptions, + format_code_settings: FormatCodeSettings, ) -> Option { - let req = RequestMethod::GetCompletions((specifier, position, options)); + let req = RequestMethod::GetCompletions(( + specifier, + position, + options, + format_code_settings, + )); match self.request(snapshot, req).await { Ok(maybe_info) => maybe_info, Err(err) => { @@ -3542,6 +3588,8 @@ pub struct GetCompletionDetailsArgs { pub position: u32, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] + pub format_code_settings: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub source: Option, #[serde(skip_serializing_if = "Option::is_none")] pub preferences: Option, @@ -3557,6 +3605,7 @@ impl From<&CompletionItemData> for GetCompletionDetailsArgs { name: item_data.name.clone(), source: item_data.source.clone(), preferences: None, + format_code_settings: None, data: item_data.data.clone(), } } @@ -3586,15 +3635,30 @@ enum RequestMethod { /// Retrieve the possible refactor info for a range of a file. GetApplicableRefactors((ModuleSpecifier, TextSpan, String)), /// Retrieve the refactor edit info for a range. - GetEditsForRefactor((ModuleSpecifier, TextSpan, String, String)), + GetEditsForRefactor( + ( + ModuleSpecifier, + FormatCodeSettings, + TextSpan, + String, + String, + ), + ), /// Retrieve code fixes for a range of a file with the provided error codes. - GetCodeFixes((ModuleSpecifier, u32, u32, Vec)), + GetCodeFixes((ModuleSpecifier, u32, u32, Vec, FormatCodeSettings)), /// Get completion information at a given position (IntelliSense). - GetCompletions((ModuleSpecifier, u32, GetCompletionsAtPositionOptions)), + GetCompletions( + ( + ModuleSpecifier, + u32, + GetCompletionsAtPositionOptions, + FormatCodeSettings, + ), + ), /// Get details about a specific completion entry. GetCompletionDetails(GetCompletionDetailsArgs), /// Retrieve the combined code fixes for a fix id for a module. - GetCombinedCodeFix((ModuleSpecifier, Value)), + GetCombinedCodeFix((ModuleSpecifier, Value, FormatCodeSettings)), /// Get declaration information for a specific position. GetDefinition((ModuleSpecifier, u32)), /// Return diagnostics for given file. @@ -3680,6 +3744,7 @@ impl RequestMethod { }), RequestMethod::GetEditsForRefactor(( specifier, + format_code_settings, span, refactor_name, action_name, @@ -3687,6 +3752,7 @@ impl RequestMethod { "id": id, "method": "getEditsForRefactor", "specifier": state.denormalize_specifier(specifier), + "formatCodeSettings": format_code_settings, "range": { "pos": span.start, "end": span.start + span.length}, "refactorName": refactor_name, "actionName": action_name, @@ -3696,6 +3762,7 @@ impl RequestMethod { start_pos, end_pos, error_codes, + format_code_settings, )) => json!({ "id": id, "method": "getCodeFixes", @@ -3703,25 +3770,37 @@ impl RequestMethod { "startPosition": start_pos, "endPosition": end_pos, "errorCodes": error_codes, + "formatCodeSettings": format_code_settings, }), - RequestMethod::GetCombinedCodeFix((specifier, fix_id)) => json!({ + RequestMethod::GetCombinedCodeFix(( + specifier, + fix_id, + format_code_settings, + )) => json!({ "id": id, "method": "getCombinedCodeFix", "specifier": state.denormalize_specifier(specifier), "fixId": fix_id, + "formatCodeSettings": format_code_settings, }), RequestMethod::GetCompletionDetails(args) => json!({ "id": id, "method": "getCompletionDetails", "args": args }), - RequestMethod::GetCompletions((specifier, position, preferences)) => { + RequestMethod::GetCompletions(( + specifier, + position, + preferences, + format_code_settings, + )) => { json!({ "id": id, "method": "getCompletions", "specifier": state.denormalize_specifier(specifier), "position": position, "preferences": preferences, + "formatCodeSettings": format_code_settings, }) } RequestMethod::GetDefinition((specifier, position)) => json!({ @@ -4589,6 +4668,7 @@ mod tests { trigger_character: Some(".".to_string()), trigger_kind: None, }, + Default::default(), )), Default::default(), ); @@ -4605,6 +4685,7 @@ mod tests { name: "log".to_string(), source: None, preferences: None, + format_code_settings: None, data: None, }), Default::default(), @@ -4700,6 +4781,105 @@ mod tests { ); } + #[test] + fn test_completions_fmt() { + let fixture_a = r#" + console.log(someLongVaria) + "#; + let fixture_b = r#" + export const someLongVariable = 1 + "#; + let line_index = LineIndex::new(fixture_a); + let position = line_index + .offset_tsc(lsp::Position { + line: 1, + character: 33, + }) + .unwrap(); + let temp_dir = TempDir::new(); + let (mut runtime, state_snapshot, _) = setup( + &temp_dir, + false, + json!({ + "target": "esnext", + "module": "esnext", + "lib": ["deno.ns", "deno.window"], + "noEmit": true, + }), + &[ + ("file:///a.ts", fixture_a, 1, LanguageId::TypeScript), + ("file:///b.ts", fixture_b, 1, LanguageId::TypeScript), + ], + ); + let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let result = request( + &mut runtime, + state_snapshot.clone(), + RequestMethod::GetDiagnostics(vec![specifier.clone()]), + Default::default(), + ); + assert!(result.is_ok()); + let fmt_options_config = FmtOptionsConfig { + semi_colons: Some(false), + ..Default::default() + }; + let result = request( + &mut runtime, + state_snapshot.clone(), + RequestMethod::GetCompletions(( + specifier.clone(), + position, + GetCompletionsAtPositionOptions { + user_preferences: UserPreferences { + allow_incomplete_completions: Some(true), + allow_text_changes_in_new_files: Some(true), + include_completions_for_module_exports: Some(true), + include_completions_with_insert_text: Some(true), + ..Default::default() + }, + ..Default::default() + }, + (&fmt_options_config).into(), + )), + Default::default(), + ) + .unwrap(); + let info: CompletionInfo = serde_json::from_value(result).unwrap(); + let entry = info + .entries + .iter() + .find(|e| &e.name == "someLongVariable") + .unwrap(); + let result = request( + &mut runtime, + state_snapshot, + RequestMethod::GetCompletionDetails(GetCompletionDetailsArgs { + specifier, + position, + name: entry.name.clone(), + source: entry.source.clone(), + preferences: None, + format_code_settings: Some((&fmt_options_config).into()), + data: entry.data.clone(), + }), + Default::default(), + ) + .unwrap(); + let details: CompletionEntryDetails = + serde_json::from_value(result).unwrap(); + let actions = details.code_actions.unwrap(); + let action = actions + .iter() + .find(|a| &a.description == r#"Add import from "./b.ts""#) + .unwrap(); + let changes = action.changes.first().unwrap(); + let change = changes.text_changes.first().unwrap(); + assert_eq!( + change.new_text, + "import { someLongVariable } from \"./b.ts\"\n" + ); + } + #[test] fn test_update_import_statement() { let fixtures = vec![ diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 43a3c3bcf636d3..f2ccec466fac5f 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -1035,10 +1035,8 @@ delete Object.prototype.__proto__; languageService.getEditsForRefactor( request.specifier, { - indentSize: 2, + ...request.formatCodeSettings, indentStyle: ts.IndentStyle.Smart, - semicolons: ts.SemicolonPreference.Insert, - convertTabsToSpaces: true, insertSpaceBeforeAndAfterBinaryOperators: true, insertSpaceAfterCommaDelimiter: true, }, @@ -1060,9 +1058,8 @@ delete Object.prototype.__proto__; request.endPosition, request.errorCodes.map((v) => Number(v)), { - indentSize: 2, + ...request.formatCodeSettings, indentStyle: ts.IndentStyle.Block, - semicolons: ts.SemicolonPreference.Insert, }, { quotePreference: "double", @@ -1080,9 +1077,8 @@ delete Object.prototype.__proto__; }, request.fixId, { - indentSize: 2, + ...request.formatCodeSettings, indentStyle: ts.IndentStyle.Block, - semicolons: ts.SemicolonPreference.Insert, }, { quotePreference: "double", @@ -1100,7 +1096,7 @@ delete Object.prototype.__proto__; request.args.specifier, request.args.position, request.args.name, - {}, + request.args.formatCodeSettings ?? {}, request.args.source, request.args.preferences, request.args.data, @@ -1114,6 +1110,7 @@ delete Object.prototype.__proto__; request.specifier, request.position, request.preferences, + request.formatCodeSettings, ), ); } diff --git a/cli/tsc/compiler.d.ts b/cli/tsc/compiler.d.ts index 66c0946972617a..da713a1bd516d9 100644 --- a/cli/tsc/compiler.d.ts +++ b/cli/tsc/compiler.d.ts @@ -121,6 +121,7 @@ declare global { interface GetEditsForRefactor extends BaseLanguageServerRequest { method: "getEditsForRefactor"; specifier: string; + formatCodeSettings: ts.FormatCodeSettings; range: ts.TextRange; refactorName: string; actionName: string; @@ -132,6 +133,7 @@ declare global { startPosition: number; endPosition: number; errorCodes: string[]; + formatCodeSettings: ts.FormatCodeSettings; } interface GetCombinedCodeFix extends BaseLanguageServerRequest { @@ -139,6 +141,7 @@ declare global { specifier: string; // deno-lint-ignore ban-types fixId: {}; + formatCodeSettings: ts.FormatCodeSettings; } interface GetCompletionDetails extends BaseLanguageServerRequest { @@ -147,6 +150,7 @@ declare global { specifier: string; position: number; name: string; + formatCodeSettings: ts.FormatCodeSettings; source?: string; preferences?: ts.UserPreferences; data?: ts.CompletionEntryData; @@ -158,6 +162,7 @@ declare global { specifier: string; position: number; preferences: ts.GetCompletionsAtPositionOptions; + formatCodeSettings: ts.FormatCodeSettings; } interface GetDiagnosticsRequest extends BaseLanguageServerRequest { From 3bb2f23b60917fc118b913e021932802bfaac59a Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 17 Aug 2023 11:25:00 -0400 Subject: [PATCH 32/60] fix(cli): handle missing `now` field in cache (#20192) Fixes this error message: ``` error: missing field `now` at line 32 column 1 ``` This would occur if someone used an old version of the deno_cache library to cache information in the cache then tried to load it with the latest CLI. Regression in the last patch when migrating to the deno_cache_dir crate. --- Cargo.lock | 4 ++-- cli/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6cdb4f551f4b21..2829a11b718d5d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -971,9 +971,9 @@ dependencies = [ [[package]] name = "deno_cache_dir" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd2de39c0d15e801f5968334998336c159ac7e335f8acddd781982777195152" +checksum = "85e4ee308992ed5bd4977d251c0ce4bdfa4cc59efa4ee93d17ebe46eae1e4563" dependencies = [ "anyhow", "deno_media_type", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 0879b45e82df5b..c2c4b3ede0ce8b 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -46,7 +46,7 @@ winres.workspace = true [dependencies] deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } -deno_cache_dir = "=0.5.0" +deno_cache_dir = "=0.5.2" deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_doc = "=0.64.0" deno_emit = "=0.25.0" From 954188ef79a16b36368ea341210ce417a724e22c Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 17 Aug 2023 12:14:22 -0400 Subject: [PATCH 33/60] fix(unstable): disable importing from the vendor directory (#20067) Some people might get think they need to import from this directory, which could cause confusion and duplicate dependencies. Additionally, the `vendor` directory has special behaviour in the language server, so importing from the folder will definitely cause confusion and issues there. --- cli/factory.rs | 12 ++++++-- cli/lsp/documents.rs | 13 ++++++-- cli/resolver.rs | 48 +++++++++++++++++++++++------- cli/tests/integration/lsp_tests.rs | 42 ++++++++++++++++++++++++++ cli/tests/integration/run_tests.rs | 13 ++++++++ cli/tools/vendor/test.rs | 12 +++++--- test_util/src/lsp.rs | 1 + 7 files changed, 121 insertions(+), 20 deletions(-) diff --git a/cli/factory.rs b/cli/factory.rs index 6a99bb2da94604..dbf9bd95ba1c0e 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -38,6 +38,7 @@ use crate::npm::NpmPackageFsResolver; use crate::npm::NpmResolution; use crate::npm::PackageJsonDepsInstaller; use crate::resolver::CliGraphResolver; +use crate::resolver::CliGraphResolverOptions; use crate::standalone::DenoCompileBinaryWriter; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; @@ -424,13 +425,18 @@ impl CliFactory { .resolver .get_or_try_init_async(async { Ok(Arc::new(CliGraphResolver::new( - self.options.to_maybe_jsx_import_source_config()?, - self.maybe_import_map().await?.clone(), - self.options.no_npm(), self.npm_api()?.clone(), self.npm_resolution().await?.clone(), self.package_json_deps_provider().clone(), self.package_json_deps_installer().await?.clone(), + CliGraphResolverOptions { + maybe_jsx_import_source_config: self + .options + .to_maybe_jsx_import_source_config()?, + maybe_import_map: self.maybe_import_map().await?.clone(), + maybe_vendor_dir: self.options.vendor_dir_path(), + no_npm: self.options.no_npm(), + }, ))) }) .await diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 439cf547d649cb..8227d2e4c2e823 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -21,6 +21,7 @@ use crate::npm::CliNpmRegistryApi; use crate::npm::NpmResolution; use crate::npm::PackageJsonDepsInstaller; use crate::resolver::CliGraphResolver; +use crate::resolver::CliGraphResolverOptions; use crate::util::glob; use crate::util::path::specifier_to_file_path; use crate::util::text_encoding; @@ -1241,13 +1242,19 @@ impl Documents { Arc::new(PackageJsonDepsProvider::new(maybe_package_json_deps)); let deps_installer = Arc::new(PackageJsonDepsInstaller::no_op()); self.resolver = Arc::new(CliGraphResolver::new( - maybe_jsx_config, - options.maybe_import_map, - false, options.npm_registry_api, options.npm_resolution, deps_provider, deps_installer, + CliGraphResolverOptions { + maybe_jsx_import_source_config: maybe_jsx_config, + maybe_import_map: options.maybe_import_map, + maybe_vendor_dir: options + .maybe_config_file + .and_then(|c| c.vendor_dir_path()) + .as_ref(), + no_npm: false, + }, )); self.imports = Arc::new( if let Some(Ok(imports)) = diff --git a/cli/resolver.rs b/cli/resolver.rs index 6fa8eaabef234d..f78f31e8dd02c0 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use deno_core::anyhow::anyhow; +use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_core::futures::future; use deno_core::futures::future::LocalBoxFuture; @@ -16,6 +17,7 @@ use deno_npm::registry::NpmRegistryApi; use deno_runtime::deno_node::is_builtin_node_module; use deno_semver::npm::NpmPackageReq; use import_map::ImportMap; +use std::path::PathBuf; use std::sync::Arc; use crate::args::package_json::PackageJsonDeps; @@ -102,6 +104,7 @@ pub struct CliGraphResolver { mapped_specifier_resolver: MappedSpecifierResolver, maybe_default_jsx_import_source: Option, maybe_jsx_import_source_module: Option, + maybe_vendor_specifier: Option, no_npm: bool, npm_registry_api: Arc, npm_resolution: Arc, @@ -125,8 +128,9 @@ impl Default for CliGraphResolver { maybe_import_map: Default::default(), package_json_deps_provider: Default::default(), }, - maybe_default_jsx_import_source: Default::default(), - maybe_jsx_import_source_module: Default::default(), + maybe_default_jsx_import_source: None, + maybe_jsx_import_source_module: None, + maybe_vendor_specifier: None, no_npm: false, npm_registry_api, npm_resolution, @@ -137,27 +141,37 @@ impl Default for CliGraphResolver { } } +pub struct CliGraphResolverOptions<'a> { + pub maybe_jsx_import_source_config: Option, + pub maybe_import_map: Option>, + pub maybe_vendor_dir: Option<&'a PathBuf>, + pub no_npm: bool, +} + impl CliGraphResolver { pub fn new( - maybe_jsx_import_source_config: Option, - maybe_import_map: Option>, - no_npm: bool, npm_registry_api: Arc, npm_resolution: Arc, package_json_deps_provider: Arc, package_json_deps_installer: Arc, + options: CliGraphResolverOptions, ) -> Self { Self { mapped_specifier_resolver: MappedSpecifierResolver { - maybe_import_map, + maybe_import_map: options.maybe_import_map, package_json_deps_provider, }, - maybe_default_jsx_import_source: maybe_jsx_import_source_config + maybe_default_jsx_import_source: options + .maybe_jsx_import_source_config .as_ref() .and_then(|c| c.default_specifier.clone()), - maybe_jsx_import_source_module: maybe_jsx_import_source_config + maybe_jsx_import_source_module: options + .maybe_jsx_import_source_config .map(|c| c.module), - no_npm, + maybe_vendor_specifier: options + .maybe_vendor_dir + .and_then(|v| ModuleSpecifier::from_directory_path(v).ok()), + no_npm: options.no_npm, npm_registry_api, npm_resolution, package_json_deps_installer, @@ -219,7 +233,7 @@ impl Resolver for CliGraphResolver { referrer: &ModuleSpecifier, ) -> Result { use MappedResolution::*; - match self + let result = match self .mapped_specifier_resolver .resolve(specifier, referrer)? { @@ -232,7 +246,21 @@ impl Resolver for CliGraphResolver { } None => deno_graph::resolve_import(specifier, referrer) .map_err(|err| err.into()), + }; + + // When the user is vendoring, don't allow them to import directly from the vendor/ directory + // as it might cause them confusion or duplicate dependencies. Additionally, this folder has + // special treatment in the language server so it will definitely cause issues/confusion there + // if they do this. + if let Some(vendor_specifier) = &self.maybe_vendor_specifier { + if let Ok(specifier) = &result { + if specifier.as_str().starts_with(vendor_specifier.as_str()) { + bail!("Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring."); + } + } } + + result } } diff --git a/cli/tests/integration/lsp_tests.rs b/cli/tests/integration/lsp_tests.rs index d540c5f37938cd..d2cb3680639a4b 100644 --- a/cli/tests/integration/lsp_tests.rs +++ b/cli/tests/integration/lsp_tests.rs @@ -8958,5 +8958,47 @@ fn lsp_vendor_dir() { ); assert_eq!(diagnostics.all().len(), 2); + // now try doing a relative import into the vendor directory + client.write_notification( + "textDocument/didChange", + json!({ + "textDocument": { + "uri": local_file_uri, + "version": 2 + }, + "contentChanges": [ + { + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 2, "character": 0 }, + }, + "text": "import { returnsHi } from './vendor/subdir/mod1.ts';\nconst test: string = returnsHi();\nconsole.log(test);" + } + ] + }), + ); + + let diagnostics = client.read_diagnostics(); + + assert_eq!( + json!( + diagnostics + .messages_with_file_and_source(local_file_uri.as_str(), "deno") + .diagnostics + ), + json!([ + { + "range": { + "start": { "line": 0, "character": 26 }, + "end": { "line": 0, "character": 51 } + }, + "severity": 1, + "code": "resolver-error", + "source": "deno", + "message": "Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring." + } + ]), + ); + client.shutdown(); } diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index 2ac6ed98518c97..0c39c2b72498af 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -4580,4 +4580,17 @@ console.log(returnsHi());"#, .args("run --vendor http://localhost:4545/subdir/CAPITALS/hello_there.ts") .run() .assert_matches_text("hello there\n"); + + // now try importing directly from the vendor folder + temp_dir.write( + "main.ts", + r#"import { returnsHi } from './vendor/http_localhost_4545/subdir/mod1.ts'; +console.log(returnsHi());"#, + ); + deno_run_cmd + .run() + .assert_matches_text("error: Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring. + at [WILDCARD]/main.ts:1:27 +") + .assert_exit_code(1); } diff --git a/cli/tools/vendor/test.rs b/cli/tools/vendor/test.rs index 0bf6f84f393fd8..c00cc654d78469 100644 --- a/cli/tools/vendor/test.rs +++ b/cli/tools/vendor/test.rs @@ -26,6 +26,7 @@ use crate::cache::ParsedSourceCache; use crate::npm::CliNpmRegistryApi; use crate::npm::NpmResolution; use crate::resolver::CliGraphResolver; +use crate::resolver::CliGraphResolverOptions; use super::build::VendorEnvironment; @@ -290,7 +291,7 @@ impl VendorTestBuilder { } fn build_resolver( - jsx_import_source_config: Option, + maybe_jsx_import_source_config: Option, original_import_map: Option, ) -> CliGraphResolver { let npm_registry_api = Arc::new(CliNpmRegistryApi::new_uninitialized()); @@ -300,13 +301,16 @@ fn build_resolver( None, )); CliGraphResolver::new( - jsx_import_source_config, - original_import_map.map(Arc::new), - false, npm_registry_api, npm_resolution, Default::default(), Default::default(), + CliGraphResolverOptions { + maybe_jsx_import_source_config, + maybe_import_map: original_import_map.map(Arc::new), + maybe_vendor_dir: None, + no_npm: false, + }, ) } diff --git a/test_util/src/lsp.rs b/test_util/src/lsp.rs index 949dd25d61ffeb..8a1f24b3282679 100644 --- a/test_util/src/lsp.rs +++ b/test_util/src/lsp.rs @@ -998,6 +998,7 @@ impl CollectedDiagnostics { .unwrap() } + #[track_caller] pub fn messages_with_file_and_source( &self, specifier: &str, From 0199d256385897283908d34a9b2c7bcf3bb6e761 Mon Sep 17 00:00:00 2001 From: Alexander Michaud <131308558+armichaud@users.noreply.github.com> Date: Thu, 17 Aug 2023 17:41:29 -0400 Subject: [PATCH 34/60] fix(test): JUnit reporter includes file, line and column attributes (#20174) Closes #20156 --- cli/tests/integration/test_tests.rs | 5 ++++ cli/tests/testdata/test/pass.junit.out | 38 ++++++++++++++++++++++++++ cli/tools/test/reporters/junit.rs | 23 ++++++++++++---- 3 files changed, 60 insertions(+), 6 deletions(-) create mode 100644 cli/tests/testdata/test/pass.junit.out diff --git a/cli/tests/integration/test_tests.rs b/cli/tests/integration/test_tests.rs index bcf050adf0fa83..04465dd53ece66 100644 --- a/cli/tests/integration/test_tests.rs +++ b/cli/tests/integration/test_tests.rs @@ -267,6 +267,11 @@ itest!(exit_sanitizer { exit_code: 1, }); +itest!(junit { + args: "test --reporter junit test/pass.ts", + output: "test/pass.junit.out", +}); + itest!(clear_timeout { args: "test test/clear_timeout.ts", exit_code: 0, diff --git a/cli/tests/testdata/test/pass.junit.out b/cli/tests/testdata/test/pass.junit.out new file mode 100644 index 00000000000000..b652dbf85a5a0c --- /dev/null +++ b/cli/tests/testdata/test/pass.junit.out @@ -0,0 +1,38 @@ +Check [WILDCARD]/testdata/test/pass.ts + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/cli/tools/test/reporters/junit.rs b/cli/tools/test/reporters/junit.rs index eb6479a59fafd9..a62c32dabb435f 100644 --- a/cli/tools/test/reporters/junit.rs +++ b/cli/tools/test/reporters/junit.rs @@ -40,13 +40,24 @@ impl JunitTestReporter { impl TestReporter for JunitTestReporter { fn report_register(&mut self, description: &TestDescription) { - self.cases.insert( - description.id, - quick_junit::TestCase::new( - description.name.clone(), - quick_junit::TestCaseStatus::skipped(), - ), + let mut case = quick_junit::TestCase::new( + description.name.clone(), + quick_junit::TestCaseStatus::skipped(), ); + let file_name = description.location.file_name.clone(); + let file_name = file_name.strip_prefix("file://").unwrap_or(&file_name); + case + .extra + .insert(String::from("filename"), String::from(file_name)); + case.extra.insert( + String::from("line"), + description.location.line_number.to_string(), + ); + case.extra.insert( + String::from("col"), + description.location.column_number.to_string(), + ); + self.cases.insert(description.id, case); } fn report_plan(&mut self, _plan: &TestPlan) {} From 2150ae7d601f16a6c166702dace7229a3f4b5c1a Mon Sep 17 00:00:00 2001 From: Heyang Zhou Date: Fri, 18 Aug 2023 17:34:16 +0800 Subject: [PATCH 35/60] feat(ext/kv): key expiration (#20091) Co-authored-by: Luca Casonato --- cli/tests/unit/kv_test.ts | 126 +++++++++++++++++++ cli/tsc/dts/lib.deno.unstable.d.ts | 32 ++++- ext/kv/01_db.ts | 37 ++++-- ext/kv/interface.rs | 1 + ext/kv/lib.rs | 8 +- ext/kv/sqlite.rs | 192 ++++++++++++++++++++++------- 6 files changed, 335 insertions(+), 61 deletions(-) diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 74a8ed6b340863..438ebd7eecdeef 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -1806,3 +1806,129 @@ Deno.test({ } }, }); + +Deno.test({ + name: "kv expiration", + async fn() { + const filename = await Deno.makeTempFile({ prefix: "kv_expiration_db" }); + try { + await Deno.remove(filename); + } catch { + // pass + } + let db: Deno.Kv | null = null; + + try { + db = await Deno.openKv(filename); + + await db.set(["a"], 1, { expireIn: 1000 }); + await db.set(["b"], 2, { expireIn: 1000 }); + assertEquals((await db.get(["a"])).value, 1); + assertEquals((await db.get(["b"])).value, 2); + + // Value overwrite should also reset expiration + await db.set(["b"], 2, { expireIn: 3600 * 1000 }); + + // Wait for expiration + await sleep(1000); + + // Re-open to trigger immediate cleanup + db.close(); + db = null; + db = await Deno.openKv(filename); + + let ok = false; + for (let i = 0; i < 50; i++) { + await sleep(100); + if ( + JSON.stringify( + (await db.getMany([["a"], ["b"]])).map((x) => x.value), + ) === "[null,2]" + ) { + ok = true; + break; + } + } + + if (!ok) { + throw new Error("Values did not expire"); + } + } finally { + if (db) { + try { + db.close(); + } catch { + // pass + } + } + try { + await Deno.remove(filename); + } catch { + // pass + } + } + }, +}); + +Deno.test({ + name: "kv expiration with atomic", + async fn() { + const filename = await Deno.makeTempFile({ prefix: "kv_expiration_db" }); + try { + await Deno.remove(filename); + } catch { + // pass + } + let db: Deno.Kv | null = null; + + try { + db = await Deno.openKv(filename); + + await db.atomic().set(["a"], 1, { expireIn: 1000 }).set(["b"], 2, { + expireIn: 1000, + }).commit(); + assertEquals((await db.getMany([["a"], ["b"]])).map((x) => x.value), [ + 1, + 2, + ]); + + // Wait for expiration + await sleep(1000); + + // Re-open to trigger immediate cleanup + db.close(); + db = null; + db = await Deno.openKv(filename); + + let ok = false; + for (let i = 0; i < 50; i++) { + await sleep(100); + if ( + JSON.stringify( + (await db.getMany([["a"], ["b"]])).map((x) => x.value), + ) === "[null,null]" + ) { + ok = true; + break; + } + } + + if (!ok) { + throw new Error("Values did not expire"); + } + } finally { + if (db) { + try { + db.close(); + } catch { + // pass + } + } + try { + await Deno.remove(filename); + } catch { + // pass + } + } + }, +}); diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index eb612a655ab0de..70b346a2a7abf6 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1357,7 +1357,13 @@ declare namespace Deno { * mutation is applied to the key. * * - `set` - Sets the value of the key to the given value, overwriting any - * existing value. + * existing value. Optionally an `expireIn` option can be specified to + * set a time-to-live (TTL) for the key. The TTL is specified in + * milliseconds, and the key will be deleted from the database at earliest + * after the specified number of milliseconds have elapsed. Once the + * specified duration has passed, the key may still be visible for some + * additional time. If the `expireIn` option is not specified, the key will + * not expire. * - `delete` - Deletes the key from the database. The mutation is a no-op if * the key does not exist. * - `sum` - Adds the given value to the existing value of the key. Both the @@ -1379,7 +1385,7 @@ declare namespace Deno { export type KvMutation = & { key: KvKey } & ( - | { type: "set"; value: unknown } + | { type: "set"; value: unknown; expireIn?: number } | { type: "delete" } | { type: "sum"; value: KvU64 } | { type: "max"; value: KvU64 } @@ -1591,8 +1597,15 @@ declare namespace Deno { /** * Add to the operation a mutation that sets the value of the specified key * to the specified value if all checks pass during the commit. + * + * Optionally an `expireIn` option can be specified to set a time-to-live + * (TTL) for the key. The TTL is specified in milliseconds, and the key will + * be deleted from the database at earliest after the specified number of + * milliseconds have elapsed. Once the specified duration has passed, the + * key may still be visible for some additional time. If the `expireIn` + * option is not specified, the key will not expire. */ - set(key: KvKey, value: unknown): this; + set(key: KvKey, value: unknown, options?: { expireIn?: number }): this; /** * Add to the operation a mutation that deletes the specified key if all * checks pass during the commit. @@ -1721,8 +1734,19 @@ declare namespace Deno { * const db = await Deno.openKv(); * await db.set(["foo"], "bar"); * ``` + * + * Optionally an `expireIn` option can be specified to set a time-to-live + * (TTL) for the key. The TTL is specified in milliseconds, and the key will + * be deleted from the database at earliest after the specified number of + * milliseconds have elapsed. Once the specified duration has passed, the + * key may still be visible for some additional time. If the `expireIn` + * option is not specified, the key will not expire. */ - set(key: KvKey, value: unknown): Promise; + set( + key: KvKey, + value: unknown, + options?: { expireIn?: number }, + ): Promise; /** * Delete the value for the given key from the database. If no value exists diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index 8fd1f5997a256d..4cd8744bafe1f3 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -130,12 +130,15 @@ class Kv { }); } - async set(key: Deno.KvKey, value: unknown) { + async set(key: Deno.KvKey, value: unknown, options?: { expireIn?: number }) { value = serializeValue(value); const checks: Deno.AtomicCheck[] = []; + const expireAt = typeof options?.expireIn === "number" + ? Date.now() + options.expireIn + : undefined; const mutations = [ - [key, "set", value], + [key, "set", value, expireAt], ]; const versionstamp = await core.opAsync( @@ -152,7 +155,7 @@ class Kv { async delete(key: Deno.KvKey) { const checks: Deno.AtomicCheck[] = []; const mutations = [ - [key, "delete", null], + [key, "delete", null, undefined], ]; const result = await core.opAsync( @@ -318,7 +321,7 @@ class AtomicOperation { #rid: number; #checks: [Deno.KvKey, string | null][] = []; - #mutations: [Deno.KvKey, string, RawValue | null][] = []; + #mutations: [Deno.KvKey, string, RawValue | null, number | undefined][] = []; #enqueues: [Uint8Array, number, Deno.KvKey[], number[] | null][] = []; constructor(rid: number) { @@ -337,6 +340,7 @@ class AtomicOperation { const key = mutation.key; let type: string; let value: RawValue | null; + let expireAt: number | undefined = undefined; switch (mutation.type) { case "delete": type = "delete"; @@ -345,6 +349,10 @@ class AtomicOperation { } break; case "set": + if (typeof mutation.expireIn === "number") { + expireAt = Date.now() + mutation.expireIn; + } + /* falls through */ case "sum": case "min": case "max": @@ -357,33 +365,40 @@ class AtomicOperation { default: throw new TypeError("Invalid mutation type"); } - this.#mutations.push([key, type, value]); + this.#mutations.push([key, type, value, expireAt]); } return this; } sum(key: Deno.KvKey, n: bigint): this { - this.#mutations.push([key, "sum", serializeValue(new KvU64(n))]); + this.#mutations.push([key, "sum", serializeValue(new KvU64(n)), undefined]); return this; } min(key: Deno.KvKey, n: bigint): this { - this.#mutations.push([key, "min", serializeValue(new KvU64(n))]); + this.#mutations.push([key, "min", serializeValue(new KvU64(n)), undefined]); return this; } max(key: Deno.KvKey, n: bigint): this { - this.#mutations.push([key, "max", serializeValue(new KvU64(n))]); + this.#mutations.push([key, "max", serializeValue(new KvU64(n)), undefined]); return this; } - set(key: Deno.KvKey, value: unknown): this { - this.#mutations.push([key, "set", serializeValue(value)]); + set( + key: Deno.KvKey, + value: unknown, + options?: { expireIn?: number }, + ): this { + const expireAt = typeof options?.expireIn === "number" + ? Date.now() + options.expireIn + : undefined; + this.#mutations.push([key, "set", serializeValue(value), expireAt]); return this; } delete(key: Deno.KvKey): this { - this.#mutations.push([key, "delete", null]); + this.#mutations.push([key, "delete", null, undefined]); return this; } diff --git a/ext/kv/interface.rs b/ext/kv/interface.rs index b67ee1243e2219..28b43f8d71acff 100644 --- a/ext/kv/interface.rs +++ b/ext/kv/interface.rs @@ -237,6 +237,7 @@ pub struct KvCheck { pub struct KvMutation { pub key: Vec, pub kind: MutationKind, + pub expire_at: Option, } /// A request to enqueue a message to the database. This message is delivered diff --git a/ext/kv/lib.rs b/ext/kv/lib.rs index a781f45795de3b..7164a700bf30f4 100644 --- a/ext/kv/lib.rs +++ b/ext/kv/lib.rs @@ -375,7 +375,7 @@ impl TryFrom for KvCheck { } } -type V8KvMutation = (KvKey, String, Option); +type V8KvMutation = (KvKey, String, Option, Option); impl TryFrom for KvMutation { type Error = AnyError; @@ -396,7 +396,11 @@ impl TryFrom for KvMutation { ))) } }; - Ok(KvMutation { key, kind }) + Ok(KvMutation { + key, + kind, + expire_at: value.3, + }) } } diff --git a/ext/kv/sqlite.rs b/ext/kv/sqlite.rs index aea438d2dfa7ab..8e37d2c87fd6d0 100644 --- a/ext/kv/sqlite.rs +++ b/ext/kv/sqlite.rs @@ -1,7 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::borrow::Cow; -use std::cell::Cell; use std::cell::RefCell; use std::future::Future; use std::marker::PhantomData; @@ -10,10 +9,12 @@ use std::path::PathBuf; use std::rc::Rc; use std::rc::Weak; use std::sync::Arc; +use std::sync::Mutex; use std::time::Duration; use std::time::SystemTime; use async_trait::async_trait; +use deno_core::error::get_custom_error_class; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures; @@ -57,7 +58,7 @@ const STATEMENT_KV_POINT_GET_VALUE_ONLY: &str = const STATEMENT_KV_POINT_GET_VERSION_ONLY: &str = "select version from kv where k = ?"; const STATEMENT_KV_POINT_SET: &str = - "insert into kv (k, v, v_encoding, version) values (:k, :v, :v_encoding, :version) on conflict(k) do update set v = :v, v_encoding = :v_encoding, version = :version"; + "insert into kv (k, v, v_encoding, version, expiration_ms) values (:k, :v, :v_encoding, :version, :expiration_ms) on conflict(k) do update set v = :v, v_encoding = :v_encoding, version = :version, expiration_ms = :expiration_ms"; const STATEMENT_KV_POINT_DELETE: &str = "delete from kv where k = ?"; const STATEMENT_QUEUE_ADD_READY: &str = "insert into queue (ts, id, data, backoff_schedule, keys_if_undelivered) values(?, ?, ?, ?, ?)"; @@ -79,7 +80,7 @@ create table if not exists migration_state( ) "; -const MIGRATIONS: [&str; 2] = [ +const MIGRATIONS: [&str; 3] = [ " create table data_version ( k integer primary key, @@ -112,12 +113,56 @@ create table queue_running( primary key (deadline, id) ); +", + " +alter table kv add column seq integer not null default 0; +alter table data_version add column seq integer not null default 0; +alter table kv add column expiration_ms integer not null default -1; +create index kv_expiration_ms_idx on kv (expiration_ms); ", ]; const DISPATCH_CONCURRENCY_LIMIT: usize = 100; const DEFAULT_BACKOFF_SCHEDULE: [u32; 5] = [100, 1000, 5000, 30000, 60000]; +const ERROR_USING_CLOSED_DATABASE: &str = "Attempted to use a closed database"; + +#[derive(Clone)] +struct ProtectedConn { + guard: Rc>, + conn: Arc>>, +} + +#[derive(Clone)] +struct WeakProtectedConn { + guard: Weak>, + conn: std::sync::Weak>>, +} + +impl ProtectedConn { + fn new(conn: rusqlite::Connection) -> Self { + Self { + guard: Rc::new(AsyncRefCell::new(())), + conn: Arc::new(Mutex::new(Some(conn))), + } + } + + fn downgrade(&self) -> WeakProtectedConn { + WeakProtectedConn { + guard: Rc::downgrade(&self.guard), + conn: Arc::downgrade(&self.conn), + } + } +} + +impl WeakProtectedConn { + fn upgrade(&self) -> Option { + let guard = self.guard.upgrade()?; + let conn = self.conn.upgrade()?; + Some(ProtectedConn { guard, conn }) + } +} + pub struct SqliteDbHandler { pub default_storage_dir: Option, _permissions: PhantomData

, @@ -197,7 +242,7 @@ impl DatabaseHandler for SqliteDbHandler

{ } }) .await?; - let conn = Rc::new(AsyncRefCell::new(Cell::new(Some(conn)))); + let conn = ProtectedConn::new(conn); SqliteDb::run_tx(conn.clone(), |tx| { tx.execute(STATEMENT_CREATE_MIGRATION_TABLE, [])?; @@ -227,16 +272,35 @@ impl DatabaseHandler for SqliteDbHandler

{ }) .await?; + let expiration_watcher = spawn(watch_expiration(conn.clone())); + Ok(SqliteDb { conn, queue: OnceCell::new(), + expiration_watcher, }) } } pub struct SqliteDb { - conn: Rc>>>, + conn: ProtectedConn, queue: OnceCell, + expiration_watcher: deno_core::task::JoinHandle<()>, +} + +impl Drop for SqliteDb { + fn drop(&mut self) { + self.expiration_watcher.abort(); + + // The above `abort()` operation is asynchronous. It's not + // guaranteed that the sqlite connection will be closed immediately. + // So here we synchronously take the conn mutex and drop the connection. + // + // This blocks the event loop if the connection is still being used, + // but ensures correctness - deleting the database file after calling + // the `close` method will always work. + self.conn.conn.lock().unwrap().take(); + } } async fn sqlite_retry_loop>>( @@ -263,10 +327,7 @@ async fn sqlite_retry_loop>>( } impl SqliteDb { - async fn run_tx( - conn: Rc>>>, - f: F, - ) -> Result + async fn run_tx(conn: ProtectedConn, f: F) -> Result where F: (FnOnce(rusqlite::Transaction<'_>) -> Result) + Clone @@ -277,42 +338,38 @@ impl SqliteDb { sqlite_retry_loop(|| Self::run_tx_inner(conn.clone(), f.clone())).await } - async fn run_tx_inner( - conn: Rc>>>, - f: F, - ) -> Result + async fn run_tx_inner(conn: ProtectedConn, f: F) -> Result where F: (FnOnce(rusqlite::Transaction<'_>) -> Result) + Send + 'static, R: Send + 'static, { - // Transactions need exclusive access to the connection. Wait until - // we can borrow_mut the connection. - let cell = conn.borrow_mut().await; - - // Take the db out of the cell and run the transaction via spawn_blocking. - let mut db = cell.take().unwrap(); - let (result, db) = spawn_blocking(move || { - let result = { - match db.transaction() { - Ok(tx) => f(tx), - Err(e) => Err(e.into()), - } + // `run_tx` runs in an asynchronous context. First acquire the async lock to + // coordinate with other async invocations. + let _guard_holder = conn.guard.borrow_mut().await; + + // Then, take the synchronous lock. This operation is guaranteed to success without waiting, + // unless the database is being closed. + let db = conn.conn.clone(); + spawn_blocking(move || { + let mut db = db.try_lock().ok(); + let Some(db) = db.as_mut().and_then(|x| x.as_mut()) else { + return Err(type_error(ERROR_USING_CLOSED_DATABASE)) }; - (result, db) + let result = match db.transaction() { + Ok(tx) => f(tx), + Err(e) => Err(e.into()), + }; + result }) .await - .unwrap(); - - // Put the db back into the cell. - cell.set(Some(db)); - result + .unwrap() } } pub struct DequeuedMessage { - conn: Weak>>>, + conn: WeakProtectedConn, id: String, payload: Option>, waker_tx: mpsc::Sender<()>, @@ -341,7 +398,20 @@ impl QueueMessageHandle for DequeuedMessage { tx.commit()?; Ok(requeued) }) - .await?; + .await; + let requeued = match requeued { + Ok(x) => x, + Err(e) => { + // Silently ignore the error if the database has been closed + // This message will be delivered on the next run + if get_custom_error_class(&e) == Some("TypeError") + && e.to_string() == ERROR_USING_CLOSED_DATABASE + { + return Ok(()); + } + return Err(e); + } + }; if requeued { // If the message was requeued, wake up the dequeue loop. self.waker_tx.send(()).await?; @@ -360,7 +430,7 @@ impl QueueMessageHandle for DequeuedMessage { type DequeueReceiver = mpsc::Receiver<(Vec, String)>; struct SqliteQueue { - conn: Rc>>>, + conn: ProtectedConn, dequeue_rx: Rc>, concurrency_limiter: Arc, waker_tx: mpsc::Sender<()>, @@ -368,7 +438,7 @@ struct SqliteQueue { } impl SqliteQueue { - fn new(conn: Rc>>>) -> Self { + fn new(conn: ProtectedConn) -> Self { let conn_clone = conn.clone(); let (shutdown_tx, shutdown_rx) = watch::channel::<()>(()); let (waker_tx, waker_rx) = mpsc::channel::<()>(1); @@ -406,7 +476,7 @@ impl SqliteQueue { let permit = self.concurrency_limiter.clone().acquire_owned().await?; Ok(DequeuedMessage { - conn: Rc::downgrade(&self.conn), + conn: self.conn.downgrade(), id, payload: Some(payload), waker_tx: self.waker_tx.clone(), @@ -424,7 +494,7 @@ impl SqliteQueue { } async fn dequeue_loop( - conn: Rc>>>, + conn: ProtectedConn, dequeue_tx: mpsc::Sender<(Vec, String)>, mut shutdown_rx: watch::Receiver<()>, mut waker_rx: mpsc::Receiver<()>, @@ -511,7 +581,7 @@ impl SqliteQueue { } async fn get_earliest_ready_ts( - conn: Rc>>>, + conn: ProtectedConn, ) -> Result, AnyError> { SqliteDb::run_tx(conn.clone(), move |tx| { let ts = tx @@ -527,7 +597,7 @@ impl SqliteQueue { } async fn requeue_inflight_messages( - conn: Rc>>>, + conn: ProtectedConn, ) -> Result<(), AnyError> { loop { let done = SqliteDb::run_tx(conn.clone(), move |tx| { @@ -608,7 +678,7 @@ impl SqliteQueue { for key in keys_if_undelivered { let changed = tx .prepare_cached(STATEMENT_KV_POINT_SET)? - .execute(params![key, &data, &VALUE_ENCODING_V8, &version])?; + .execute(params![key, &data, &VALUE_ENCODING_V8, &version, -1i64])?; assert_eq!(changed, 1); } } @@ -623,6 +693,31 @@ impl SqliteQueue { } } +async fn watch_expiration(db: ProtectedConn) { + loop { + // Scan for expired keys + let res = SqliteDb::run_tx(db.clone(), move |tx| { + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + tx.prepare_cached( + "delete from kv where expiration_ms >= 0 and expiration_ms <= ?", + )? + .execute(params![now])?; + tx.commit()?; + Ok(()) + }) + .await; + if let Err(e) = res { + eprintln!("kv: Error in expiration watcher: {}", e); + } + let sleep_duration = + Duration::from_secs_f64(60.0 + rand::thread_rng().gen_range(0.0..30.0)); + tokio::time::sleep(sleep_duration).await; + } +} + #[async_trait(?Send)] impl Database for SqliteDb { type QMH = DequeuedMessage; @@ -698,9 +793,17 @@ impl Database for SqliteDb { match &mutation.kind { MutationKind::Set(value) => { let (value, encoding) = encode_value(value); - let changed = tx - .prepare_cached(STATEMENT_KV_POINT_SET)? - .execute(params![mutation.key, &value, &encoding, &version])?; + let changed = + tx.prepare_cached(STATEMENT_KV_POINT_SET)?.execute(params![ + mutation.key, + value, + &encoding, + &version, + mutation + .expire_at + .and_then(|x| i64::try_from(x).ok()) + .unwrap_or(-1i64) + ])?; assert_eq!(changed, 1) } MutationKind::Delete => { @@ -845,7 +948,8 @@ fn mutate_le64( key, &new_value[..], encoding, - new_version + new_version, + -1i64, ])?; assert_eq!(changed, 1); From fc6a537c2b2ab463c0686c249133f7548bff9d54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 18 Aug 2023 13:48:18 +0200 Subject: [PATCH 36/60] fix(node/http): emit error when addr in use (#20200) Closes https://github.com/denoland/deno/issues/20186 --- cli/tests/unit_node/http_test.ts | 26 ++++++++++++++++++++++++++ ext/node/polyfills/http.ts | 30 ++++++++++++++++++------------ 2 files changed, 44 insertions(+), 12 deletions(-) diff --git a/cli/tests/unit_node/http_test.ts b/cli/tests/unit_node/http_test.ts index 706c672f14a01c..9feee02728efb0 100644 --- a/cli/tests/unit_node/http_test.ts +++ b/cli/tests/unit_node/http_test.ts @@ -751,3 +751,29 @@ Deno.test( assertEquals(body, "hello"); }, ); + +Deno.test("[node/http] server emits error if addr in use", async () => { + const promise = deferred(); + const promise2 = deferred(); + + const server = http.createServer(); + server.listen(9001); + + const server2 = http.createServer(); + server2.on("error", (e) => { + promise2.resolve(e); + }); + server2.listen(9001); + + const err = await promise2; + server.close(() => promise.resolve()); + server2.close(); + await promise; + const expectedMsg = Deno.build.os === "windows" + ? "Only one usage of each socket address" + : "Address already in use"; + assert( + err.message.startsWith(expectedMsg), + `Wrong error: ${err.message}`, + ); +}); diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 609a046ac833e2..52aac4caec6102 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -1610,19 +1610,25 @@ export class ServerImpl extends EventEmitter { return; } this.#ac = ac; - this.#server = serve( - { - handler: handler as Deno.ServeHandler, - ...this.#addr, - signal: ac.signal, - // @ts-ignore Might be any without `--unstable` flag - onListen: ({ port }) => { - this.#addr!.port = port; - this.emit("listening"); + try { + this.#server = serve( + { + handler: handler as Deno.ServeHandler, + ...this.#addr, + signal: ac.signal, + // @ts-ignore Might be any without `--unstable` flag + onListen: ({ port }) => { + this.#addr!.port = port; + this.emit("listening"); + }, + ...this._additionalServeOptions?.(), }, - ...this._additionalServeOptions?.(), - }, - ); + ); + } catch (e) { + this.emit("error", e); + return; + } + if (this.#unref) { this.#server.unref(); } From 987c3d6aa8d481b75cb48b291bf16fcb2d2acb46 Mon Sep 17 00:00:00 2001 From: Marcos Casagrande Date: Fri, 18 Aug 2023 14:44:57 +0200 Subject: [PATCH 37/60] perf(ext/event): replace ReflectHas with object lookup (#20190) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR optimizes event dispatch by replacing `ReflectHas` with object lookup. I also made `isSlottable` return `false` since AFAIK there aren't any slottables nodes in Deno **This PR** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 --------------------------------------------------------------------- ----------------------------- event dispatch 80.46 ns/iter 12,428,739.4 (73.84 ns … 120.07 ns) 81.82 ns 86.34 ns 91.18 ns ``` **main** ``` cpu: 13th Gen Intel(R) Core(TM) i9-13900H runtime: deno 1.36.1 (x86_64-unknown-linux-gnu) benchmark time (avg) iter/s (min … max) p75 p99 p995 --------------------------------------------------------------------- ----------------------------- event dispatch 102.66 ns/iter 9,741,319.6 (96.66 ns … 132.88 ns) 104.18 ns 114.58 ns 118.45 ns ``` ```js const tg = new EventTarget(); const ev = new Event("foo"); const listener = () => {}; tg.addEventListener("foo", listener); Deno.bench("event dispatch ", () => { tg.dispatchEvent(ev); }); ``` towards https://github.com/denoland/deno/issues/20167 --- ext/web/02_event.js | 26 +++++++++----------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/ext/web/02_event.js b/ext/web/02_event.js index d59a897a62672c..4d3f23a022d71c 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -31,7 +31,6 @@ const { ObjectGetOwnPropertyDescriptor, ObjectPrototypeIsPrototypeOf, ReflectDefineProperty, - ReflectHas, SafeArrayIterator, SafeMap, StringPrototypeStartsWith, @@ -108,14 +107,6 @@ function setStopImmediatePropagation( event[_stopImmediatePropagationFlag] = value; } -// Type guards that widen the event type - -function hasRelatedTarget( - event, -) { - return ReflectHas(event, "relatedTarget"); -} - const isTrusted = ObjectGetOwnPropertyDescriptor({ get isTrusted() { return this[_isTrusted]; @@ -501,9 +492,12 @@ function isShadowRoot(nodeImpl) { } function isSlottable( - nodeImpl, + /* nodeImpl, */ ) { - return Boolean(isNode(nodeImpl) && ReflectHas(nodeImpl, "assignedSlot")); + // TODO(marcosc90) currently there aren't any slottables nodes + // https://dom.spec.whatwg.org/#concept-slotable + // return isNode(nodeImpl) && ReflectHas(nodeImpl, "assignedSlot"); + return false; } // DOM Logic functions @@ -546,9 +540,7 @@ function dispatch( setDispatched(eventImpl, true); targetOverride = targetOverride ?? targetImpl; - const eventRelatedTarget = hasRelatedTarget(eventImpl) - ? eventImpl.relatedTarget - : null; + const eventRelatedTarget = eventImpl.relatedTarget; let relatedTarget = retarget(eventRelatedTarget, targetImpl); if (targetImpl !== relatedTarget || targetImpl === eventRelatedTarget) { @@ -972,7 +964,7 @@ class EventTarget { const { listeners } = self[eventTargetData]; - if (!(ReflectHas(listeners, type))) { + if (!listeners[type]) { listeners[type] = []; } @@ -1020,7 +1012,7 @@ class EventTarget { ); const { listeners } = self[eventTargetData]; - if (callback !== null && ReflectHas(listeners, type)) { + if (callback !== null && listeners[type]) { listeners[type] = ArrayPrototypeFilter( listeners[type], (listener) => listener.callback !== callback, @@ -1069,7 +1061,7 @@ class EventTarget { } const { listeners } = self[eventTargetData]; - if (!ReflectHas(listeners, event.type)) { + if (!listeners[event.type]) { setTarget(event, this); return true; } From 073df7756c4e15098466b796eef3a457cbf75f23 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 18 Aug 2023 11:01:41 -0400 Subject: [PATCH 38/60] ci: try to fix flaky stdio_streams_are_locked_in_permission_prompt (#20204) --- .../stdio_streams_are_locked_in_permission_prompt/worker.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cli/tests/testdata/run/stdio_streams_are_locked_in_permission_prompt/worker.js b/cli/tests/testdata/run/stdio_streams_are_locked_in_permission_prompt/worker.js index 1e5a632d336ece..287027c83930bd 100644 --- a/cli/tests/testdata/run/stdio_streams_are_locked_in_permission_prompt/worker.js +++ b/cli/tests/testdata/run/stdio_streams_are_locked_in_permission_prompt/worker.js @@ -1 +1,3 @@ -console.log("Are you sure you want to continue?"); +setTimeout(() => { + console.log("Are you sure you want to continue?"); +}, 10); // ensure we don't output too quickly before the permission prompt From 9771a9e825db0744b7c41b643ad114ce4cc2b056 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Sat, 19 Aug 2023 09:56:12 +0530 Subject: [PATCH 39/60] chore: remove third_party submodule (#20201) removes third_party submodule, tools are installed on-demand. - removed `load_test` and websocket benchmark (covered by benchy) - removed node/bun http benchmarks (covered by benchy) - `dlint` & `dprint` downloaded on-demand. - `wrk` & `hyperfine` downloaded before CI benchmark run. Install locally using: `./tools/install_prebuilt.js wrk hyperfine` #### updating dlint/dprint update version in `tools/util.js` and place binary in `denoland/deno_third_party`. --- .github/workflows/ci.generate.ts | 13 +- .github/workflows/ci.yml | 12 +- .gitignore | 1 + .gitmodules | 4 - cli/bench/http.rs | 79 +- cli/bench/http/bun_hono_router.js | 17 - cli/bench/http/bun_http.js | 10 - cli/bench/http/bun_http_send_file.js | 14 - cli/bench/http/node_http.js | 9 - cli/bench/http/node_http_read_headers.js | 10 - cli/bench/http/node_http_read_headers.lua | 5 - cli/bench/http/node_post_bin.js | 18 - cli/bench/http/node_post_bin.lua | 5 - cli/bench/http/node_post_json.js | 18 - cli/bench/http/node_post_json.lua | 3 - cli/bench/http/node_reactdom_ssr.js | 16201 -------------------- cli/bench/http/node_tcp.js | 18 - cli/bench/main.rs | 8 - cli/bench/websocket.rs | 76 - cli/bench/websocket/deno_echo.js | 25 - third_party | 1 - tools/format.js | 6 +- tools/install_prebuilt.js | 8 + tools/lint.js | 15 +- tools/util.js | 93 +- 25 files changed, 132 insertions(+), 16537 deletions(-) delete mode 100644 cli/bench/http/bun_hono_router.js delete mode 100644 cli/bench/http/bun_http.js delete mode 100644 cli/bench/http/bun_http_send_file.js delete mode 100644 cli/bench/http/node_http.js delete mode 100644 cli/bench/http/node_http_read_headers.js delete mode 100644 cli/bench/http/node_http_read_headers.lua delete mode 100644 cli/bench/http/node_post_bin.js delete mode 100644 cli/bench/http/node_post_bin.lua delete mode 100644 cli/bench/http/node_post_json.js delete mode 100644 cli/bench/http/node_post_json.lua delete mode 100644 cli/bench/http/node_reactdom_ssr.js delete mode 100644 cli/bench/http/node_tcp.js delete mode 100644 cli/bench/websocket.rs delete mode 100644 cli/bench/websocket/deno_echo.js delete mode 160000 third_party create mode 100755 tools/install_prebuilt.js diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index cf9ec8ac167749..77e34b102a08d5 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -65,8 +65,8 @@ sudo mount --rbind /sys /sysroot/sys sudo mount --rbind /home /sysroot/home sudo mount -t proc /proc /sysroot/proc -cp third_party/prebuilt/linux64/libdl/libdl.so.2 . -cp third_party/prebuilt/linux64/libdl/libdl.a . +wget https://github.com/denoland/deno_third_party/raw/master/prebuilt/linux64/libdl/libdl.a +wget https://github.com/denoland/deno_third_party/raw/master/prebuilt/linux64/libdl/libdl.so.2 sudo ln -s libdl.so.2 /sysroot/lib/x86_64-linux-gnu/libdl.so sudo ln -s libdl.a /sysroot/lib/x86_64-linux-gnu/libdl.a @@ -106,6 +106,8 @@ CFLAGS=-flto=thin --sysroot=/sysroot __0`, }; +const installBenchTools = "./tools/install_prebuilt.js wrk hyperfine"; + // The Windows builder is a little strange -- there's lots of room on C: and not so much on D: // We'll check out to D:, but then all of our builds should happen on a C:-mapped drive const reconfigureWindowsStorage = { @@ -395,7 +397,6 @@ const ci = { reconfigureWindowsStorage, ...cloneRepoStep, submoduleStep("./test_util/std"), - submoduleStep("./third_party"), { ...submoduleStep("./test_util/wpt"), if: "matrix.wpt", @@ -504,6 +505,8 @@ const ci = { 'if [ "${{ matrix.job }}" == "bench" ]', "then", " node -v", + // Install benchmark tools. + installBenchTools, "fi", ].join("\n"), }, @@ -552,7 +555,7 @@ const ci = { name: "test_format.js", if: "matrix.job == 'lint' && startsWith(matrix.os, 'ubuntu')", run: - "deno run --unstable --allow-write --allow-read --allow-run ./tools/format.js --check", + "deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check", }, { name: "Lint PR title", @@ -567,7 +570,7 @@ const ci = { name: "lint.js", if: "matrix.job == 'lint'", run: - "deno run --unstable --allow-write --allow-read --allow-run ./tools/lint.js", + "deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/lint.js", }, { name: "node_compat/setup.ts --check", diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 945a5eed5a8dc4..f978163255f256 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -128,9 +128,6 @@ jobs: - name: Clone submodule ./test_util/std run: git submodule update --init --recursive --depth=1 -- ./test_util/std if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - - name: Clone submodule ./third_party - run: git submodule update --init --recursive --depth=1 -- ./third_party - if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Clone submodule ./test_util/wpt run: git submodule update --init --recursive --depth=1 -- ./test_util/wpt if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.wpt)' @@ -252,8 +249,8 @@ jobs: sudo mount --rbind /home /sysroot/home sudo mount -t proc /proc /sysroot/proc - cp third_party/prebuilt/linux64/libdl/libdl.so.2 . - cp third_party/prebuilt/linux64/libdl/libdl.a . + wget https://github.com/denoland/deno_third_party/raw/master/prebuilt/linux64/libdl/libdl.a + wget https://github.com/denoland/deno_third_party/raw/master/prebuilt/linux64/libdl/libdl.so.2 sudo ln -s libdl.so.2 /sysroot/lib/x86_64-linux-gnu/libdl.so sudo ln -s libdl.a /sysroot/lib/x86_64-linux-gnu/libdl.a @@ -303,6 +300,7 @@ jobs: if [ "${{ matrix.job }}" == "bench" ] then node -v + ./tools/install_prebuilt.js wrk hyperfine fi if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Cache Cargo home @@ -332,7 +330,7 @@ jobs: cache-path: ./target - name: test_format.js if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' && startsWith(matrix.os, ''ubuntu''))' - run: deno run --unstable --allow-write --allow-read --allow-run ./tools/format.js --check + run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check - name: Lint PR title if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' && github.event_name == ''pull_request'' && startsWith(matrix.os, ''ubuntu''))' env: @@ -340,7 +338,7 @@ jobs: run: deno run ./tools/verify_pr_title.js "$PR_TITLE" - name: lint.js if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')' - run: deno run --unstable --allow-write --allow-read --allow-run ./tools/lint.js + run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/lint.js - name: node_compat/setup.ts --check if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'' && startsWith(matrix.os, ''ubuntu''))' run: deno run --allow-write --allow-read --allow-run=git ./tools/node_compat/setup.ts --check diff --git a/.gitignore b/.gitignore index 62bbca261e0c3d..417bed71c8fbca 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ gclient_config.py_entries /target/ /std/hash/_wasm/target /tools/wpt/manifest.json +/third_party/ /test_napi/node_modules /test_napi/build /test_napi/third_party_tests/node_modules diff --git a/.gitmodules b/.gitmodules index a81f84fc6f8375..c35696a0525941 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,3 @@ -[submodule "deno_third_party"] - path = third_party - url = https://github.com/denoland/deno_third_party.git - shallow = true [submodule "test_util/std"] path = test_util/std url = https://github.com/denoland/deno_std diff --git a/cli/bench/http.rs b/cli/bench/http.rs index 243ea3154292ba..98fdef3308966b 100644 --- a/cli/bench/http.rs +++ b/cli/bench/http.rs @@ -38,7 +38,6 @@ pub fn benchmark( if path.ends_with(".lua") { continue; } - let name = entry.file_name().into_string().unwrap(); let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap(); let lua_script = http_dir.join(format!("{file_stem}.lua")); @@ -48,65 +47,25 @@ pub fn benchmark( } let port = get_port(); - if name.starts_with("node") { - // node - res.insert( - file_stem.to_string(), - run( - &["node", path, &port.to_string()], - port, - None, - None, - maybe_lua, - )?, - ); - } else if name.starts_with("bun") && !cfg!(target_os = "windows") { - // Bun does not support Windows. - #[cfg(target_arch = "x86_64")] - #[cfg(not(target_vendor = "apple"))] - let bun_exe = test_util::prebuilt_tool_path("bun"); - #[cfg(target_vendor = "apple")] - #[cfg(target_arch = "x86_64")] - let bun_exe = test_util::prebuilt_tool_path("bun-x64"); - #[cfg(target_vendor = "apple")] - #[cfg(target_arch = "aarch64")] - let bun_exe = test_util::prebuilt_tool_path("bun-aarch64"); - #[cfg(target_os = "linux")] - #[cfg(target_arch = "aarch64")] - let bun_exe = test_util::prebuilt_tool_path("bun-aarch64"); - - // bun - res.insert( - file_stem.to_string(), - run( - &[&bun_exe.to_string(), path, &port.to_string()], - port, - None, - None, - maybe_lua, - )?, - ); - } else { - // deno run -A --unstable - res.insert( - file_stem.to_string(), - run( - &[ - deno_exe.as_str(), - "run", - "--allow-all", - "--unstable", - "--enable-testing-features-do-not-use", - path, - &server_addr(port), - ], - port, - None, - None, - maybe_lua, - )?, - ); - } + // deno run -A --unstable + res.insert( + file_stem.to_string(), + run( + &[ + deno_exe.as_str(), + "run", + "--allow-all", + "--unstable", + "--enable-testing-features-do-not-use", + path, + &server_addr(port), + ], + port, + None, + None, + maybe_lua, + )?, + ); } res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?); diff --git a/cli/bench/http/bun_hono_router.js b/cli/bench/http/bun_hono_router.js deleted file mode 100644 index 395ac35a0e555e..00000000000000 --- a/cli/bench/http/bun_hono_router.js +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -const port = Bun.argv[2] || "4545"; -const { Hono } = require("../testdata/npm/hono/dist/index.js"); - -const app = new Hono(); -app.use("*", async (c, n) => { - c.res.headers.set("Date", (new Date()).toUTCString()); - await n(); -}); -app.get("/", (c) => c.text("Hello, World!")); - -Bun.serve({ - fetch(r) { - return app.fetch(r); - }, - port: Number(port), -}); diff --git a/cli/bench/http/bun_http.js b/cli/bench/http/bun_http.js deleted file mode 100644 index 5eaa328eb081cb..00000000000000 --- a/cli/bench/http/bun_http.js +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -const port = Bun.argv[2] || "4545"; -Bun.serve({ - fetch(_req) { - return new Response("Hello World", { - headers: { "Date": (new Date()).toUTCString() }, - }); - }, - port: Number(port), -}); diff --git a/cli/bench/http/bun_http_send_file.js b/cli/bench/http/bun_http_send_file.js deleted file mode 100644 index 745cf34b8d5a8a..00000000000000 --- a/cli/bench/http/bun_http_send_file.js +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -const port = Bun.argv[2] || "4545"; - -const path = new URL("../testdata/128k.bin", import.meta.url).pathname; - -Bun.serve({ - fetch(_req) { - const file = Bun.file(path); - return new Response(file, { - headers: { "Date": (new Date()).toUTCString() }, - }); - }, - port: Number(port), -}); diff --git a/cli/bench/http/node_http.js b/cli/bench/http/node_http.js deleted file mode 100644 index 3c0d9c41c72182..00000000000000 --- a/cli/bench/http/node_http.js +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -const http = require("http"); -const port = process.argv[2] || "4544"; -console.log("port", port); -http - .Server((req, res) => { - res.end("Hello World"); - }) - .listen(port); diff --git a/cli/bench/http/node_http_read_headers.js b/cli/bench/http/node_http_read_headers.js deleted file mode 100644 index 0ec230a3a64d3c..00000000000000 --- a/cli/bench/http/node_http_read_headers.js +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -const http = require("http"); -const port = process.argv[2] || "4544"; -console.log("port", port); -http - .Server((req, res) => { - const bar = req.headers["foo"]; - res.end(bar); - }) - .listen(port); diff --git a/cli/bench/http/node_http_read_headers.lua b/cli/bench/http/node_http_read_headers.lua deleted file mode 100644 index 64f1923ff3ee15..00000000000000 --- a/cli/bench/http/node_http_read_headers.lua +++ /dev/null @@ -1,5 +0,0 @@ -wrk.headers["foo"] = "bar" -wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36" -wrk.headers["Viewport-Width"] = "1920" -wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9" -wrk.headers["Accept-Language"] = "en,la;q=0.9" \ No newline at end of file diff --git a/cli/bench/http/node_post_bin.js b/cli/bench/http/node_post_bin.js deleted file mode 100644 index 9026c295877119..00000000000000 --- a/cli/bench/http/node_post_bin.js +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -const http = require("http"); -const port = process.argv[2] || "4544"; -console.log("port", port); -http - .Server((req, res) => { - if (req.method == "POST") { - let chunks = []; - req.on("data", function (data) { - chunks.push(data); - }); - req.on("end", function () { - const buffer = Buffer.concat(chunks); - res.end(buffer.byteLength.toString()); - }); - } - }) - .listen(port); diff --git a/cli/bench/http/node_post_bin.lua b/cli/bench/http/node_post_bin.lua deleted file mode 100644 index c8f5d3e3f7e9df..00000000000000 --- a/cli/bench/http/node_post_bin.lua +++ /dev/null @@ -1,5 +0,0 @@ -wrk.method = "POST" -wrk.headers["Content-Type"] = "application/octet-stream" - -file = io.open("./cli/bench/testdata/128k.bin", "rb") -wrk.body = file:read("*a") \ No newline at end of file diff --git a/cli/bench/http/node_post_json.js b/cli/bench/http/node_post_json.js deleted file mode 100644 index bab64b28d49bc1..00000000000000 --- a/cli/bench/http/node_post_json.js +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -const http = require("http"); -const port = process.argv[2] || "4544"; -console.log("port", port); -http - .Server((req, res) => { - if (req.method == "POST") { - let body = ""; - req.on("data", function (data) { - body += data; - }); - req.on("end", function () { - const { hello } = JSON.parse(body); - res.end(hello); - }); - } - }) - .listen(port); diff --git a/cli/bench/http/node_post_json.lua b/cli/bench/http/node_post_json.lua deleted file mode 100644 index 71697a0682b49e..00000000000000 --- a/cli/bench/http/node_post_json.lua +++ /dev/null @@ -1,3 +0,0 @@ -wrk.method = "POST" -wrk.headers["Content-Type"] = "application/json" -wrk.body = '{"hello":"node"}' \ No newline at end of file diff --git a/cli/bench/http/node_reactdom_ssr.js b/cli/bench/http/node_reactdom_ssr.js deleted file mode 100644 index f82b504d79bb81..00000000000000 --- a/cli/bench/http/node_reactdom_ssr.js +++ /dev/null @@ -1,16201 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -var Gd = Object.create; -var Ac = Object.defineProperty; -var Xd = Object.getOwnPropertyDescriptor; -var Zd = Object.getOwnPropertyNames; -var Jd = Object.getPrototypeOf, - Qd = Object.prototype.hasOwnProperty; -var an = (e, n) => () => (n || e((n = { exports: {} }).exports, n), n.exports); -var Kd = (e, n, i, s) => { - if ((n && typeof n == "object") || typeof n == "function") { - for (let v of Zd(n)) { - !Qd.call(e, v) && - v !== i && - Ac(e, v, { - get: () => n[v], - enumerable: !(s = Xd(n, v)) || s.enumerable, - }); - } - } - return e; -}; -var Dc = (e, n, i) => ( - (i = e != null ? Gd(Jd(e)) : {}), - Kd( - n || !e || !e.__esModule - ? Ac(i, "default", { value: e, enumerable: !0 }) - : i, - e, - ) -); -var Nc = an(($) => { - "use strict"; - var Ai = Symbol.for("react.element"), - qd = Symbol.for("react.portal"), - ep = Symbol.for("react.fragment"), - tp = Symbol.for("react.strict_mode"), - rp = Symbol.for("react.profiler"), - np = Symbol.for("react.provider"), - op = Symbol.for("react.context"), - ap = Symbol.for("react.forward_ref"), - ip = Symbol.for("react.suspense"), - lp = Symbol.for("react.memo"), - sp = Symbol.for("react.lazy"), - Oc = Symbol.iterator; - function up(e) { - return e === null || typeof e != "object" - ? null - : ((e = (Oc && e[Oc]) || e["@@iterator"]), - typeof e == "function" ? e : null); - } - var Bc = { - isMounted: function () { - return !1; - }, - enqueueForceUpdate: function () {}, - enqueueReplaceState: function () {}, - enqueueSetState: function () {}, - }, - Uc = Object.assign, - jc = {}; - function sa(e, n, i) { - (this.props = e), - (this.context = n), - (this.refs = jc), - (this.updater = i || Bc); - } - sa.prototype.isReactComponent = {}; - sa.prototype.setState = function (e, n) { - if (typeof e != "object" && typeof e != "function" && e != null) { - throw Error( - "setState(...): takes an object of state variables to update or a function which returns an object of state variables.", - ); - } - this.updater.enqueueSetState(this, e, n, "setState"); - }; - sa.prototype.forceUpdate = function (e) { - this.updater.enqueueForceUpdate(this, e, "forceUpdate"); - }; - function Hc() {} - Hc.prototype = sa.prototype; - function Hu(e, n, i) { - (this.props = e), - (this.context = n), - (this.refs = jc), - (this.updater = i || Bc); - } - var Wu = (Hu.prototype = new Hc()); - Wu.constructor = Hu; - Uc(Wu, sa.prototype); - Wu.isPureReactComponent = !0; - var Mc = Array.isArray, - Wc = Object.prototype.hasOwnProperty, - zu = { current: null }, - zc = { key: !0, ref: !0, __self: !0, __source: !0 }; - function $c(e, n, i) { - var s, - v = {}, - c = null, - m = null; - if (n != null) { - for ( - s - in (n.ref !== void 0 && (m = n.ref), - n.key !== void 0 && (c = "" + n.key), - n) - ) { - Wc.call(n, s) && !zc.hasOwnProperty(s) && (v[s] = n[s]); - } - } - var S = arguments.length - 2; - if (S === 1) v.children = i; - else if (1 < S) { - for (var E = Array(S), x = 0; x < S; x++) E[x] = arguments[x + 2]; - v.children = E; - } - if (e && e.defaultProps) { - for (s in ((S = e.defaultProps), S)) v[s] === void 0 && (v[s] = S[s]); - } - return { - $$typeof: Ai, - type: e, - key: c, - ref: m, - props: v, - _owner: zu.current, - }; - } - function cp(e, n) { - return { - $$typeof: Ai, - type: e.type, - key: n, - ref: e.ref, - props: e.props, - _owner: e._owner, - }; - } - function $u(e) { - return typeof e == "object" && e !== null && e.$$typeof === Ai; - } - function fp(e) { - var n = { "=": "=0", ":": "=2" }; - return ( - "$" + - e.replace(/[=:]/g, function (i) { - return n[i]; - }) - ); - } - var Lc = /\/+/g; - function ju(e, n) { - return typeof e == "object" && e !== null && e.key != null - ? fp("" + e.key) - : n.toString(36); - } - function Ml(e, n, i, s, v) { - var c = typeof e; - (c === "undefined" || c === "boolean") && (e = null); - var m = !1; - if (e === null) m = !0; - else { - switch (c) { - case "string": - case "number": - m = !0; - break; - case "object": - switch (e.$$typeof) { - case Ai: - case qd: - m = !0; - } - } - } - if (m) { - return ( - (m = e), - (v = v(m)), - (e = s === "" ? "." + ju(m, 0) : s), - Mc(v) - ? ((i = ""), - e != null && (i = e.replace(Lc, "$&/") + "/"), - Ml(v, n, i, "", function (x) { - return x; - })) - : v != null && - ($u(v) && - (v = cp( - v, - i + - (!v.key || (m && m.key === v.key) - ? "" - : ("" + v.key).replace(Lc, "$&/") + "/") + - e, - )), - n.push(v)), - 1 - ); - } - if (((m = 0), (s = s === "" ? "." : s + ":"), Mc(e))) { - for (var S = 0; S < e.length; S++) { - c = e[S]; - var E = s + ju(c, S); - m += Ml(c, n, i, E, v); - } - } else if (((E = up(e)), typeof E == "function")) { - for (e = E.call(e), S = 0; !(c = e.next()).done;) { - (c = c.value), (E = s + ju(c, S++)), (m += Ml(c, n, i, E, v)); - } - } else if (c === "object") { - throw ( - ((n = String(e)), - Error( - "Objects are not valid as a React child (found: " + - (n === "[object Object]" - ? "object with keys {" + Object.keys(e).join(", ") + "}" - : n) + - "). If you meant to render a collection of children, use an array instead.", - )) - ); - } - return m; - } - function Ol(e, n, i) { - if (e == null) return e; - var s = [], - v = 0; - return ( - Ml(e, s, "", "", function (c) { - return n.call(i, c, v++); - }), s - ); - } - function dp(e) { - if (e._status === -1) { - var n = e._result; - (n = n()), - n.then( - function (i) { - (e._status === 0 || e._status === -1) && - ((e._status = 1), (e._result = i)); - }, - function (i) { - (e._status === 0 || e._status === -1) && - ((e._status = 2), (e._result = i)); - }, - ), - e._status === -1 && ((e._status = 0), (e._result = n)); - } - if (e._status === 1) return e._result.default; - throw e._result; - } - var Ve = { current: null }, - Ll = { transition: null }, - pp = { - ReactCurrentDispatcher: Ve, - ReactCurrentBatchConfig: Ll, - ReactCurrentOwner: zu, - }; - $.Children = { - map: Ol, - forEach: function (e, n, i) { - Ol( - e, - function () { - n.apply(this, arguments); - }, - i, - ); - }, - count: function (e) { - var n = 0; - return ( - Ol(e, function () { - n++; - }), n - ); - }, - toArray: function (e) { - return ( - Ol(e, function (n) { - return n; - }) || [] - ); - }, - only: function (e) { - if (!$u(e)) { - throw Error( - "React.Children.only expected to receive a single React element child.", - ); - } - return e; - }, - }; - $.Component = sa; - $.Fragment = ep; - $.Profiler = rp; - $.PureComponent = Hu; - $.StrictMode = tp; - $.Suspense = ip; - $.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED = pp; - $.cloneElement = function (e, n, i) { - if (e == null) { - throw Error( - "React.cloneElement(...): The argument must be a React element, but you passed " + - e + - ".", - ); - } - var s = Uc({}, e.props), - v = e.key, - c = e.ref, - m = e._owner; - if (n != null) { - if ( - (n.ref !== void 0 && ((c = n.ref), (m = zu.current)), - n.key !== void 0 && (v = "" + n.key), - e.type && e.type.defaultProps) - ) { - var S = e.type.defaultProps; - } - for (E in n) { - Wc.call(n, E) && - !zc.hasOwnProperty(E) && - (s[E] = n[E] === void 0 && S !== void 0 ? S[E] : n[E]); - } - } - var E = arguments.length - 2; - if (E === 1) s.children = i; - else if (1 < E) { - S = Array(E); - for (var x = 0; x < E; x++) S[x] = arguments[x + 2]; - s.children = S; - } - return { $$typeof: Ai, type: e.type, key: v, ref: c, props: s, _owner: m }; - }; - $.createContext = function (e) { - return ( - (e = { - $$typeof: op, - _currentValue: e, - _currentValue2: e, - _threadCount: 0, - Provider: null, - Consumer: null, - _defaultValue: null, - _globalName: null, - }), - (e.Provider = { $$typeof: np, _context: e }), - (e.Consumer = e) - ); - }; - $.createElement = $c; - $.createFactory = function (e) { - var n = $c.bind(null, e); - return (n.type = e), n; - }; - $.createRef = function () { - return { current: null }; - }; - $.forwardRef = function (e) { - return { $$typeof: ap, render: e }; - }; - $.isValidElement = $u; - $.lazy = function (e) { - return { $$typeof: sp, _payload: { _status: -1, _result: e }, _init: dp }; - }; - $.memo = function (e, n) { - return { $$typeof: lp, type: e, compare: n === void 0 ? null : n }; - }; - $.startTransition = function (e) { - var n = Ll.transition; - Ll.transition = {}; - try { - e(); - } finally { - Ll.transition = n; - } - }; - $.unstable_act = function () { - throw Error("act(...) is not supported in production builds of React."); - }; - $.useCallback = function (e, n) { - return Ve.current.useCallback(e, n); - }; - $.useContext = function (e) { - return Ve.current.useContext(e); - }; - $.useDebugValue = function () {}; - $.useDeferredValue = function (e) { - return Ve.current.useDeferredValue(e); - }; - $.useEffect = function (e, n) { - return Ve.current.useEffect(e, n); - }; - $.useId = function () { - return Ve.current.useId(); - }; - $.useImperativeHandle = function (e, n, i) { - return Ve.current.useImperativeHandle(e, n, i); - }; - $.useInsertionEffect = function (e, n) { - return Ve.current.useInsertionEffect(e, n); - }; - $.useLayoutEffect = function (e, n) { - return Ve.current.useLayoutEffect(e, n); - }; - $.useMemo = function (e, n) { - return Ve.current.useMemo(e, n); - }; - $.useReducer = function (e, n, i) { - return Ve.current.useReducer(e, n, i); - }; - $.useRef = function (e) { - return Ve.current.useRef(e); - }; - $.useState = function (e) { - return Ve.current.useState(e); - }; - $.useSyncExternalStore = function (e, n, i) { - return Ve.current.useSyncExternalStore(e, n, i); - }; - $.useTransition = function () { - return Ve.current.useTransition(); - }; - $.version = "18.2.0"; -}); -var Vc = an((N, Bl) => { - "use strict"; - process.env.NODE_ENV !== "production" && - (function () { - "use strict"; - typeof __REACT_DEVTOOLS_GLOBAL_HOOK__ < "u" && - typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart == - "function" && - __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart(new Error()); - var e = "18.2.0", - n = Symbol.for("react.element"), - i = Symbol.for("react.portal"), - s = Symbol.for("react.fragment"), - v = Symbol.for("react.strict_mode"), - c = Symbol.for("react.profiler"), - m = Symbol.for("react.provider"), - S = Symbol.for("react.context"), - E = Symbol.for("react.forward_ref"), - x = Symbol.for("react.suspense"), - R = Symbol.for("react.suspense_list"), - D = Symbol.for("react.memo"), - V = Symbol.for("react.lazy"), - te = Symbol.for("react.offscreen"), - J = Symbol.iterator, - Oe = "@@iterator"; - function P(h) { - if (h === null || typeof h != "object") return null; - var b = (J && h[J]) || h[Oe]; - return typeof b == "function" ? b : null; - } - var ue = { current: null }, - W = { transition: null }, - q = { - current: null, - isBatchingLegacy: !1, - didScheduleLegacyUpdate: !1, - }, - xe = { current: null }, - _e = {}, - he = null; - function F(h) { - he = h; - } - (_e.setExtraStackFrame = function (h) { - he = h; - }), - (_e.getCurrentStack = null), - (_e.getStackAddendum = function () { - var h = ""; - he && (h += he); - var b = _e.getCurrentStack; - return b && (h += b() || ""), h; - }); - var ct = !1, - qt = !1, - yt = !1, - Mt = !1, - Lt = !1, - ft = { - ReactCurrentDispatcher: ue, - ReactCurrentBatchConfig: W, - ReactCurrentOwner: xe, - }; - (ft.ReactDebugCurrentFrame = _e), (ft.ReactCurrentActQueue = q); - function Me(h) { - { - for ( - var b = arguments.length, k = new Array(b > 1 ? b - 1 : 0), T = 1; - T < b; - T++ - ) { - k[T - 1] = arguments[T]; - } - Er("warn", h, k); - } - } - function B(h) { - { - for ( - var b = arguments.length, k = new Array(b > 1 ? b - 1 : 0), T = 1; - T < b; - T++ - ) { - k[T - 1] = arguments[T]; - } - Er("error", h, k); - } - } - function Er(h, b, k) { - { - var T = ft.ReactDebugCurrentFrame, - I = T.getStackAddendum(); - I !== "" && ((b += "%s"), (k = k.concat([I]))); - var U = k.map(function (M) { - return String(M); - }); - U.unshift("Warning: " + b), - Function.prototype.apply.call(console[h], console, U); - } - } - var tt = {}; - function Bt(h, b) { - { - var k = h.constructor, - T = (k && (k.displayName || k.name)) || "ReactClass", - I = T + "." + b; - if (tt[I]) return; - B( - "Can't call %s on a component that is not yet mounted. This is a no-op, but it might indicate a bug in your application. Instead, assign to `this.state` directly or define a `state = {};` class property with the desired state in the %s component.", - b, - T, - ), (tt[I] = !0); - } - } - var bt = { - isMounted: function (h) { - return !1; - }, - enqueueForceUpdate: function (h, b, k) { - Bt(h, "forceUpdate"); - }, - enqueueReplaceState: function (h, b, k, T) { - Bt(h, "replaceState"); - }, - enqueueSetState: function (h, b, k, T) { - Bt(h, "setState"); - }, - }, - Le = Object.assign, - er = {}; - Object.freeze(er); - function re(h, b, k) { - (this.props = h), - (this.context = b), - (this.refs = er), - (this.updater = k || bt); - } - (re.prototype.isReactComponent = {}), - (re.prototype.setState = function (h, b) { - if (typeof h != "object" && typeof h != "function" && h != null) { - throw new Error( - "setState(...): takes an object of state variables to update or a function which returns an object of state variables.", - ); - } - this.updater.enqueueSetState(this, h, b, "setState"); - }), - (re.prototype.forceUpdate = function (h) { - this.updater.enqueueForceUpdate(this, h, "forceUpdate"); - }); - { - var ce = { - isMounted: [ - "isMounted", - "Instead, make sure to clean up subscriptions and pending requests in componentWillUnmount to prevent memory leaks.", - ], - replaceState: [ - "replaceState", - "Refactor your code to use setState instead (see https://github.com/facebook/react/issues/3236).", - ], - }, - tr = function (h, b) { - Object.defineProperty(re.prototype, h, { - get: function () { - Me( - "%s(...) is deprecated in plain JavaScript React classes. %s", - b[0], - b[1], - ); - }, - }); - }; - for (var rr in ce) ce.hasOwnProperty(rr) && tr(rr, ce[rr]); - } - function nr() {} - nr.prototype = re.prototype; - function Rr(h, b, k) { - (this.props = h), - (this.context = b), - (this.refs = er), - (this.updater = k || bt); - } - var St = (Rr.prototype = new nr()); - (St.constructor = Rr), - Le(St, re.prototype), - (St.isPureReactComponent = !0); - function so() { - var h = { current: null }; - return Object.seal(h), h; - } - var uo = Array.isArray; - function ye(h) { - return uo(h); - } - function ve(h) { - { - var b = typeof Symbol == "function" && Symbol.toStringTag, - k = (b && h[Symbol.toStringTag]) || h.constructor.name || "Object"; - return k; - } - } - function co(h) { - try { - return or(h), !1; - } catch { - return !0; - } - } - function or(h) { - return "" + h; - } - function je(h) { - if (co(h)) { - return ( - B( - "The provided key is an unsupported type %s. This value must be coerced to a string before before using it here.", - ve(h), - ), or(h) - ); - } - } - function ha(h, b, k) { - var T = h.displayName; - if (T) return T; - var I = b.displayName || b.name || ""; - return I !== "" ? k + "(" + I + ")" : k; - } - function ar(h) { - return h.displayName || "Context"; - } - function dt(h) { - if (h == null) return null; - if ( - (typeof h.tag == "number" && - B( - "Received an unexpected object in getComponentNameFromType(). This is likely a bug in React. Please file an issue.", - ), - typeof h == "function") - ) { - return h.displayName || h.name || null; - } - if (typeof h == "string") return h; - switch (h) { - case s: - return "Fragment"; - case i: - return "Portal"; - case c: - return "Profiler"; - case v: - return "StrictMode"; - case x: - return "Suspense"; - case R: - return "SuspenseList"; - } - if (typeof h == "object") { - switch (h.$$typeof) { - case S: - var b = h; - return ar(b) + ".Consumer"; - case m: - var k = h; - return ar(k._context) + ".Provider"; - case E: - return ha(h, h.render, "ForwardRef"); - case D: - var T = h.displayName || null; - return T !== null ? T : dt(h.type) || "Memo"; - case V: { - var I = h, - U = I._payload, - M = I._init; - try { - return dt(M(U)); - } catch { - return null; - } - } - } - } - return null; - } - var ir = Object.prototype.hasOwnProperty, - fo = { key: !0, ref: !0, __self: !0, __source: !0 }, - lr, - fn, - sr; - sr = {}; - function pt(h) { - if (ir.call(h, "ref")) { - var b = Object.getOwnPropertyDescriptor(h, "ref").get; - if (b && b.isReactWarning) return !1; - } - return h.ref !== void 0; - } - function ke(h) { - if (ir.call(h, "key")) { - var b = Object.getOwnPropertyDescriptor(h, "key").get; - if (b && b.isReactWarning) return !1; - } - return h.key !== void 0; - } - function po(h, b) { - var k = function () { - lr || - ((lr = !0), - B( - "%s: `key` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)", - b, - )); - }; - (k.isReactWarning = !0), - Object.defineProperty(h, "key", { get: k, configurable: !0 }); - } - function va(h, b) { - var k = function () { - fn || - ((fn = !0), - B( - "%s: `ref` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)", - b, - )); - }; - (k.isReactWarning = !0), - Object.defineProperty(h, "ref", { get: k, configurable: !0 }); - } - function ga(h) { - if ( - typeof h.ref == "string" && - xe.current && - h.__self && - xe.current.stateNode !== h.__self - ) { - var b = dt(xe.current.type); - sr[b] || - (B( - 'Component "%s" contains the string ref "%s". Support for string refs will be removed in a future major release. This case cannot be automatically converted to an arrow function. We ask you to manually fix this case by using useRef() or createRef() instead. Learn more about using refs safely here: https://reactjs.org/link/strict-mode-string-ref', - b, - h.ref, - ), - (sr[b] = !0)); - } - } - var dn = function (h, b, k, T, I, U, M) { - var H = { $$typeof: n, type: h, key: b, ref: k, props: M, _owner: U }; - return ( - (H._store = {}), - Object.defineProperty(H._store, "validated", { - configurable: !1, - enumerable: !1, - writable: !0, - value: !1, - }), - Object.defineProperty(H, "_self", { - configurable: !1, - enumerable: !1, - writable: !1, - value: T, - }), - Object.defineProperty(H, "_source", { - configurable: !1, - enumerable: !1, - writable: !1, - value: I, - }), - Object.freeze && (Object.freeze(H.props), Object.freeze(H)), - H - ); - }; - function ho(h, b, k) { - var T, - I = {}, - U = null, - M = null, - H = null, - z = null; - if (b != null) { - pt(b) && ((M = b.ref), ga(b)), - ke(b) && (je(b.key), (U = "" + b.key)), - (H = b.__self === void 0 ? null : b.__self), - (z = b.__source === void 0 ? null : b.__source); - for (T in b) ir.call(b, T) && !fo.hasOwnProperty(T) && (I[T] = b[T]); - } - var Y = arguments.length - 2; - if (Y === 1) I.children = k; - else if (Y > 1) { - for (var Q = Array(Y), K = 0; K < Y; K++) Q[K] = arguments[K + 2]; - Object.freeze && Object.freeze(Q), (I.children = Q); - } - if (h && h.defaultProps) { - var ee = h.defaultProps; - for (T in ee) I[T] === void 0 && (I[T] = ee[T]); - } - if (U || M) { - var fe = typeof h == "function" - ? h.displayName || h.name || "Unknown" - : h; - U && po(I, fe), M && va(I, fe); - } - return dn(h, U, M, H, z, xe.current, I); - } - function ma(h, b) { - var k = dn(h.type, b, h.ref, h._self, h._source, h._owner, h.props); - return k; - } - function pn(h, b, k) { - if (h == null) { - throw new Error( - "React.cloneElement(...): The argument must be a React element, but you passed " + - h + - ".", - ); - } - var T, - I = Le({}, h.props), - U = h.key, - M = h.ref, - H = h._self, - z = h._source, - Y = h._owner; - if (b != null) { - pt(b) && ((M = b.ref), (Y = xe.current)), - ke(b) && (je(b.key), (U = "" + b.key)); - var Q; - h.type && h.type.defaultProps && (Q = h.type.defaultProps); - for (T in b) { - ir.call(b, T) && - !fo.hasOwnProperty(T) && - (b[T] === void 0 && Q !== void 0 ? (I[T] = Q[T]) : (I[T] = b[T])); - } - } - var K = arguments.length - 2; - if (K === 1) I.children = k; - else if (K > 1) { - for (var ee = Array(K), fe = 0; fe < K; fe++) { - ee[fe] = arguments[fe + 2]; - } - I.children = ee; - } - return dn(h.type, U, M, H, z, Y, I); - } - function wt(h) { - return typeof h == "object" && h !== null && h.$$typeof === n; - } - var Te = ".", - vo = ":"; - function go(h) { - var b = /[=:]/g, - k = { "=": "=0", ":": "=2" }, - T = h.replace(b, function (I) { - return k[I]; - }); - return "$" + T; - } - var xt = !1, - hn = /\/+/g; - function vn(h) { - return h.replace(hn, "$&/"); - } - function Ir(h, b) { - return typeof h == "object" && h !== null && h.key != null - ? (je(h.key), go("" + h.key)) - : b.toString(36); - } - function ur(h, b, k, T, I) { - var U = typeof h; - (U === "undefined" || U === "boolean") && (h = null); - var M = !1; - if (h === null) M = !0; - else { - switch (U) { - case "string": - case "number": - M = !0; - break; - case "object": - switch (h.$$typeof) { - case n: - case i: - M = !0; - } - } - } - if (M) { - var H = h, - z = I(H), - Y = T === "" ? Te + Ir(H, 0) : T; - if (ye(z)) { - var Q = ""; - Y != null && (Q = vn(Y) + "/"), - ur(z, b, Q, "", function (Ma) { - return Ma; - }); - } else { - z != null && - (wt(z) && - (z.key && (!H || H.key !== z.key) && je(z.key), - (z = ma( - z, - k + - (z.key && (!H || H.key !== z.key) - ? vn("" + z.key) + "/" - : "") + - Y, - ))), - b.push(z)); - } - return 1; - } - var K, - ee, - fe = 0, - Se = T === "" ? Te : T + vo; - if (ye(h)) { - for (var yr = 0; yr < h.length; yr++) { - (K = h[yr]), (ee = Se + Ir(K, yr)), (fe += ur(K, b, k, ee, I)); - } - } else { - var Pn = P(h); - if (typeof Pn == "function") { - var Wr = h; - Pn === Wr.entries && - (xt || - Me( - "Using Maps as children is not supported. Use an array of keyed ReactElements instead.", - ), - (xt = !0)); - for (var Co = Pn.call(Wr), zr, Oa = 0; !(zr = Co.next()).done;) { - (K = zr.value), - (ee = Se + Ir(K, Oa++)), - (fe += ur(K, b, k, ee, I)); - } - } else if (U === "object") { - var Eo = String(h); - throw new Error( - "Objects are not valid as a React child (found: " + - (Eo === "[object Object]" - ? "object with keys {" + Object.keys(h).join(", ") + "}" - : Eo) + - "). If you meant to render a collection of children, use an array instead.", - ); - } - } - return fe; - } - function _r(h, b, k) { - if (h == null) return h; - var T = [], - I = 0; - return ( - ur(h, T, "", "", function (U) { - return b.call(k, U, I++); - }), T - ); - } - function ya(h) { - var b = 0; - return ( - _r(h, function () { - b++; - }), b - ); - } - function ba(h, b, k) { - _r( - h, - function () { - b.apply(this, arguments); - }, - k, - ); - } - function mo(h) { - return ( - _r(h, function (b) { - return b; - }) || [] - ); - } - function Ut(h) { - if (!wt(h)) { - throw new Error( - "React.Children.only expected to receive a single React element child.", - ); - } - return h; - } - function gn(h) { - var b = { - $$typeof: S, - _currentValue: h, - _currentValue2: h, - _threadCount: 0, - Provider: null, - Consumer: null, - _defaultValue: null, - _globalName: null, - }; - b.Provider = { $$typeof: m, _context: b }; - var k = !1, - T = !1, - I = !1; - { - var U = { $$typeof: S, _context: b }; - Object.defineProperties(U, { - Provider: { - get: function () { - return ( - T || - ((T = !0), - B( - "Rendering is not supported and will be removed in a future major release. Did you mean to render instead?", - )), b.Provider - ); - }, - set: function (M) { - b.Provider = M; - }, - }, - _currentValue: { - get: function () { - return b._currentValue; - }, - set: function (M) { - b._currentValue = M; - }, - }, - _currentValue2: { - get: function () { - return b._currentValue2; - }, - set: function (M) { - b._currentValue2 = M; - }, - }, - _threadCount: { - get: function () { - return b._threadCount; - }, - set: function (M) { - b._threadCount = M; - }, - }, - Consumer: { - get: function () { - return ( - k || - ((k = !0), - B( - "Rendering is not supported and will be removed in a future major release. Did you mean to render instead?", - )), b.Consumer - ); - }, - }, - displayName: { - get: function () { - return b.displayName; - }, - set: function (M) { - I || - (Me( - "Setting `displayName` on Context.Consumer has no effect. You should set it directly on the context with Context.displayName = '%s'.", - M, - ), - (I = !0)); - }, - }, - }), (b.Consumer = U); - } - return (b._currentRenderer = null), (b._currentRenderer2 = null), b; - } - var jt = -1, - Pr = 0, - ge = 1, - Sa = 2; - function wa(h) { - if (h._status === jt) { - var b = h._result, - k = b(); - if ( - (k.then( - function (U) { - if (h._status === Pr || h._status === jt) { - var M = h; - (M._status = ge), (M._result = U); - } - }, - function (U) { - if (h._status === Pr || h._status === jt) { - var M = h; - (M._status = Sa), (M._result = U); - } - }, - ), - h._status === jt) - ) { - var T = h; - (T._status = Pr), (T._result = k); - } - } - if (h._status === ge) { - var I = h._result; - return ( - I === void 0 && - B( - `lazy: Expected the result of a dynamic import() call. Instead received: %s - -Your code should look like: - const MyComponent = lazy(() => import('./MyComponent')) - -Did you accidentally put curly braces around the import?`, - I, - ), - "default" in I || - B( - `lazy: Expected the result of a dynamic import() call. Instead received: %s - -Your code should look like: - const MyComponent = lazy(() => import('./MyComponent'))`, - I, - ), - I.default - ); - } else throw h._result; - } - function xa(h) { - var b = { _status: jt, _result: h }, - k = { $$typeof: V, _payload: b, _init: wa }; - { - var T, I; - Object.defineProperties(k, { - defaultProps: { - configurable: !0, - get: function () { - return T; - }, - set: function (U) { - B( - "React.lazy(...): It is not supported to assign `defaultProps` to a lazy component import. Either specify them where the component is defined, or create a wrapping component around it.", - ), - (T = U), - Object.defineProperty(k, "defaultProps", { enumerable: !0 }); - }, - }, - propTypes: { - configurable: !0, - get: function () { - return I; - }, - set: function (U) { - B( - "React.lazy(...): It is not supported to assign `propTypes` to a lazy component import. Either specify them where the component is defined, or create a wrapping component around it.", - ), - (I = U), - Object.defineProperty(k, "propTypes", { enumerable: !0 }); - }, - }, - }); - } - return k; - } - function ka(h) { - h != null && h.$$typeof === D - ? B( - "forwardRef requires a render function but received a `memo` component. Instead of forwardRef(memo(...)), use memo(forwardRef(...)).", - ) - : typeof h != "function" - ? B( - "forwardRef requires a render function but was given %s.", - h === null ? "null" : typeof h, - ) - : h.length !== 0 && - h.length !== 2 && - B( - "forwardRef render functions accept exactly two parameters: props and ref. %s", - h.length === 1 - ? "Did you forget to use the ref parameter?" - : "Any additional parameter will be undefined.", - ), - h != null && - (h.defaultProps != null || h.propTypes != null) && - B( - "forwardRef render functions do not support propTypes or defaultProps. Did you accidentally pass a React component?", - ); - var b = { $$typeof: E, render: h }; - { - var k; - Object.defineProperty(b, "displayName", { - enumerable: !1, - configurable: !0, - get: function () { - return k; - }, - set: function (T) { - (k = T), !h.name && !h.displayName && (h.displayName = T); - }, - }); - } - return b; - } - var mn; - mn = Symbol.for("react.module.reference"); - function yo(h) { - return !!( - typeof h == "string" || - typeof h == "function" || - h === s || - h === c || - Lt || - h === v || - h === x || - h === R || - Mt || - h === te || - ct || - qt || - yt || - (typeof h == "object" && - h !== null && - (h.$$typeof === V || - h.$$typeof === D || - h.$$typeof === m || - h.$$typeof === S || - h.$$typeof === E || - h.$$typeof === mn || - h.getModuleId !== void 0)) - ); - } - function Ta(h, b) { - yo(h) || - B( - "memo: The first argument must be a component. Instead received: %s", - h === null ? "null" : typeof h, - ); - var k = { $$typeof: D, type: h, compare: b === void 0 ? null : b }; - { - var T; - Object.defineProperty(k, "displayName", { - enumerable: !1, - configurable: !0, - get: function () { - return T; - }, - set: function (I) { - (T = I), !h.name && !h.displayName && (h.displayName = I); - }, - }); - } - return k; - } - function me() { - var h = ue.current; - return ( - h === null && - B(`Invalid hook call. Hooks can only be called inside of the body of a function component. This could happen for one of the following reasons: -1. You might have mismatching versions of React and the renderer (such as React DOM) -2. You might be breaking the Rules of Hooks -3. You might have more than one copy of React in the same app -See https://reactjs.org/link/invalid-hook-call for tips about how to debug and fix this problem.`), - h - ); - } - function be(h) { - var b = me(); - if (h._context !== void 0) { - var k = h._context; - k.Consumer === h - ? B( - "Calling useContext(Context.Consumer) is not supported, may cause bugs, and will be removed in a future major release. Did you mean to call useContext(Context) instead?", - ) - : k.Provider === h && - B( - "Calling useContext(Context.Provider) is not supported. Did you mean to call useContext(Context) instead?", - ); - } - return b.useContext(h); - } - function Ca(h) { - var b = me(); - return b.useState(h); - } - function Ea(h, b, k) { - var T = me(); - return T.useReducer(h, b, k); - } - function Ra(h) { - var b = me(); - return b.useRef(h); - } - function bo(h, b) { - var k = me(); - return k.useEffect(h, b); - } - function So(h, b) { - var k = me(); - return k.useInsertionEffect(h, b); - } - function Ia(h, b) { - var k = me(); - return k.useLayoutEffect(h, b); - } - function _a(h, b) { - var k = me(); - return k.useCallback(h, b); - } - function cr(h, b) { - var k = me(); - return k.useMemo(h, b); - } - function Fr(h, b, k) { - var T = me(); - return T.useImperativeHandle(h, b, k); - } - function kt(h, b) { - { - var k = me(); - return k.useDebugValue(h, b); - } - } - function Ar() { - var h = me(); - return h.useTransition(); - } - function Dr(h) { - var b = me(); - return b.useDeferredValue(h); - } - function fr() { - var h = me(); - return h.useId(); - } - function yn(h, b, k) { - var T = me(); - return T.useSyncExternalStore(h, b, k); - } - var Tt = 0, - Or, - He, - Ht, - Ct, - Wt, - zt, - Et; - function dr() {} - dr.__reactDisabledLog = !0; - function bn() { - { - if (Tt === 0) { - (Or = console.log), - (He = console.info), - (Ht = console.warn), - (Ct = console.error), - (Wt = console.group), - (zt = console.groupCollapsed), - (Et = console.groupEnd); - var h = { - configurable: !0, - enumerable: !0, - value: dr, - writable: !0, - }; - Object.defineProperties(console, { - info: h, - log: h, - warn: h, - error: h, - group: h, - groupCollapsed: h, - groupEnd: h, - }); - } - Tt++; - } - } - function Sn() { - { - if ((Tt--, Tt === 0)) { - var h = { configurable: !0, enumerable: !0, writable: !0 }; - Object.defineProperties(console, { - log: Le({}, h, { value: Or }), - info: Le({}, h, { value: He }), - warn: Le({}, h, { value: Ht }), - error: Le({}, h, { value: Ct }), - group: Le({}, h, { value: Wt }), - groupCollapsed: Le({}, h, { value: zt }), - groupEnd: Le({}, h, { value: Et }), - }); - } - Tt < 0 && - B( - "disabledDepth fell below zero. This is a bug in React. Please file an issue.", - ); - } - } - var Ee = ft.ReactCurrentDispatcher, - wn; - function Mr(h, b, k) { - { - if (wn === void 0) { - try { - throw Error(); - } catch (I) { - var T = I.stack.trim().match(/\n( *(at )?)/); - wn = (T && T[1]) || ""; - } - } - return ( - ` -` + - wn + - h - ); - } - } - var Lr = !1, - Br; - { - var wo = typeof WeakMap == "function" ? WeakMap : Map; - Br = new wo(); - } - function We(h, b) { - if (!h || Lr) return ""; - { - var k = Br.get(h); - if (k !== void 0) return k; - } - var T; - Lr = !0; - var I = Error.prepareStackTrace; - Error.prepareStackTrace = void 0; - var U; - (U = Ee.current), (Ee.current = null), bn(); - try { - if (b) { - var M = function () { - throw Error(); - }; - if ( - (Object.defineProperty(M.prototype, "props", { - set: function () { - throw Error(); - }, - }), - typeof Reflect == "object" && Reflect.construct) - ) { - try { - Reflect.construct(M, []); - } catch (Se) { - T = Se; - } - Reflect.construct(h, [], M); - } else { - try { - M.call(); - } catch (Se) { - T = Se; - } - h.call(M.prototype); - } - } else { - try { - throw Error(); - } catch (Se) { - T = Se; - } - h(); - } - } catch (Se) { - if (Se && T && typeof Se.stack == "string") { - for ( - var H = Se.stack.split(` -`), - z = T.stack.split(` -`), - Y = H.length - 1, - Q = z.length - 1; - Y >= 1 && Q >= 0 && H[Y] !== z[Q]; - ) { - Q--; - } - for (; Y >= 1 && Q >= 0; Y--, Q--) { - if (H[Y] !== z[Q]) { - if (Y !== 1 || Q !== 1) { - do if ((Y--, Q--, Q < 0 || H[Y] !== z[Q])) { - var K = ` -` + H[Y].replace(" at new ", " at "); - return ( - h.displayName && - K.includes("") && - (K = K.replace("", h.displayName)), - typeof h == "function" && Br.set(h, K), - K - ); - } while (Y >= 1 && Q >= 0); - } - break; - } - } - } - } finally { - (Lr = !1), (Ee.current = U), Sn(), (Error.prepareStackTrace = I); - } - var ee = h ? h.displayName || h.name : "", - fe = ee ? Mr(ee) : ""; - return typeof h == "function" && Br.set(h, fe), fe; - } - function Rt(h, b, k) { - return We(h, !1); - } - function ht(h) { - var b = h.prototype; - return !!(b && b.isReactComponent); - } - function $t(h, b, k) { - if (h == null) return ""; - if (typeof h == "function") return We(h, ht(h)); - if (typeof h == "string") return Mr(h); - switch (h) { - case x: - return Mr("Suspense"); - case R: - return Mr("SuspenseList"); - } - if (typeof h == "object") { - switch (h.$$typeof) { - case E: - return Rt(h.render); - case D: - return $t(h.type, b, k); - case V: { - var T = h, - I = T._payload, - U = T._init; - try { - return $t(U(I), b, k); - } catch {} - } - } - } - return ""; - } - var Ce = {}, - rt = ft.ReactDebugCurrentFrame; - function pr(h) { - if (h) { - var b = h._owner, - k = $t(h.type, h._source, b ? b.type : null); - rt.setExtraStackFrame(k); - } else rt.setExtraStackFrame(null); - } - function hr(h, b, k, T, I) { - { - var U = Function.call.bind(ir); - for (var M in h) { - if (U(h, M)) { - var H = void 0; - try { - if (typeof h[M] != "function") { - var z = Error( - (T || "React class") + - ": " + - k + - " type `" + - M + - "` is invalid; it must be a function, usually from the `prop-types` package, but received `" + - typeof h[M] + - "`.This often happens because of typos such as `PropTypes.function` instead of `PropTypes.func`.", - ); - throw ((z.name = "Invariant Violation"), z); - } - H = h[M]( - b, - M, - T, - k, - null, - "SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED", - ); - } catch (Y) { - H = Y; - } - H && - !(H instanceof Error) && - (pr(I), - B( - "%s: type specification of %s `%s` is invalid; the type checker function must return `null` or an `Error` but returned a %s. You may have forgotten to pass an argument to the type checker creator (arrayOf, instanceOf, objectOf, oneOf, oneOfType, and shape all require an argument).", - T || "React class", - k, - M, - typeof H, - ), - pr(null)), - H instanceof Error && - !(H.message in Ce) && - ((Ce[H.message] = !0), - pr(I), - B("Failed %s type: %s", k, H.message), - pr(null)); - } - } - } - } - function It(h) { - if (h) { - var b = h._owner, - k = $t(h.type, h._source, b ? b.type : null); - F(k); - } else F(null); - } - var vr; - vr = !1; - function Ze() { - if (xe.current) { - var h = dt(xe.current.type); - if (h) { - return ( - ` - -Check the render method of \`` + - h + - "`." - ); - } - } - return ""; - } - function _t(h) { - if (h !== void 0) { - var b = h.fileName.replace(/^.*[\\\/]/, ""), - k = h.lineNumber; - return ( - ` - -Check your code at ` + - b + - ":" + - k + - "." - ); - } - return ""; - } - function vt(h) { - return h != null ? _t(h.__source) : ""; - } - var Ur = {}; - function Re(h) { - var b = Ze(); - if (!b) { - var k = typeof h == "string" ? h : h.displayName || h.name; - k && - (b = ` - -Check the top-level render call using <` + - k + - ">."); - } - return b; - } - function Je(h, b) { - if (!(!h._store || h._store.validated || h.key != null)) { - h._store.validated = !0; - var k = Re(b); - if (!Ur[k]) { - Ur[k] = !0; - var T = ""; - h && - h._owner && - h._owner !== xe.current && - (T = " It was passed a child from " + dt(h._owner.type) + "."), - It(h), - B( - 'Each child in a list should have a unique "key" prop.%s%s See https://reactjs.org/link/warning-keys for more information.', - k, - T, - ), - It(null); - } - } - } - function xn(h, b) { - if (typeof h == "object") { - if (ye(h)) { - for (var k = 0; k < h.length; k++) { - var T = h[k]; - wt(T) && Je(T, b); - } - } else if (wt(h)) h._store && (h._store.validated = !0); - else if (h) { - var I = P(h); - if (typeof I == "function" && I !== h.entries) { - for (var U = I.call(h), M; !(M = U.next()).done;) { - wt(M.value) && Je(M.value, b); - } - } - } - } - } - function gr(h) { - { - var b = h.type; - if (b == null || typeof b == "string") return; - var k; - if (typeof b == "function") k = b.propTypes; - else if ( - typeof b == "object" && - (b.$$typeof === E || b.$$typeof === D) - ) { - k = b.propTypes; - } else return; - if (k) { - var T = dt(b); - hr(k, h.props, "prop", T, h); - } else if (b.PropTypes !== void 0 && !vr) { - vr = !0; - var I = dt(b); - B( - "Component %s declared `PropTypes` instead of `propTypes`. Did you misspell the property assignment?", - I || "Unknown", - ); - } - typeof b.getDefaultProps == "function" && - !b.getDefaultProps.isReactClassApproved && - B( - "getDefaultProps is only used on classic React.createClass definitions. Use a static property named `defaultProps` instead.", - ); - } - } - function jr(h) { - { - for (var b = Object.keys(h.props), k = 0; k < b.length; k++) { - var T = b[k]; - if (T !== "children" && T !== "key") { - It(h), - B( - "Invalid prop `%s` supplied to `React.Fragment`. React.Fragment can only have `key` and `children` props.", - T, - ), - It(null); - break; - } - } - h.ref !== null && - (It(h), - B("Invalid attribute `ref` supplied to `React.Fragment`."), - It(null)); - } - } - function kn(h, b, k) { - var T = yo(h); - if (!T) { - var I = ""; - (h === void 0 || - (typeof h == "object" && - h !== null && - Object.keys(h).length === 0)) && - (I += - " You likely forgot to export your component from the file it's defined in, or you might have mixed up default and named imports."); - var U = vt(b); - U ? (I += U) : (I += Ze()); - var M; - h === null - ? (M = "null") - : ye(h) - ? (M = "array") - : h !== void 0 && h.$$typeof === n - ? ((M = "<" + (dt(h.type) || "Unknown") + " />"), - (I = - " Did you accidentally export a JSX literal instead of a component?")) - : (M = typeof h), - B( - "React.createElement: type is invalid -- expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s", - M, - I, - ); - } - var H = ho.apply(this, arguments); - if (H == null) return H; - if (T) for (var z = 2; z < arguments.length; z++) xn(arguments[z], h); - return h === s ? jr(H) : gr(H), H; - } - var Tn = !1; - function xo(h) { - var b = kn.bind(null, h); - return ( - (b.type = h), - Tn || - ((Tn = !0), - Me( - "React.createFactory() is deprecated and will be removed in a future major release. Consider using JSX or use React.createElement() directly instead.", - )), - Object.defineProperty(b, "type", { - enumerable: !1, - get: function () { - return ( - Me( - "Factory.type is deprecated. Access the class directly before passing it to createFactory.", - ), - Object.defineProperty(this, "type", { value: h }), - h - ); - }, - }), - b - ); - } - function ko(h, b, k) { - for ( - var T = pn.apply(this, arguments), I = 2; - I < arguments.length; - I++ - ) { - xn(arguments[I], T.type); - } - return gr(T), T; - } - function To(h, b) { - var k = W.transition; - W.transition = {}; - var T = W.transition; - W.transition._updatedFibers = new Set(); - try { - h(); - } finally { - if (((W.transition = k), k === null && T._updatedFibers)) { - var I = T._updatedFibers.size; - I > 10 && - Me( - "Detected a large number of updates inside startTransition. If this is due to a subscription please re-write it to use React provided hooks. Otherwise concurrent mode guarantees are off the table.", - ), T._updatedFibers.clear(); - } - } - } - var Cn = !1, - Pt = null; - function Pa(h) { - if (Pt === null) { - try { - var b = ("require" + Math.random()).slice(0, 7), - k = Bl && Bl[b]; - Pt = k.call(Bl, "timers").setImmediate; - } catch { - Pt = function (I) { - Cn === !1 && - ((Cn = !0), - typeof MessageChannel > "u" && - B( - "This browser does not have a MessageChannel implementation, so enqueuing tasks via await act(async () => ...) will fail. Please file an issue at https://github.com/facebook/react/issues if you encounter this warning.", - )); - var U = new MessageChannel(); - (U.port1.onmessage = I), U.port2.postMessage(void 0); - }; - } - } - return Pt(h); - } - var gt = 0, - Hr = !1; - function Fa(h) { - { - var b = gt; - gt++, q.current === null && (q.current = []); - var k = q.isBatchingLegacy, - T; - try { - if ( - ((q.isBatchingLegacy = !0), - (T = h()), - !k && q.didScheduleLegacyUpdate) - ) { - var I = q.current; - I !== null && ((q.didScheduleLegacyUpdate = !1), Rn(I)); - } - } catch (ee) { - throw (mr(b), ee); - } finally { - q.isBatchingLegacy = k; - } - if ( - T !== null && - typeof T == "object" && - typeof T.then == "function" - ) { - var U = T, - M = !1, - H = { - then: function (ee, fe) { - (M = !0), - U.then( - function (Se) { - mr(b), gt === 0 ? Be(Se, ee, fe) : ee(Se); - }, - function (Se) { - mr(b), fe(Se); - }, - ); - }, - }; - return ( - !Hr && - typeof Promise < "u" && - Promise.resolve() - .then(function () {}) - .then(function () { - M || - ((Hr = !0), - B( - "You called act(async () => ...) without await. This could lead to unexpected testing behaviour, interleaving multiple act calls and mixing their scopes. You should - await act(async () => ...);", - )); - }), H - ); - } else { - var z = T; - if ((mr(b), gt === 0)) { - var Y = q.current; - Y !== null && (Rn(Y), (q.current = null)); - var Q = { - then: function (ee, fe) { - q.current === null - ? ((q.current = []), Be(z, ee, fe)) - : ee(z); - }, - }; - return Q; - } else { - var K = { - then: function (ee, fe) { - ee(z); - }, - }; - return K; - } - } - } - } - function mr(h) { - h !== gt - 1 && - B( - "You seem to have overlapping act() calls, this is not supported. Be sure to await previous act() calls before making a new one. ", - ), (gt = h); - } - function Be(h, b, k) { - { - var T = q.current; - if (T !== null) { - try { - Rn(T), - Pa(function () { - T.length === 0 ? ((q.current = null), b(h)) : Be(h, b, k); - }); - } catch (I) { - k(I); - } - } else b(h); - } - } - var En = !1; - function Rn(h) { - if (!En) { - En = !0; - var b = 0; - try { - for (; b < h.length; b++) { - var k = h[b]; - do k = k(!0); while (k !== null); - } - h.length = 0; - } catch (T) { - throw ((h = h.slice(b + 1)), T); - } finally { - En = !1; - } - } - } - var Aa = kn, - In = ko, - Da = xo, - _n = { map: _r, forEach: ba, count: ya, toArray: mo, only: Ut }; - (N.Children = _n), - (N.Component = re), - (N.Fragment = s), - (N.Profiler = c), - (N.PureComponent = Rr), - (N.StrictMode = v), - (N.Suspense = x), - (N.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED = ft), - (N.cloneElement = In), - (N.createContext = gn), - (N.createElement = Aa), - (N.createFactory = Da), - (N.createRef = so), - (N.forwardRef = ka), - (N.isValidElement = wt), - (N.lazy = xa), - (N.memo = Ta), - (N.startTransition = To), - (N.unstable_act = Fa), - (N.useCallback = _a), - (N.useContext = be), - (N.useDebugValue = kt), - (N.useDeferredValue = Dr), - (N.useEffect = bo), - (N.useId = fr), - (N.useImperativeHandle = Fr), - (N.useInsertionEffect = So), - (N.useLayoutEffect = Ia), - (N.useMemo = cr), - (N.useReducer = Ea), - (N.useRef = Ra), - (N.useState = Ca), - (N.useSyncExternalStore = yn), - (N.useTransition = Ar), - (N.version = e), - typeof __REACT_DEVTOOLS_GLOBAL_HOOK__ < "u" && - typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStop == - "function" && - __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStop( - new Error(), - ); - })(); -}); -var ua = an((_v, Nu) => { - "use strict"; - process.env.NODE_ENV === "production" - ? (Nu.exports = Nc()) - : (Nu.exports = Vc()); -}); -var Wf = an((ca) => { - "use strict"; - var pf = ua(), - hp = require("stream"), - qe = Object.prototype.hasOwnProperty, - vp = - /^[:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD][:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\-.0-9\u00B7\u0300-\u036F\u203F-\u2040]*$/, - Yc = {}, - Gc = {}; - function hf(e) { - return qe.call(Gc, e) - ? !0 - : qe.call(Yc, e) - ? !1 - : vp.test(e) - ? (Gc[e] = !0) - : ((Yc[e] = !0), !1); - } - function Ge(e, n, i, s, v, c, m) { - (this.acceptsBooleans = n === 2 || n === 3 || n === 4), - (this.attributeName = s), - (this.attributeNamespace = v), - (this.mustUseProperty = i), - (this.propertyName = e), - (this.type = n), - (this.sanitizeURL = c), - (this.removeEmptyString = m); - } - var Fe = {}; - "children dangerouslySetInnerHTML defaultValue defaultChecked innerHTML suppressContentEditableWarning suppressHydrationWarning style" - .split(" ") - .forEach(function (e) { - Fe[e] = new Ge(e, 0, !1, e, null, !1, !1); - }); - [ - ["acceptCharset", "accept-charset"], - ["className", "class"], - ["htmlFor", "for"], - ["httpEquiv", "http-equiv"], - ].forEach(function (e) { - var n = e[0]; - Fe[n] = new Ge(n, 1, !1, e[1], null, !1, !1); - }); - ["contentEditable", "draggable", "spellCheck", "value"].forEach(function (e) { - Fe[e] = new Ge(e, 2, !1, e.toLowerCase(), null, !1, !1); - }); - [ - "autoReverse", - "externalResourcesRequired", - "focusable", - "preserveAlpha", - ].forEach(function (e) { - Fe[e] = new Ge(e, 2, !1, e, null, !1, !1); - }); - "allowFullScreen async autoFocus autoPlay controls default defer disabled disablePictureInPicture disableRemotePlayback formNoValidate hidden loop noModule noValidate open playsInline readOnly required reversed scoped seamless itemScope" - .split(" ") - .forEach(function (e) { - Fe[e] = new Ge(e, 3, !1, e.toLowerCase(), null, !1, !1); - }); - ["checked", "multiple", "muted", "selected"].forEach(function (e) { - Fe[e] = new Ge(e, 3, !0, e, null, !1, !1); - }); - ["capture", "download"].forEach(function (e) { - Fe[e] = new Ge(e, 4, !1, e, null, !1, !1); - }); - ["cols", "rows", "size", "span"].forEach(function (e) { - Fe[e] = new Ge(e, 6, !1, e, null, !1, !1); - }); - ["rowSpan", "start"].forEach(function (e) { - Fe[e] = new Ge(e, 5, !1, e.toLowerCase(), null, !1, !1); - }); - var qu = /[\-:]([a-z])/g; - function ec(e) { - return e[1].toUpperCase(); - } - "accent-height alignment-baseline arabic-form baseline-shift cap-height clip-path clip-rule color-interpolation color-interpolation-filters color-profile color-rendering dominant-baseline enable-background fill-opacity fill-rule flood-color flood-opacity font-family font-size font-size-adjust font-stretch font-style font-variant font-weight glyph-name glyph-orientation-horizontal glyph-orientation-vertical horiz-adv-x horiz-origin-x image-rendering letter-spacing lighting-color marker-end marker-mid marker-start overline-position overline-thickness paint-order panose-1 pointer-events rendering-intent shape-rendering stop-color stop-opacity strikethrough-position strikethrough-thickness stroke-dasharray stroke-dashoffset stroke-linecap stroke-linejoin stroke-miterlimit stroke-opacity stroke-width text-anchor text-decoration text-rendering underline-position underline-thickness unicode-bidi unicode-range units-per-em v-alphabetic v-hanging v-ideographic v-mathematical vector-effect vert-adv-y vert-origin-x vert-origin-y word-spacing writing-mode xmlns:xlink x-height" - .split(" ") - .forEach(function (e) { - var n = e.replace(qu, ec); - Fe[n] = new Ge(n, 1, !1, e, null, !1, !1); - }); - "xlink:actuate xlink:arcrole xlink:role xlink:show xlink:title xlink:type" - .split(" ") - .forEach(function (e) { - var n = e.replace(qu, ec); - Fe[n] = new Ge(n, 1, !1, e, "http://www.w3.org/1999/xlink", !1, !1); - }); - ["xml:base", "xml:lang", "xml:space"].forEach(function (e) { - var n = e.replace(qu, ec); - Fe[n] = new Ge(n, 1, !1, e, "http://www.w3.org/XML/1998/namespace", !1, !1); - }); - ["tabIndex", "crossOrigin"].forEach(function (e) { - Fe[e] = new Ge(e, 1, !1, e.toLowerCase(), null, !1, !1); - }); - Fe.xlinkHref = new Ge( - "xlinkHref", - 1, - !1, - "xlink:href", - "http://www.w3.org/1999/xlink", - !0, - !1, - ); - ["src", "href", "action", "formAction"].forEach(function (e) { - Fe[e] = new Ge(e, 1, !1, e.toLowerCase(), null, !0, !0); - }); - var Hl = { - animationIterationCount: !0, - aspectRatio: !0, - borderImageOutset: !0, - borderImageSlice: !0, - borderImageWidth: !0, - boxFlex: !0, - boxFlexGroup: !0, - boxOrdinalGroup: !0, - columnCount: !0, - columns: !0, - flex: !0, - flexGrow: !0, - flexPositive: !0, - flexShrink: !0, - flexNegative: !0, - flexOrder: !0, - gridArea: !0, - gridRow: !0, - gridRowEnd: !0, - gridRowSpan: !0, - gridRowStart: !0, - gridColumn: !0, - gridColumnEnd: !0, - gridColumnSpan: !0, - gridColumnStart: !0, - fontWeight: !0, - lineClamp: !0, - lineHeight: !0, - opacity: !0, - order: !0, - orphans: !0, - tabSize: !0, - widows: !0, - zIndex: !0, - zoom: !0, - fillOpacity: !0, - floodOpacity: !0, - stopOpacity: !0, - strokeDasharray: !0, - strokeDashoffset: !0, - strokeMiterlimit: !0, - strokeOpacity: !0, - strokeWidth: !0, - }, - gp = ["Webkit", "ms", "Moz", "O"]; - Object.keys(Hl).forEach(function (e) { - gp.forEach(function (n) { - (n = n + e.charAt(0).toUpperCase() + e.substring(1)), (Hl[n] = Hl[e]); - }); - }); - var mp = /["'&<>]/; - function Ye(e) { - if (typeof e == "boolean" || typeof e == "number") return "" + e; - e = "" + e; - var n = mp.exec(e); - if (n) { - var i = "", - s, - v = 0; - for (s = n.index; s < e.length; s++) { - switch (e.charCodeAt(s)) { - case 34: - n = """; - break; - case 38: - n = "&"; - break; - case 39: - n = "'"; - break; - case 60: - n = "<"; - break; - case 62: - n = ">"; - break; - default: - continue; - } - v !== s && (i += e.substring(v, s)), (v = s + 1), (i += n); - } - e = v !== s ? i + e.substring(v, s) : i; - } - return e; - } - var yp = /([A-Z])/g, - bp = /^ms-/, - Xu = Array.isArray; - function xr(e, n) { - return { insertionMode: e, selectedValue: n }; - } - function Sp(e, n, i) { - switch (n) { - case "select": - return xr(1, i.value != null ? i.value : i.defaultValue); - case "svg": - return xr(2, null); - case "math": - return xr(3, null); - case "foreignObject": - return xr(1, null); - case "table": - return xr(4, null); - case "thead": - case "tbody": - case "tfoot": - return xr(5, null); - case "colgroup": - return xr(7, null); - case "tr": - return xr(6, null); - } - return 4 <= e.insertionMode || e.insertionMode === 0 ? xr(1, null) : e; - } - var Xc = new Map(); - function vf(e, n, i) { - if (typeof i != "object") { - throw Error( - "The `style` prop expects a mapping from style properties to values, not a string. For example, style={{marginRight: spacing + 'em'}} when using JSX.", - ); - } - n = !0; - for (var s in i) { - if (qe.call(i, s)) { - var v = i[s]; - if (v != null && typeof v != "boolean" && v !== "") { - if (s.indexOf("--") === 0) { - var c = Ye(s); - v = Ye(("" + v).trim()); - } else { - c = s; - var m = Xc.get(c); - m !== void 0 || - ((m = Ye(c.replace(yp, "-$1").toLowerCase().replace(bp, "-ms-"))), - Xc.set(c, m)), - (c = m), - (v = typeof v == "number" - ? v === 0 || qe.call(Hl, s) ? "" + v : v + "px" - : Ye(("" + v).trim())); - } - n - ? ((n = !1), e.push(' style="', c, ":", v)) - : e.push(";", c, ":", v); - } - } - } - n || e.push('"'); - } - function it(e, n, i, s) { - switch (i) { - case "style": - vf(e, n, s); - return; - case "defaultValue": - case "defaultChecked": - case "innerHTML": - case "suppressContentEditableWarning": - case "suppressHydrationWarning": - return; - } - if ( - !(2 < i.length) || - (i[0] !== "o" && i[0] !== "O") || - (i[1] !== "n" && i[1] !== "N") - ) { - if (((n = Fe.hasOwnProperty(i) ? Fe[i] : null), n !== null)) { - switch (typeof s) { - case "function": - case "symbol": - return; - case "boolean": - if (!n.acceptsBooleans) return; - } - switch (((i = n.attributeName), n.type)) { - case 3: - s && e.push(" ", i, '=""'); - break; - case 4: - s === !0 - ? e.push(" ", i, '=""') - : s !== !1 && e.push(" ", i, '="', Ye(s), '"'); - break; - case 5: - isNaN(s) || e.push(" ", i, '="', Ye(s), '"'); - break; - case 6: - !isNaN(s) && 1 <= s && e.push(" ", i, '="', Ye(s), '"'); - break; - default: - n.sanitizeURL && (s = "" + s), e.push(" ", i, '="', Ye(s), '"'); - } - } else if (hf(i)) { - switch (typeof s) { - case "function": - case "symbol": - return; - case "boolean": - if ( - ((n = i.toLowerCase().slice(0, 5)), - n !== "data-" && n !== "aria-") - ) { - return; - } - } - e.push(" ", i, '="', Ye(s), '"'); - } - } - } - function Wl(e, n, i) { - if (n != null) { - if (i != null) { - throw Error( - "Can only set one of `children` or `props.dangerouslySetInnerHTML`.", - ); - } - if (typeof n != "object" || !("__html" in n)) { - throw Error( - "`props.dangerouslySetInnerHTML` must be in the form `{__html: ...}`. Please visit https://reactjs.org/link/dangerously-set-inner-html for more information.", - ); - } - (n = n.__html), n != null && e.push("" + n); - } - } - function wp(e) { - var n = ""; - return ( - pf.Children.forEach(e, function (i) { - i != null && (n += i); - }), n - ); - } - function Vu(e, n, i, s) { - e.push(Jt(i)); - var v = (i = null), - c; - for (c in n) { - if (qe.call(n, c)) { - var m = n[c]; - if (m != null) { - switch (c) { - case "children": - i = m; - break; - case "dangerouslySetInnerHTML": - v = m; - break; - default: - it(e, s, c, m); - } - } - } - } - return ( - e.push(">"), Wl(e, v, i), typeof i == "string" ? (e.push(Ye(i)), null) : i - ); - } - var xp = /^[a-zA-Z][a-zA-Z:_\.\-\d]*$/, - Zc = new Map(); - function Jt(e) { - var n = Zc.get(e); - if (n === void 0) { - if (!xp.test(e)) throw Error("Invalid tag: " + e); - (n = "<" + e), Zc.set(e, n); - } - return n; - } - function kp(e, n, i, s, v) { - switch (n) { - case "select": - e.push(Jt("select")); - var c = null, - m = null; - for (R in i) { - if (qe.call(i, R)) { - var S = i[R]; - if (S != null) { - switch (R) { - case "children": - c = S; - break; - case "dangerouslySetInnerHTML": - m = S; - break; - case "defaultValue": - case "value": - break; - default: - it(e, s, R, S); - } - } - } - } - return e.push(">"), Wl(e, m, c), c; - case "option": - (m = v.selectedValue), e.push(Jt("option")); - var E = (S = null), - x = null, - R = null; - for (c in i) { - if (qe.call(i, c)) { - var D = i[c]; - if (D != null) { - switch (c) { - case "children": - S = D; - break; - case "selected": - x = D; - break; - case "dangerouslySetInnerHTML": - R = D; - break; - case "value": - E = D; - default: - it(e, s, c, D); - } - } - } - } - if (m != null) { - if (((i = E !== null ? "" + E : wp(S)), Xu(m))) { - for (s = 0; s < m.length; s++) { - if ("" + m[s] === i) { - e.push(' selected=""'); - break; - } - } - } else "" + m === i && e.push(' selected=""'); - } else x && e.push(' selected=""'); - return e.push(">"), Wl(e, R, S), S; - case "textarea": - e.push(Jt("textarea")), (R = m = c = null); - for (S in i) { - if (qe.call(i, S) && ((E = i[S]), E != null)) { - switch (S) { - case "children": - R = E; - break; - case "value": - c = E; - break; - case "defaultValue": - m = E; - break; - case "dangerouslySetInnerHTML": - throw Error( - "`dangerouslySetInnerHTML` does not make sense on