diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 4ab1ba0f0f07c0..b6b0d36f8687b4 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 87; +const cacheVersion = 91; const ubuntuX86Runner = "ubuntu-22.04"; const ubuntuX86XlRunner = "ubuntu-22.04-xl"; @@ -86,7 +86,7 @@ ${installPkgsCommand} || echo 'Failed. Trying again.' && sudo apt-get clean && s (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true echo "Decompressing sysroot..." -wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240207/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz +wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz cd / xzcat /tmp/sysroot.tar.xz | sudo tar -x sudo mount --rbind /dev /sysroot/dev @@ -95,21 +95,23 @@ sudo mount --rbind /home /sysroot/home sudo mount -t proc /proc /sysroot/proc cd -if [[ \`uname -m\` == "aarch64" ]]; then - echo "Copying libdl.a" - sudo cp /sysroot/usr/lib/aarch64-linux-gnu/libdl.a /sysroot/lib/aarch64-linux-gnu/libdl.a - echo "Copying libdl.so" - sudo cp /sysroot/lib/aarch64-linux-gnu/libdl.so.2 /sysroot/lib/aarch64-linux-gnu/libdl.so -else - echo "Copying libdl.a" - sudo cp /sysroot/usr/lib/x86_64-linux-gnu/libdl.a /sysroot/lib/x86_64-linux-gnu/libdl.a - echo "Copying libdl.so" - sudo cp /sysroot/lib/x86_64-linux-gnu/libdl.so.2 /sysroot/lib/x86_64-linux-gnu/libdl.so -fi +echo "Done." # Configure the build environment. Both Rust and Clang will produce # llvm bitcode only, so we can use lld's incremental LTO support. -cat >> $GITHUB_ENV << __0 + +# Load the sysroot's env vars +echo "sysroot env:" +cat /sysroot/.env +. /sysroot/.env + +# Important notes: +# 1. -ldl seems to be required to avoid a failure in FFI tests. This flag seems +# to be in the Rust default flags in the smoketest, so uncertain why we need +# to be explicit here. +# 2. RUSTFLAGS and RUSTDOCFLAGS must be specified, otherwise the doctests fail +# to build because the object formats are not compatible. +echo " CARGO_PROFILE_BENCH_INCREMENTAL=false CARGO_PROFILE_BENCH_LTO=false CARGO_PROFILE_RELEASE_INCREMENTAL=false @@ -118,28 +120,27 @@ RUSTFLAGS<<__1 -C linker-plugin-lto=true -C linker=clang-${llvmVersion} -C link-arg=-fuse-ld=lld-${llvmVersion} - -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m --cfg tokio_unstable - \${{ env.RUSTFLAGS }} + $RUSTFLAGS __1 RUSTDOCFLAGS<<__1 -C linker-plugin-lto=true -C linker=clang-${llvmVersion} -C link-arg=-fuse-ld=lld-${llvmVersion} - -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m - \${{ env.RUSTFLAGS }} + --cfg tokio_unstable + $RUSTFLAGS __1 CC=/usr/bin/clang-${llvmVersion} -CFLAGS=-flto=thin --sysroot=/sysroot -__0`, +CFLAGS=-flto=thin $CFLAGS +" > $GITHUB_ENV`, }; const installBenchTools = "./tools/install_prebuilt.js wrk hyperfine"; @@ -700,6 +701,24 @@ const ci = { "df -h", ].join("\n"), }, + { + // Run a minimal check to ensure that binary is not corrupted, regardless + // of our build mode + name: "Check deno binary", + if: "matrix.job == 'test'", + run: + 'target/${{ matrix.profile }}/deno eval "console.log(1+2)" | grep 3', + env: { + NO_COLOR: 1, + }, + }, + { + // Verify that the binary actually works in the Ubuntu-16.04 sysroot. + name: "Check deno binary (in sysroot)", + if: "matrix.job == 'test' && matrix.use_sysroot", + run: + 'sudo chroot /sysroot "$(pwd)/target/${{ matrix.profile }}/deno" --version', + }, { name: "Upload PR artifact (linux)", if: [ @@ -835,25 +854,6 @@ const ci = { ].join("\n"), run: "cargo test --release --locked", }, - { - // Since all tests are skipped when we're building a tagged commit - // this is a minimal check to ensure that binary is not corrupted - name: "Check deno binary", - if: - "matrix.profile == 'release' && startsWith(github.ref, 'refs/tags/')", - run: 'target/release/deno eval "console.log(1+2)" | grep 3', - env: { - NO_COLOR: 1, - }, - }, - { - // Verify that the binary actually works in the Ubuntu-16.04 sysroot. - // TODO(mmastrac): make this work for aarch64 as well - name: "Check deno binary (in sysroot)", - if: - "matrix.profile == 'release' && matrix.use_sysroot && matrix.arch != 'aarch64'", - run: 'sudo chroot /sysroot "$(pwd)/target/release/deno" --version', - }, { name: "Configure hosts file for WPT", if: "matrix.wpt", diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8203791e9c0bcd..03f28a696fbb88 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -273,7 +273,7 @@ jobs: (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true echo "Decompressing sysroot..." - wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240207/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz + wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz cd / xzcat /tmp/sysroot.tar.xz | sudo tar -x sudo mount --rbind /dev /sysroot/dev @@ -282,21 +282,23 @@ jobs: sudo mount -t proc /proc /sysroot/proc cd - if [[ `uname -m` == "aarch64" ]]; then - echo "Copying libdl.a" - sudo cp /sysroot/usr/lib/aarch64-linux-gnu/libdl.a /sysroot/lib/aarch64-linux-gnu/libdl.a - echo "Copying libdl.so" - sudo cp /sysroot/lib/aarch64-linux-gnu/libdl.so.2 /sysroot/lib/aarch64-linux-gnu/libdl.so - else - echo "Copying libdl.a" - sudo cp /sysroot/usr/lib/x86_64-linux-gnu/libdl.a /sysroot/lib/x86_64-linux-gnu/libdl.a - echo "Copying libdl.so" - sudo cp /sysroot/lib/x86_64-linux-gnu/libdl.so.2 /sysroot/lib/x86_64-linux-gnu/libdl.so - fi + echo "Done." # Configure the build environment. Both Rust and Clang will produce # llvm bitcode only, so we can use lld's incremental LTO support. - cat >> $GITHUB_ENV << __0 + + # Load the sysroot's env vars + echo "sysroot env:" + cat /sysroot/.env + . /sysroot/.env + + # Important notes: + # 1. -ldl seems to be required to avoid a failure in FFI tests. This flag seems + # to be in the Rust default flags in the smoketest, so uncertain why we need + # to be explicit here. + # 2. RUSTFLAGS and RUSTDOCFLAGS must be specified, otherwise the doctests fail + # to build because the object formats are not compatible. + echo " CARGO_PROFILE_BENCH_INCREMENTAL=false CARGO_PROFILE_BENCH_LTO=false CARGO_PROFILE_RELEASE_INCREMENTAL=false @@ -305,28 +307,27 @@ jobs: -C linker-plugin-lto=true -C linker=clang-17 -C link-arg=-fuse-ld=lld-17 - -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m --cfg tokio_unstable - ${{ env.RUSTFLAGS }} + $RUSTFLAGS __1 RUSTDOCFLAGS<<__1 -C linker-plugin-lto=true -C linker=clang-17 -C link-arg=-fuse-ld=lld-17 - -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m - ${{ env.RUSTFLAGS }} + --cfg tokio_unstable + $RUSTFLAGS __1 CC=/usr/bin/clang-17 - CFLAGS=-flto=thin --sysroot=/sysroot - __0 + CFLAGS=-flto=thin $CFLAGS + " > $GITHUB_ENV - name: Remove macOS cURL --ipv4 flag run: |- curl --version @@ -366,8 +367,8 @@ jobs: path: |- ~/.cargo/registry/index ~/.cargo/registry/cache - key: '87-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' - restore-keys: '87-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' + key: '91-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' + restore-keys: '91-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' if: '!(matrix.skip)' - name: Restore cache build output (PR) uses: actions/cache/restore@v4 @@ -379,7 +380,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '87-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '91-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -419,6 +420,14 @@ jobs: df -h cargo build --release --locked --all-targets df -h + - name: Check deno binary + if: '!(matrix.skip) && (matrix.job == ''test'')' + run: 'target/${{ matrix.profile }}/deno eval "console.log(1+2)" | grep 3' + env: + NO_COLOR: 1 + - name: Check deno binary (in sysroot) + if: '!(matrix.skip) && (matrix.job == ''test'' && matrix.use_sysroot)' + run: 'sudo chroot /sysroot "$(pwd)/target/${{ matrix.profile }}/deno" --version' - name: Upload PR artifact (linux) if: |- !(matrix.skip) && (matrix.job == 'test' && @@ -512,14 +521,6 @@ jobs: github.repository == 'denoland/deno' && !startsWith(github.ref, 'refs/tags/')))) run: cargo test --release --locked - - name: Check deno binary - if: '!(matrix.skip) && (matrix.profile == ''release'' && startsWith(github.ref, ''refs/tags/''))' - run: target/release/deno eval "console.log(1+2)" | grep 3 - env: - NO_COLOR: 1 - - name: Check deno binary (in sysroot) - if: '!(matrix.skip) && (matrix.profile == ''release'' && matrix.use_sysroot && matrix.arch != ''aarch64'')' - run: sudo chroot /sysroot "$(pwd)/target/release/deno" --version - name: Configure hosts file for WPT if: '!(matrix.skip) && (matrix.wpt)' run: ./wpt make-hosts-file | sudo tee -a /etc/hosts @@ -668,7 +669,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '87-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '91-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index 7540e5ca50cc5e..14ee88169daabc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -263,9 +263,9 @@ dependencies = [ [[package]] name = "ast_node" -version = "0.9.6" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e3e06ec6ac7d893a0db7127d91063ad7d9da8988f8a1a256f03729e6eec026" +checksum = "2ab31376d309dd3bfc9cfb3c11c93ce0e0741bbe0354b20e7f8c60b044730b79" dependencies = [ "proc-macro2", "quote", @@ -706,10 +706,8 @@ dependencies = [ "os_pipe", "pretty_assertions", "serde", - "serde_repr", "test_server", "tokio", - "tokio-util", "tower-lsp", "trust-dns-client", "trust-dns-server", @@ -1068,7 +1066,7 @@ dependencies = [ [[package]] name = "deno" -version = "1.43.1" +version = "1.43.4" dependencies = [ "async-trait", "base32", @@ -1111,7 +1109,6 @@ dependencies = [ "faster-hex", "flate2", "fs3", - "fwdansi", "glibc_version", "glob", "ignore", @@ -1131,11 +1128,9 @@ dependencies = [ "notify", "once_cell", "open", - "os_pipe", "p256", "percent-encoding", "phf 0.11.2", - "pin-project", "pretty_assertions", "quick-junit", "rand", @@ -1161,7 +1156,6 @@ dependencies = [ "tower-lsp", "twox-hash", "typed-arena", - "unicode-width", "uuid", "walkdir", "winapi", @@ -1172,9 +1166,9 @@ dependencies = [ [[package]] name = "deno_ast" -version = "0.38.1" +version = "0.38.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e2417aad5382d10d035e46d35f2f5fbbb93a922816408245ee585e7ca775194" +checksum = "584547d27786a734536fde7088f8429d355569c39410427be44695c300618408" dependencies = [ "anyhow", "base64", @@ -1216,17 +1210,16 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.143.0" +version = "0.146.0" dependencies = [ "bencher", "deno_core", - "once_cell", "tokio", ] [[package]] name = "deno_broadcast_channel" -version = "0.143.0" +version = "0.146.0" dependencies = [ "async-trait", "deno_core", @@ -1236,7 +1229,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.81.0" +version = "0.84.0" dependencies = [ "async-trait", "deno_core", @@ -1267,13 +1260,12 @@ dependencies = [ [[package]] name = "deno_canvas" -version = "0.18.0" +version = "0.21.0" dependencies = [ "deno_core", "deno_webgpu", "image", "serde", - "tokio", ] [[package]] @@ -1296,16 +1288,16 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.149.0" +version = "0.152.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.279.0" +version = "0.280.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6af4398065fca315c07f07c3a7074fc8928e0a0488eeb16b68f390ab9190ed97" +checksum = "12d26f2d3e243bbbdd0851ab542b20ec48ac1fcf6c64ab06e81133da3113ebdd" dependencies = [ "anyhow", "bincode", @@ -1340,7 +1332,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1" [[package]] name = "deno_cron" -version = "0.29.0" +version = "0.32.0" dependencies = [ "anyhow", "async-trait", @@ -1352,7 +1344,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.163.0" +version = "0.166.0" dependencies = [ "aes", "aes-gcm", @@ -1379,16 +1371,15 @@ dependencies = [ "sha2", "signature", "spki", - "tokio", "uuid", "x25519-dalek", ] [[package]] name = "deno_doc" -version = "0.129.0" +version = "0.135.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38dc17660505a70f007a71d1550e2357662ca2083f9136eedcd53ccbac7537a6" +checksum = "c2c39bbe2769f6e8bfba1a99d7656e6994474c48fd2085ed82b59556e9d398dd" dependencies = [ "ammonia", "anyhow", @@ -1411,9 +1402,9 @@ dependencies = [ [[package]] name = "deno_emit" -version = "0.40.2" +version = "0.40.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efe4e71f7c2b0ddef4a4927a193ce47824488f29c90166700605b3fe560fecb7" +checksum = "80b80fef2bf1b6e14712633975f7f39a3b29b95a5769cafcb959ffa1a84b7680" dependencies = [ "anyhow", "base64", @@ -1428,7 +1419,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.173.0" +version = "0.176.0" dependencies = [ "bytes", "data-url", @@ -1436,7 +1427,6 @@ dependencies = [ "deno_tls", "dyn-clone", "http 0.2.12", - "pin-project", "reqwest", "serde", "serde_json", @@ -1446,46 +1436,43 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.136.0" +version = "0.139.0" dependencies = [ "deno_core", "dlopen2", "dynasmrt", "libffi", "libffi-sys", + "log", "serde", "serde-value", "serde_json", - "tokio", "winapi", ] [[package]] name = "deno_fs" -version = "0.59.0" +version = "0.62.0" dependencies = [ "async-trait", "base32", "deno_core", "deno_io", "filetime", - "fs3", "junction", "libc", - "log", "nix 0.26.2", "rand", "rayon", "serde", - "tokio", "winapi", ] [[package]] name = "deno_graph" -version = "0.74.5" +version = "0.75.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7078d584a4ff53349ec8f7909826dffe2a0c2ccb61b9b6549ea38d996d5a61" +checksum = "a68ddc87ce88c0a2568277ee6caabf37890128710910416c09cd6f6a8931dba6" dependencies = [ "anyhow", "async-trait", @@ -1512,7 +1499,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.146.0" +version = "0.149.0" dependencies = [ "async-compression", "async-trait", @@ -1551,12 +1538,13 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.59.0" +version = "0.62.0" dependencies = [ "async-trait", "deno_core", "filetime", "fs3", + "log", "once_cell", "os_pipe", "rand", @@ -1566,7 +1554,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.57.0" +version = "0.60.0" dependencies = [ "anyhow", "async-trait", @@ -1585,14 +1573,9 @@ dependencies = [ "prost", "prost-build", "rand", - "reqwest", "rusqlite", "serde", - "serde_json", - "termcolor", - "tokio", "url", - "uuid", ] [[package]] @@ -1637,7 +1620,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.79.0" +version = "0.82.0" dependencies = [ "deno_core", "libloading 0.7.4", @@ -1658,12 +1641,10 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.141.0" +version = "0.144.0" dependencies = [ "deno_core", "deno_tls", - "enum-as-inner", - "log", "pin-project", "rustls-tokio-stream", "serde", @@ -1675,7 +1656,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.86.0" +version = "0.89.0" dependencies = [ "aead-gcm-stream", "aes", @@ -1699,6 +1680,7 @@ dependencies = [ "faster-hex", "h2 0.3.26", "hkdf", + "home", "http 0.2.12", "idna 0.3.0", "indexmap", @@ -1708,7 +1690,6 @@ dependencies = [ "libz-sys", "md-5", "md4", - "nix 0.26.2", "num-bigint", "num-bigint-dig", "num-integer", @@ -1735,7 +1716,6 @@ dependencies = [ "simd-json", "spki", "tokio", - "typenum", "url", "winapi", "windows-sys 0.48.0", @@ -1745,9 +1725,9 @@ dependencies = [ [[package]] name = "deno_npm" -version = "0.18.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcd4f91bb7139c031791f135aa1785e08a828795d5daaefe981b9f9292f66e91" +checksum = "9db4bc4de6c0d2935662dcd99542b03a0db5f1778bada7ef2afc074e7819068d" dependencies = [ "anyhow", "async-trait", @@ -1758,13 +1738,14 @@ dependencies = [ "monch", "serde", "thiserror", + "url", ] [[package]] name = "deno_ops" -version = "0.155.0" +version = "0.156.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5acf7e33e0000df00433e50334d42eb325bd1195d81e3d579d4cb09de601303e" +checksum = "8237b272db1a6cb941b8a5a63ba63539004a8263e8b0230a11136d76eea273f9" dependencies = [ "proc-macro-rules", "proc-macro2", @@ -1777,9 +1758,8 @@ dependencies = [ [[package]] name = "deno_permissions" -version = "0.9.0" +version = "0.12.0" dependencies = [ - "console_static_text", "deno_core", "deno_terminal", "fqdn", @@ -1787,16 +1767,14 @@ dependencies = [ "log", "once_cell", "serde", - "termcolor", "which 4.4.2", "winapi", ] [[package]] name = "deno_runtime" -version = "0.157.0" +version = "0.160.0" dependencies = [ - "console_static_text", "deno_ast", "deno_broadcast_channel", "deno_cache", @@ -1826,10 +1804,7 @@ dependencies = [ "dlopen2", "encoding_rs", "fastwebsockets", - "filetime", "flate2", - "fs3", - "fwdansi", "http 1.1.0", "http-body-util", "hyper 0.14.28", @@ -1844,7 +1819,6 @@ dependencies = [ "once_cell", "percent-encoding", "regex", - "ring", "rustyline", "serde", "signal-hook", @@ -1856,7 +1830,6 @@ dependencies = [ "which 4.4.2", "winapi", "windows-sys 0.48.0", - "winres", ] [[package]] @@ -1874,9 +1847,9 @@ dependencies = [ [[package]] name = "deno_task_shell" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b44af10161906e1bccc1fc966f074bec0148997bb7e2221ecd29416dcad90b3" +checksum = "97e5ff66a1e89edb7ca0c36b73a8fcdc008ba426c4ad7a36e1dfb3f4a166179e" dependencies = [ "anyhow", "futures", @@ -1901,16 +1874,16 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.136.0" +version = "0.139.0" dependencies = [ "deno_core", "deno_native_certs", - "once_cell", "rustls", "rustls-pemfile", "rustls-tokio-stream", "rustls-webpki", "serde", + "tokio", "webpki-roots", ] @@ -1925,19 +1898,18 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.149.0" +version = "0.152.0" dependencies = [ "deno_bench_util", "deno_console", "deno_core", "deno_webidl", - "serde", "urlpattern", ] [[package]] name = "deno_web" -version = "0.180.0" +version = "0.183.0" dependencies = [ "async-trait", "base64-simd 0.8.0", @@ -1953,25 +1925,23 @@ dependencies = [ "serde", "tokio", "uuid", - "windows-sys 0.48.0", ] [[package]] name = "deno_webgpu" -version = "0.116.0" +version = "0.119.0" dependencies = [ "deno_core", "raw-window-handle", "serde", "tokio", "wgpu-core", - "wgpu-hal", "wgpu-types", ] [[package]] name = "deno_webidl" -version = "0.149.0" +version = "0.152.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1979,7 +1949,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.154.0" +version = "0.157.0" dependencies = [ "bytes", "deno_core", @@ -1999,12 +1969,11 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.144.0" +version = "0.147.0" dependencies = [ "deno_core", "deno_web", "rusqlite", - "serde", ] [[package]] @@ -2536,9 +2505,9 @@ dependencies = [ [[package]] name = "eszip" -version = "0.68.2" +version = "0.69.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e478354842a7ee02c685221da61b7ecee919283e65818167d0f8ce60cca70798" +checksum = "8f606daca1ce18c69ccdabc59aa1c7e077356b8ffcd74e12c7646f545320a2fd" dependencies = [ "anyhow", "base64", @@ -2649,9 +2618,9 @@ checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f" [[package]] name = "file_test_runner" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc644d2903f00e5f0e5d34dca805c7a100b09a1d257e07697101d90eb10d3351" +checksum = "b8797fcdc5c6b8c06839900c30f5c59b3541ef2bec218579470ce7b1afc17ee9" dependencies = [ "anyhow", "crossbeam-channel", @@ -2760,9 +2729,9 @@ checksum = "1bf664d6b0598fea5600b85cddc79e60d4c1f262f42faf75c2d77dc2164c9a8b" [[package]] name = "from_variant" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a0b11eeb173ce52f84ebd943d42e58813a2ebb78a6a3ff0a243b71c5199cd7b" +checksum = "fdc9cc75639b041067353b9bce2450d6847e547276c6fbe4487d7407980e07db" dependencies = [ "proc-macro2", "swc_macros_common", @@ -2904,16 +2873,6 @@ dependencies = [ "slab", ] -[[package]] -name = "fwdansi" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c1f5787fe85505d1f7777268db5103d80a7a374d2316a7ce262e57baf8f208" -dependencies = [ - "memchr", - "termcolor", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -3033,19 +2992,6 @@ dependencies = [ "bitflags 2.5.0", ] -[[package]] -name = "gpu-allocator" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f56f6318968d03c18e1bcf4857ff88c61157e9da8e47c5f29055d60e1228884" -dependencies = [ - "log", - "presser", - "thiserror", - "winapi", - "windows", -] - [[package]] name = "gpu-descriptor" version = "0.3.0" @@ -4164,9 +4110,8 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.79.0" +version = "0.82.0" dependencies = [ - "proc-macro2", "quote", "serde", "serde_json", @@ -4869,12 +4814,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" -[[package]] -name = "presser" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8cf8e6a8aa66ce33f63993ffc4ea4271eb5b0530a9002db8455ea6050c77bfa" - [[package]] name = "pretty_assertions" version = "1.4.0" @@ -5455,9 +5394,9 @@ dependencies = [ [[package]] name = "rustls-tokio-stream" -version = "0.2.17" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded7a36e8ac05b8ada77a84c5ceec95361942ee9dedb60a82f93f788a791aae8" +checksum = "fd707225bb670bcd2876886bb571753d1ce03a9cedfa2e629a79984ca9a93cfb" dependencies = [ "futures", "rustls", @@ -5731,9 +5670,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.188.0" +version = "0.189.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb49d0c52dba872ec78d826009b0cb0ec3b911753bda8b7e98f104cf88e0b936" +checksum = "893c995255d6fbf55c33166b651fd037c4e3cc7864bf82213ea18d0ec94ed165" dependencies = [ "num-bigint", "serde", @@ -6011,9 +5950,9 @@ dependencies = [ [[package]] name = "string_enum" -version = "0.4.2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b650ea2087d32854a0f20b837fc56ec987a1cb4f758c9757e1171ee9812da63" +checksum = "05e383308aebc257e7d7920224fa055c632478d92744eca77f99be8fa1545b90" dependencies = [ "proc-macro2", "quote", @@ -6078,9 +6017,9 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.226.0" +version = "0.227.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60aeba6588ba222a184e7ae34bc349330d5196797fd2b7f921dfac6ef62db7f9" +checksum = "d1a212bd08b1121c7204a04407ea055779fc00cf80024fc666dd97b00749cf87" dependencies = [ "anyhow", "crc", @@ -6122,9 +6061,9 @@ dependencies = [ [[package]] name = "swc_common" -version = "0.33.25" +version = "0.33.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a529796c240cd87da18d26d63f9de4c7ad3680cf0a04b95f0c37f4c4f0a0da63" +checksum = "a2f9706038906e66f3919028f9f7a37f3ed552f1b85578e93f4468742e2da438" dependencies = [ "ast_node", "better_scoped_tls", @@ -6148,9 +6087,9 @@ dependencies = [ [[package]] name = "swc_config" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ada712ac5e28a301683c8af957e8a56deca675cbc376473dd207a527b989efb5" +checksum = "7be1a689e146be1eae53139482cb061dcf0fa01dff296bbe7b96fff92d8e2936" dependencies = [ "anyhow", "indexmap", @@ -6162,9 +6101,9 @@ dependencies = [ [[package]] name = "swc_config_macro" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b2574f75082322a27d990116cd2a24de52945fc94172b24ca0b3e9e2a6ceb6b" +checksum = "7c5f56139042c1a95b54f5ca48baa0e0172d369bcc9d3d473dad1de36bae8399" dependencies = [ "proc-macro2", "quote", @@ -6174,9 +6113,9 @@ dependencies = [ [[package]] name = "swc_ecma_ast" -version = "0.113.0" +version = "0.113.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f99fdda741656887f4cf75c1cee249a5f0374d67d30acc2b073182e902546ff2" +checksum = "dc1690cc0c9ab60b44ac0225ba1e231ac532f7ba1d754df761c6ee607561afae" dependencies = [ "bitflags 2.5.0", "is-macro", @@ -6192,9 +6131,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "0.149.0" +version = "0.149.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c21b8ae99bc3b95c6f7909915cd1e5994bec4e5b576f2e2a6879e56f2770760" +checksum = "4fef147127a2926ca26171c7afcbf028ff86dc543ced87d316713f25620a15b9" dependencies = [ "memchr", "num-bigint", @@ -6211,9 +6150,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen_macros" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ab87ba81ae05efd394ab4a8cbdba595ac3554a5e393c76699449d47c43582e" +checksum = "090e409af49c8d1a3c13b3aab1ed09dd4eda982207eb3e63c2ad342f072b49c8" dependencies = [ "proc-macro2", "quote", @@ -6223,9 +6162,9 @@ dependencies = [ [[package]] name = "swc_ecma_loader" -version = "0.45.27" +version = "0.45.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a923880fc27cf5f3d2a684debb7c5a0ee60100af1bfe424cb5e722d290bf88a" +checksum = "92c68f934bd2c51f29c4ad0bcae09924e9dc30d7ce0680367d45b42d40338a67" dependencies = [ "anyhow", "pathdiff", @@ -6237,9 +6176,9 @@ dependencies = [ [[package]] name = "swc_ecma_parser" -version = "0.144.0" +version = "0.144.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da9f3a58f0a64410f4006eb1fdb64d190ad3cc6cd12a7bf1f0dbb916e4ca4c7" +checksum = "0499e69683ae5d67a20ff0279b94bc90f29df7922a46331b54d5dd367bf89570" dependencies = [ "either", "new_debug_unreachable", @@ -6259,9 +6198,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_base" -version = "0.138.0" +version = "0.138.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91771e358664649cf2cabec86a270bd9ce267f5213f299cacb255951b5edf06b" +checksum = "eddb95c2bdad1c9c29edf35712e1e0f9b9ddc1cdb5ba2d582fd93468cb075a03" dependencies = [ "better_scoped_tls", "bitflags 2.5.0", @@ -6282,9 +6221,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_classes" -version = "0.127.0" +version = "0.127.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfa5cd60314f35a114dc85955c6b645e9bb13fdfda4f732137ed62a482ca8990" +checksum = "53043d81678f3c693604eeb1d1f0fe6ba10f303104a31b954dbeebed9cadf530" dependencies = [ "swc_atoms", "swc_common", @@ -6296,9 +6235,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_macros" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17e309b88f337da54ef7fe4c5b99c2c522927071f797ee6c9fb8b6bf2d100481" +checksum = "500a1dadad1e0e41e417d633b3d6d5de677c9e0d3159b94ba3348436cdb15aab" dependencies = [ "proc-macro2", "quote", @@ -6308,9 +6247,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_optimization" -version = "0.199.0" +version = "0.199.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba0d1c320c74b97e6f79f8ff5bae05b78cc211492b29654994963e4f1fe4a01f" +checksum = "32ea30b3df748236c619409f222f0ba68ebeebc08dfff109d2195664a15689f9" dependencies = [ "dashmap", "indexmap", @@ -6332,9 +6271,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_proposal" -version = "0.172.0" +version = "0.172.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "067a79cba791af32fb0634c0ca08051e47b56ccfe8d07349b7787b22da401084" +checksum = "7fbc414d6a9c5479cfb4c6e92fcdac504582bd7bc89a0ed7f8808b72dc8bd1f0" dependencies = [ "either", "rustc-hash", @@ -6352,9 +6291,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_react" -version = "0.184.0" +version = "0.184.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69c87599f4a10987fe2687967e5448858b458f2924faa62f044acd56f4e3ffda" +checksum = "565a76c4ca47ce31d78301c0beab878e4c2cb4f624691254d834ec8c0e236755" dependencies = [ "base64", "dashmap", @@ -6376,9 +6315,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_typescript" -version = "0.189.0" +version = "0.189.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08ea0dc9076708448e8ded8e1119717e0e6e095b1ba42b4c8d7fdb1d26fba418" +checksum = "e209026c1d3c577cafac257d87e7c0d23119282fbdc8ed03d7f56077e95beb90" dependencies = [ "ryu-js", "serde", @@ -6393,9 +6332,9 @@ dependencies = [ [[package]] name = "swc_ecma_utils" -version = "0.128.0" +version = "0.128.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd533f5751b7a8673bd843151c4e6e64a2dcf6c1f65331401e88f244c0e85de7" +checksum = "fe5242670bc74e0a0b64b9d4912b37be36944517ce0881314162aeb4381272c3" dependencies = [ "indexmap", "num_cpus", @@ -6411,9 +6350,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "0.99.0" +version = "0.99.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c74008ebc5e0d3d9a1b3df54083ddbff1a375cfadff857da1fdc7837b48c52d" +checksum = "28a6ce28ad8e591f8d627f1f9cb26b25e5d83052a9bc1b674d95fc28040cfa98" dependencies = [ "num-bigint", "swc_atoms", @@ -6436,9 +6375,9 @@ dependencies = [ [[package]] name = "swc_fast_graph" -version = "0.21.21" +version = "0.21.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54db83cdbd924cc8b5082ab54ff2a1b4f53ecde8f53c87b9f9c877c9daef4569" +checksum = "f3fdd64bc3d161d6c1ea9a8ae5779e4ba132afc67e7b8ece5420bfc9c6e1275d" dependencies = [ "indexmap", "petgraph", @@ -6448,9 +6387,9 @@ dependencies = [ [[package]] name = "swc_graph_analyzer" -version = "0.22.22" +version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b66d0e18899b3a69eca103e5b4af2f0c837427aa07a60be1c4ceb4346ea245" +checksum = "c728a8f9b82b7160a1ae246e31232177b371f827eb0d01006c0f120a3494871c" dependencies = [ "auto_impl", "petgraph", @@ -6461,9 +6400,9 @@ dependencies = [ [[package]] name = "swc_macros_common" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5be7766a95a2840ded618baeaab63809b71230ef19094b34f76c8af4d85aa2" +checksum = "91745f3561057493d2da768437c427c0e979dff7396507ae02f16c981c4a8466" dependencies = [ "proc-macro2", "quote", @@ -6472,9 +6411,9 @@ dependencies = [ [[package]] name = "swc_visit" -version = "0.5.13" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0263be55289abfe9c877ffef83d877b5bdfac036ffe2de793f48f5e47e41dbae" +checksum = "043d11fe683dcb934583ead49405c0896a5af5face522e4682c16971ef7871b9" dependencies = [ "either", "swc_visit_macros", @@ -6482,9 +6421,9 @@ dependencies = [ [[package]] name = "swc_visit_macros" -version = "0.5.11" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33fc817055fe127b4285dc85058596768bfde7537ae37da82c67815557f03e33" +checksum = "4ae9ef18ff8daffa999f729db056d2821cd2f790f3a11e46422d19f46bb193e7" dependencies = [ "Inflector", "proc-macro2", @@ -7141,9 +7080,9 @@ checksum = "b1b6def86329695390197b82c1e244a54a131ceb66c996f2088a3876e2ae083f" [[package]] name = "unicode-id-start" -version = "1.1.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8f73150333cb58412db36f2aca8f2875b013049705cc77b94ded70a1ab1f5da" +checksum = "02aebfa694eccbbbffdd92922c7de136b9fe764396d2f10e21bce1681477cfc1" [[package]] name = "unicode-ident" @@ -7255,9 +7194,9 @@ dependencies = [ [[package]] name = "v8" -version = "0.91.0" +version = "0.91.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03bdee44e85d6235cff99e1ed5b1016c53822c70d1cce3d51f421b27a125a1e8" +checksum = "69026e2e8af55a4d2f20c0c17f690e8b31472bf76ab75b1205d3a0fab60c8f84" dependencies = [ "bitflags 2.5.0", "fslock", @@ -7493,7 +7432,6 @@ dependencies = [ "glow", "glutin_wgl_sys", "gpu-alloc", - "gpu-allocator", "gpu-descriptor", "js-sys", "khronos-egl", diff --git a/Cargo.toml b/Cargo.toml index 41fb28d9bf519f..7dee54c25bf64d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,16 +43,16 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -deno_ast = { version = "=0.38.1", features = ["transpiling"] } -deno_core = { version = "0.279.0" } +deno_ast = { version = "=0.38.2", features = ["transpiling"] } +deno_core = { version = "0.280.0" } -deno_bench_util = { version = "0.143.0", path = "./bench_util" } +deno_bench_util = { version = "0.146.0", path = "./bench_util" } deno_lockfile = "0.19.0" deno_media_type = { version = "0.1.4", features = ["module_specifier"] } -deno_permissions = { version = "0.9.0", path = "./runtime/permissions" } -deno_runtime = { version = "0.157.0", path = "./runtime" } +deno_permissions = { version = "0.12.0", path = "./runtime/permissions" } +deno_runtime = { version = "0.160.0", path = "./runtime" } deno_terminal = "0.1.1" -napi_sym = { version = "0.79.0", path = "./cli/napi/sym" } +napi_sym = { version = "0.82.0", path = "./cli/napi/sym" } test_util = { package = "test_server", path = "./tests/util/server" } denokv_proto = "0.5.0" @@ -61,28 +61,28 @@ denokv_remote = "0.5.0" denokv_sqlite = { default-features = false, version = "0.5.0" } # exts -deno_broadcast_channel = { version = "0.143.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.81.0", path = "./ext/cache" } -deno_canvas = { version = "0.18.0", path = "./ext/canvas" } -deno_console = { version = "0.149.0", path = "./ext/console" } -deno_cron = { version = "0.29.0", path = "./ext/cron" } -deno_crypto = { version = "0.163.0", path = "./ext/crypto" } -deno_fetch = { version = "0.173.0", path = "./ext/fetch" } -deno_ffi = { version = "0.136.0", path = "./ext/ffi" } -deno_fs = { version = "0.59.0", path = "./ext/fs" } -deno_http = { version = "0.146.0", path = "./ext/http" } -deno_io = { version = "0.59.0", path = "./ext/io" } -deno_kv = { version = "0.57.0", path = "./ext/kv" } -deno_napi = { version = "0.79.0", path = "./ext/napi" } -deno_net = { version = "0.141.0", path = "./ext/net" } -deno_node = { version = "0.86.0", path = "./ext/node" } -deno_tls = { version = "0.136.0", path = "./ext/tls" } -deno_url = { version = "0.149.0", path = "./ext/url" } -deno_web = { version = "0.180.0", path = "./ext/web" } -deno_webgpu = { version = "0.116.0", path = "./ext/webgpu" } -deno_webidl = { version = "0.149.0", path = "./ext/webidl" } -deno_websocket = { version = "0.154.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.144.0", path = "./ext/webstorage" } +deno_broadcast_channel = { version = "0.146.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.84.0", path = "./ext/cache" } +deno_canvas = { version = "0.21.0", path = "./ext/canvas" } +deno_console = { version = "0.152.0", path = "./ext/console" } +deno_cron = { version = "0.32.0", path = "./ext/cron" } +deno_crypto = { version = "0.166.0", path = "./ext/crypto" } +deno_fetch = { version = "0.176.0", path = "./ext/fetch" } +deno_ffi = { version = "0.139.0", path = "./ext/ffi" } +deno_fs = { version = "0.62.0", path = "./ext/fs" } +deno_http = { version = "0.149.0", path = "./ext/http" } +deno_io = { version = "0.62.0", path = "./ext/io" } +deno_kv = { version = "0.60.0", path = "./ext/kv" } +deno_napi = { version = "0.82.0", path = "./ext/napi" } +deno_net = { version = "0.144.0", path = "./ext/net" } +deno_node = { version = "0.89.0", path = "./ext/node" } +deno_tls = { version = "0.139.0", path = "./ext/tls" } +deno_url = { version = "0.152.0", path = "./ext/url" } +deno_web = { version = "0.183.0", path = "./ext/web" } +deno_webgpu = { version = "0.119.0", path = "./ext/webgpu" } +deno_webidl = { version = "0.152.0", path = "./ext/webidl" } +deno_websocket = { version = "0.157.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.147.0", path = "./ext/webstorage" } aes = "=0.8.3" anyhow = "1.0.57" @@ -148,9 +148,10 @@ regex = "^1.7.0" reqwest = { version = "=0.11.20", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955 ring = "^0.17.0" rusqlite = { version = "=0.29.0", features = ["unlock_notify", "bundled"] } -rustls = "0.21.11" +# pinned because it was causing issues on cargo publish +rustls = "=0.21.11" rustls-pemfile = "1.0.0" -rustls-tokio-stream = "=0.2.17" +rustls-tokio-stream = "=0.2.24" rustls-webpki = "0.101.4" rustyline = "=13.0.0" saffron = "=0.1.0" @@ -187,11 +188,9 @@ rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", " # webgpu raw-window-handle = "0.6.0" wgpu-core = "0.20" -wgpu-hal = "0.20" wgpu-types = "0.20" # macros -proc-macro2 = "1" quote = "1" syn = { version = "2", features = ["full", "extra-traits"] } @@ -199,7 +198,6 @@ syn = { version = "2", features = ["full", "extra-traits"] } nix = "=0.26.2" # windows deps -fwdansi = "=1.1.0" junction = "=0.2.0" winapi = "=0.3.9" windows-sys = { version = "0.48.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem"] } diff --git a/Releases.md b/Releases.md index 72a5f35cc60e7b..26cfab37660e19 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,81 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.43.4 / 2024.05.16 + +- fix(cli): panic with `deno coverage` (#23353) +- fix(doc): --lint - private ref diagnostic was displaying incorrect information + (#23834) +- fix(doc/publish): support expando properties (#23795) +- fix(emit): regression - keep comments in emit (#23815) +- fix(ext/node): export geteuid from node:process (#23840) +- fix(ext/node): fix grpc error_handling example (#23755) +- fix(ext/node): homedir() `getpwuid`/`SHGetKnownFolderPath` fallback (#23841) +- fix(ext/node): process.uptime works without this (#23786) +- fix(ext/web): update ongoing promise in async iterator `return()` method + (#23642) +- fix(lsp): respect types dependencies for tsc roots (#23825) +- fix(lsp): show reference code lens on methods (#23804) +- fix(node): error when throwing `FS_EISDIR` (#23829) +- fix(node): seperate worker module cache (#23634) +- fix(node): stub `AsyncResource.emitDestroy()` (#23802) +- fix(node): wrong `worker_threads.terminate()` return value (#23803) +- fix(npm): handle null fields in npm registry JSON (#23785) +- fix(npm): make tarball extraction more reliable (#23759) +- fix(publish): always include config file when publishing (#23797) +- fix(publish): error for missing version constraints on dry-publish instead of + just publish (#23798) +- fix(runtime): output to stderr with colors if a tty and stdout is piped + (#23813) +- fix: Add missing `"junction"` type for `SymlinkOptions.types` (#23756) +- fix: update swc_ecma_parser to 0.114.1 (#23816) +- fix: widen aarch64 linux minimum GLIBC version by improving sysroot build + (#23791) +- perf(compile): Do not checksum eszip content (#23839) +- perf(jsr): download metadata files as soon as possible and in parallel + (#23836) +- perf(lsp): Cache semantic tokens for open documents (#23799) + +### 1.43.3 / 2024.05.10 + +- fix(ext/webgpu): invalidate GPUAdapter when a device is created (#23752) +- fix(lsp): completions for using decl identifiers (#23748) +- fix(lsp): move sloppy import resolution from loader to resolver (#23751) +- fix(node): better cjs re-export handling (#23760) +- fix(runtime): Allow opening /dev/fd/XXX for unix (#23743) +- fix(task): regression where `npx ` sometimes couldn't find command + (#23730) +- fix: bump deno_core to fix unsoundness (#23768) + +### 1.43.2 / 2024.05.08 + +- feat(runtime): allow adding custom extensions to snapshot (#23569) +- fix(compile): relative permissions should be retained as relative (#23719) +- fix(ext/node): check resource exists before close (#23655) +- fix(ext/node): don't rely on Deno.env to read NODE_DEBUG (#23694) +- fix(ext/node): napi_get_element and napi_set_element work with objects + (#23713) +- fix(ext/node): support delete process.env.var (#23647) +- fix(ext/web): properly handle `Blob` case for `createImageBitmap` (#23518) +- fix(ext/webgpu): correctly validate GPUExtent3D, GPUOrigin3D, GPUOrigin2D & + GPUColor (#23413) +- fix(fmt/js): `else` was moved to wrong `if` sometimes when formatting minified + code (#23706) +- fix(jsr): panic when importing jsr package with deps via https (#23728) +- fix(lsp): Catch cancellation exceptions thrown by TSC, stop waiting for TS + result upon cancellation (#23645) +- fix(lsp): Pass diagnostic codes to TSC as numbers (#23720) +- fix(lsp): always cache all npm packages (#23679) +- fix(lsp): handle multiline semantic tokens (#23691) +- fix(publish): public api - trace parent classes & interfaces when following a + method (#23661) +- fix(runtime): allow r/w access to /etc without --allow-all (#23718) +- fix(test): proper type checking for files with doc tests (#23654) +- fix(workers): `importScripts` concurrently and use a new `reqwest::Client` per + importScripts (#23699) +- fix: DOMException doesn't throw on __callSitesEvals (#23729) +- fix: upgrade TypeScript from 5.4.3 to 5.4.5 (#23740) + ### 1.43.0 / 2024.05.01 - FUTURE(ext/net): remove diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index 5026dd6809e208..3c7a1698584430 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.143.0" +version = "0.146.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -17,7 +17,6 @@ path = "lib.rs" [dependencies] bencher.workspace = true deno_core.workspace = true -once_cell.workspace = true tokio.workspace = true [[bench]] diff --git a/bench_util/profiling.rs b/bench_util/profiling.rs index 23936498861cae..151a29e599bba9 100644 --- a/bench_util/profiling.rs +++ b/bench_util/profiling.rs @@ -39,6 +39,7 @@ macro_rules! bench_or_profile { }; } +#[allow(clippy::print_stdout)] pub fn run_profiles(opts: &TestOpts, tests: Vec) { let tests = filter_tests(opts, tests); // let decs = tests.iter().map(|t| t.desc.clone()).collect(); diff --git a/cli/Cargo.toml b/cli/Cargo.toml index d1a47e468262d5..626b06f4e53b99 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.43.1" +version = "1.43.4" authors.workspace = true default-run = "deno" edition.workspace = true @@ -67,17 +67,17 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa deno_cache_dir = { workspace = true } deno_config = "=0.16.3" deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } -deno_doc = { version = "=0.129.0", features = ["html", "syntect"] } -deno_emit = "=0.40.2" -deno_graph = { version = "=0.74.5", features = ["tokio_executor"] } +deno_doc = { version = "=0.135.0", features = ["html", "syntect"] } +deno_emit = "=0.40.3" +deno_graph = { version = "=0.75.2", features = ["tokio_executor"] } deno_lint = { version = "=0.58.4", features = ["docs"] } deno_lockfile.workspace = true -deno_npm = "=0.18.0" +deno_npm = "=0.20.1" deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_semver = "=0.5.4" -deno_task_shell = "=0.16.0" +deno_task_shell = "=0.16.1" deno_terminal.workspace = true -eszip = "=0.68.2" +eszip = "=0.69.0" napi_sym.workspace = true async-trait.workspace = true @@ -121,11 +121,9 @@ monch.workspace = true notify.workspace = true once_cell.workspace = true open = "5.0.1" -os_pipe.workspace = true p256.workspace = true percent-encoding.workspace = true phf.workspace = true -pin-project.workspace = true quick-junit = "^0.3.5" rand = { workspace = true, features = ["small_rng"] } regex.workspace = true @@ -148,14 +146,12 @@ tokio-util.workspace = true tower-lsp.workspace = true twox-hash = "=1.6.3" typed-arena = "=2.0.1" -unicode-width = "0.1" uuid = { workspace = true, features = ["serde"] } walkdir = "=2.3.2" zeromq.workspace = true zstd.workspace = true [target.'cfg(windows)'.dependencies] -fwdansi.workspace = true junction.workspace = true winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 0ef02be3927d59..dca9cfa4920e54 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -3889,6 +3889,7 @@ fn eval_parse(flags: &mut Flags, matches: &mut ArgMatches) { // TODO(@satyarohith): remove this flag in 2.0. let as_typescript = matches.get_flag("ts"); + #[allow(clippy::print_stderr)] if as_typescript { eprintln!( "⚠️ {}", @@ -4218,6 +4219,8 @@ fn test_parse(flags: &mut Flags, matches: &mut ArgMatches) { let no_run = matches.get_flag("no-run"); let trace_leaks = matches.get_flag("trace-ops") || matches.get_flag("trace-leaks"); + + #[allow(clippy::print_stderr)] if trace_leaks && matches.get_flag("trace-ops") { // We can't change this to use the log crate because its not configured // yet at this point since the flags haven't been parsed. This flag is @@ -4267,10 +4270,17 @@ fn test_parse(flags: &mut Flags, matches: &mut ArgMatches) { // yet at this point since the flags haven't been parsed. This flag is // deprecated though so it's not worth changing the code to use the log // crate here and this is only done for testing anyway. - eprintln!( - "⚠️ {}", - crate::colors::yellow("The `--jobs` flag is deprecated and will be removed in Deno 2.0.\nUse the `--parallel` flag with possibly the `DENO_JOBS` environment variable instead.\nLearn more at: https://docs.deno.com/runtime/manual/basics/env_variables"), - ); + #[allow(clippy::print_stderr)] + { + eprintln!( + "⚠️ {}", + crate::colors::yellow(concat!( + "The `--jobs` flag is deprecated and will be removed in Deno 2.0.\n", + "Use the `--parallel` flag with possibly the `DENO_JOBS` environment variable instead.\n", + "Learn more at: https://docs.deno.com/runtime/manual/basics/env_variables" + )), + ); + } if let Some(value) = matches.remove_one::("jobs") { Some(value) } else { diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 3b5d79ef3cb797..0a0f7d7043dda5 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -12,6 +12,7 @@ use self::package_json::PackageJsonDeps; use ::import_map::ImportMap; use deno_ast::SourceMapOption; use deno_core::resolve_url_or_path; +use deno_graph::GraphKind; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; use deno_runtime::deno_tls::RootCertStoreProvider; @@ -194,7 +195,7 @@ pub fn ts_config_to_transpile_and_emit_options( }, deno_ast::EmitOptions { inline_sources: options.inline_sources, - keep_comments: false, + keep_comments: true, source_map, source_map_file: None, }, @@ -747,9 +748,12 @@ impl CliOptions { format!("for: {}", insecure_allowlist.join(", ")) }; let msg = - format!("DANGER: TLS certificate validation is disabled {domains}"); - // use eprintln instead of log::warn so this always gets shown - eprintln!("{}", colors::yellow(msg)); + format!("DANGER: TLS certificate validation is disabled {}", domains); + #[allow(clippy::print_stderr)] + { + // use eprintln instead of log::warn so this always gets shown + eprintln!("{}", colors::yellow(msg)); + } } let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache); @@ -870,6 +874,14 @@ impl CliOptions { self.maybe_config_file.as_ref().map(|f| f.specifier.clone()) } + pub fn graph_kind(&self) -> GraphKind { + match self.sub_command() { + DenoSubcommand::Cache(_) => GraphKind::All, + DenoSubcommand::Check(_) => GraphKind::TypesOnly, + _ => self.type_check_mode().as_graph_kind(), + } + } + pub fn ts_type_lib_window(&self) -> TsTypeLib { TsTypeLib::DenoWindow } diff --git a/cli/bench/main.rs b/cli/bench/main.rs index 9bae6fab60456b..8f0627558bc042 100644 --- a/cli/bench/main.rs +++ b/cli/bench/main.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] + use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::Value; diff --git a/cli/cache/deno_dir.rs b/cli/cache/deno_dir.rs index b56dfbc8930dfb..9f2911f718c452 100644 --- a/cli/cache/deno_dir.rs +++ b/cli/cache/deno_dir.rs @@ -33,11 +33,10 @@ impl DenoDirProvider { /// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them /// in single directory that can be controlled with `$DENO_DIR` env variable. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct DenoDir { /// Example: /Users/rld/.deno/ - /// Note: This is not exposed in order to encourage using re-usable methods. - root: PathBuf, + pub root: PathBuf, /// Used by TsCompiler to cache compiler output. pub gen_cache: DiskCache, } diff --git a/cli/cache/disk_cache.rs b/cli/cache/disk_cache.rs index cd44dd17a64ffd..3aeebbc6d94d3f 100644 --- a/cli/cache/disk_cache.rs +++ b/cli/cache/disk_cache.rs @@ -14,7 +14,7 @@ use std::path::PathBuf; use std::path::Prefix; use std::str; -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct DiskCache { pub location: PathBuf, } diff --git a/cli/factory.rs b/cli/factory.rs index 074eaa1e4cebae..8a9d20970a1c09 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -21,9 +21,9 @@ use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; use crate::emit::Emitter; use crate::file_fetcher::FileFetcher; +use crate::graph_container::MainModuleGraphContainer; use crate::graph_util::FileWatcherReporter; use crate::graph_util::ModuleGraphBuilder; -use crate::graph_util::ModuleGraphContainer; use crate::graph_util::ModuleGraphCreator; use crate::http_util::HttpClient; use crate::module_loader::CliModuleLoaderFactory; @@ -60,7 +60,6 @@ use deno_core::futures::FutureExt; use deno_core::parking_lot::Mutex; use deno_core::FeatureChecker; -use deno_graph::GraphKind; use deno_lockfile::WorkspaceMemberConfig; use deno_runtime::deno_fs; use deno_runtime::deno_node::analyze::NodeCodeTranslator; @@ -157,7 +156,7 @@ struct CliFactoryServices { emit_cache: Deferred, emitter: Deferred>, fs: Deferred>, - graph_container: Deferred>, + main_graph_container: Deferred>, lockfile: Deferred>>>, maybe_import_map: Deferred>>, maybe_inspector_server: Deferred>>, @@ -673,17 +672,19 @@ impl CliFactory { .await } - pub fn graph_container(&self) -> &Arc { - self.services.graph_container.get_or_init(|| { - let graph_kind = match self.options.sub_command() { - // todo(dsherret): ideally the graph container would not be used - // for deno cache because it doesn't dynamically load modules - DenoSubcommand::Cache(_) => GraphKind::All, - DenoSubcommand::Check(_) => GraphKind::TypesOnly, - _ => self.options.type_check_mode().as_graph_kind(), - }; - Arc::new(ModuleGraphContainer::new(graph_kind)) - }) + pub async fn main_module_graph_container( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .main_graph_container + .get_or_try_init_async(async { + Ok(Arc::new(MainModuleGraphContainer::new( + self.cli_options().clone(), + self.module_load_preparer().await?.clone(), + ))) + }) + .await } pub fn maybe_inspector_server( @@ -706,7 +707,6 @@ impl CliFactory { .get_or_try_init_async(async { Ok(Arc::new(ModuleLoadPreparer::new( self.options.clone(), - self.graph_container().clone(), self.maybe_lockfile().clone(), self.module_graph_builder().await?.clone(), self.text_only_progress_bar().clone(), @@ -791,11 +791,15 @@ impl CliFactory { self.blob_store().clone(), Box::new(CliModuleLoaderFactory::new( &self.options, + if self.options.code_cache_enabled() { + Some(self.code_cache()?.clone()) + } else { + None + }, self.emitter()?.clone(), - self.graph_container().clone(), + self.main_module_graph_container().await?.clone(), + self.module_info_cache()?.clone(), self.module_load_preparer().await?.clone(), - self.parsed_source_cache().clone(), - self.resolver().await?.clone(), cli_node_resolver.clone(), NpmModuleLoader::new( self.cjs_resolutions().clone(), @@ -803,12 +807,8 @@ impl CliFactory { fs.clone(), cli_node_resolver.clone(), ), - if self.options.code_cache_enabled() { - Some(self.code_cache()?.clone()) - } else { - None - }, - self.module_info_cache()?.clone(), + self.parsed_source_cache().clone(), + self.resolver().await?.clone(), )), self.root_cert_store_provider().clone(), self.fs().clone(), diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 01e1e31583c69a..eda579b2b4c568 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -739,6 +739,8 @@ async fn fetch_no_follow<'a>( Ok(FetchOnceResult::Code(body, result_headers)) } +#[allow(clippy::print_stdout)] +#[allow(clippy::print_stderr)] #[cfg(test)] mod tests { use crate::cache::GlobalHttpCache; diff --git a/cli/graph_container.rs b/cli/graph_container.rs new file mode 100644 index 00000000000000..ec18ffaab45d4f --- /dev/null +++ b/cli/graph_container.rs @@ -0,0 +1,157 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::sync::Arc; + +use deno_ast::ModuleSpecifier; +use deno_core::error::AnyError; +use deno_core::parking_lot::RwLock; +use deno_core::resolve_url_or_path; +use deno_graph::ModuleGraph; +use deno_runtime::colors; +use deno_runtime::permissions::PermissionsContainer; + +use crate::args::CliOptions; +use crate::module_loader::ModuleLoadPreparer; + +pub trait ModuleGraphContainer: Clone + 'static { + /// Acquires a permit to modify the module graph without other code + /// having the chance to modify it. In the meantime, other code may + /// still read from the existing module graph. + async fn acquire_update_permit(&self) -> impl ModuleGraphUpdatePermit; + /// Gets a copy of the graph. + fn graph(&self) -> Arc; +} + +/// A permit for updating the module graph. When complete and +/// everything looks fine, calling `.commit()` will store the +/// new graph in the ModuleGraphContainer. +pub trait ModuleGraphUpdatePermit { + /// Gets the module graph for mutation. + fn graph_mut(&mut self) -> &mut ModuleGraph; + /// Saves the mutated module graph in the container. + fn commit(self); +} + +/// Holds the `ModuleGraph` for the main worker. +#[derive(Clone)] +pub struct MainModuleGraphContainer { + // Allow only one request to update the graph data at a time, + // but allow other requests to read from it at any time even + // while another request is updating the data. + update_queue: Arc, + inner: Arc>>, + cli_options: Arc, + module_load_preparer: Arc, +} + +impl MainModuleGraphContainer { + pub fn new( + cli_options: Arc, + module_load_preparer: Arc, + ) -> Self { + Self { + update_queue: Default::default(), + inner: Arc::new(RwLock::new(Arc::new(ModuleGraph::new( + cli_options.graph_kind(), + )))), + cli_options, + module_load_preparer, + } + } + + pub async fn check_specifiers( + &self, + specifiers: &[ModuleSpecifier], + ) -> Result<(), AnyError> { + let mut graph_permit = self.acquire_update_permit().await; + let graph = graph_permit.graph_mut(); + self + .module_load_preparer + .prepare_module_load( + graph, + specifiers, + false, + self.cli_options.ts_type_lib_window(), + PermissionsContainer::allow_all(), + ) + .await?; + graph_permit.commit(); + Ok(()) + } + + /// Helper around prepare_module_load that loads and type checks + /// the provided files. + pub async fn load_and_type_check_files( + &self, + files: &[String], + ) -> Result<(), AnyError> { + let specifiers = self.collect_specifiers(files)?; + + if specifiers.is_empty() { + log::warn!("{} No matching files found.", colors::yellow("Warning")); + } + + self.check_specifiers(&specifiers).await + } + + pub fn collect_specifiers( + &self, + files: &[String], + ) -> Result, AnyError> { + let excludes = self.cli_options.resolve_config_excludes()?; + Ok( + files + .iter() + .filter_map(|file| { + let file_url = + resolve_url_or_path(file, self.cli_options.initial_cwd()).ok()?; + if file_url.scheme() != "file" { + return Some(file_url); + } + // ignore local files that match any of files listed in `exclude` option + let file_path = file_url.to_file_path().ok()?; + if excludes.matches_path(&file_path) { + None + } else { + Some(file_url) + } + }) + .collect::>(), + ) + } +} + +impl ModuleGraphContainer for MainModuleGraphContainer { + async fn acquire_update_permit(&self) -> impl ModuleGraphUpdatePermit { + let permit = self.update_queue.acquire().await; + MainModuleGraphUpdatePermit { + permit, + inner: self.inner.clone(), + graph: (**self.inner.read()).clone(), + } + } + + fn graph(&self) -> Arc { + self.inner.read().clone() + } +} + +/// A permit for updating the module graph. When complete and +/// everything looks fine, calling `.commit()` will store the +/// new graph in the ModuleGraphContainer. +pub struct MainModuleGraphUpdatePermit<'a> { + permit: crate::util::sync::TaskQueuePermit<'a>, + inner: Arc>>, + graph: ModuleGraph, +} + +impl<'a> ModuleGraphUpdatePermit for MainModuleGraphUpdatePermit<'a> { + fn graph_mut(&mut self) -> &mut ModuleGraph { + &mut self.graph + } + + fn commit(self) { + *self.inner.write() = Arc::new(self.graph); + drop(self.permit); // explicit drop for clarity + } +} diff --git a/cli/graph_util.rs b/cli/graph_util.rs index ac7f8a36526503..ed56cf9f7a120b 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -18,8 +18,6 @@ use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path; -use crate::util::sync::TaskQueue; -use crate::util::sync::TaskQueuePermit; use deno_runtime::fs_util::specifier_to_file_path; use deno_config::WorkspaceMemberConfig; @@ -27,7 +25,6 @@ use deno_core::anyhow::bail; use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; -use deno_core::parking_lot::RwLock; use deno_core::ModuleSpecifier; use deno_graph::source::Loader; use deno_graph::source::ResolutionMode; @@ -65,7 +62,7 @@ pub struct GraphValidOptions { /// for the CLI. pub fn graph_valid( graph: &ModuleGraph, - fs: &dyn FileSystem, + fs: Arc, roots: &[ModuleSpecifier], options: GraphValidOptions, ) -> Result<(), AnyError> { @@ -99,7 +96,7 @@ pub fn graph_valid( ) } ModuleGraphError::ModuleError(e) => { - enhanced_module_error_message(fs, e) + enhanced_module_error_message(fs.clone(), e) } }; @@ -661,7 +658,7 @@ impl ModuleGraphBuilder { ) -> Result<(), AnyError> { graph_valid( graph, - self.fs.as_ref(), + self.fs.clone(), roots, GraphValidOptions { is_vendoring: false, @@ -705,14 +702,13 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String { } pub fn enhanced_module_error_message( - fs: &dyn FileSystem, + fs: Arc, error: &ModuleError, ) -> String { let additional_message = match error { ModuleError::LoadingErr(specifier, _, _) // ex. "Is a directory" error | ModuleError::Missing(specifier, _) => { - SloppyImportsResolver::resolve_with_fs( - fs, + SloppyImportsResolver::new(fs).resolve( specifier, ResolutionMode::Execution, ) @@ -763,40 +759,6 @@ fn get_resolution_error_bare_specifier( } } -/// Holds the `ModuleGraph` and what parts of it are type checked. -pub struct ModuleGraphContainer { - // Allow only one request to update the graph data at a time, - // but allow other requests to read from it at any time even - // while another request is updating the data. - update_queue: Arc, - inner: Arc>>, -} - -impl ModuleGraphContainer { - pub fn new(graph_kind: GraphKind) -> Self { - Self { - update_queue: Default::default(), - inner: Arc::new(RwLock::new(Arc::new(ModuleGraph::new(graph_kind)))), - } - } - - /// Acquires a permit to modify the module graph without other code - /// having the chance to modify it. In the meantime, other code may - /// still read from the existing module graph. - pub async fn acquire_update_permit(&self) -> ModuleGraphUpdatePermit { - let permit = self.update_queue.acquire().await; - ModuleGraphUpdatePermit { - permit, - inner: self.inner.clone(), - graph: (**self.inner.read()).clone(), - } - } - - pub fn graph(&self) -> Arc { - self.inner.read().clone() - } -} - /// Gets if any of the specified root's "file:" dependents are in the /// provided changed set. pub fn has_graph_root_local_dependent_changed( @@ -829,31 +791,6 @@ pub fn has_graph_root_local_dependent_changed( false } -/// A permit for updating the module graph. When complete and -/// everything looks fine, calling `.commit()` will store the -/// new graph in the ModuleGraphContainer. -pub struct ModuleGraphUpdatePermit<'a> { - permit: TaskQueuePermit<'a>, - inner: Arc>>, - graph: ModuleGraph, -} - -impl<'a> ModuleGraphUpdatePermit<'a> { - /// Gets the module graph for mutation. - pub fn graph_mut(&mut self) -> &mut ModuleGraph { - &mut self.graph - } - - /// Saves the mutated module graph in the container - /// and returns an Arc to the new module graph. - pub fn commit(self) -> Arc { - let graph = Arc::new(self.graph); - *self.inner.write() = graph.clone(); - drop(self.permit); // explicit drop for clarity - graph - } -} - #[derive(Clone, Debug)] pub struct FileWatcherReporter { watcher_communicator: Arc, diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs index a1048dace72443..d899cd79644d70 100644 --- a/cli/lsp/cache.rs +++ b/cli/lsp/cache.rs @@ -1,11 +1,16 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use crate::cache::DenoDir; +use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; +use crate::cache::LocalLspHttpCache; +use crate::lsp::config::Config; +use crate::lsp::logging::lsp_log; +use crate::lsp::logging::lsp_warn; use deno_runtime::fs_util::specifier_to_file_path; -use deno_core::parking_lot::Mutex; +use deno_core::url::Url; use deno_core::ModuleSpecifier; -use std::collections::HashMap; use std::fs; use std::path::Path; use std::sync::Arc; @@ -22,7 +27,7 @@ pub const LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY: deno_cache_dir::GlobalToLocalCopy = deno_cache_dir::GlobalToLocalCopy::Disallow; pub fn calculate_fs_version( - cache: &Arc, + cache: &LspCache, specifier: &ModuleSpecifier, ) -> Option { match specifier.scheme() { @@ -49,13 +54,14 @@ pub fn calculate_fs_version_at_path(path: &Path) -> Option { } fn calculate_fs_version_in_cache( - cache: &Arc, + cache: &LspCache, specifier: &ModuleSpecifier, ) -> Option { - let Ok(cache_key) = cache.cache_item_key(specifier) else { + let http_cache = cache.root_vendor_or_global(); + let Ok(cache_key) = http_cache.cache_item_key(specifier) else { return Some("1".to_string()); }; - match cache.read_modified_time(&cache_key) { + match http_cache.read_modified_time(&cache_key) { Ok(Some(modified)) => { match modified.duration_since(SystemTime::UNIX_EPOCH) { Ok(n) => Some(n.as_millis().to_string()), @@ -67,83 +73,71 @@ fn calculate_fs_version_in_cache( } } -/// Populate the metadata map based on the supplied headers -fn parse_metadata( - headers: &HashMap, -) -> HashMap { - let mut metadata = HashMap::new(); - if let Some(warning) = headers.get("x-deno-warning").cloned() { - metadata.insert(MetadataKey::Warning, warning); - } - metadata -} - -#[derive(Debug, PartialEq, Eq, Hash)] -pub enum MetadataKey { - /// Represent the `x-deno-warning` header associated with the document - Warning, -} - #[derive(Debug, Clone)] -struct Metadata { - values: Arc>, - version: Option, +pub struct LspCache { + deno_dir: DenoDir, + global: Arc, + root_vendor: Option>, } -#[derive(Debug, Clone)] -pub struct CacheMetadata { - cache: Arc, - metadata: Arc>>, +impl Default for LspCache { + fn default() -> Self { + Self::new(None) + } } -impl CacheMetadata { - pub fn new(cache: Arc) -> Self { +impl LspCache { + pub fn new(global_cache_url: Option) -> Self { + let global_cache_path = global_cache_url.and_then(|s| { + specifier_to_file_path(&s) + .inspect(|p| { + lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy()); + }) + .inspect_err(|err| { + lsp_warn!("Failed to resolve custom cache path: {err}"); + }) + .ok() + }); + let deno_dir = DenoDir::new(global_cache_path) + .expect("should be infallible with absolute custom root"); + let global = Arc::new(GlobalHttpCache::new( + deno_dir.deps_folder_path(), + crate::cache::RealDenoCacheEnv, + )); Self { - cache, - metadata: Default::default(), + deno_dir, + global, + root_vendor: None, } } - /// Return the meta data associated with the specifier. Unlike the `get()` - /// method, redirects of the supplied specifier will not be followed. - pub fn get( - &self, - specifier: &ModuleSpecifier, - ) -> Option>> { - if matches!( - specifier.scheme(), - "file" | "npm" | "node" | "data" | "blob" - ) { - return None; - } - let version = calculate_fs_version_in_cache(&self.cache, specifier); - let metadata = self.metadata.lock().get(specifier).cloned(); - if metadata.as_ref().and_then(|m| m.version.clone()) != version { - self.refresh(specifier).map(|m| m.values) - } else { - metadata.map(|m| m.values) - } + pub fn update_config(&mut self, config: &Config) { + self.root_vendor = config.tree.root_data().and_then(|data| { + let vendor_dir = data.vendor_dir.as_ref()?; + Some(Arc::new(LocalLspHttpCache::new( + vendor_dir.clone(), + self.global.clone(), + ))) + }); } - fn refresh(&self, specifier: &ModuleSpecifier) -> Option { - if matches!( - specifier.scheme(), - "file" | "npm" | "node" | "data" | "blob" - ) { - return None; - } - let cache_key = self.cache.cache_item_key(specifier).ok()?; - let headers = self.cache.read_headers(&cache_key).ok()??; - let values = Arc::new(parse_metadata(&headers)); - let version = calculate_fs_version_in_cache(&self.cache, specifier); - let mut metadata_map = self.metadata.lock(); - let metadata = Metadata { values, version }; - metadata_map.insert(specifier.clone(), metadata.clone()); - Some(metadata) + pub fn deno_dir(&self) -> &DenoDir { + &self.deno_dir + } + + pub fn global(&self) -> &Arc { + &self.global + } + + pub fn root_vendor(&self) -> Option<&Arc> { + self.root_vendor.as_ref() } - pub fn set_cache(&mut self, cache: Arc) { - self.cache = cache; - self.metadata.lock().clear(); + pub fn root_vendor_or_global(&self) -> Arc { + self + .root_vendor + .as_ref() + .map(|v| v.clone() as _) + .unwrap_or(self.global.clone() as _) } } diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index 8300546b431640..d7e9e70527cabe 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -505,6 +505,7 @@ pub fn collect_tsc( )); } tsc::ScriptElementKind::LocalFunctionElement + | tsc::ScriptElementKind::MemberFunctionElement | tsc::ScriptElementKind::MemberGetAccessorElement | tsc::ScriptElementKind::MemberSetAccessorElement | tsc::ScriptElementKind::ConstructorImplementationElement diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index f9d2316ae52cb6..3f63d2857f7546 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -799,41 +799,39 @@ fn get_workspace_completions( #[cfg(test)] mod tests { use super::*; - use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; + use crate::lsp::cache::LspCache; use crate::lsp::documents::Documents; use crate::lsp::documents::LanguageId; use crate::lsp::search::tests::TestPackageSearchApi; use deno_core::resolve_url; use deno_graph::Range; use std::collections::HashMap; - use std::path::Path; - use std::sync::Arc; use test_util::TempDir; - fn mock_documents( - fixtures: &[(&str, &str, i32, LanguageId)], - source_fixtures: &[(&str, &str)], - location: &Path, + fn setup( + open_sources: &[(&str, &str, i32, LanguageId)], + fs_sources: &[(&str, &str)], ) -> Documents { - let cache = Arc::new(GlobalHttpCache::new( - location.to_path_buf(), - crate::cache::RealDenoCacheEnv, - )); - let mut documents = Documents::new(cache); - for (specifier, source, version, language_id) in fixtures { + let temp_dir = TempDir::new(); + let cache = LspCache::new(Some(temp_dir.uri())); + let mut documents = Documents::default(); + documents.update_config( + &Default::default(), + &Default::default(), + &cache, + &Default::default(), + ); + for (specifier, source, version, language_id) in open_sources { let specifier = resolve_url(specifier).expect("failed to create specifier"); documents.open(specifier, *version, *language_id, (*source).into()); } - let http_cache = GlobalHttpCache::new( - location.to_path_buf(), - crate::cache::RealDenoCacheEnv, - ); - for (specifier, source) in source_fixtures { + for (specifier, source) in fs_sources { let specifier = resolve_url(specifier).expect("failed to create specifier"); - http_cache + cache + .global() .set(&specifier, HashMap::default(), source.as_bytes()) .expect("could not cache file"); assert!( @@ -844,15 +842,6 @@ mod tests { documents } - fn setup( - temp_dir: &TempDir, - documents: &[(&str, &str, i32, LanguageId)], - sources: &[(&str, &str)], - ) -> Documents { - let location = temp_dir.path().join("deps"); - mock_documents(documents, sources, location.as_path()) - } - #[test] fn test_get_relative_specifiers() { let base = resolve_url("file:///a/b/c.ts").unwrap(); @@ -936,9 +925,7 @@ mod tests { character: 21, }, }; - let temp_dir = TempDir::new(); let documents = setup( - &temp_dir, &[ ( "file:///a/b/c.ts", diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index e1fed5a541bfa4..597f45688cabf0 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -1454,10 +1454,6 @@ impl ConfigTree { .unwrap_or_default() } - pub fn root_vendor_dir(&self) -> Option<&PathBuf> { - self.root_data().and_then(|d| d.vendor_dir.as_ref()) - } - pub fn root_lockfile(&self) -> Option<&Arc>> { self.root_data().and_then(|d| d.lockfile.as_ref()) } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index ebd6338cd0df3c..8472ad185c08bd 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -1,7 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use super::analysis; -use super::cache; use super::client::Client; use super::config::Config; use super::documents; @@ -801,7 +800,7 @@ fn generate_lint_diagnostics( break; } // ignore any npm package files - if snapshot.resolver.in_npm_package(specifier) { + if snapshot.resolver.in_node_modules(specifier) { continue; } let version = document.maybe_lsp_version(); @@ -1232,7 +1231,7 @@ impl DenoDiagnostic { Self::NoCacheJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing jsr package: {}", pkg_req), Some(json!({ "specifier": specifier }))), Self::NoCacheNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: {}", pkg_req), Some(json!({ "specifier": specifier }))), Self::NoLocal(specifier) => { - let sloppy_resolution = SloppyImportsResolver::resolve_with_fs(&deno_fs::RealFs, specifier, ResolutionMode::Execution); + let sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution); let data = sloppy_resolution.as_lsp_quick_fix_message().map(|message| { json!({ "specifier": specifier, @@ -1328,17 +1327,18 @@ fn diagnose_resolution( match resolution { Resolution::Ok(resolved) => { let specifier = &resolved.specifier; - // If the module is a remote module and has a `X-Deno-Warning` header, we - // want a warning diagnostic with that message. - if let Some(metadata) = snapshot.cache_metadata.get(specifier) { - if let Some(message) = - metadata.get(&cache::MetadataKey::Warning).cloned() - { - diagnostics.push(DenoDiagnostic::DenoWarn(message)); + let managed_npm_resolver = snapshot.resolver.maybe_managed_npm_resolver(); + for (_, headers) in snapshot.resolver.redirect_chain_headers(specifier) { + if let Some(message) = headers.get("x-deno-warning") { + diagnostics.push(DenoDiagnostic::DenoWarn(message.clone())); } } - let managed_npm_resolver = snapshot.resolver.maybe_managed_npm_resolver(); if let Some(doc) = snapshot.documents.get(specifier) { + if let Some(headers) = doc.maybe_headers() { + if let Some(message) = headers.get("x-deno-warning") { + diagnostics.push(DenoDiagnostic::DenoWarn(message.clone())); + } + } if let Some(diagnostic) = check_redirect_diagnostic(specifier, &doc) { diagnostics.push(diagnostic); } @@ -1434,7 +1434,7 @@ fn diagnose_dependency( dependency_key: &str, dependency: &deno_graph::Dependency, ) { - if snapshot.resolver.in_npm_package(referrer) { + if snapshot.resolver.in_node_modules(referrer) { return; // ignore, surface typescript errors instead } @@ -1563,9 +1563,9 @@ fn generate_deno_diagnostics( #[cfg(test)] mod tests { + use super::*; - use crate::cache::GlobalHttpCache; - use crate::cache::RealDenoCacheEnv; + use crate::lsp::cache::LspCache; use crate::lsp::config::Config; use crate::lsp::config::Settings; use crate::lsp::config::WorkspaceSettings; @@ -1575,57 +1575,9 @@ mod tests { use crate::lsp::resolver::LspResolver; use deno_config::ConfigFile; use pretty_assertions::assert_eq; - use std::path::Path; - use std::path::PathBuf; use std::sync::Arc; use test_util::TempDir; - async fn mock_state_snapshot( - fixtures: &[(&str, &str, i32, LanguageId)], - location: &Path, - maybe_import_map: Option<(&str, &str)>, - ) -> StateSnapshot { - let cache = Arc::new(GlobalHttpCache::new( - location.to_path_buf(), - RealDenoCacheEnv, - )); - let mut documents = Documents::new(cache.clone()); - for (specifier, source, version, language_id) in fixtures { - let specifier = - resolve_url(specifier).expect("failed to create specifier"); - documents.open( - specifier.clone(), - *version, - *language_id, - (*source).into(), - ); - } - let mut config = Config::new_with_roots([resolve_url("file:///").unwrap()]); - if let Some((base_url, json_string)) = maybe_import_map { - let base_url = resolve_url(base_url).unwrap(); - let config_file = ConfigFile::new( - json_string, - base_url, - &deno_config::ParseOptions::default(), - ) - .unwrap(); - config.tree.inject_config_file(config_file).await; - } - let resolver = LspResolver::default() - .with_new_config(&config, cache, None, None) - .await; - StateSnapshot { - project_version: 0, - documents, - assets: Default::default(), - cache_metadata: cache::CacheMetadata::new(Arc::new( - GlobalHttpCache::new(location.to_path_buf(), RealDenoCacheEnv), - )), - config: Arc::new(config), - resolver, - } - } - fn mock_config() -> Config { let root_uri = resolve_url("file:///").unwrap(); Config { @@ -1649,21 +1601,49 @@ mod tests { } async fn setup( - temp_dir: &TempDir, sources: &[(&str, &str, i32, LanguageId)], maybe_import_map: Option<(&str, &str)>, - ) -> (StateSnapshot, PathBuf) { - let location = temp_dir.path().join("deps").to_path_buf(); - let state_snapshot = - mock_state_snapshot(sources, &location, maybe_import_map).await; - (state_snapshot, location) + ) -> StateSnapshot { + let temp_dir = TempDir::new(); + let cache = LspCache::new(Some(temp_dir.uri())); + let mut config = Config::new_with_roots([resolve_url("file:///").unwrap()]); + if let Some((base_url, json_string)) = maybe_import_map { + let base_url = resolve_url(base_url).unwrap(); + let config_file = ConfigFile::new( + json_string, + base_url, + &deno_config::ParseOptions::default(), + ) + .unwrap(); + config.tree.inject_config_file(config_file).await; + } + let resolver = LspResolver::default() + .with_new_config(&config, &cache, None) + .await; + let mut documents = Documents::default(); + documents.update_config(&config, &resolver, &cache, &Default::default()); + for (specifier, source, version, language_id) in sources { + let specifier = + resolve_url(specifier).expect("failed to create specifier"); + documents.open( + specifier.clone(), + *version, + *language_id, + (*source).into(), + ); + } + StateSnapshot { + project_version: 0, + documents, + assets: Default::default(), + config: Arc::new(config), + resolver, + } } #[tokio::test] async fn test_enabled_then_disabled_specifier() { - let temp_dir = TempDir::new(); - let (snapshot, cache_location) = setup( - &temp_dir, + let snapshot = setup( &[( "file:///a.ts", r#"import * as b from "./b.ts"; @@ -1677,9 +1657,7 @@ let c: number = "a"; ) .await; let snapshot = Arc::new(snapshot); - let cache = - Arc::new(GlobalHttpCache::new(cache_location, RealDenoCacheEnv)); - let ts_server = TsServer::new(Default::default(), cache); + let ts_server = TsServer::new(Default::default()); ts_server.start(None).unwrap(); // test enabled @@ -1757,9 +1735,7 @@ let c: number = "a"; #[tokio::test] async fn test_deno_diagnostics_with_import_map() { - let temp_dir = TempDir::new(); - let (snapshot, _) = setup( - &temp_dir, + let snapshot = setup( &[ ( "file:///std/assert/mod.ts", @@ -1895,9 +1871,7 @@ let c: number = "a"; #[tokio::test] async fn duplicate_diagnostics_for_duplicate_imports() { - let temp_dir = TempDir::new(); - let (snapshot, _) = setup( - &temp_dir, + let snapshot = setup( &[( "file:///a.ts", r#" @@ -1973,9 +1947,7 @@ let c: number = "a"; #[tokio::test] async fn unable_to_load_a_local_module() { - let temp_dir = TempDir::new(); - let (snapshot, _) = setup( - &temp_dir, + let snapshot = setup( &[( "file:///a.ts", r#" diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index d008dbb74bdf4d..8a6e8b65b568ee 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -1,6 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use super::cache::calculate_fs_version; +use super::cache::LspCache; use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY; use super::config::Config; use super::resolver::LspResolver; @@ -10,12 +11,7 @@ use super::text::LineIndex; use super::tsc; use super::tsc::AssetDocument; -use crate::cache::HttpCache; use crate::graph_util::CliJsrUrlProvider; -use crate::lsp::logging::lsp_warn; -use crate::resolver::SloppyImportsFsEntry; -use crate::resolver::SloppyImportsResolution; -use crate::resolver::SloppyImportsResolver; use deno_runtime::fs_util::specifier_to_file_path; use dashmap::DashMap; @@ -33,7 +29,6 @@ use deno_core::ModuleSpecifier; use deno_graph::source::ResolutionMode; use deno_graph::Resolution; use deno_runtime::deno_node; -use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; @@ -148,6 +143,16 @@ impl AssetOrDocument { } } + pub fn maybe_semantic_tokens(&self) -> Option { + match self { + AssetOrDocument::Asset(_) => None, + AssetOrDocument::Document(d) => d + .open_data + .as_ref() + .and_then(|d| d.maybe_semantic_tokens.lock().clone()), + } + } + pub fn text(&self) -> Arc { match self { AssetOrDocument::Asset(a) => a.text(), @@ -254,6 +259,7 @@ fn get_maybe_test_module_fut( pub struct DocumentOpenData { lsp_version: i32, maybe_parsed_source: Option, + maybe_semantic_tokens: Arc>>, } #[derive(Debug)] @@ -289,7 +295,7 @@ impl Document { maybe_headers: Option>, resolver: Arc, config: Arc, - cache: &Arc, + cache: &Arc, ) -> Arc { let text_info = SourceTextInfo::new(content); let media_type = resolve_media_type( @@ -335,6 +341,7 @@ impl Document { open_data: maybe_lsp_version.map(|v| DocumentOpenData { lsp_version: v, maybe_parsed_source, + maybe_semantic_tokens: Default::default(), }), resolver, specifier, @@ -426,6 +433,8 @@ impl Document { open_data: self.open_data.as_ref().map(|d| DocumentOpenData { lsp_version: d.lsp_version, maybe_parsed_source, + // reset semantic tokens + maybe_semantic_tokens: Default::default(), }), resolver, specifier: self.specifier.clone(), @@ -504,12 +513,13 @@ impl Document { open_data: self.open_data.is_some().then_some(DocumentOpenData { lsp_version: version, maybe_parsed_source, + maybe_semantic_tokens: Default::default(), }), resolver: self.resolver.clone(), })) } - pub fn closed(&self, cache: &Arc) -> Arc { + pub fn closed(&self, cache: &Arc) -> Arc { Arc::new(Self { config: self.config.clone(), specifier: self.specifier.clone(), @@ -530,7 +540,7 @@ impl Document { }) } - pub fn saved(&self, cache: &Arc) -> Arc { + pub fn saved(&self, cache: &Arc) -> Arc { Arc::new(Self { config: self.config.clone(), specifier: self.specifier.clone(), @@ -567,6 +577,10 @@ impl Document { self.line_index.clone() } + pub fn maybe_headers(&self) -> Option<&HashMap> { + self.maybe_headers.as_ref() + } + fn maybe_fs_version(&self) -> Option<&str> { self.maybe_fs_version.as_deref() } @@ -653,6 +667,15 @@ impl Document { ) { *self.maybe_navigation_tree.lock() = Some(navigation_tree); } + + pub fn cache_semantic_tokens_full( + &self, + semantic_tokens: lsp::SemanticTokens, + ) { + if let Some(open_data) = self.open_data.as_ref() { + *open_data.maybe_semantic_tokens.lock() = Some(semantic_tokens); + } + } } fn resolve_media_type( @@ -661,17 +684,9 @@ fn resolve_media_type( maybe_language_id: Option, resolver: &LspResolver, ) -> MediaType { - if resolver.in_npm_package(specifier) { - match resolver.url_to_node_resolution(specifier.clone()) { - Ok(Some(resolution)) => { - let (_, media_type) = - NodeResolution::into_specifier_and_media_type(Some(resolution)); - return media_type; - } - Err(err) => { - lsp_warn!("Node resolution failed for '{}': {}", specifier, err); - } - _ => {} + if resolver.in_node_modules(specifier) { + if let Some(media_type) = resolver.node_media_type(specifier) { + return media_type; } } @@ -714,7 +729,7 @@ impl FileSystemDocuments { specifier: &ModuleSpecifier, resolver: &Arc, config: &Arc, - cache: &Arc, + cache: &Arc, ) -> Option> { let new_fs_version = calculate_fs_version(cache, specifier); let old_doc = self.docs.get(specifier).map(|v| v.value().clone()); @@ -744,7 +759,7 @@ impl FileSystemDocuments { specifier: &ModuleSpecifier, resolver: &Arc, config: &Arc, - cache: &Arc, + cache: &Arc, ) -> Option> { let doc = if specifier.scheme() == "file" { let path = specifier_to_file_path(specifier).ok()?; @@ -777,11 +792,12 @@ impl FileSystemDocuments { cache, ) } else { - let cache_key = cache.cache_item_key(specifier).ok()?; - let bytes = cache + let http_cache = cache.root_vendor_or_global(); + let cache_key = http_cache.cache_item_key(specifier).ok()?; + let bytes = http_cache .read_file_bytes(&cache_key, None, LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY) .ok()??; - let specifier_headers = cache.read_headers(&cache_key).ok()??; + let specifier_headers = http_cache.read_headers(&cache_key).ok()??; let (_, maybe_charset) = deno_graph::source::resolve_media_type_and_charset_from_headers( specifier, @@ -834,10 +850,10 @@ pub enum DocumentsFilter { OpenDiagnosable, } -#[derive(Debug, Clone)] +#[derive(Debug, Default, Clone)] pub struct Documents { /// The DENO_DIR that the documents looks for non-file based modules. - cache: Arc, + cache: Arc, config: Arc, /// A flag that indicates that stated data is potentially invalid and needs to /// be recalculated before being considered valid. @@ -854,25 +870,9 @@ pub struct Documents { /// Gets if any document had a node: specifier such that a @types/node package /// should be injected. has_injected_types_node_package: bool, - /// If --unstable-sloppy-imports is enabled. - unstable_sloppy_imports: bool, } impl Documents { - pub fn new(cache: Arc) -> Self { - Self { - cache: cache.clone(), - config: Default::default(), - dirty: true, - open_docs: HashMap::default(), - file_system_docs: Default::default(), - resolver: Default::default(), - npm_specifier_reqs: Default::default(), - has_injected_types_node_package: false, - unstable_sloppy_imports: false, - } - } - /// "Open" a document from the perspective of the editor, meaning that /// requests for information from the document will come from the in-memory /// representation received from the language server client, versus reading @@ -992,63 +992,26 @@ impl Documents { } } - pub fn resolve_specifier( + pub fn resolve_document_specifier( &self, specifier: &ModuleSpecifier, ) -> Option { - if self.unstable_sloppy_imports && specifier.scheme() == "file" { - Some( - self - .resolve_unstable_sloppy_import(specifier) - .into_specifier() - .into_owned(), - ) + let specifier = if let Ok(jsr_req_ref) = + JsrPackageReqReference::from_specifier(specifier) + { + Cow::Owned(self.resolver.jsr_to_registry_url(&jsr_req_ref)?) } else { - let specifier = if let Ok(jsr_req_ref) = - JsrPackageReqReference::from_specifier(specifier) - { - Cow::Owned(self.resolver.jsr_to_registry_url(&jsr_req_ref)?) - } else { - Cow::Borrowed(specifier) - }; - if !DOCUMENT_SCHEMES.contains(&specifier.scheme()) { - return None; - } - self.resolver.resolve_redirects(&specifier) + Cow::Borrowed(specifier) + }; + if !DOCUMENT_SCHEMES.contains(&specifier.scheme()) { + return None; } - } - - fn resolve_unstable_sloppy_import<'a>( - &self, - specifier: &'a ModuleSpecifier, - ) -> SloppyImportsResolution<'a> { - SloppyImportsResolver::resolve_with_stat_sync( - specifier, - ResolutionMode::Types, - |path| { - if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { - if self.open_docs.contains_key(&specifier) - || self.cache.contains(&specifier) - { - return Some(SloppyImportsFsEntry::File); - } - } - path.metadata().ok().and_then(|m| { - if m.is_file() { - Some(SloppyImportsFsEntry::File) - } else if m.is_dir() { - Some(SloppyImportsFsEntry::Dir) - } else { - None - } - }) - }, - ) + self.resolver.resolve_redirects(&specifier) } /// Return `true` if the specifier can be resolved to a document. pub fn exists(&self, specifier: &ModuleSpecifier) -> bool { - let specifier = self.resolve_specifier(specifier); + let specifier = self.resolve_document_specifier(specifier); if let Some(specifier) = specifier { if self.open_docs.contains_key(&specifier) { return true; @@ -1061,7 +1024,7 @@ impl Documents { .map(|p| p.is_file()) .unwrap_or(false); } - if self.cache.contains(&specifier) { + if self.cache.root_vendor_or_global().contains(&specifier) { return true; } } @@ -1085,7 +1048,7 @@ impl Documents { &self, original_specifier: &ModuleSpecifier, ) -> Option> { - let specifier = self.resolve_specifier(original_specifier)?; + let specifier = self.resolve_document_specifier(original_specifier)?; if let Some(document) = self.open_docs.get(&specifier) { Some(document.clone()) } else { @@ -1098,13 +1061,6 @@ impl Documents { } } - pub fn is_open(&self, specifier: &ModuleSpecifier) -> bool { - let Some(specifier) = self.resolve_specifier(specifier) else { - return false; - }; - self.open_docs.contains_key(&specifier) - } - /// Return a collection of documents that are contained in the document store /// based on the provided filter. pub fn documents(&self, filter: DocumentsFilter) -> Vec> { @@ -1158,17 +1114,6 @@ impl Documents { let dependencies = document.as_ref().map(|d| d.dependencies()); let mut results = Vec::new(); for specifier in specifiers { - if self.resolver.in_npm_package(referrer) { - // we're in an npm package, so use node resolution - results.push(Some(NodeResolution::into_specifier_and_media_type( - self - .resolver - .node_resolve(specifier, referrer, NodeResolutionMode::Types) - .ok() - .flatten(), - ))); - continue; - } if specifier.starts_with("asset:") { if let Ok(specifier) = ModuleSpecifier::parse(specifier) { let media_type = MediaType::from_specifier(&specifier); @@ -1186,20 +1131,6 @@ impl Documents { } else { results.push(None); } - } else if let Some(specifier) = self - .resolver - .resolve_graph_import(specifier) - .and_then(|r| r.maybe_specifier()) - { - results.push(self.resolve_dependency(specifier, referrer)); - } else if let Ok(npm_req_ref) = - NpmPackageReqReference::from_str(specifier) - { - results.push(node_resolve_npm_req_ref( - &npm_req_ref, - referrer, - &self.resolver, - )); } else if let Ok(specifier) = self.resolver.as_graph_resolver().resolve( specifier, &deno_graph::Range { @@ -1221,17 +1152,12 @@ impl Documents { &mut self, config: &Config, resolver: &Arc, - cache: Arc, + cache: &LspCache, workspace_files: &BTreeSet, ) { self.config = Arc::new(config.clone()); - self.cache = cache; - let config_data = config.tree.root_data(); - let config_file = config_data.and_then(|d| d.config_file.as_deref()); + self.cache = Arc::new(cache.clone()); self.resolver = resolver.clone(); - self.unstable_sloppy_imports = config_file - .map(|c| c.has_unstable("sloppy-imports")) - .unwrap_or(false); { let fs_docs = &self.file_system_docs; // Clean up non-existent documents. @@ -1354,7 +1280,7 @@ impl Documents { self.dirty = false; } - fn resolve_dependency( + pub fn resolve_dependency( &self, specifier: &ModuleSpecifier, referrer: &ModuleSpecifier, @@ -1369,7 +1295,11 @@ impl Documents { } if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) { - return node_resolve_npm_req_ref(&npm_ref, referrer, &self.resolver); + return self.resolver.npm_to_file_url( + &npm_ref, + referrer, + NodeResolutionMode::Types, + ); } let Some(doc) = self.get(specifier) else { return Some((specifier.clone(), MediaType::from_specifier(specifier))); @@ -1383,28 +1313,10 @@ impl Documents { } } -fn node_resolve_npm_req_ref( - npm_req_ref: &NpmPackageReqReference, - referrer: &ModuleSpecifier, - resolver: &LspResolver, -) -> Option<(ModuleSpecifier, MediaType)> { - Some(NodeResolution::into_specifier_and_media_type( - resolver - .resolve_npm_req_reference( - npm_req_ref, - referrer, - NodeResolutionMode::Types, - ) - .ok() - .flatten(), - )) -} - /// Loader that will look at the open documents. pub struct OpenDocumentsGraphLoader<'a> { pub inner_loader: &'a mut dyn deno_graph::source::Loader, pub open_docs: &'a HashMap>, - pub unstable_sloppy_imports: bool, } impl<'a> OpenDocumentsGraphLoader<'a> { @@ -1426,32 +1338,6 @@ impl<'a> OpenDocumentsGraphLoader<'a> { } None } - - fn resolve_unstable_sloppy_import<'b>( - &self, - specifier: &'b ModuleSpecifier, - ) -> SloppyImportsResolution<'b> { - SloppyImportsResolver::resolve_with_stat_sync( - specifier, - ResolutionMode::Types, - |path| { - if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { - if self.open_docs.contains_key(&specifier) { - return Some(SloppyImportsFsEntry::File); - } - } - path.metadata().ok().and_then(|m| { - if m.is_file() { - Some(SloppyImportsFsEntry::File) - } else if m.is_dir() { - Some(SloppyImportsFsEntry::Dir) - } else { - None - } - }) - }, - ) - } } impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> { @@ -1460,17 +1346,9 @@ impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> { specifier: &ModuleSpecifier, options: deno_graph::source::LoadOptions, ) -> deno_graph::source::LoadFuture { - let specifier = if self.unstable_sloppy_imports { - self - .resolve_unstable_sloppy_import(specifier) - .into_specifier() - } else { - Cow::Borrowed(specifier) - }; - - match self.load_from_docs(&specifier) { + match self.load_from_docs(specifier) { Some(fut) => fut, - None => self.inner_loader.load(&specifier, options), + None => self.inner_loader.load(specifier, options), } } @@ -1543,31 +1421,29 @@ fn analyze_module( #[cfg(test)] mod tests { - use crate::cache::GlobalHttpCache; - use crate::cache::RealDenoCacheEnv; - use super::*; + use crate::lsp::cache::LspCache; use deno_config::ConfigFile; use deno_core::serde_json; use deno_core::serde_json::json; use pretty_assertions::assert_eq; - use test_util::PathRef; use test_util::TempDir; - fn setup(temp_dir: &TempDir) -> (Documents, PathRef, Arc) { - let location = temp_dir.path().join("deps"); - let cache = Arc::new(GlobalHttpCache::new( - location.to_path_buf(), - RealDenoCacheEnv, - )); - let documents = Documents::new(cache.clone()); - (documents, location, cache) + async fn setup() -> (Documents, LspCache, TempDir) { + let temp_dir = TempDir::new(); + let cache = LspCache::new(Some(temp_dir.uri())); + let config = Config::default(); + let resolver = LspResolver::default() + .with_new_config(&config, &cache, None) + .await; + let mut documents = Documents::default(); + documents.update_config(&config, &resolver, &cache, &Default::default()); + (documents, cache, temp_dir) } - #[test] - fn test_documents_open_close() { - let temp_dir = TempDir::new(); - let (mut documents, _, _) = setup(&temp_dir); + #[tokio::test] + async fn test_documents_open_close() { + let (mut documents, _, _) = setup().await; let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap(); let content = r#"import * as b from "./b.ts"; console.log(b); @@ -1590,10 +1466,9 @@ console.log(b); assert!(document.maybe_lsp_version().is_none()); } - #[test] - fn test_documents_change() { - let temp_dir = TempDir::new(); - let (mut documents, _, _) = setup(&temp_dir); + #[tokio::test] + async fn test_documents_change() { + let (mut documents, _, _) = setup().await; let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap(); let content = r#"import * as b from "./b.ts"; console.log(b); @@ -1632,15 +1507,13 @@ console.log(b, "hello deno"); ); } - #[test] - fn test_documents_ensure_no_duplicates() { + #[tokio::test] + async fn test_documents_ensure_no_duplicates() { // it should never happen that a user of this API causes this to happen, // but we'll guard against it anyway - let temp_dir = TempDir::new(); - let (mut documents, documents_path, _) = setup(&temp_dir); - let file_path = documents_path.join("file.ts"); - let file_specifier = ModuleSpecifier::from_file_path(&file_path).unwrap(); - documents_path.create_dir_all(); + let (mut documents, _, temp_dir) = setup().await; + let file_path = temp_dir.path().join("file.ts"); + let file_specifier = temp_dir.uri().join("file.ts").unwrap(); file_path.write(""); // open the document @@ -1664,27 +1537,21 @@ console.log(b, "hello deno"); async fn test_documents_refresh_dependencies_config_change() { // it should never happen that a user of this API causes this to happen, // but we'll guard against it anyway - let temp_dir = TempDir::new(); - let (mut documents, documents_path, cache) = setup(&temp_dir); - fs::create_dir_all(&documents_path).unwrap(); + let (mut documents, cache, temp_dir) = setup().await; - let file1_path = documents_path.join("file1.ts"); - let file1_specifier = ModuleSpecifier::from_file_path(&file1_path).unwrap(); + let file1_path = temp_dir.path().join("file1.ts"); + let file1_specifier = temp_dir.uri().join("file1.ts").unwrap(); fs::write(&file1_path, "").unwrap(); - let file2_path = documents_path.join("file2.ts"); - let file2_specifier = ModuleSpecifier::from_file_path(&file2_path).unwrap(); + let file2_path = temp_dir.path().join("file2.ts"); + let file2_specifier = temp_dir.uri().join("file2.ts").unwrap(); fs::write(&file2_path, "").unwrap(); - let file3_path = documents_path.join("file3.ts"); - let file3_specifier = ModuleSpecifier::from_file_path(&file3_path).unwrap(); + let file3_path = temp_dir.path().join("file3.ts"); + let file3_specifier = temp_dir.uri().join("file3.ts").unwrap(); fs::write(&file3_path, "").unwrap(); - let mut config = - Config::new_with_roots(vec![ModuleSpecifier::from_directory_path( - &documents_path, - ) - .unwrap()]); + let mut config = Config::new_with_roots([temp_dir.uri()]); let workspace_settings = serde_json::from_str(r#"{ "enable": true }"#).unwrap(); config.set_workspace_settings(workspace_settings, vec![]); @@ -1714,14 +1581,9 @@ console.log(b, "hello deno"); .await; let resolver = LspResolver::default() - .with_new_config(&config, cache.clone(), None, None) + .with_new_config(&config, &cache, None) .await; - documents.update_config( - &config, - &resolver, - cache.clone(), - &workspace_files, - ); + documents.update_config(&config, &resolver, &cache, &workspace_files); // open the document let document = documents.open( @@ -1763,9 +1625,9 @@ console.log(b, "hello deno"); .await; let resolver = LspResolver::default() - .with_new_config(&config, cache.clone(), None, None) + .with_new_config(&config, &cache, None) .await; - documents.update_config(&config, &resolver, cache, &workspace_files); + documents.update_config(&config, &resolver, &cache, &workspace_files); // check the document's dependencies let document = documents.get(&file1_specifier).unwrap(); diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 67eaca74ed9843..009b466548344f 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -42,7 +42,7 @@ use super::analysis::ts_changes_to_edit; use super::analysis::CodeActionCollection; use super::analysis::CodeActionData; use super::analysis::TsResponseImportMapper; -use super::cache; +use super::cache::LspCache; use super::capabilities; use super::client::Client; use super::code_lens; @@ -88,10 +88,6 @@ use crate::args::CaData; use crate::args::CacheSetting; use crate::args::CliOptions; use crate::args::Flags; -use crate::cache::DenoDir; -use crate::cache::GlobalHttpCache; -use crate::cache::HttpCache; -use crate::cache::LocalLspHttpCache; use crate::factory::CliFactory; use crate::file_fetcher::FileFetcher; use crate::graph_util; @@ -121,11 +117,10 @@ impl RootCertStoreProvider for LspRootCertStoreProvider { pub struct LanguageServer(Arc>, CancellationToken); /// Snapshot of the state used by TSC. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] pub struct StateSnapshot { pub project_version: usize, pub assets: AssetsSnapshot, - pub cache_metadata: cache::CacheMetadata, pub config: Arc, pub documents: Documents, pub resolver: Arc, @@ -174,12 +169,7 @@ pub struct Inner { /// Cached versions of "fixed" assets that can either be inlined in Rust or /// are part of the TypeScript snapshot and have to be fetched out. assets: Assets, - /// This may be a copy of `self.global_cache`, or a vendor dir if one is - /// configured. - cache: Arc, - /// A representation of metadata associated with specifiers in the DENO_DIR - /// or vendor dir which is used by the language server. - cache_metadata: cache::CacheMetadata, + cache: LspCache, /// The LSP client that this LSP server is connected to. pub client: Client, /// Configuration information. @@ -189,16 +179,11 @@ pub struct Inner { /// The collection of documents that the server is currently handling, either /// on disk or "open" within the client. pub documents: Documents, - global_cache: Arc, + http_client: Arc, initial_cwd: PathBuf, jsr_search_api: CliJsrSearchApi, - http_client: Arc, - task_queue: LanguageServerTaskQueue, /// Handles module registries, which allow discovery of modules module_registry: ModuleRegistry, - /// An optional path to the DENO_DIR which has been specified in the client - /// options. - maybe_global_cache_path: Option, /// A lazily create "server" for handling test run requests. maybe_testing_server: Option, npm_search_api: CliNpmSearchApi, @@ -206,6 +191,7 @@ pub struct Inner { /// A collection of measurements which instrument that performance of the LSP. performance: Arc, resolver: Arc, + task_queue: LanguageServerTaskQueue, /// A memoized version of fixable diagnostic codes retrieved from TypeScript. ts_fixable_diagnostics: Vec, /// An abstraction that handles interactions with TypeScript. @@ -251,14 +237,13 @@ impl LanguageServer { let mut loader = crate::lsp::documents::OpenDocumentsGraphLoader { inner_loader: &mut inner_loader, open_docs: &open_docs, - unstable_sloppy_imports: cli_options.unstable_sloppy_imports(), }; let graph = module_graph_creator .create_graph_with_loader(GraphKind::All, roots.clone(), &mut loader) .await?; graph_util::graph_valid( &graph, - factory.fs().as_ref(), + factory.fs().clone(), &roots, graph_util::GraphValidOptions { is_vendoring: false, @@ -451,24 +436,20 @@ impl LanguageServer { impl Inner { fn new(client: Client) -> Self { - let dir = DenoDir::new(None).expect("could not access DENO_DIR"); + let cache = LspCache::default(); let http_client = Arc::new(HttpClient::new(None, None)); - let module_registry = - ModuleRegistry::new(dir.registries_folder_path(), http_client.clone()); + let module_registry = ModuleRegistry::new( + cache.deno_dir().registries_folder_path(), + http_client.clone(), + ); let jsr_search_api = CliJsrSearchApi::new(module_registry.file_fetcher.clone()); let npm_search_api = CliNpmSearchApi::new(module_registry.file_fetcher.clone()); - let global_cache = Arc::new(GlobalHttpCache::new( - dir.deps_folder_path(), - crate::cache::RealDenoCacheEnv, - )); - let cache = global_cache.clone(); - let documents = Documents::new(cache.clone()); - let cache_metadata = cache::CacheMetadata::new(cache.clone()); + let documents = Documents::default(); let performance = Arc::new(Performance::default()); let config = Config::default(); - let ts_server = Arc::new(TsServer::new(performance.clone(), cache.clone())); + let ts_server = Arc::new(TsServer::new(performance.clone())); let diagnostics_state = Arc::new(DiagnosticsState::default()); let diagnostics_server = DiagnosticsServer::new( client.clone(), @@ -484,17 +465,14 @@ impl Inner { Self { assets, cache, - cache_metadata, client, config, diagnostics_state, diagnostics_server, documents, - global_cache, http_client, initial_cwd: initial_cwd.clone(), jsr_search_api, - maybe_global_cache_path: None, project_version: 0, task_queue: Default::default(), maybe_testing_server: None, @@ -599,7 +577,6 @@ impl Inner { Arc::new(StateSnapshot { project_version: self.project_version, assets: self.assets.snapshot(), - cache_metadata: self.cache_metadata.clone(), config: Arc::new(self.config.clone()), documents: self.documents.clone(), resolver: self.resolver.snapshot(), @@ -608,36 +585,21 @@ impl Inner { pub async fn update_global_cache(&mut self) { let mark = self.performance.mark("lsp.update_global_cache"); - let maybe_cache = &self.config.workspace_settings().cache; - self.maybe_global_cache_path = if let Some(cache_str) = maybe_cache { - let cache_url = if let Ok(url) = Url::from_file_path(cache_str) { - Ok(url) + let maybe_cache = self.config.workspace_settings().cache.as_ref(); + let global_cache_url = maybe_cache.and_then(|cache_str| { + if let Ok(url) = Url::from_file_path(cache_str) { + Some(url) } else if let Some(root_uri) = self.config.root_uri() { - root_uri.join(cache_str).map_err(|e| e.into()) + root_uri.join(cache_str).inspect_err(|err| lsp_warn!("Failed to resolve custom cache path: {err}")).ok() } else { - Err(anyhow!( + lsp_warn!( "The configured cache path \"{cache_str}\" is not resolvable outside of a workspace.", - )) - }; - cache_url - .and_then(|s| specifier_to_file_path(&s)) - .inspect(|p| { - lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy()); - }) - .inspect_err(|err| { - lsp_warn!("Failed to resolve custom cache path: {err}"); - }) - .ok() - } else { - None - }; - let deno_dir = match DenoDir::new(self.maybe_global_cache_path.clone()) { - Ok(d) => d, - Err(err) => { - lsp_warn!("Couldn't access DENO_DIR: {err}"); - return; + ); + None } - }; + }); + self.cache = LspCache::new(global_cache_url); + let deno_dir = self.cache.deno_dir(); let workspace_settings = self.config.workspace_settings(); let maybe_root_path = self .config @@ -675,28 +637,13 @@ impl Inner { CliJsrSearchApi::new(self.module_registry.file_fetcher.clone()); self.npm_search_api = CliNpmSearchApi::new(self.module_registry.file_fetcher.clone()); - self.global_cache = Arc::new(GlobalHttpCache::new( - deno_dir.deps_folder_path(), - crate::cache::RealDenoCacheEnv, - )); self.performance.measure(mark); } pub fn update_cache(&mut self) { let mark = self.performance.mark("lsp.update_cache"); - let maybe_local_cache = - self.config.tree.root_vendor_dir().map(|local_path| { - Arc::new(LocalLspHttpCache::new( - local_path.clone(), - self.global_cache.clone(), - )) - }); - self.url_map.set_cache(maybe_local_cache.clone()); - self.cache = maybe_local_cache - .clone() - .map(|c| c as Arc) - .unwrap_or(self.global_cache.clone()); - self.cache_metadata.set_cache(self.cache.clone()); + self.cache.update_config(&self.config); + self.url_map.set_cache(self.cache.root_vendor().cloned()); self.performance.measure(mark); } @@ -951,7 +898,7 @@ impl Inner { async fn refresh_config_tree(&mut self) { let mut file_fetcher = FileFetcher::new( - self.global_cache.clone(), + self.cache.global().clone(), CacheSetting::RespectHeaders, true, self.http_client.clone(), @@ -996,12 +943,7 @@ impl Inner { async fn refresh_resolver(&mut self) { self.resolver = self .resolver - .with_new_config( - &self.config, - self.cache.clone(), - self.maybe_global_cache_path.as_deref(), - Some(&self.http_client), - ) + .with_new_config(&self.config, &self.cache, Some(&self.http_client)) .await; } @@ -1009,7 +951,7 @@ impl Inner { self.documents.update_config( &self.config, &self.resolver, - self.cache.clone(), + &self.cache, &self.workspace_files, ); @@ -2587,6 +2529,16 @@ impl Inner { .performance .mark_with_args("lsp.semantic_tokens_full", ¶ms); let asset_or_doc = self.get_asset_or_document(&specifier)?; + if let Some(tokens) = asset_or_doc.maybe_semantic_tokens() { + let response = if !tokens.data.is_empty() { + Some(SemanticTokensResult::Tokens(tokens.clone())) + } else { + None + }; + self.performance.measure(mark); + return Ok(response); + } + let line_index = asset_or_doc.line_index(); let semantic_classification = self @@ -2600,6 +2552,11 @@ impl Inner { let semantic_tokens = semantic_classification.to_semantic_tokens(line_index)?; + + if let Some(doc) = asset_or_doc.document() { + doc.cache_semantic_tokens_full(semantic_tokens.clone()); + } + let response = if !semantic_tokens.data.is_empty() { Some(SemanticTokensResult::Tokens(semantic_tokens)) } else { @@ -2624,6 +2581,18 @@ impl Inner { .performance .mark_with_args("lsp.semantic_tokens_range", ¶ms); let asset_or_doc = self.get_asset_or_document(&specifier)?; + if let Some(tokens) = asset_or_doc.maybe_semantic_tokens() { + let tokens = + super::semantic_tokens::tokens_within_range(&tokens, params.range); + let response = if !tokens.data.is_empty() { + Some(SemanticTokensRangeResult::Tokens(tokens)) + } else { + None + }; + self.performance.measure(mark); + return Ok(response); + } + let line_index = asset_or_doc.line_index(); let semantic_classification = self @@ -3305,7 +3274,7 @@ impl Inner { let workspace_settings = self.config.workspace_settings(); let cli_options = CliOptions::new( Flags { - cache_path: self.maybe_global_cache_path.clone(), + cache_path: Some(self.cache.deno_dir().root.clone()), ca_stores: workspace_settings.certificate_stores.clone(), ca_data: workspace_settings.tls_certificate.clone().map(CaData::File), unsafely_ignore_certificate_errors: workspace_settings diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index ebe254b9dee1de..0f515060a045d3 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -2,14 +2,12 @@ use crate::args::package_json; use crate::args::CacheSetting; -use crate::cache::DenoDir; use crate::cache::FastInsecureHasher; use crate::graph_util::CliJsrUrlProvider; use crate::http_util::HttpClient; use crate::jsr::JsrCacheResolver; use crate::lsp::config::Config; use crate::lsp::config::ConfigData; -use crate::lsp::logging::lsp_warn; use crate::npm::create_cli_npm_resolver_for_lsp; use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverByonmCreateOptions; @@ -21,16 +19,18 @@ use crate::npm::ManagedCliNpmResolver; use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliNodeResolver; +use crate::resolver::SloppyImportsResolver; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; +use dashmap::DashMap; +use deno_ast::MediaType; use deno_cache_dir::HttpCache; use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; +use deno_core::url::Url; use deno_graph::source::NpmResolver; use deno_graph::source::Resolver; use deno_graph::GraphImport; use deno_graph::ModuleSpecifier; -use deno_graph::Resolution; use deno_npm::NpmSystemInfo; use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolution; @@ -45,11 +45,14 @@ use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use indexmap::IndexMap; use package_json::PackageJsonDepsProvider; +use std::borrow::Cow; use std::collections::HashMap; -use std::path::Path; +use std::collections::HashSet; use std::rc::Rc; use std::sync::Arc; +use super::cache::LspCache; + #[derive(Debug, Clone)] pub struct LspResolver { graph_resolver: Arc, @@ -81,11 +84,10 @@ impl LspResolver { pub async fn with_new_config( &self, config: &Config, - cache: Arc, - global_cache_path: Option<&Path>, + cache: &LspCache, http_client: Option<&Arc>, ) -> Arc { - let npm_config_hash = LspNpmConfigHash::new(config, global_cache_path); + let npm_config_hash = LspNpmConfigHash::new(config, cache); let config_data = config.tree.root_data(); let mut npm_resolver = None; let mut node_resolver = None; @@ -93,8 +95,7 @@ impl LspResolver { if let (Some(http_client), Some(config_data)) = (http_client, config_data) { npm_resolver = - create_npm_resolver(config_data, global_cache_path, http_client) - .await; + create_npm_resolver(config_data, cache, http_client).await; node_resolver = create_node_resolver(npm_resolver.as_ref()); } } else { @@ -107,10 +108,12 @@ impl LspResolver { node_resolver.as_ref(), ); let jsr_resolver = Some(Arc::new(JsrCacheResolver::new( - cache.clone(), + cache.root_vendor_or_global(), config_data.and_then(|d| d.lockfile.clone()), ))); - let redirect_resolver = Some(Arc::new(RedirectResolver::new(cache))); + let redirect_resolver = Some(Arc::new(RedirectResolver::new( + cache.root_vendor_or_global(), + ))); let graph_imports = config_data .and_then(|d| d.config_file.as_ref()) .and_then(|cf| cf.to_maybe_imports().ok()) @@ -189,6 +192,20 @@ impl LspResolver { self.graph_resolver.as_ref() } + pub fn maybe_managed_npm_resolver(&self) -> Option<&ManagedCliNpmResolver> { + self.npm_resolver.as_ref().and_then(|r| r.as_managed()) + } + + pub fn graph_import_specifiers( + &self, + ) -> impl Iterator { + self + .graph_imports + .values() + .flat_map(|i| i.dependencies.values()) + .flat_map(|value| value.get_type().or_else(|| value.get_code())) + } + pub fn jsr_to_registry_url( &self, req_ref: &JsrPackageReqReference, @@ -208,81 +225,41 @@ impl LspResolver { self.jsr_resolver.as_ref()?.lookup_req_for_nv(nv) } - pub fn maybe_managed_npm_resolver(&self) -> Option<&ManagedCliNpmResolver> { - self.npm_resolver.as_ref().and_then(|r| r.as_managed()) - } - - pub fn graph_import_specifiers( + pub fn npm_to_file_url( &self, - ) -> impl Iterator { - self - .graph_imports - .values() - .flat_map(|i| i.dependencies.values()) - .flat_map(|value| value.get_type().or_else(|| value.get_code())) - } - - pub fn resolve_graph_import(&self, specifier: &str) -> Option<&Resolution> { - for graph_imports in self.graph_imports.values() { - let maybe_dep = graph_imports.dependencies.get(specifier); - if maybe_dep.is_some() { - return maybe_dep.map(|d| &d.maybe_type); - } - } - None + req_ref: &NpmPackageReqReference, + referrer: &ModuleSpecifier, + mode: NodeResolutionMode, + ) -> Option<(ModuleSpecifier, MediaType)> { + let node_resolver = self.node_resolver.as_ref()?; + Some(NodeResolution::into_specifier_and_media_type( + node_resolver + .resolve_req_reference( + req_ref, + &PermissionsContainer::allow_all(), + referrer, + mode, + ) + .ok(), + )) } - pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { if let Some(npm_resolver) = &self.npm_resolver { return npm_resolver.in_npm_package(specifier); } false } - pub fn node_resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result, AnyError> { - let Some(node_resolver) = self.node_resolver.as_ref() else { - return Ok(None); - }; - node_resolver.resolve( - specifier, - referrer, - mode, - &PermissionsContainer::allow_all(), - ) - } - - pub fn resolve_npm_req_reference( - &self, - req_ref: &NpmPackageReqReference, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result, AnyError> { - let Some(node_resolver) = self.node_resolver.as_ref() else { - return Ok(None); - }; - node_resolver - .resolve_req_reference( - req_ref, - &PermissionsContainer::allow_all(), - referrer, - mode, - ) - .map(Some) - } - - pub fn url_to_node_resolution( + pub fn node_media_type( &self, - specifier: ModuleSpecifier, - ) -> Result, AnyError> { - let Some(node_resolver) = self.node_resolver.as_ref() else { - return Ok(None); - }; - node_resolver.url_to_node_resolution(specifier).map(Some) + specifier: &ModuleSpecifier, + ) -> Option { + let node_resolver = self.node_resolver.as_ref()?; + let resolution = node_resolver + .url_to_node_resolution(specifier.clone()) + .ok()?; + Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1) } pub fn get_closest_package_json( @@ -305,18 +282,27 @@ impl LspResolver { }; redirect_resolver.resolve(specifier) } + + pub fn redirect_chain_headers( + &self, + specifier: &ModuleSpecifier, + ) -> Vec<(ModuleSpecifier, Arc>)> { + let Some(redirect_resolver) = self.redirect_resolver.as_ref() else { + return vec![]; + }; + redirect_resolver + .chain(specifier) + .into_iter() + .map(|(s, e)| (s, e.headers.clone())) + .collect() + } } async fn create_npm_resolver( config_data: &ConfigData, - global_cache_path: Option<&Path>, + cache: &LspCache, http_client: &Arc, ) -> Option> { - let deno_dir = DenoDir::new(global_cache_path.map(|p| p.to_owned())) - .inspect_err(|err| { - lsp_warn!("Error getting deno dir: {:#}", err); - }) - .ok()?; let node_modules_dir = config_data .node_modules_dir .clone() @@ -341,7 +327,7 @@ async fn create_npm_resolver( // updating it. Only the cache request should update the lockfile. maybe_lockfile: None, fs: Arc::new(deno_fs::RealFs), - npm_global_cache_dir: deno_dir.npm_folder_path(), + npm_global_cache_dir: cache.deno_dir().npm_folder_path(), // Use an "only" cache setting in order to make the // user do an explicit "cache" command and prevent // the cache from being filled with lots of packages while @@ -382,6 +368,8 @@ fn create_graph_resolver( node_resolver: Option<&Arc>, ) -> Arc { let config_file = config_data.and_then(|d| d.config_file.as_deref()); + let unstable_sloppy_imports = + config_file.is_some_and(|cf| cf.has_unstable("sloppy-imports")); Arc::new(CliGraphResolver::new(CliGraphResolverOptions { node_resolver: node_resolver.cloned(), npm_resolver: npm_resolver.cloned(), @@ -399,10 +387,9 @@ fn create_graph_resolver( bare_node_builtins_enabled: config_file .map(|cf| cf.has_unstable("bare-node-builtins")) .unwrap_or(false), - // Don't set this for the LSP because instead we'll use the OpenDocumentsLoader - // because it's much easier and we get diagnostics/quick fixes about a redirected - // specifier for free. - sloppy_imports_resolver: None, + sloppy_imports_resolver: unstable_sloppy_imports.then(|| { + SloppyImportsResolver::new_without_stat_cache(Arc::new(deno_fs::RealFs)) + }), })) } @@ -410,7 +397,7 @@ fn create_graph_resolver( struct LspNpmConfigHash(u64); impl LspNpmConfigHash { - pub fn new(config: &Config, global_cache_path: Option<&Path>) -> Self { + pub fn new(config: &Config, cache: &LspCache) -> Self { let config_data = config.tree.root_data(); let scope = config_data.map(|d| &d.scope); let node_modules_dir = @@ -419,64 +406,195 @@ impl LspNpmConfigHash { let mut hasher = FastInsecureHasher::new(); hasher.write_hashable(scope); hasher.write_hashable(node_modules_dir); - hasher.write_hashable(global_cache_path); if let Some(lockfile) = lockfile { hasher.write_hashable(&*lockfile.lock()); } - hasher.write_hashable(global_cache_path); + hasher.write_hashable(cache.deno_dir().npm_folder_path()); Self(hasher.finish()) } } -#[derive(Debug)] +#[derive(Debug, Eq, PartialEq)] +struct RedirectEntry { + headers: Arc>, + target: Url, + destination: Option, +} + +type GetHeadersFn = + Box Option> + Send + Sync>; + struct RedirectResolver { - cache: Arc, - redirects: Mutex>, + get_headers: GetHeadersFn, + entries: DashMap>>, +} + +impl std::fmt::Debug for RedirectResolver { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RedirectResolver") + .field("get_headers", &"Box(|_| { ... })") + .field("entries", &self.entries) + .finish() + } } impl RedirectResolver { - pub fn new(cache: Arc) -> Self { + fn new(cache: Arc) -> Self { Self { - cache, - redirects: Mutex::new(HashMap::new()), + get_headers: Box::new(move |specifier| { + let cache_key = cache.cache_item_key(specifier).ok()?; + cache.read_headers(&cache_key).ok().flatten() + }), + entries: Default::default(), } } - pub fn resolve( - &self, - specifier: &ModuleSpecifier, - ) -> Option { - if matches!(specifier.scheme(), "http" | "https") { - let mut redirects = self.redirects.lock(); - if let Some(specifier) = redirects.get(specifier) { - Some(specifier.clone()) - } else { - let redirect = self.resolve_remote(specifier, 10)?; - redirects.insert(specifier.clone(), redirect.clone()); - Some(redirect) - } - } else { - Some(specifier.clone()) + #[cfg(test)] + fn mock(get_headers: GetHeadersFn) -> Self { + Self { + get_headers, + entries: Default::default(), } } - fn resolve_remote( - &self, - specifier: &ModuleSpecifier, - redirect_limit: usize, - ) -> Option { - if redirect_limit > 0 { - let cache_key = self.cache.cache_item_key(specifier).ok()?; - let headers = self.cache.read_headers(&cache_key).ok().flatten()?; + fn resolve(&self, specifier: &Url) -> Option { + if !matches!(specifier.scheme(), "http" | "https") { + return Some(specifier.clone()); + } + let mut current = specifier.clone(); + let mut chain = vec![]; + let destination = loop { + if let Some(maybe_entry) = self.entries.get(¤t) { + break match maybe_entry.as_ref() { + Some(entry) => entry.destination.clone(), + None => Some(current), + }; + } + let Some(headers) = (self.get_headers)(¤t) else { + break None; + }; + let headers = Arc::new(headers); if let Some(location) = headers.get("location") { - let redirect = - deno_core::resolve_import(location, specifier.as_str()).ok()?; - self.resolve_remote(&redirect, redirect_limit - 1) + if chain.len() > 10 { + break None; + } + let Ok(target) = + deno_core::resolve_import(location, specifier.as_str()) + else { + break None; + }; + chain.push(( + current.clone(), + RedirectEntry { + headers, + target: target.clone(), + destination: None, + }, + )); + current = target; } else { - Some(specifier.clone()) + self.entries.insert(current.clone(), None); + break Some(current); } - } else { - None + }; + for (specifier, mut entry) in chain { + entry.destination = destination.clone(); + self.entries.insert(specifier, Some(Arc::new(entry))); + } + destination + } + + fn chain(&self, specifier: &Url) -> Vec<(Url, Arc)> { + self.resolve(specifier); + let mut result = vec![]; + let mut seen = HashSet::new(); + let mut current = Cow::Borrowed(specifier); + loop { + let Some(maybe_entry) = self.entries.get(¤t) else { + break; + }; + let Some(entry) = maybe_entry.as_ref() else { + break; + }; + result.push((current.as_ref().clone(), entry.clone())); + seen.insert(current.as_ref().clone()); + if seen.contains(&entry.target) { + break; + } + current = Cow::Owned(entry.target.clone()) } + result + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_redirect_resolver() { + let redirect_resolver = + RedirectResolver::mock(Box::new(|specifier| match specifier.as_str() { + "https://foo/redirect_2.js" => Some( + [("location".to_string(), "./redirect_1.js".to_string())] + .into_iter() + .collect(), + ), + "https://foo/redirect_1.js" => Some( + [("location".to_string(), "./file.js".to_string())] + .into_iter() + .collect(), + ), + "https://foo/file.js" => Some([].into_iter().collect()), + _ => None, + })); + assert_eq!( + redirect_resolver.resolve(&Url::parse("https://foo/file.js").unwrap()), + Some(Url::parse("https://foo/file.js").unwrap()) + ); + assert_eq!( + redirect_resolver + .resolve(&Url::parse("https://foo/redirect_1.js").unwrap()), + Some(Url::parse("https://foo/file.js").unwrap()) + ); + assert_eq!( + redirect_resolver + .resolve(&Url::parse("https://foo/redirect_2.js").unwrap()), + Some(Url::parse("https://foo/file.js").unwrap()) + ); + assert_eq!( + redirect_resolver.resolve(&Url::parse("https://foo/unknown").unwrap()), + None + ); + assert_eq!( + redirect_resolver + .chain(&Url::parse("https://foo/redirect_2.js").unwrap()), + vec![ + ( + Url::parse("https://foo/redirect_2.js").unwrap(), + Arc::new(RedirectEntry { + headers: Arc::new( + [("location".to_string(), "./redirect_1.js".to_string())] + .into_iter() + .collect() + ), + target: Url::parse("https://foo/redirect_1.js").unwrap(), + destination: Some(Url::parse("https://foo/file.js").unwrap()), + }) + ), + ( + Url::parse("https://foo/redirect_1.js").unwrap(), + Arc::new(RedirectEntry { + headers: Arc::new( + [("location".to_string(), "./file.js".to_string())] + .into_iter() + .collect() + ), + target: Url::parse("https://foo/file.js").unwrap(), + destination: Some(Url::parse("https://foo/file.js").unwrap()), + }) + ), + ] + ); } } diff --git a/cli/lsp/semantic_tokens.rs b/cli/lsp/semantic_tokens.rs index a2a56cce0d8af9..0cf154d0ff71d4 100644 --- a/cli/lsp/semantic_tokens.rs +++ b/cli/lsp/semantic_tokens.rs @@ -7,6 +7,7 @@ use std::ops::Index; use std::ops::IndexMut; +use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types::SemanticToken; use tower_lsp::lsp_types::SemanticTokenModifier; use tower_lsp::lsp_types::SemanticTokenType; @@ -247,6 +248,54 @@ impl SemanticTokensBuilder { } } +pub fn tokens_within_range( + tokens: &SemanticTokens, + range: lsp::Range, +) -> SemanticTokens { + let mut line = 0; + let mut character = 0; + + let mut first_token_line = 0; + let mut first_token_char = 0; + let mut keep_start_idx = tokens.data.len(); + let mut keep_end_idx = keep_start_idx; + for (i, token) in tokens.data.iter().enumerate() { + if token.delta_line != 0 { + character = 0; + } + line += token.delta_line; + character += token.delta_start; + let token_start = lsp::Position::new(line, character); + if i < keep_start_idx && token_start >= range.start { + keep_start_idx = i; + first_token_line = line; + first_token_char = character; + } + if token_start > range.end { + keep_end_idx = i; + break; + } + } + if keep_end_idx == keep_start_idx { + return SemanticTokens { + result_id: None, + data: Vec::new(), + }; + } + + let mut data = tokens.data[keep_start_idx..keep_end_idx].to_vec(); + // we need to adjust the delta_line and delta_start on the first token + // as it is relative to 0 now, not the previous token + let first_token = &mut data[0]; + first_token.delta_line = first_token_line; + first_token.delta_start = first_token_char; + + SemanticTokens { + result_id: None, + data, + } +} + #[cfg(test)] mod tests { use super::*; @@ -352,4 +401,129 @@ mod tests { ] ); } + + #[test] + fn test_tokens_within_range() { + let mut builder = SemanticTokensBuilder::new(); + builder.push(1, 0, 5, 0, 0); + builder.push(2, 1, 1, 1, 0); + builder.push(2, 2, 3, 2, 0); + builder.push(2, 5, 5, 3, 0); + builder.push(3, 0, 4, 4, 0); + builder.push(5, 2, 3, 5, 0); + let tokens = builder.build(None); + let range = lsp::Range { + start: lsp::Position { + line: 2, + character: 2, + }, + end: lsp::Position { + line: 4, + character: 0, + }, + }; + + let result = tokens_within_range(&tokens, range); + + assert_eq!( + result.data, + vec![ + // line 2 char 2 + SemanticToken { + delta_line: 2, + delta_start: 2, + length: 3, + token_type: 2, + token_modifiers_bitset: 0 + }, + // line 2 char 5 + SemanticToken { + delta_line: 0, + delta_start: 3, + length: 5, + token_type: 3, + token_modifiers_bitset: 0 + }, + // line 3 char 0 + SemanticToken { + delta_line: 1, + delta_start: 0, + length: 4, + token_type: 4, + token_modifiers_bitset: 0 + } + ] + ); + } + + #[test] + fn test_tokens_within_range_include_end() { + let mut builder = SemanticTokensBuilder::new(); + builder.push(1, 0, 1, 0, 0); + builder.push(2, 1, 2, 1, 0); + builder.push(2, 3, 3, 2, 0); + builder.push(3, 0, 4, 3, 0); + let tokens = builder.build(None); + let range = lsp::Range { + start: lsp::Position { + line: 2, + character: 2, + }, + end: lsp::Position { + line: 3, + character: 4, + }, + }; + let result = tokens_within_range(&tokens, range); + + assert_eq!( + result.data, + vec![ + // line 2 char 3 + SemanticToken { + delta_line: 2, + delta_start: 3, + length: 3, + token_type: 2, + token_modifiers_bitset: 0 + }, + // line 3 char 0 + SemanticToken { + delta_line: 1, + delta_start: 0, + length: 4, + token_type: 3, + token_modifiers_bitset: 0 + } + ] + ); + } + + #[test] + fn test_tokens_within_range_empty() { + let mut builder = SemanticTokensBuilder::new(); + builder.push(1, 0, 1, 0, 0); + builder.push(2, 1, 2, 1, 0); + builder.push(2, 3, 3, 2, 0); + builder.push(3, 0, 4, 3, 0); + let tokens = builder.build(None); + let range = lsp::Range { + start: lsp::Position { + line: 3, + character: 2, + }, + end: lsp::Position { + line: 3, + character: 4, + }, + }; + let result = tokens_within_range(&tokens, range); + + assert_eq!(result.data, vec![]); + + assert_eq!( + tokens_within_range(&SemanticTokens::default(), range).data, + vec![] + ); + } } diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index ae4b62ea8833cc..29b6a4f190ebf9 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -219,10 +219,10 @@ impl TestRun { // file would have impact on other files, which is undesirable. let permissions = Permissions::from_options(&factory.cli_options().permissions_options()?)?; + let main_graph_container = factory.main_module_graph_container().await?; test::check_specifiers( - factory.cli_options(), factory.file_fetcher()?, - factory.module_load_preparer().await?, + main_graph_container, self .queue .iter() diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index bed71f6d9226b7..b41a8fb1930fc9 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -8,6 +8,7 @@ use super::documents::DocumentsFilter; use super::language_server; use super::language_server::StateSnapshot; use super::performance::Performance; +use super::performance::PerformanceMark; use super::refactor::RefactorCodeActionData; use super::refactor::ALL_KNOWN_REFACTOR_ACTION_KINDS; use super::refactor::EXTRACT_CONSTANT; @@ -22,25 +23,25 @@ use super::urls::INVALID_SPECIFIER; use crate::args::jsr_url; use crate::args::FmtOptionsConfig; -use crate::cache::HttpCache; -use crate::lsp::cache::CacheMetadata; -use crate::lsp::documents::Documents; use crate::lsp::logging::lsp_warn; use crate::tsc; use crate::tsc::ResolveArgs; use crate::tsc::MISSING_DEPENDENCY_SPECIFIER; use crate::util::path::relative_specifier; use crate::util::path::to_percent_decoded_str; +use crate::util::result::InfallibleResultExt; +use crate::util::v8::convert; +use deno_core::convert::Smi; +use deno_core::convert::ToV8; +use deno_core::error::StdAnyError; use deno_runtime::fs_util::specifier_to_file_path; use dashmap::DashMap; use deno_ast::MediaType; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context as _; -use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::futures::FutureExt; -use deno_core::located_script_name; use deno_core::op2; use deno_core::parking_lot::Mutex; use deno_core::resolve_url; @@ -66,9 +67,12 @@ use regex::Captures; use regex::Regex; use serde_repr::Deserialize_repr; use serde_repr::Serialize_repr; +use std::borrow::Cow; +use std::cell::RefCell; use std::cmp; use std::collections::HashMap; use std::collections::HashSet; +use std::convert::Infallible; use std::net::SocketAddr; use std::ops::Range; use std::path::Path; @@ -220,7 +224,6 @@ fn normalize_diagnostic( pub struct TsServer { performance: Arc, - cache: Arc, sender: mpsc::UnboundedSender, receiver: Mutex>>, specifier_map: Arc, @@ -232,7 +235,6 @@ impl std::fmt::Debug for TsServer { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("TsServer") .field("performance", &self.performance) - .field("cache", &self.cache) .field("sender", &self.sender) .field("receiver", &self.receiver) .field("specifier_map", &self.specifier_map) @@ -249,6 +251,16 @@ pub enum ChangeKind { Closed = 2, } +impl<'a> ToV8<'a> for ChangeKind { + type Error = Infallible; + fn to_v8( + self, + scope: &mut v8::HandleScope<'a>, + ) -> Result, Self::Error> { + Smi(self as u8).to_v8(scope) + } +} + impl Serialize for ChangeKind { fn serialize(&self, serializer: S) -> Result where @@ -265,15 +277,28 @@ pub struct PendingChange { pub config_changed: bool, } -impl PendingChange { - fn to_v8<'s>( - &self, - scope: &mut v8::HandleScope<'s>, - ) -> Result, AnyError> { - let modified_scripts = serde_v8::to_v8(scope, &self.modified_scripts)?; +impl<'a> ToV8<'a> for PendingChange { + type Error = Infallible; + fn to_v8( + self, + scope: &mut v8::HandleScope<'a>, + ) -> Result, Self::Error> { + let modified_scripts = { + let mut modified_scripts_v8 = + Vec::with_capacity(self.modified_scripts.len()); + for (specifier, kind) in &self.modified_scripts { + let specifier = v8::String::new(scope, specifier).unwrap().into(); + let kind = kind.to_v8(scope).unwrap_infallible(); + let pair = + v8::Array::new_with_elements(scope, &[specifier, kind]).into(); + modified_scripts_v8.push(pair); + } + v8::Array::new_with_elements(scope, &modified_scripts_v8).into() + }; let project_version = v8::Integer::new_from_unsigned(scope, self.project_version as u32).into(); let config_changed = v8::Boolean::new(scope, self.config_changed).into(); + Ok( v8::Array::new_with_elements( scope, @@ -282,7 +307,9 @@ impl PendingChange { .into(), ) } +} +impl PendingChange { fn coalesce( &mut self, new_version: usize, @@ -331,11 +358,10 @@ impl PendingChange { } impl TsServer { - pub fn new(performance: Arc, cache: Arc) -> Self { + pub fn new(performance: Arc) -> Self { let (tx, request_rx) = mpsc::unbounded_channel::(); Self { performance, - cache, sender: tx, receiver: Mutex::new(Some(request_rx)), specifier_map: Arc::new(TscSpecifierMap::new()), @@ -363,13 +389,11 @@ impl TsServer { // on the `TsServer` struct. let receiver = self.receiver.lock().take().unwrap(); let performance = self.performance.clone(); - let cache = self.cache.clone(); let specifier_map = self.specifier_map.clone(); let _join_handle = thread::spawn(move || { run_tsc_thread( receiver, performance.clone(), - cache.clone(), specifier_map.clone(), maybe_inspector_server, ) @@ -1076,6 +1100,7 @@ impl TsServer { let droppable_token = DroppableToken(token.clone()); let (tx, mut rx) = oneshot::channel::>(); let change = self.pending_change.lock().take(); + if self .sender .send((req, snapshot, tx, token.clone(), change)) @@ -1372,6 +1397,7 @@ pub enum OneOrMany { Many(Vec), } +/// Aligns with ts.ScriptElementKind #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] pub enum ScriptElementKind { #[serde(rename = "")] @@ -1400,6 +1426,10 @@ pub enum ScriptElementKind { VariableElement, #[serde(rename = "local var")] LocalVariableElement, + #[serde(rename = "using")] + VariableUsingElement, + #[serde(rename = "await using")] + VariableAwaitUsingElement, #[serde(rename = "function")] FunctionElement, #[serde(rename = "local function")] @@ -1412,6 +1442,8 @@ pub enum ScriptElementKind { MemberSetAccessorElement, #[serde(rename = "property")] MemberVariableElement, + #[serde(rename = "accessor")] + MemberAccessorVariableElement, #[serde(rename = "constructor")] ConstructorImplementationElement, #[serde(rename = "call")] @@ -1456,7 +1488,8 @@ impl Default for ScriptElementKind { } } -/// This mirrors the method `convertKind` in `completions.ts` in vscode +/// This mirrors the method `convertKind` in `completions.ts` in vscode (extensions/typescript-language-features) +/// https://github.com/microsoft/vscode/blob/bd2df940d74b51105aefb11304e028d2fb56a9dc/extensions/typescript-language-features/src/languageFeatures/completions.ts#L440 impl From for lsp::CompletionItemKind { fn from(kind: ScriptElementKind) -> Self { match kind { @@ -1502,7 +1535,18 @@ impl From for lsp::CompletionItemKind { ScriptElementKind::ScriptElement => lsp::CompletionItemKind::FILE, ScriptElementKind::Directory => lsp::CompletionItemKind::FOLDER, ScriptElementKind::String => lsp::CompletionItemKind::CONSTANT, - _ => lsp::CompletionItemKind::PROPERTY, + ScriptElementKind::LocalClassElement + | ScriptElementKind::ConstructorImplementationElement + | ScriptElementKind::TypeParameterElement + | ScriptElementKind::Label + | ScriptElementKind::JsxAttribute + | ScriptElementKind::Link + | ScriptElementKind::LinkName + | ScriptElementKind::LinkText + | ScriptElementKind::VariableUsingElement + | ScriptElementKind::VariableAwaitUsingElement + | ScriptElementKind::MemberAccessorVariableElement + | ScriptElementKind::Unknown => lsp::CompletionItemKind::PROPERTY, } } } @@ -3940,10 +3984,12 @@ struct State { last_id: usize, performance: Arc, // the response from JS, as a JSON string - response: Option, + response_tx: Option>>, state_snapshot: Arc, specifier_map: Arc, token: CancellationToken, + pending_requests: Option>, + mark: Option, } impl State { @@ -3951,14 +3997,17 @@ impl State { state_snapshot: Arc, specifier_map: Arc, performance: Arc, + pending_requests: UnboundedReceiver, ) -> Self { Self { last_id: 1, performance, - response: None, + response_tx: None, state_snapshot, specifier_map, token: Default::default(), + mark: None, + pending_requests: Some(pending_requests), } } @@ -4005,7 +4054,7 @@ fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool { let state = state.borrow::(); let mark = state.performance.mark("tsc.op.op_is_node_file"); let r = match ModuleSpecifier::parse(&path) { - Ok(specifier) => state.state_snapshot.resolver.in_npm_package(&specifier), + Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier), Err(_) => false, }; state.performance.measure(mark); @@ -4079,6 +4128,75 @@ fn op_resolve( op_resolve_inner(state, ResolveArgs { base, specifiers }) } +struct TscRequestArray { + request: TscRequest, + id: Smi, + change: convert::OptionNull, +} + +impl<'a> ToV8<'a> for TscRequestArray { + type Error = StdAnyError; + + fn to_v8( + self, + scope: &mut v8::HandleScope<'a>, + ) -> Result, Self::Error> { + let id = self.id.to_v8(scope).unwrap_infallible(); + + let (method_name, args) = self.request.to_server_request(scope)?; + + let method_name = deno_core::FastString::from_static(method_name) + .v8_string(scope) + .into(); + let args = args.unwrap_or_else(|| v8::Array::new(scope, 0).into()); + + let change = self.change.to_v8(scope).unwrap_infallible(); + + Ok( + v8::Array::new_with_elements(scope, &[id, method_name, args, change]) + .into(), + ) + } +} + +#[op2(async)] +#[to_v8] +async fn op_poll_requests( + state: Rc>, +) -> convert::OptionNull { + let mut pending_requests = { + let mut state = state.borrow_mut(); + let state = state.try_borrow_mut::().unwrap(); + state.pending_requests.take().unwrap() + }; + + let Some((request, snapshot, response_tx, token, change)) = + pending_requests.recv().await + else { + return None.into(); + }; + + let mut state = state.borrow_mut(); + let state = state.try_borrow_mut::().unwrap(); + state.pending_requests = Some(pending_requests); + state.state_snapshot = snapshot; + state.token = token; + state.response_tx = Some(response_tx); + let id = state.last_id; + state.last_id += 1; + let mark = state + .performance + .mark_with_args(format!("tsc.host.{}", request.method()), &request); + state.mark = Some(mark); + + Some(TscRequestArray { + request, + id: Smi(id), + change: change.into(), + }) + .into() +} + #[inline] fn op_resolve_inner( state: &mut OpState, @@ -4107,9 +4225,25 @@ fn op_resolve_inner( } #[op2(fast)] -fn op_respond(state: &mut OpState, #[string] response: String) { +fn op_respond( + state: &mut OpState, + #[string] response: String, + #[string] error: String, +) { let state = state.borrow_mut::(); - state.response = Some(response); + state.performance.measure(state.mark.take().unwrap()); + let response = if !error.is_empty() { + Err(anyhow!("tsc error: {error}")) + } else { + Ok(response) + }; + + let was_sent = state.response_tx.take().unwrap().send(response).is_ok(); + // Don't print the send error if the token is cancelled, it's expected + // to fail in that case and this commonly occurs. + if !was_sent && !state.token.is_cancelled() { + lsp_warn!("Unable to send result to client."); + } } #[op2] @@ -4117,44 +4251,55 @@ fn op_respond(state: &mut OpState, #[string] response: String) { fn op_script_names(state: &mut OpState) -> Vec { let state = state.borrow_mut::(); let mark = state.performance.mark("tsc.op.op_script_names"); - let documents = &state.state_snapshot.documents; - let all_docs = documents.documents(DocumentsFilter::AllDiagnosable); let mut seen = HashSet::new(); let mut result = Vec::new(); - if documents.has_injected_types_node_package() { + if state + .state_snapshot + .documents + .has_injected_types_node_package() + { // ensure this is first so it resolves the node types first let specifier = "asset:///node_types.d.ts"; result.push(specifier.to_string()); - seen.insert(specifier); + seen.insert(Cow::Borrowed(specifier)); } // inject these next because they're global for specifier in state.state_snapshot.resolver.graph_import_specifiers() { - if seen.insert(specifier.as_str()) { + if seen.insert(Cow::Borrowed(specifier.as_str())) { result.push(specifier.to_string()); } } - // finally include the documents and all their dependencies - for doc in &all_docs { - let specifiers = std::iter::once(doc.specifier()).chain( - doc - .dependencies() - .values() - .filter_map(|dep| dep.get_type().or_else(|| dep.get_code())), - ); - for specifier in specifiers { - if seen.insert(specifier.as_str()) { - if let Some(specifier) = documents.resolve_specifier(specifier) { - // only include dependencies we know to exist otherwise typescript will error - if documents.exists(&specifier) - && (specifier.scheme() == "file" || documents.is_open(&specifier)) - { - result.push(specifier.to_string()); - } + // finally include the documents + let docs = state + .state_snapshot + .documents + .documents(DocumentsFilter::AllDiagnosable); + for doc in &docs { + let specifier = doc.specifier(); + let is_open = doc.is_open(); + if seen.insert(Cow::Borrowed(specifier.as_str())) + && (is_open || specifier.scheme() == "file") + { + let types_specifier = (|| { + let documents = &state.state_snapshot.documents; + let types = doc.maybe_types_dependency().maybe_specifier()?; + let (types, _) = documents.resolve_dependency(types, specifier)?; + let types_doc = documents.get(&types)?; + Some(types_doc.specifier().clone()) + })(); + // If there is a types dep, use that as the root instead. But if the doc + // is open, include both as roots. + if let Some(types_specifier) = &types_specifier { + if seen.insert(Cow::Owned(types_specifier.to_string())) { + result.push(types_specifier.to_string()); } } + if types_specifier.is_none() || is_open { + result.push(specifier.to_string()); + } } } @@ -4205,123 +4350,37 @@ fn op_project_version(state: &mut OpState) -> usize { struct TscRuntime { js_runtime: JsRuntime, - server_request_fn_global: v8::Global, + server_main_loop_fn_global: v8::Global, } impl TscRuntime { fn new(mut js_runtime: JsRuntime) -> Self { - let server_request_fn_global = { + let server_main_loop_fn_global = { let context = js_runtime.main_context(); let scope = &mut js_runtime.handle_scope(); let context_local = v8::Local::new(scope, context); let global_obj = context_local.global(scope); - let server_request_fn_str = - v8::String::new_external_onebyte_static(scope, b"serverRequest") + let server_main_loop_fn_str = + v8::String::new_external_onebyte_static(scope, b"serverMainLoop") .unwrap(); - let server_request_fn = v8::Local::try_from( - global_obj.get(scope, server_request_fn_str.into()).unwrap(), + let server_main_loop_fn = v8::Local::try_from( + global_obj + .get(scope, server_main_loop_fn_str.into()) + .unwrap(), ) .unwrap(); - v8::Global::new(scope, server_request_fn) + v8::Global::new(scope, server_main_loop_fn) }; Self { - server_request_fn_global, + server_main_loop_fn_global, js_runtime, } } - - /// Send a request into the runtime and return the JSON string containing the response. - fn request( - &mut self, - state_snapshot: Arc, - request: TscRequest, - change: Option, - token: CancellationToken, - ) -> Result { - if token.is_cancelled() { - return Err(anyhow!("Operation was cancelled.")); - } - let (performance, id) = { - let op_state = self.js_runtime.op_state(); - let mut op_state = op_state.borrow_mut(); - let state = op_state.borrow_mut::(); - state.state_snapshot = state_snapshot; - state.token = token; - state.last_id += 1; - let id = state.last_id; - (state.performance.clone(), id) - }; - let mark = performance - .mark_with_args(format!("tsc.host.{}", request.method()), &request); - - { - let scope = &mut self.js_runtime.handle_scope(); - let tc_scope = &mut v8::TryCatch::new(scope); - let server_request_fn = - v8::Local::new(tc_scope, &self.server_request_fn_global); - let undefined = v8::undefined(tc_scope).into(); - - let change = if let Some(change) = change { - change.to_v8(tc_scope)? - } else { - v8::null(tc_scope).into() - }; - - let (method, req_args) = request.to_server_request(tc_scope)?; - let args = vec![ - v8::Integer::new(tc_scope, id as i32).into(), - v8::String::new(tc_scope, method).unwrap().into(), - req_args.unwrap_or_else(|| v8::Array::new(tc_scope, 0).into()), - change, - ]; - - server_request_fn.call(tc_scope, undefined, &args); - if tc_scope.has_caught() && !tc_scope.has_terminated() { - if let Some(stack_trace) = tc_scope.stack_trace() { - lsp_warn!( - "Error during TS request \"{method}\":\n {}", - stack_trace.to_rust_string_lossy(tc_scope), - ); - } else if let Some(message) = tc_scope.message() { - lsp_warn!( - "Error during TS request \"{method}\":\n {}\n {}", - message.get(tc_scope).to_rust_string_lossy(tc_scope), - tc_scope - .exception() - .map(|exc| exc.to_rust_string_lossy(tc_scope)) - .unwrap_or_default() - ); - } else { - lsp_warn!( - "Error during TS request \"{method}\":\n {}", - tc_scope - .exception() - .map(|exc| exc.to_rust_string_lossy(tc_scope)) - .unwrap_or_default(), - ); - } - tc_scope.rethrow(); - } - } - - let op_state = self.js_runtime.op_state(); - let mut op_state = op_state.borrow_mut(); - let state = op_state.borrow_mut::(); - - performance.measure(mark); - state.response.take().ok_or_else(|| { - custom_error( - "RequestError", - "The response was not received for the request.", - ) - }) - } } fn run_tsc_thread( - mut request_rx: UnboundedReceiver, + request_rx: UnboundedReceiver, performance: Arc, - cache: Arc, specifier_map: Arc, maybe_inspector_server: Option>, ) { @@ -4330,9 +4389,13 @@ fn run_tsc_thread( // supplied snapshot is an isolate that contains the TypeScript language // server. let mut tsc_runtime = JsRuntime::new(RuntimeOptions { - extensions: vec![deno_tsc::init_ops(performance, cache, specifier_map)], + extensions: vec![deno_tsc::init_ops( + performance, + specifier_map, + request_rx, + )], startup_snapshot: Some(tsc::compiler_snapshot()), - inspector: maybe_inspector_server.is_some(), + inspector: has_inspector_server, ..Default::default() }); @@ -4345,40 +4408,53 @@ fn run_tsc_thread( } let tsc_future = async { - start_tsc(&mut tsc_runtime, false).unwrap(); - let (request_signal_tx, mut request_signal_rx) = mpsc::unbounded_channel::<()>(); - let tsc_runtime = Rc::new(tokio::sync::Mutex::new(TscRuntime::new(tsc_runtime))); + // start_tsc(&mut tsc_runtime, false).unwrap(); + let tsc_runtime = + Rc::new(tokio::sync::Mutex::new(TscRuntime::new(tsc_runtime))); let tsc_runtime_ = tsc_runtime.clone(); + let event_loop_fut = async { loop { - if has_inspector_server { - tsc_runtime_.lock().await.js_runtime.run_event_loop(PollEventLoopOptions { + if let Err(e) = tsc_runtime_ + .lock() + .await + .js_runtime + .run_event_loop(PollEventLoopOptions { wait_for_inspector: false, pump_v8_message_loop: true, - }).await.ok(); + }) + .await + { + log::error!("Error in TSC event loop: {e}"); } - request_signal_rx.recv_many(&mut vec![], 1000).await; } }; - tokio::pin!(event_loop_fut); - loop { - tokio::select! { - biased; - (maybe_request, mut tsc_runtime) = async { (request_rx.recv().await, tsc_runtime.lock().await) } => { - if let Some((req, state_snapshot, tx, token, pending_change)) = maybe_request { - let value = tsc_runtime.request(state_snapshot, req, pending_change, token.clone()); - request_signal_tx.send(()).unwrap(); - let was_sent = tx.send(value).is_ok(); - // Don't print the send error if the token is cancelled, it's expected - // to fail in that case and this commonly occurs. - if !was_sent && !token.is_cancelled() { - lsp_warn!("Unable to send result to client."); - } - } else { - break; - } - }, - _ = &mut event_loop_fut => {} + let main_loop_fut = { + let enable_debug = std::env::var("DENO_TSC_DEBUG") + .map(|s| { + let s = s.trim(); + s == "1" || s.eq_ignore_ascii_case("true") + }) + .unwrap_or(false); + let mut runtime = tsc_runtime.lock().await; + let main_loop = runtime.server_main_loop_fn_global.clone(); + let args = { + let scope = &mut runtime.js_runtime.handle_scope(); + let enable_debug_local = + v8::Local::::from(v8::Boolean::new(scope, enable_debug)); + [v8::Global::new(scope, enable_debug_local)] + }; + + runtime.js_runtime.call_with_args(&main_loop, &args) + }; + + tokio::select! { + biased; + _ = event_loop_fut => {}, + res = main_loop_fut => { + if let Err(err) = res { + log::error!("Error in TSC main loop: {err}"); + } } } } @@ -4400,38 +4476,23 @@ deno_core::extension!(deno_tsc, op_script_version, op_ts_config, op_project_version, + op_poll_requests, ], options = { performance: Arc, - cache: Arc, specifier_map: Arc, + request_rx: UnboundedReceiver, }, state = |state, options| { state.put(State::new( - Arc::new(StateSnapshot { - project_version: 0, - assets: Default::default(), - cache_metadata: CacheMetadata::new(options.cache.clone()), - config: Default::default(), - documents: Documents::new(options.cache.clone()), - resolver: Default::default(), - }), + Default::default(), options.specifier_map, options.performance, + options.request_rx, )); }, ); -/// Instruct a language server runtime to start the language server and provide -/// it with a minimal bootstrap configuration. -fn start_tsc(runtime: &mut JsRuntime, debug: bool) -> Result<(), AnyError> { - let init_config = json!({ "debug": debug }); - let init_src = format!("globalThis.serverInit({init_config});"); - - runtime.execute_script(located_script_name!(), init_src)?; - Ok(()) -} - #[derive(Debug, Deserialize_repr, Serialize_repr)] #[repr(u32)] pub enum CompletionTriggerKind { @@ -5059,11 +5120,9 @@ impl TscRequest { #[cfg(test)] mod tests { use super::*; - use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; - use crate::cache::RealDenoCacheEnv; use crate::http_util::HeadersMap; - use crate::lsp::cache::CacheMetadata; + use crate::lsp::cache::LspCache; use crate::lsp::config::Config; use crate::lsp::config::WorkspaceSettings; use crate::lsp::documents::Documents; @@ -5071,29 +5130,14 @@ mod tests { use crate::lsp::resolver::LspResolver; use crate::lsp::text::LineIndex; use pretty_assertions::assert_eq; - use std::path::Path; use test_util::TempDir; - async fn mock_state_snapshot( - fixtures: &[(&str, &str, i32, LanguageId)], - location: &Path, + async fn setup( ts_config: Value, - ) -> StateSnapshot { - let cache = Arc::new(GlobalHttpCache::new( - location.to_path_buf(), - RealDenoCacheEnv, - )); - let mut documents = Documents::new(cache.clone()); - for (specifier, source, version, language_id) in fixtures { - let specifier = - resolve_url(specifier).expect("failed to create specifier"); - documents.open( - specifier.clone(), - *version, - *language_id, - (*source).into(), - ); - } + sources: &[(&str, &str, i32, LanguageId)], + ) -> (TsServer, Arc, LspCache) { + let temp_dir = TempDir::new(); + let cache = LspCache::new(Some(temp_dir.uri())); let mut config = Config::default(); config .tree @@ -5110,37 +5154,37 @@ mod tests { ) .await; let resolver = LspResolver::default() - .with_new_config(&config, cache.clone(), None, None) + .with_new_config(&config, &cache, None) .await; - StateSnapshot { + let mut documents = Documents::default(); + documents.update_config(&config, &resolver, &cache, &Default::default()); + for (specifier, source, version, language_id) in sources { + let specifier = + resolve_url(specifier).expect("failed to create specifier"); + documents.open( + specifier.clone(), + *version, + *language_id, + (*source).into(), + ); + } + let snapshot = Arc::new(StateSnapshot { project_version: 0, documents, assets: Default::default(), - cache_metadata: CacheMetadata::new(cache), config: Arc::new(config), resolver, - } - } - - async fn setup( - temp_dir: &TempDir, - config: Value, - sources: &[(&str, &str, i32, LanguageId)], - ) -> (TsServer, Arc, Arc) { - let location = temp_dir.path().join("deps").to_path_buf(); - let cache = - Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)); - let snapshot = - Arc::new(mock_state_snapshot(sources, &location, config).await); + }); let performance = Arc::new(Performance::default()); - let ts_server = TsServer::new(performance, cache.clone()); + let ts_server = TsServer::new(performance); ts_server.start(None).unwrap(); (ts_server, snapshot, cache) } fn setup_op_state(state_snapshot: Arc) -> OpState { + let (_tx, rx) = mpsc::unbounded_channel(); let state = - State::new(state_snapshot, Default::default(), Default::default()); + State::new(state_snapshot, Default::default(), Default::default(), rx); let mut op_state = OpState::new(None); op_state.put(state); op_state @@ -5163,9 +5207,7 @@ mod tests { #[tokio::test] async fn test_get_diagnostics() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5211,9 +5253,7 @@ mod tests { #[tokio::test] async fn test_get_diagnostics_lib() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5239,9 +5279,7 @@ mod tests { #[tokio::test] async fn test_module_resolution() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5272,9 +5310,7 @@ mod tests { #[tokio::test] async fn test_bad_module_specifiers() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5320,9 +5356,7 @@ mod tests { #[tokio::test] async fn test_remote_modules() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5353,9 +5387,7 @@ mod tests { #[tokio::test] async fn test_partial_modules() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5422,9 +5454,7 @@ mod tests { #[tokio::test] async fn test_no_debug_failure() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5470,8 +5500,7 @@ mod tests { #[tokio::test] async fn test_request_assets() { - let temp_dir = TempDir::new(); - let (ts_server, snapshot, _) = setup(&temp_dir, json!({}), &[]).await; + let (ts_server, snapshot, _) = setup(json!({}), &[]).await; let assets = get_isolate_assets(&ts_server, snapshot).await; let mut asset_names = assets .iter() @@ -5503,9 +5532,7 @@ mod tests { #[tokio::test] async fn test_modify_sources() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, cache) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5528,6 +5555,7 @@ mod tests { let specifier_dep = resolve_url("https://deno.land/x/example/a.ts").unwrap(); cache + .global() .set( &specifier_dep, HeadersMap::default(), @@ -5562,6 +5590,7 @@ mod tests { }) ); cache + .global() .set( &specifier_dep, HeadersMap::default(), @@ -5637,9 +5666,7 @@ mod tests { character: 16, }) .unwrap(); - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5788,9 +5815,7 @@ mod tests { character: 33, }) .unwrap(); - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5897,9 +5922,7 @@ mod tests { #[tokio::test] async fn test_get_edits_for_file_rename() { - let temp_dir = TempDir::new(); let (ts_server, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", @@ -5975,9 +5998,7 @@ mod tests { #[tokio::test] async fn resolve_unknown_dependency() { - let temp_dir = TempDir::new(); let (_, snapshot, _) = setup( - &temp_dir, json!({ "target": "esnext", "module": "esnext", diff --git a/cli/main.rs b/cli/main.rs index 3b103e7807b56e..099bf060cc8319 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -8,6 +8,7 @@ mod emit; mod errors; mod factory; mod file_fetcher; +mod graph_container; mod graph_util; mod http_util; mod js; @@ -30,6 +31,7 @@ use crate::args::flags_from_vec; use crate::args::DenoSubcommand; use crate::args::Flags; use crate::args::DENO_FUTURE; +use crate::graph_container::ModuleGraphContainer; use crate::util::display; use crate::util::v8::get_v8_flags_from_env; use crate::util::v8::init_v8_flags; @@ -112,18 +114,19 @@ async fn run_subcommand(flags: Flags) -> Result { }), DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move { let factory = CliFactory::from_flags(flags)?; - let module_load_preparer = factory.module_load_preparer().await?; let emitter = factory.emitter()?; - let graph_container = factory.graph_container(); - module_load_preparer + let main_graph_container = + factory.main_module_graph_container().await?; + main_graph_container .load_and_type_check_files(&cache_flags.files) .await?; - emitter.cache_module_emits(&graph_container.graph()) + emitter.cache_module_emits(&main_graph_container.graph()) }), DenoSubcommand::Check(check_flags) => spawn_subcommand(async move { let factory = CliFactory::from_flags(flags)?; - let module_load_preparer = factory.module_load_preparer().await?; - module_load_preparer + let main_graph_container = + factory.main_module_graph_container().await?; + main_graph_container .load_and_type_check_files(&check_flags.files) .await }), @@ -235,6 +238,7 @@ async fn run_subcommand(flags: Flags) -> Result { handle.await? } +#[allow(clippy::print_stderr)] fn setup_panic_hook() { // This function does two things inside of the panic hook: // - Tokio does not exit the process when a task panics, so we define a custom @@ -259,6 +263,7 @@ fn setup_panic_hook() { })); } +#[allow(clippy::print_stderr)] fn exit_with_message(message: &str, code: i32) -> ! { eprintln!( "{}: {}", @@ -289,6 +294,7 @@ fn exit_for_error(error: AnyError) -> ! { exit_with_message(&error_string, error_code); } +#[allow(clippy::print_stderr)] pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { eprintln!( "Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.", @@ -298,6 +304,7 @@ pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { } // TODO(bartlomieju): remove when `--unstable` flag is removed. +#[allow(clippy::print_stderr)] pub(crate) fn unstable_warn_cb(feature: &str, api_name: &str) { eprintln!( "⚠️ {}", @@ -369,7 +376,9 @@ fn resolve_flags_and_init( // TODO(bartlomieju): remove when `--unstable` flag is removed. if flags.unstable_config.legacy_flag_enabled { + #[allow(clippy::print_stderr)] if matches!(flags.subcommand, DenoSubcommand::Check(_)) { + // can't use log crate because that's not setup yet eprintln!( "⚠️ {}", colors::yellow( diff --git a/cli/mainrt.rs b/cli/mainrt.rs index 59efa026c588a3..6a363c04b5d9be 100644 --- a/cli/mainrt.rs +++ b/cli/mainrt.rs @@ -36,6 +36,7 @@ use std::env::current_exe; use crate::args::Flags; +#[allow(clippy::print_stderr)] pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { eprintln!( "Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.", @@ -44,6 +45,7 @@ pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { std::process::exit(70); } +#[allow(clippy::print_stderr)] fn exit_with_message(message: &str, code: i32) -> ! { eprintln!( "{}: {}", @@ -57,7 +59,7 @@ fn unwrap_or_exit(result: Result) -> T { match result { Ok(value) => value, Err(error) => { - let mut error_string = format!("{error:?}"); + let mut error_string = format!("{:?}", error); if let Some(e) = error.downcast_ref::() { error_string = format_js_error(e); diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 7d8cb130b296c8..9a8441ccd99795 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -1,5 +1,12 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; +use std::cell::RefCell; +use std::pin::Pin; +use std::rc::Rc; +use std::str; +use std::sync::Arc; + use crate::args::jsr_url; use crate::args::CliOptions; use crate::args::DenoSubcommand; @@ -9,10 +16,12 @@ use crate::cache::ModuleInfoCache; use crate::cache::ParsedSourceCache; use crate::emit::Emitter; use crate::factory::CliFactory; +use crate::graph_container::MainModuleGraphContainer; +use crate::graph_container::ModuleGraphContainer; +use crate::graph_container::ModuleGraphUpdatePermit; use crate::graph_util::graph_lock_or_exit; use crate::graph_util::CreateGraphOptions; use crate::graph_util::ModuleGraphBuilder; -use crate::graph_util::ModuleGraphContainer; use crate::node; use crate::resolver::CliGraphResolver; use crate::resolver::CliNodeResolver; @@ -23,6 +32,7 @@ use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; +use crate::worker::ModuleLoaderAndSourceMapGetter; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; @@ -36,7 +46,6 @@ use deno_core::futures::future::FutureExt; use deno_core::futures::Future; use deno_core::parking_lot::Mutex; use deno_core::resolve_url; -use deno_core::resolve_url_or_path; use deno_core::ModuleCodeString; use deno_core::ModuleLoader; use deno_core::ModuleSource; @@ -48,9 +57,11 @@ use deno_core::ResolutionKind; use deno_core::SourceMapGetter; use deno_graph::source::ResolutionMode; use deno_graph::source::Resolver; +use deno_graph::GraphKind; use deno_graph::JsModule; use deno_graph::JsonModule; use deno_graph::Module; +use deno_graph::ModuleGraph; use deno_graph::Resolution; use deno_lockfile::Lockfile; use deno_runtime::code_cache; @@ -58,12 +69,6 @@ use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::fs_util::code_timestamp; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; -use deno_terminal::colors; -use std::borrow::Cow; -use std::pin::Pin; -use std::rc::Rc; -use std::str; -use std::sync::Arc; pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> { let npm_resolver = factory.npm_resolver().await?; @@ -83,12 +88,19 @@ pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> { entry.value.cloned() } }) - .collect(); + .collect::>(); + let mut graph_permit = factory + .main_module_graph_container() + .await? + .acquire_update_permit() + .await; + let graph = graph_permit.graph_mut(); factory .module_load_preparer() .await? .prepare_module_load( - roots, + graph, + &roots, false, factory.cli_options().ts_type_lib_window(), deno_runtime::permissions::PermissionsContainer::allow_all(), @@ -101,7 +113,6 @@ pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> { pub struct ModuleLoadPreparer { options: Arc, - graph_container: Arc, lockfile: Option>>, module_graph_builder: Arc, progress_bar: ProgressBar, @@ -112,7 +123,6 @@ impl ModuleLoadPreparer { #[allow(clippy::too_many_arguments)] pub fn new( options: Arc, - graph_container: Arc, lockfile: Option>>, module_graph_builder: Arc, progress_bar: ProgressBar, @@ -120,7 +130,6 @@ impl ModuleLoadPreparer { ) -> Self { Self { options, - graph_container, lockfile, module_graph_builder, progress_bar, @@ -135,7 +144,8 @@ impl ModuleLoadPreparer { #[allow(clippy::too_many_arguments)] pub async fn prepare_module_load( &self, - roots: Vec, + graph: &mut ModuleGraph, + roots: &[ModuleSpecifier], is_dynamic: bool, lib: TsTypeLib, permissions: PermissionsContainer, @@ -144,10 +154,7 @@ impl ModuleLoadPreparer { let _pb_clear_guard = self.progress_bar.clear_guard(); let mut cache = self.module_graph_builder.create_fetch_cacher(permissions); - log::debug!("Creating module graph."); - let mut graph_update_permit = - self.graph_container.acquire_update_permit().await; - let graph = graph_update_permit.graph_mut(); + log::debug!("Building module graph."); let has_type_checked = !graph.roots.is_empty(); self @@ -157,13 +164,13 @@ impl ModuleLoadPreparer { CreateGraphOptions { is_dynamic, graph_kind: graph.graph_kind(), - roots: roots.clone(), + roots: roots.to_vec(), loader: Some(&mut cache), }, ) .await?; - self.module_graph_builder.graph_roots_valid(graph, &roots)?; + self.module_graph_builder.graph_roots_valid(graph, roots)?; // If there is a lockfile... if let Some(lockfile) = &self.lockfile { @@ -174,9 +181,6 @@ impl ModuleLoadPreparer { lockfile.write().context("Failed writing lockfile.")?; } - // save the graph and get a reference to the new graph - let graph = graph_update_permit.commit(); - drop(_pb_clear_guard); // type check if necessary @@ -188,7 +192,7 @@ impl ModuleLoadPreparer { // created, we could avoid the clone of the graph here by providing // the actual graph on the first run and then getting the Arc // back from the return value. - (*graph).clone(), + graph.clone(), check::CheckOptions { build_fast_check_graph: true, lib, @@ -204,154 +208,23 @@ impl ModuleLoadPreparer { Ok(()) } - - /// Helper around prepare_module_load that loads and type checks - /// the provided files. - pub async fn load_and_type_check_files( - &self, - files: &[String], - ) -> Result<(), AnyError> { - let lib = self.options.ts_type_lib_window(); - - let specifiers = self.collect_specifiers(files)?; - - if specifiers.is_empty() { - log::warn!("{} No matching files found.", colors::yellow("Warning")); - } - - self - .prepare_module_load( - specifiers, - false, - lib, - PermissionsContainer::allow_all(), - ) - .await - } - - fn collect_specifiers( - &self, - files: &[String], - ) -> Result, AnyError> { - let excludes = self.options.resolve_config_excludes()?; - Ok( - files - .iter() - .filter_map(|file| { - let file_url = - resolve_url_or_path(file, self.options.initial_cwd()).ok()?; - if file_url.scheme() != "file" { - return Some(file_url); - } - // ignore local files that match any of files listed in `exclude` option - let file_path = file_url.to_file_path().ok()?; - if excludes.matches_path(&file_path) { - None - } else { - Some(file_url) - } - }) - .collect::>(), - ) - } -} - -struct PreparedModuleLoader { - emitter: Arc, - graph_container: Arc, - parsed_source_cache: Arc, -} - -impl PreparedModuleLoader { - pub fn load_prepared_module( - &self, - specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result { - if specifier.scheme() == "node" { - unreachable!(); // Node built-in modules should be handled internally. - } - - let graph = self.graph_container.graph(); - match graph.get(specifier) { - Some(deno_graph::Module::Json(JsonModule { - source, - media_type, - specifier, - .. - })) => Ok(ModuleCodeStringSource { - code: source.clone().into(), - found_url: specifier.clone(), - media_type: *media_type, - }), - Some(deno_graph::Module::Js(JsModule { - source, - media_type, - specifier, - .. - })) => { - let code: ModuleCodeString = match media_type { - MediaType::JavaScript - | MediaType::Unknown - | MediaType::Cjs - | MediaType::Mjs - | MediaType::Json => source.clone().into(), - MediaType::Dts | MediaType::Dcts | MediaType::Dmts => { - Default::default() - } - MediaType::TypeScript - | MediaType::Mts - | MediaType::Cts - | MediaType::Jsx - | MediaType::Tsx => { - // get emit text - self - .emitter - .emit_parsed_source(specifier, *media_type, source)? - } - MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { - panic!("Unexpected media type {media_type} for {specifier}") - } - }; - - // at this point, we no longer need the parsed source in memory, so free it - self.parsed_source_cache.free(specifier); - - Ok(ModuleCodeStringSource { - code, - found_url: specifier.clone(), - media_type: *media_type, - }) - } - Some( - deno_graph::Module::External(_) - | deno_graph::Module::Node(_) - | deno_graph::Module::Npm(_), - ) - | None => { - let mut msg = format!("Loading unprepared module: {specifier}"); - if let Some(referrer) = maybe_referrer { - msg = format!("{}, imported from: {}", msg, referrer.as_str()); - } - Err(anyhow!(msg)) - } - } - } } struct SharedCliModuleLoaderState { + graph_kind: GraphKind, lib_window: TsTypeLib, lib_worker: TsTypeLib, is_inspecting: bool, is_repl: bool, - graph_container: Arc, + code_cache: Option>, + emitter: Arc, + main_module_graph_container: Arc, + module_info_cache: Arc, module_load_preparer: Arc, - prepared_module_loader: PreparedModuleLoader, - resolver: Arc, node_resolver: Arc, npm_module_loader: NpmModuleLoader, - code_cache: Option>, - module_info_cache: Arc, + parsed_source_cache: Arc, + resolver: Arc, } pub struct CliModuleLoaderFactory { @@ -362,18 +235,19 @@ impl CliModuleLoaderFactory { #[allow(clippy::too_many_arguments)] pub fn new( options: &CliOptions, + code_cache: Option>, emitter: Arc, - graph_container: Arc, + main_module_graph_container: Arc, + module_info_cache: Arc, module_load_preparer: Arc, - parsed_source_cache: Arc, - resolver: Arc, node_resolver: Arc, npm_module_loader: NpmModuleLoader, - code_cache: Option>, - module_info_cache: Arc, + parsed_source_cache: Arc, + resolver: Arc, ) -> Self { Self { shared: Arc::new(SharedCliModuleLoaderState { + graph_kind: options.graph_kind(), lib_window: options.ts_type_lib_window(), lib_worker: options.ts_type_lib_worker(), is_inspecting: options.is_inspecting(), @@ -381,34 +255,39 @@ impl CliModuleLoaderFactory { options.sub_command(), DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_) ), - prepared_module_loader: PreparedModuleLoader { - emitter, - graph_container: graph_container.clone(), - parsed_source_cache, - }, - graph_container, + code_cache, + emitter, + main_module_graph_container, + module_info_cache, module_load_preparer, - resolver, node_resolver, npm_module_loader, - code_cache, - module_info_cache, + parsed_source_cache, + resolver, }), } } - fn create_with_lib( + fn create_with_lib( &self, + graph_container: TGraphContainer, lib: TsTypeLib, root_permissions: PermissionsContainer, dynamic_permissions: PermissionsContainer, - ) -> Rc { - Rc::new(CliModuleLoader { + ) -> ModuleLoaderAndSourceMapGetter { + let loader = Rc::new(CliModuleLoader { lib, root_permissions, dynamic_permissions, + graph_container, + emitter: self.shared.emitter.clone(), + parsed_source_cache: self.shared.parsed_source_cache.clone(), shared: self.shared.clone(), - }) + }); + ModuleLoaderAndSourceMapGetter { + module_loader: loader.clone(), + source_map_getter: Some(loader), + } } } @@ -417,8 +296,9 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { &self, root_permissions: PermissionsContainer, dynamic_permissions: PermissionsContainer, - ) -> Rc { + ) -> ModuleLoaderAndSourceMapGetter { self.create_with_lib( + (*self.shared.main_module_graph_container).clone(), self.shared.lib_window, root_permissions, dynamic_permissions, @@ -429,22 +309,20 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { &self, root_permissions: PermissionsContainer, dynamic_permissions: PermissionsContainer, - ) -> Rc { + ) -> ModuleLoaderAndSourceMapGetter { self.create_with_lib( + // create a fresh module graph for the worker + WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new( + self.shared.graph_kind, + ))), self.shared.lib_worker, root_permissions, dynamic_permissions, ) } - - fn create_source_map_getter(&self) -> Option> { - Some(Rc::new(CliSourceMapGetter { - shared: self.shared.clone(), - })) - } } -struct CliModuleLoader { +struct CliModuleLoader { lib: TsTypeLib, /// The initial set of permissions used to resolve the static imports in the /// worker. These are "allow all" for main worker, and parent thread @@ -454,9 +332,12 @@ struct CliModuleLoader { /// "root permissions" for Web Worker. dynamic_permissions: PermissionsContainer, shared: Arc, + emitter: Arc, + parsed_source_cache: Arc, + graph_container: TGraphContainer, } -impl CliModuleLoader { +impl CliModuleLoader { fn load_sync( &self, specifier: &ModuleSpecifier, @@ -476,10 +357,7 @@ impl CliModuleLoader { { result? } else { - self - .shared - .prepared_module_loader - .load_prepared_module(specifier, maybe_referrer)? + self.load_prepared_module(specifier, maybe_referrer)? }; let code = if self.shared.is_inspecting { // we need the code with the source map in order for @@ -581,7 +459,7 @@ impl CliModuleLoader { }; } - let graph = self.shared.graph_container.graph(); + let graph = self.graph_container.graph(); let maybe_resolved = match graph.get(referrer) { Some(Module::Js(module)) => { module.dependencies.get(specifier).map(|d| &d.maybe_code) @@ -695,9 +573,86 @@ impl CliModuleLoader { .map(|timestamp| timestamp.to_string())?; Ok(Some(timestamp)) } + + fn load_prepared_module( + &self, + specifier: &ModuleSpecifier, + maybe_referrer: Option<&ModuleSpecifier>, + ) -> Result { + if specifier.scheme() == "node" { + unreachable!(); // Node built-in modules should be handled internally. + } + + let graph = self.graph_container.graph(); + match graph.get(specifier) { + Some(deno_graph::Module::Json(JsonModule { + source, + media_type, + specifier, + .. + })) => Ok(ModuleCodeStringSource { + code: source.clone().into(), + found_url: specifier.clone(), + media_type: *media_type, + }), + Some(deno_graph::Module::Js(JsModule { + source, + media_type, + specifier, + .. + })) => { + let code: ModuleCodeString = match media_type { + MediaType::JavaScript + | MediaType::Unknown + | MediaType::Cjs + | MediaType::Mjs + | MediaType::Json => source.clone().into(), + MediaType::Dts | MediaType::Dcts | MediaType::Dmts => { + Default::default() + } + MediaType::TypeScript + | MediaType::Mts + | MediaType::Cts + | MediaType::Jsx + | MediaType::Tsx => { + // get emit text + self + .emitter + .emit_parsed_source(specifier, *media_type, source)? + } + MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { + panic!("Unexpected media type {media_type} for {specifier}") + } + }; + + // at this point, we no longer need the parsed source in memory, so free it + self.parsed_source_cache.free(specifier); + + Ok(ModuleCodeStringSource { + code, + found_url: specifier.clone(), + media_type: *media_type, + }) + } + Some( + deno_graph::Module::External(_) + | deno_graph::Module::Node(_) + | deno_graph::Module::Npm(_), + ) + | None => { + let mut msg = format!("Loading unprepared module: {specifier}"); + if let Some(referrer) = maybe_referrer { + msg = format!("{}, imported from: {}", msg, referrer.as_str()); + } + Err(anyhow!(msg)) + } + } + } } -impl ModuleLoader for CliModuleLoader { +impl ModuleLoader + for CliModuleLoader +{ fn resolve( &self, specifier: &str, @@ -747,13 +702,12 @@ impl ModuleLoader for CliModuleLoader { _maybe_referrer: Option, is_dynamic: bool, ) -> Pin>>> { - if let Some(result) = - self.shared.npm_module_loader.maybe_prepare_load(specifier) - { - return Box::pin(deno_core::futures::future::ready(result)); + if self.shared.node_resolver.in_npm_package(specifier) { + return Box::pin(deno_core::futures::future::ready(Ok(()))); } let specifier = specifier.clone(); + let graph_container = self.graph_container.clone(); let module_load_preparer = self.shared.module_load_preparer.clone(); let root_permissions = if is_dynamic { @@ -764,9 +718,19 @@ impl ModuleLoader for CliModuleLoader { let lib = self.lib; async move { + let mut update_permit = graph_container.acquire_update_permit().await; + let graph = update_permit.graph_mut(); module_load_preparer - .prepare_module_load(vec![specifier], is_dynamic, lib, root_permissions) - .await + .prepare_module_load( + graph, + &[specifier], + is_dynamic, + lib, + root_permissions, + ) + .await?; + update_permit.commit(); + Ok(()) } .boxed_local() } @@ -795,15 +759,13 @@ impl ModuleLoader for CliModuleLoader { ); } } - async {}.boxed_local() + std::future::ready(()).boxed_local() } } -struct CliSourceMapGetter { - shared: Arc, -} - -impl SourceMapGetter for CliSourceMapGetter { +impl SourceMapGetter + for CliModuleLoader +{ fn get_source_map(&self, file_name: &str) -> Option> { let specifier = resolve_url(file_name).ok()?; match specifier.scheme() { @@ -812,11 +774,7 @@ impl SourceMapGetter for CliSourceMapGetter { "wasm" | "file" | "http" | "https" | "data" | "blob" => (), _ => return None, } - let source = self - .shared - .prepared_module_loader - .load_prepared_module(&specifier, None) - .ok()?; + let source = self.load_prepared_module(&specifier, None).ok()?; source_map_from_code(&source.code) } @@ -825,7 +783,7 @@ impl SourceMapGetter for CliSourceMapGetter { file_name: &str, line_number: usize, ) -> Option { - let graph = self.shared.graph_container.graph(); + let graph = self.graph_container.graph(); let code = match graph.get(&resolve_url(file_name).ok()?) { Some(deno_graph::Module::Js(module)) => &module.source, Some(deno_graph::Module::Json(module)) => &module.source, @@ -844,3 +802,54 @@ impl SourceMapGetter for CliSourceMapGetter { } } } + +/// Holds the `ModuleGraph` in workers. +#[derive(Clone)] +struct WorkerModuleGraphContainer { + // Allow only one request to update the graph data at a time, + // but allow other requests to read from it at any time even + // while another request is updating the data. + update_queue: Rc, + inner: Rc>>, +} + +impl WorkerModuleGraphContainer { + pub fn new(module_graph: Arc) -> Self { + Self { + update_queue: Default::default(), + inner: Rc::new(RefCell::new(module_graph)), + } + } +} + +impl ModuleGraphContainer for WorkerModuleGraphContainer { + async fn acquire_update_permit(&self) -> impl ModuleGraphUpdatePermit { + let permit = self.update_queue.acquire().await; + WorkerModuleGraphUpdatePermit { + permit, + inner: self.inner.clone(), + graph: (**self.inner.borrow()).clone(), + } + } + + fn graph(&self) -> Arc { + self.inner.borrow().clone() + } +} + +struct WorkerModuleGraphUpdatePermit { + permit: deno_core::unsync::TaskQueuePermit, + inner: Rc>>, + graph: ModuleGraph, +} + +impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit { + fn graph_mut(&mut self) -> &mut ModuleGraph { + &mut self.graph + } + + fn commit(self) { + *self.inner.borrow_mut() = Arc::new(self.graph); + drop(self.permit); // explicit drop for clarity + } +} diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml index 1301c28f8c7bea..c88b845857524c 100644 --- a/cli/napi/sym/Cargo.toml +++ b/cli/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.79.0" +version = "0.82.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -15,7 +15,6 @@ path = "./lib.rs" proc-macro = true [dependencies] -proc-macro2.workspace = true quote.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/cli/npm/managed/cache.rs b/cli/npm/managed/cache.rs index 9ba5c1c9963082..44b98fcee0b0d8 100644 --- a/cli/npm/managed/cache.rs +++ b/cli/npm/managed/cache.rs @@ -25,6 +25,7 @@ use crate::util::fs::hard_link_dir_recursive; use crate::util::progress_bar::ProgressBar; use super::tarball::verify_and_extract_tarball; +use super::tarball::TarballExtractionMode; /// Stores a single copy of npm packages in a cache. #[derive(Debug)] @@ -69,7 +70,7 @@ impl NpmCache { /// to ensure a package is only downloaded once per run of the CLI. This /// prevents downloads from re-occurring when someone has `--reload` and /// and imports a dynamic import that imports the same package again for example. - fn should_use_global_cache_for_package(&self, package: &PackageNv) -> bool { + fn should_use_cache_for_package(&self, package: &PackageNv) -> bool { self.cache_setting.should_use_for_npm_package(&package.name) || !self .previously_reloaded_packages @@ -91,26 +92,23 @@ impl NpmCache { async fn ensure_package_inner( &self, - package: &PackageNv, + package_nv: &PackageNv, dist: &NpmPackageVersionDistInfo, registry_url: &Url, ) -> Result<(), AnyError> { let package_folder = self .cache_dir - .package_folder_for_name_and_version(package, registry_url); - if self.should_use_global_cache_for_package(package) - && self.fs.exists_sync(&package_folder) - // if this file exists, then the package didn't successfully extract - // the first time, or another process is currently extracting the zip file - && !self.fs.exists_sync(&package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME)) - { + .package_folder_for_name_and_version(package_nv, registry_url); + let should_use_cache = self.should_use_cache_for_package(package_nv); + let package_folder_exists = self.fs.exists_sync(&package_folder); + if should_use_cache && package_folder_exists { return Ok(()); } else if self.cache_setting == CacheSetting::Only { return Err(custom_error( "NotCached", format!( "An npm specifier not found in cache: \"{}\", --cached-only is specified.", - &package.name + &package_nv.name ) ) ); @@ -127,7 +125,31 @@ impl NpmCache { .await?; match maybe_bytes { Some(bytes) => { - verify_and_extract_tarball(package, &bytes, dist, &package_folder) + let extraction_mode = if should_use_cache || !package_folder_exists { + TarballExtractionMode::SiblingTempDir + } else { + // The user ran with `--reload`, so overwrite the package instead of + // deleting it since the package might get corrupted if a user kills + // their deno process while it's deleting a package directory + // + // We can't rename this folder and delete it because the folder + // may be in use by another process or may now contain hardlinks, + // which will cause windows to throw an "AccessDenied" error when + // renaming. So we settle for overwriting. + TarballExtractionMode::Overwrite + }; + let dist = dist.clone(); + let package_nv = package_nv.clone(); + deno_core::unsync::spawn_blocking(move || { + verify_and_extract_tarball( + &package_nv, + &bytes, + &dist, + &package_folder, + extraction_mode, + ) + }) + .await? } None => { bail!("Could not find npm package tarball at: {}", dist.tarball); @@ -150,7 +172,7 @@ impl NpmCache { .package_folder_for_id(folder_id, registry_url); if package_folder.exists() - // if this file exists, then the package didn't successfully extract + // if this file exists, then the package didn't successfully initialize // the first time, or another process is currently extracting the zip file && !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists() && self.cache_setting.should_use_for_npm_package(&folder_id.nv.name) @@ -161,6 +183,9 @@ impl NpmCache { let original_package_folder = self .cache_dir .package_folder_for_name_and_version(&folder_id.nv, registry_url); + + // it seems Windows does an "AccessDenied" error when moving a + // directory with hard links, so that's why this solution is done with_folder_sync_lock(&folder_id.nv, &package_folder, || { hard_link_dir_recursive(&original_package_folder, &package_folder) })?; @@ -206,7 +231,7 @@ impl NpmCache { const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock"; -pub fn with_folder_sync_lock( +fn with_folder_sync_lock( package: &PackageNv, output_folder: &Path, action: impl FnOnce() -> Result<(), AnyError>, diff --git a/cli/npm/managed/tarball.rs b/cli/npm/managed/tarball.rs index 1267b13d8cbddb..e2d242e6623e0a 100644 --- a/cli/npm/managed/tarball.rs +++ b/cli/npm/managed/tarball.rs @@ -2,12 +2,14 @@ use std::collections::HashSet; use std::fs; +use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; use base64::prelude::BASE64_STANDARD; use base64::Engine; use deno_core::anyhow::bail; +use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_npm::registry::NpmPackageVersionDistInfo; use deno_npm::registry::NpmPackageVersionDistInfoIntegrity; @@ -16,19 +18,76 @@ use flate2::read::GzDecoder; use tar::Archive; use tar::EntryType; -use super::cache::with_folder_sync_lock; +use crate::util::path::get_atomic_dir_path; + +#[derive(Debug, Copy, Clone)] +pub enum TarballExtractionMode { + /// Overwrites the destination directory without deleting any files. + Overwrite, + /// Creates and writes to a sibling temporary directory. When done, moves + /// it to the final destination. + /// + /// This is more robust than `Overwrite` as it better handles multiple + /// processes writing to the directory at the same time. + SiblingTempDir, +} pub fn verify_and_extract_tarball( - package: &PackageNv, + package_nv: &PackageNv, data: &[u8], dist_info: &NpmPackageVersionDistInfo, output_folder: &Path, + extraction_mode: TarballExtractionMode, ) -> Result<(), AnyError> { - verify_tarball_integrity(package, data, &dist_info.integrity())?; + verify_tarball_integrity(package_nv, data, &dist_info.integrity())?; + + match extraction_mode { + TarballExtractionMode::Overwrite => extract_tarball(data, output_folder), + TarballExtractionMode::SiblingTempDir => { + let temp_dir = get_atomic_dir_path(output_folder); + extract_tarball(data, &temp_dir)?; + rename_with_retries(&temp_dir, output_folder) + .map_err(AnyError::from) + .context("Failed moving extracted tarball to final destination.") + } + } +} + +fn rename_with_retries( + temp_dir: &Path, + output_folder: &Path, +) -> Result<(), std::io::Error> { + fn already_exists(err: &std::io::Error, output_folder: &Path) -> bool { + // Windows will do an "Access is denied" error + err.kind() == ErrorKind::AlreadyExists || output_folder.exists() + } + + let mut count = 0; + // renaming might be flaky if a lot of processes are trying + // to do this, so retry a few times + loop { + match fs::rename(temp_dir, output_folder) { + Ok(_) => return Ok(()), + Err(err) if already_exists(&err, output_folder) => { + // another process copied here, just cleanup + let _ = fs::remove_dir_all(temp_dir); + return Ok(()); + } + Err(err) => { + count += 1; + if count > 5 { + // too many retries, cleanup and return the error + let _ = fs::remove_dir_all(temp_dir); + return Err(err); + } - with_folder_sync_lock(package, output_folder, || { - extract_tarball(data, output_folder) - }) + // wait a bit before retrying... this should be very rare or only + // in error cases, so ok to sleep a bit + let sleep_ms = std::cmp::min(100, 20 * count); + std::thread::sleep(std::time::Duration::from_millis(sleep_ms)); + } + } + } } fn verify_tarball_integrity( @@ -150,6 +209,7 @@ fn extract_tarball(data: &[u8], output_folder: &Path) -> Result<(), AnyError> { #[cfg(test)] mod test { use deno_semver::Version; + use test_util::TempDir; use super::*; @@ -240,4 +300,25 @@ mod test { ) .is_ok()); } + + #[test] + fn rename_with_retries_succeeds_exists() { + let temp_dir = TempDir::new(); + let folder_1 = temp_dir.path().join("folder_1"); + let folder_2 = temp_dir.path().join("folder_2"); + + folder_1.create_dir_all(); + folder_1.join("a.txt").write("test"); + folder_2.create_dir_all(); + // this will not end up in the output as rename_with_retries assumes + // the folders ending up at the destination are the same + folder_2.join("b.txt").write("test2"); + + let dest_folder = temp_dir.path().join("dest_folder"); + + rename_with_retries(folder_1.as_path(), dest_folder.as_path()).unwrap(); + rename_with_retries(folder_2.as_path(), dest_folder.as_path()).unwrap(); + assert!(dest_folder.join("a.txt").exists()); + assert!(!dest_folder.join("b.txt").exists()); + } } diff --git a/cli/ops/jupyter.rs b/cli/ops/jupyter.rs index e7e206de5819b2..1c60bc2bc333ba 100644 --- a/cli/ops/jupyter.rs +++ b/cli/ops/jupyter.rs @@ -76,13 +76,13 @@ pub fn op_print( if is_err { if let Err(err) = sender.send(StdioMsg::Stderr(msg.into())) { - eprintln!("Failed to send stderr message: {}", err); + log::error!("Failed to send stderr message: {}", err); } return Ok(()); } if let Err(err) = sender.send(StdioMsg::Stdout(msg.into())) { - eprintln!("Failed to send stdout message: {}", err); + log::error!("Failed to send stdout message: {}", err); } Ok(()) } diff --git a/cli/resolver.rs b/cli/resolver.rs index 32233e961f82a9..4b5c99292e6cb9 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -1,5 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use dashmap::DashMap; use deno_ast::MediaType; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; @@ -33,7 +34,6 @@ use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; use import_map::ImportMap; use std::borrow::Cow; -use std::collections::HashMap; use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; @@ -91,8 +91,8 @@ impl CliNodeResolver { } } - pub fn in_npm_package(&self, referrer: &ModuleSpecifier) -> bool { - self.npm_resolver.in_npm_package(referrer) + pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + self.npm_resolver.in_npm_package(specifier) } pub fn get_closest_package_json( @@ -249,6 +249,7 @@ impl CliNodeResolver { } } +#[derive(Clone)] pub struct NpmModuleLoader { cjs_resolutions: Arc, node_code_translator: Arc, @@ -271,18 +272,6 @@ impl NpmModuleLoader { } } - pub fn maybe_prepare_load( - &self, - specifier: &ModuleSpecifier, - ) -> Option> { - if self.node_resolver.in_npm_package(specifier) { - // nothing to prepare - Some(Ok(())) - } else { - None - } - } - pub fn load_sync_if_in_npm_package( &self, specifier: &ModuleSpecifier, @@ -858,38 +847,6 @@ impl NpmResolver for CliGraphResolver { } } -#[derive(Debug)] -struct SloppyImportsStatCache { - fs: Arc, - cache: Mutex>>, -} - -impl SloppyImportsStatCache { - pub fn new(fs: Arc) -> Self { - Self { - fs, - cache: Default::default(), - } - } - - pub fn stat_sync(&self, path: &Path) -> Option { - // there will only ever be one thread in here at a - // time, so it's ok to hold the lock for so long - let mut cache = self.cache.lock(); - if let Some(entry) = cache.get(path) { - return *entry; - } - - let entry = self - .fs - .stat_sync(path) - .ok() - .and_then(|stat| SloppyImportsFsEntry::from_fs_stat(&stat)); - cache.insert(path.to_owned(), entry); - entry - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum SloppyImportsFsEntry { File, @@ -989,33 +946,27 @@ impl<'a> SloppyImportsResolution<'a> { #[derive(Debug)] pub struct SloppyImportsResolver { - stat_cache: SloppyImportsStatCache, + fs: Arc, + cache: Option>>, } impl SloppyImportsResolver { pub fn new(fs: Arc) -> Self { Self { - stat_cache: SloppyImportsStatCache::new(fs), + fs, + cache: Some(Default::default()), } } - pub fn resolve_with_fs<'a>( - fs: &dyn FileSystem, - specifier: &'a ModuleSpecifier, - mode: ResolutionMode, - ) -> SloppyImportsResolution<'a> { - Self::resolve_with_stat_sync(specifier, mode, |path| { - fs.stat_sync(path) - .ok() - .and_then(|stat| SloppyImportsFsEntry::from_fs_stat(&stat)) - }) + pub fn new_without_stat_cache(fs: Arc) -> Self { + Self { fs, cache: None } } - pub fn resolve_with_stat_sync( - specifier: &ModuleSpecifier, + pub fn resolve<'a>( + &self, + specifier: &'a ModuleSpecifier, mode: ResolutionMode, - stat_sync: impl Fn(&Path) -> Option, - ) -> SloppyImportsResolution { + ) -> SloppyImportsResolution<'a> { fn path_without_ext( path: &Path, media_type: MediaType, @@ -1065,7 +1016,7 @@ impl SloppyImportsResolver { } let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> = - match (stat_sync)(&path) { + match self.stat_sync(&path) { Some(SloppyImportsFsEntry::File) => { if mode.is_types() { let media_type = MediaType::from_specifier(specifier); @@ -1243,7 +1194,7 @@ impl SloppyImportsResolver { }; for (probe_path, reason) in probe_paths { - if (stat_sync)(&probe_path) == Some(SloppyImportsFsEntry::File) { + if self.stat_sync(&probe_path) == Some(SloppyImportsFsEntry::File) { if let Ok(specifier) = ModuleSpecifier::from_file_path(probe_path) { match reason { SloppyImportsResolutionReason::JsToTs => { @@ -1263,14 +1214,22 @@ impl SloppyImportsResolver { SloppyImportsResolution::None(specifier) } - pub fn resolve<'a>( - &self, - specifier: &'a ModuleSpecifier, - mode: ResolutionMode, - ) -> SloppyImportsResolution<'a> { - Self::resolve_with_stat_sync(specifier, mode, |path| { - self.stat_cache.stat_sync(path) - }) + fn stat_sync(&self, path: &Path) -> Option { + if let Some(cache) = &self.cache { + if let Some(entry) = cache.get(path) { + return *entry; + } + } + + let entry = self + .fs + .stat_sync(path) + .ok() + .and_then(|stat| SloppyImportsFsEntry::from_fs_stat(&stat)); + if let Some(cache) = &self.cache { + cache.insert(path.to_owned(), entry); + } + entry } } @@ -1278,7 +1237,6 @@ impl SloppyImportsResolver { mod test { use std::collections::BTreeMap; - use deno_runtime::deno_fs::RealFs; use test_util::TestContext; use super::*; @@ -1346,21 +1304,8 @@ mod test { #[test] fn test_unstable_sloppy_imports() { fn resolve(specifier: &ModuleSpecifier) -> SloppyImportsResolution { - SloppyImportsResolver::resolve_with_stat_sync( - specifier, - ResolutionMode::Execution, - |path| { - RealFs.stat_sync(path).ok().and_then(|stat| { - if stat.is_file { - Some(SloppyImportsFsEntry::File) - } else if stat.is_directory { - Some(SloppyImportsFsEntry::Dir) - } else { - None - } - }) - }, - ) + SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)) + .resolve(specifier, ResolutionMode::Execution) } let context = TestContext::default(); diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 4b7962a5f0c0cf..37720bd541b33f 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -27,6 +27,7 @@ use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; +use crate::worker::ModuleLoaderAndSourceMapGetter; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; use deno_core::anyhow::Context; @@ -282,30 +283,30 @@ impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { &self, root_permissions: PermissionsContainer, dynamic_permissions: PermissionsContainer, - ) -> Rc { - Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - root_permissions, - dynamic_permissions, - }) + ) -> ModuleLoaderAndSourceMapGetter { + ModuleLoaderAndSourceMapGetter { + module_loader: Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + root_permissions, + dynamic_permissions, + }), + source_map_getter: None, + } } fn create_for_worker( &self, root_permissions: PermissionsContainer, dynamic_permissions: PermissionsContainer, - ) -> Rc { - Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - root_permissions, - dynamic_permissions, - }) - } - - fn create_source_map_getter( - &self, - ) -> Option> { - None + ) -> ModuleLoaderAndSourceMapGetter { + ModuleLoaderAndSourceMapGetter { + module_loader: Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + root_permissions, + dynamic_permissions, + }), + source_map_getter: None, + } } } diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index a6fb9177639c8d..dd94205cb8001f 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -8,7 +8,6 @@ use crate::display::write_json_to_stdout; use crate::factory::CliFactory; use crate::factory::CliFactoryBuilder; use crate::graph_util::has_graph_root_local_dependent_changed; -use crate::module_loader::ModuleLoadPreparer; use crate::ops; use crate::tools::test::format_test_error; use crate::tools::test::TestFilter; @@ -145,24 +144,6 @@ fn create_reporter( Box::new(ConsoleReporter::new(show_output)) } -/// Type check a collection of module and document specifiers. -async fn check_specifiers( - cli_options: &CliOptions, - module_load_preparer: &ModuleLoadPreparer, - specifiers: Vec, -) -> Result<(), AnyError> { - let lib = cli_options.ts_type_lib_window(); - module_load_preparer - .prepare_module_load( - specifiers, - false, - lib, - PermissionsContainer::allow_all(), - ) - .await?; - Ok(()) -} - /// Run a single specifier as an executable bench module. async fn bench_specifier( worker_factory: Arc, @@ -445,12 +426,8 @@ pub async fn run_benchmarks( return Err(generic_error("No bench modules found")); } - check_specifiers( - cli_options, - factory.module_load_preparer().await?, - specifiers.clone(), - ) - .await?; + let main_graph_container = factory.main_module_graph_container().await?; + main_graph_container.check_specifiers(&specifiers).await?; if bench_options.no_run { return Ok(()); @@ -507,7 +484,6 @@ pub async fn run_benchmarks_with_watch( let graph_kind = cli_options.type_check_mode().as_graph_kind(); let module_graph_creator = factory.module_graph_creator().await?; - let module_load_preparer = factory.module_load_preparer().await?; let bench_modules = collect_specifiers( bench_options.files.clone(), @@ -559,7 +535,10 @@ pub async fn run_benchmarks_with_watch( .filter(|specifier| bench_modules_to_reload.contains(specifier)) .collect::>(); - check_specifiers(cli_options, module_load_preparer, specifiers.clone()) + factory + .main_module_graph_container() + .await? + .check_specifiers(&specifiers) .await?; if bench_options.no_run { diff --git a/cli/tools/bench/reporters.rs b/cli/tools/bench/reporters.rs index 9cc035f8f15922..b5229cf0adf832 100644 --- a/cli/tools/bench/reporters.rs +++ b/cli/tools/bench/reporters.rs @@ -50,6 +50,7 @@ impl JsonReporter { } } +#[allow(clippy::print_stdout)] impl BenchReporter for JsonReporter { fn report_group_summary(&mut self) {} #[cold] @@ -58,7 +59,7 @@ impl BenchReporter for JsonReporter { fn report_end(&mut self, _report: &BenchReport) { match write_json_to_stdout(self) { Ok(_) => (), - Err(e) => println!("{e}"), + Err(e) => println!("{}", e), } } @@ -118,6 +119,7 @@ impl ConsoleReporter { } } +#[allow(clippy::print_stdout)] impl BenchReporter for ConsoleReporter { #[cold] fn report_plan(&mut self, plan: &BenchPlan) { diff --git a/cli/tools/bundle.rs b/cli/tools/bundle.rs index 7701b60248ac5e..ef058f0d078756 100644 --- a/cli/tools/bundle.rs +++ b/cli/tools/bundle.rs @@ -125,7 +125,10 @@ async fn bundle_action( ); } } else { - println!("{}", bundle_output.code); + #[allow(clippy::print_stdout)] + { + println!("{}", bundle_output.code); + } } Ok(()) } diff --git a/cli/tools/coverage/reporter.rs b/cli/tools/coverage/reporter.rs index f86fd186fede7d..6547f2036dc2cd 100644 --- a/cli/tools/coverage/reporter.rs +++ b/cli/tools/coverage/reporter.rs @@ -52,7 +52,12 @@ pub trait CoverageReporter { file_reports: &'a Vec<(CoverageReport, String)>, ) -> CoverageSummary { let urls = file_reports.iter().map(|rep| &rep.0.url).collect(); - let root = util::find_root(urls).unwrap().to_file_path().unwrap(); + let root = match util::find_root(urls) + .and_then(|root_path| root_path.to_file_path().ok()) + { + Some(path) => path, + None => return HashMap::new(), + }; // summary by file or directory // tuple of (line hit, line miss, branch hit, branch miss, parent) let mut summary = HashMap::new(); @@ -103,6 +108,7 @@ struct SummaryCoverageReporter { file_reports: Vec<(CoverageReport, String)>, } +#[allow(clippy::print_stdout)] impl SummaryCoverageReporter { pub fn new() -> SummaryCoverageReporter { SummaryCoverageReporter { @@ -166,6 +172,7 @@ impl SummaryCoverageReporter { } } +#[allow(clippy::print_stdout)] impl CoverageReporter for SummaryCoverageReporter { fn report( &mut self, @@ -312,6 +319,7 @@ impl DetailedCoverageReporter { } } +#[allow(clippy::print_stdout)] impl CoverageReporter for DetailedCoverageReporter { fn report( &mut self, @@ -416,7 +424,7 @@ impl CoverageReporter for HtmlCoverageReporter { ) .unwrap(); - println!("HTML coverage report has been generated at {}", root_report); + log::info!("HTML coverage report has been generated at {}", root_report); } } diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index b16639f99185e3..ef3d48cb5138fa 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -396,8 +396,8 @@ async fn format_source_files( } Err(e) => { let _g = output_lock.lock(); - eprintln!("Error formatting: {}", file_path.to_string_lossy()); - eprintln!(" {e}"); + log::error!("Error formatting: {}", file_path.to_string_lossy()); + log::error!(" {e}"); } } Ok(()) @@ -495,6 +495,7 @@ fn format_stdin(fmt_options: FmtOptions, ext: &str) -> Result<(), AnyError> { let file_path = PathBuf::from(format!("_stdin.{ext}")); let formatted_text = format_file(&file_path, &source, &fmt_options.options)?; if fmt_options.check { + #[allow(clippy::print_stdout)] if formatted_text.is_some() { println!("Not formatted stdin"); } diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 7c1d6761a2907f..19975571b7ea73 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -97,6 +97,7 @@ pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> { Ok(()) } +#[allow(clippy::print_stdout)] fn print_cache_info( factory: &CliFactory, json: bool, diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index 9ff17ccc304740..b13dea6fd90db4 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -284,8 +284,9 @@ pub async fn install_command( }; // ensure the module is cached - CliFactory::from_flags(flags.clone())? - .module_load_preparer() + let factory = CliFactory::from_flags(flags.clone())?; + factory + .main_module_graph_container() .await? .load_and_type_check_files(&[install_flags_global.module_url.clone()]) .await?; diff --git a/cli/tools/jupyter/install.rs b/cli/tools/jupyter/install.rs index 0c9b8b3e637558..69a75837e19db6 100644 --- a/cli/tools/jupyter/install.rs +++ b/cli/tools/jupyter/install.rs @@ -27,13 +27,13 @@ pub fn status() -> Result<(), AnyError> { if let Some(specs) = json_output.get("kernelspecs") { if let Some(specs_obj) = specs.as_object() { if specs_obj.contains_key("deno") { - println!("✅ Deno kernel already installed"); + log::info!("✅ Deno kernel already installed"); return Ok(()); } } } - println!("ℹ️ Deno kernel is not yet installed, run `deno jupyter --install` to set it up"); + log::warn!("ℹ️ Deno kernel is not yet installed, run `deno jupyter --install` to set it up"); Ok(()) } @@ -108,6 +108,6 @@ pub fn install() -> Result<(), AnyError> { } let _ = std::fs::remove_dir(temp_dir); - println!("✅ Deno kernelspec installed successfully."); + log::info!("✅ Deno kernelspec installed successfully."); Ok(()) } diff --git a/cli/tools/jupyter/server.rs b/cli/tools/jupyter/server.rs index 2107dcfbfcb26e..4021cf6a31289b 100644 --- a/cli/tools/jupyter/server.rs +++ b/cli/tools/jupyter/server.rs @@ -75,7 +75,7 @@ impl JupyterServer { let handle1 = deno_core::unsync::spawn(async move { if let Err(err) = Self::handle_heartbeat(&mut heartbeat).await { - eprintln!("Heartbeat error: {}", err); + log::error!("Heartbeat error: {}", err); } }); @@ -85,14 +85,14 @@ impl JupyterServer { if let Err(err) = Self::handle_control(control_socket, cancel_handle).await { - eprintln!("Control error: {}", err); + log::error!("Control error: {}", err); } } }); let handle3 = deno_core::unsync::spawn(async move { if let Err(err) = server.handle_shell(shell_socket).await { - eprintln!("Shell error: {}", err); + log::error!("Shell error: {}", err); } }); @@ -137,7 +137,7 @@ impl JupyterServer { .await; if let Err(err) = result { - eprintln!("Output {} error: {}", name, err); + log::error!("Output {} error: {}", name, err); } } } @@ -166,10 +166,10 @@ impl JupyterServer { cancel_handle.cancel(); } "interrupt_request" => { - eprintln!("Interrupt request currently not supported"); + log::error!("Interrupt request currently not supported"); } _ => { - eprintln!( + log::error!( "Unrecognized control message type: {}", msg.message_type() ); @@ -307,7 +307,7 @@ impl JupyterServer { // We don't handle these messages } _ => { - eprintln!("Unrecognized shell message type: {}", msg.message_type()); + log::error!("Unrecognized shell message type: {}", msg.message_type()); } } @@ -386,12 +386,13 @@ impl JupyterServer { tokio::time::sleep(std::time::Duration::from_millis(5)).await; } else if let Some(exception_details) = exception_details { // Determine the exception value and name - let (name, message, stack) = - if let Some(exception) = exception_details.exception { - let result = self - .repl_session - .call_function_on_args( - r#" + let (name, message, stack) = if let Some(exception) = + exception_details.exception + { + let result = self + .repl_session + .call_function_on_args( + r#" function(object) { if (object instanceof Error) { const name = "name" in object ? String(object.name) : ""; @@ -404,32 +405,32 @@ impl JupyterServer { } } "# - .into(), - &[exception], - ) - .await?; + .into(), + &[exception], + ) + .await?; - match result.result.value { - Some(serde_json::Value::String(str)) => { - if let Ok(object) = - serde_json::from_str::>(&str) - { - let get = |k| object.get(k).cloned().unwrap_or_default(); - (get("name"), get("message"), get("stack")) - } else { - eprintln!("Unexpected result while parsing JSON {str}"); - ("".into(), "".into(), "".into()) - } - } - _ => { - eprintln!("Unexpected result while parsing exception {result:?}"); + match result.result.value { + Some(serde_json::Value::String(str)) => { + if let Ok(object) = + serde_json::from_str::>(&str) + { + let get = |k| object.get(k).cloned().unwrap_or_default(); + (get("name"), get("message"), get("stack")) + } else { + log::error!("Unexpected result while parsing JSON {str}"); ("".into(), "".into(), "".into()) } } - } else { - eprintln!("Unexpectedly missing exception {exception_details:?}"); - ("".into(), "".into(), "".into()) - }; + _ => { + log::error!("Unexpected result while parsing exception {result:?}"); + ("".into(), "".into(), "".into()) + } + } + } else { + log::error!("Unexpectedly missing exception {exception_details:?}"); + ("".into(), "".into(), "".into()) + }; let stack = if stack.is_empty() { format!( @@ -546,7 +547,7 @@ async fn publish_result( if let Some(exception_details) = &response.exception_details { // If the object doesn't have a Jupyter.display method or it throws an // exception, we just ignore it and let the caller handle it. - eprintln!("Exception encountered: {}", exception_details.text); + log::error!("Exception encountered: {}", exception_details.text); return Ok(None); } diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index 03f5b8676eeba6..aeb9199765319c 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -279,6 +279,7 @@ fn collect_lint_files( .collect_file_patterns(files) } +#[allow(clippy::print_stdout)] pub fn print_rules_list(json: bool, maybe_rules_tags: Option>) { let lint_rules = if maybe_rules_tags.is_none() { rules::get_all_rules() @@ -646,12 +647,12 @@ impl LintReporter for PrettyLintReporter { } } - eprintln!("{}", d.display()); + log::error!("{}", d.display()); } fn visit_error(&mut self, file_path: &str, err: &AnyError) { - eprintln!("Error linting: {file_path}"); - eprintln!(" {err}"); + log::error!("Error linting: {file_path}"); + log::error!(" {err}"); } fn close(&mut self, check_count: usize) { @@ -694,7 +695,7 @@ impl LintReporter for CompactLintReporter { match d.range() { Some((text_info, range)) => { let line_and_column = text_info.line_and_column_display(range.start); - eprintln!( + log::error!( "{}: line {}, col {} - {} ({})", d.specifier(), line_and_column.line_number, @@ -704,14 +705,14 @@ impl LintReporter for CompactLintReporter { ) } None => { - eprintln!("{}: {} ({})", d.specifier(), d.message(), d.code()) + log::error!("{}: {} ({})", d.specifier(), d.message(), d.code()) } } } fn visit_error(&mut self, file_path: &str, err: &AnyError) { - eprintln!("Error linting: {file_path}"); - eprintln!(" {err}"); + log::error!("Error linting: {file_path}"); + log::error!(" {err}"); } fn close(&mut self, check_count: usize) { @@ -812,7 +813,10 @@ impl LintReporter for JsonLintReporter { fn close(&mut self, _check_count: usize) { sort_diagnostics(&mut self.diagnostics); let json = serde_json::to_string_pretty(&self); - println!("{}", json.unwrap()); + #[allow(clippy::print_stdout)] + { + println!("{}", json.unwrap()); + } } } diff --git a/cli/tools/registry/diagnostics.rs b/cli/tools/registry/diagnostics.rs index 38366ed7eadda7..49f8de045282be 100644 --- a/cli/tools/registry/diagnostics.rs +++ b/cli/tools/registry/diagnostics.rs @@ -20,6 +20,7 @@ use deno_ast::SourceTextInfo; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_graph::FastCheckDiagnostic; +use deno_semver::Version; use lsp_types::Url; use super::unfurl::SpecifierUnfurlerDiagnostic; @@ -38,7 +39,11 @@ impl PublishDiagnosticsCollector { diagnostics.sort_by_cached_key(|d| d.sorting_key()); for diagnostic in diagnostics { - eprint!("{}", diagnostic.display()); + // todo(https://github.com/denoland/deno_ast/issues/245): use log crate here + #[allow(clippy::print_stderr)] + { + eprint!("{}", diagnostic.display()); + } if matches!(diagnostic.level(), DiagnosticLevel::Error) { errors += 1; } @@ -48,18 +53,18 @@ impl PublishDiagnosticsCollector { } if errors > 0 { if has_slow_types_errors { - eprintln!( + log::error!( "This package contains errors for slow types. Fixing these errors will:\n" ); - eprintln!( + log::error!( " 1. Significantly improve your package users' type checking performance." ); - eprintln!(" 2. Improve the automatic documentation generation."); - eprintln!(" 3. Enable automatic .d.ts generation for Node.js."); - eprintln!( + log::error!(" 2. Improve the automatic documentation generation."); + log::error!(" 3. Enable automatic .d.ts generation for Node.js."); + log::error!( "\nDon't want to bother? You can choose to skip this step by" ); - eprintln!("providing the --allow-slow-types flag.\n"); + log::error!("providing the --allow-slow-types flag.\n"); } Err(anyhow!( @@ -103,6 +108,13 @@ pub enum PublishDiagnostic { ExcludedModule { specifier: Url, }, + MissingConstraint { + specifier: Url, + specifier_text: String, + resolved_version: Option, + text_info: SourceTextInfo, + referrer: deno_graph::Range, + }, } impl PublishDiagnostic { @@ -149,6 +161,7 @@ impl Diagnostic for PublishDiagnostic { InvalidExternalImport { .. } => DiagnosticLevel::Error, UnsupportedJsxTsx { .. } => DiagnosticLevel::Warning, ExcludedModule { .. } => DiagnosticLevel::Error, + MissingConstraint { .. } => DiagnosticLevel::Error, } } @@ -163,6 +176,7 @@ impl Diagnostic for PublishDiagnostic { InvalidExternalImport { .. } => Cow::Borrowed("invalid-external-import"), UnsupportedJsxTsx { .. } => Cow::Borrowed("unsupported-jsx-tsx"), ExcludedModule { .. } => Cow::Borrowed("excluded-module"), + MissingConstraint { .. } => Cow::Borrowed("missing-constraint"), } } @@ -181,10 +195,25 @@ impl Diagnostic for PublishDiagnostic { InvalidExternalImport { kind, .. } => Cow::Owned(format!("invalid import to a {kind} specifier")), UnsupportedJsxTsx { .. } => Cow::Borrowed("JSX and TSX files are currently not supported"), ExcludedModule { .. } => Cow::Borrowed("module in package's module graph was excluded from publishing"), + MissingConstraint { specifier, .. } => Cow::Owned(format!("specifier '{}' is missing a version constraint", specifier)), } } fn location(&self) -> DiagnosticLocation { + fn from_referrer_range<'a>( + referrer: &'a deno_graph::Range, + text_info: &'a SourceTextInfo, + ) -> DiagnosticLocation<'a> { + DiagnosticLocation::ModulePosition { + specifier: Cow::Borrowed(&referrer.specifier), + text_info: Cow::Borrowed(text_info), + source_pos: DiagnosticSourcePos::LineAndCol { + line: referrer.start.line, + column: referrer.start.character, + }, + } + } + use PublishDiagnostic::*; match &self { FastCheck(diagnostic) => diagnostic.location(), @@ -212,24 +241,49 @@ impl Diagnostic for PublishDiagnostic { referrer, text_info, .. - } => DiagnosticLocation::ModulePosition { - specifier: Cow::Borrowed(&referrer.specifier), - text_info: Cow::Borrowed(text_info), - source_pos: DiagnosticSourcePos::LineAndCol { - line: referrer.start.line, - column: referrer.start.character, - }, - }, + } => from_referrer_range(referrer, text_info), UnsupportedJsxTsx { specifier } => DiagnosticLocation::Module { specifier: Cow::Borrowed(specifier), }, ExcludedModule { specifier } => DiagnosticLocation::Module { specifier: Cow::Borrowed(specifier), }, + MissingConstraint { + referrer, + text_info, + .. + } => from_referrer_range(referrer, text_info), } } fn snippet(&self) -> Option> { + fn from_range<'a>( + text_info: &'a SourceTextInfo, + referrer: &'a deno_graph::Range, + ) -> Option> { + if referrer.start.line == 0 && referrer.start.character == 0 { + return None; // no range, probably a jsxImportSource import + } + + Some(DiagnosticSnippet { + source: Cow::Borrowed(text_info), + highlight: DiagnosticSnippetHighlight { + style: DiagnosticSnippetHighlightStyle::Error, + range: DiagnosticSourceRange { + start: DiagnosticSourcePos::LineAndCol { + line: referrer.start.line, + column: referrer.start.character, + }, + end: DiagnosticSourcePos::LineAndCol { + line: referrer.end.line, + column: referrer.end.character, + }, + }, + description: Some("the specifier".into()), + }, + }) + } + use PublishDiagnostic::*; match &self { FastCheck(diagnostic) => diagnostic.snippet(), @@ -257,25 +311,14 @@ impl Diagnostic for PublishDiagnostic { referrer, text_info, .. - } => Some(DiagnosticSnippet { - source: Cow::Borrowed(text_info), - highlight: DiagnosticSnippetHighlight { - style: DiagnosticSnippetHighlightStyle::Error, - range: DiagnosticSourceRange { - start: DiagnosticSourcePos::LineAndCol { - line: referrer.start.line, - column: referrer.start.character, - }, - end: DiagnosticSourcePos::LineAndCol { - line: referrer.end.line, - column: referrer.end.character, - }, - }, - description: Some("the specifier".into()), - }, - }), + } => from_range(text_info, referrer), UnsupportedJsxTsx { .. } => None, ExcludedModule { .. } => None, + MissingConstraint { + referrer, + text_info, + .. + } => from_range(text_info, referrer), } } @@ -298,6 +341,13 @@ impl Diagnostic for PublishDiagnostic { ExcludedModule { .. } => Some( Cow::Borrowed("remove the module from 'exclude' and/or 'publish.exclude' in the config file or use 'publish.exclude' with a negative glob to unexclude from gitignore"), ), + MissingConstraint { specifier_text, .. } => { + Some(Cow::Borrowed(if specifier_text.starts_with("jsr:") || specifier_text.starts_with("npm:") { + "specify a version constraint for the specifier" + } else { + "specify a version constraint for the specifier in the import map" + })) + }, } } @@ -362,7 +412,14 @@ impl Diagnostic for PublishDiagnostic { ]), ExcludedModule { .. } => Cow::Owned(vec![ Cow::Borrowed("excluded modules referenced via a package export will error at runtime due to not existing in the package"), - ]) + ]), + MissingConstraint { resolved_version, .. } => Cow::Owned(vec![ + Cow::Owned(format!( + "the specifier resolved to version {} today, but will resolve to a different", + resolved_version.as_ref().map(|v| v.to_string()).unwrap_or_else(|| "".to_string())), + ), + Cow::Borrowed("major version if one is published in the future and potentially break"), + ]), } } @@ -389,6 +446,9 @@ impl Diagnostic for PublishDiagnostic { ExcludedModule { .. } => { Some(Cow::Borrowed("https://jsr.io/go/excluded-module")) } + MissingConstraint { .. } => { + Some(Cow::Borrowed("https://jsr.io/go/missing-constraint")) + } } } } diff --git a/cli/tools/registry/graph.rs b/cli/tools/registry/graph.rs index 001f85e7657aaf..7e3239cedd070c 100644 --- a/cli/tools/registry/graph.rs +++ b/cli/tools/registry/graph.rs @@ -8,6 +8,8 @@ use deno_graph::ModuleEntryRef; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; use deno_graph::WalkOptions; +use deno_semver::jsr::JsrPackageReqReference; +use deno_semver::npm::NpmPackageReqReference; use lsp_types::Url; use super::diagnostics::PublishDiagnostic; @@ -22,20 +24,67 @@ pub fn collect_invalid_external_imports( let mut collect_if_invalid = |skip_specifiers: &mut HashSet, - text: &Arc, + source_text: &Arc, + specifier_text: &str, resolution: &ResolutionResolved| { if visited.insert(resolution.specifier.clone()) { match resolution.specifier.scheme() { "file" | "data" | "node" => {} - "jsr" | "npm" => { + "jsr" => { skip_specifiers.insert(resolution.specifier.clone()); + + // check for a missing version constraint + if let Ok(jsr_req_ref) = + JsrPackageReqReference::from_specifier(&resolution.specifier) + { + if jsr_req_ref.req().version_req.version_text() == "*" { + let maybe_version = graph + .packages + .mappings() + .find(|(req, _)| *req == jsr_req_ref.req()) + .map(|(_, nv)| nv.version.clone()); + diagnostics_collector.push( + PublishDiagnostic::MissingConstraint { + specifier: resolution.specifier.clone(), + specifier_text: specifier_text.to_string(), + resolved_version: maybe_version, + text_info: SourceTextInfo::new(source_text.clone()), + referrer: resolution.range.clone(), + }, + ); + } + } + } + "npm" => { + skip_specifiers.insert(resolution.specifier.clone()); + + // check for a missing version constraint + if let Ok(jsr_req_ref) = + NpmPackageReqReference::from_specifier(&resolution.specifier) + { + if jsr_req_ref.req().version_req.version_text() == "*" { + let maybe_version = graph + .get(&resolution.specifier) + .and_then(|m| m.npm()) + .map(|n| n.nv_reference.nv().version.clone()); + diagnostics_collector.push( + PublishDiagnostic::MissingConstraint { + specifier: resolution.specifier.clone(), + specifier_text: specifier_text.to_string(), + resolved_version: maybe_version, + text_info: SourceTextInfo::new(source_text.clone()), + referrer: resolution.range.clone(), + }, + ); + } + } } "http" | "https" => { skip_specifiers.insert(resolution.specifier.clone()); diagnostics_collector.push( PublishDiagnostic::InvalidExternalImport { kind: format!("non-JSR '{}'", resolution.specifier.scheme()), - text_info: SourceTextInfo::new(text.clone()), + text_info: SourceTextInfo::new(source_text.clone()), imported: resolution.specifier.clone(), referrer: resolution.range.clone(), }, @@ -46,7 +95,7 @@ pub fn collect_invalid_external_imports( diagnostics_collector.push( PublishDiagnostic::InvalidExternalImport { kind: format!("'{}'", resolution.specifier.scheme()), - text_info: SourceTextInfo::new(text.clone()), + text_info: SourceTextInfo::new(source_text.clone()), imported: resolution.specifier.clone(), referrer: resolution.range.clone(), }, @@ -77,12 +126,22 @@ pub fn collect_invalid_external_imports( continue; }; - for (_, dep) in &module.dependencies { + for (specifier_text, dep) in &module.dependencies { if let Some(resolved) = dep.maybe_code.ok() { - collect_if_invalid(&mut skip_specifiers, &module.source, resolved); + collect_if_invalid( + &mut skip_specifiers, + &module.source, + specifier_text, + resolved, + ); } if let Some(resolved) = dep.maybe_type.ok() { - collect_if_invalid(&mut skip_specifiers, &module.source, resolved); + collect_if_invalid( + &mut skip_specifiers, + &module.source, + specifier_text, + resolved, + ); } } } diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs index b8d31853b58e67..495f2458803840 100644 --- a/cli/tools/registry/mod.rs +++ b/cli/tools/registry/mod.rs @@ -72,9 +72,10 @@ use super::check::TypeChecker; use self::paths::CollectedPublishPath; use self::tar::PublishableTarball; +#[allow(clippy::print_stderr)] fn ring_bell() { // ASCII code for the bell character. - print!("\x07"); + eprint!("\x07"); } struct PreparedPublishPackage { @@ -150,35 +151,40 @@ async fn prepare_publish( .map(|c| c.files) .unwrap_or_else(|| FilePatterns::new_with_base(root_dir.to_path_buf())); - let diagnostics_collector = diagnostics_collector.clone(); - let tarball = deno_core::unsync::spawn_blocking(move || { - let bare_node_builtins = cli_options.unstable_bare_node_builtins(); - let unfurler = SpecifierUnfurler::new( - &mapped_resolver, - sloppy_imports_resolver.as_ref(), - bare_node_builtins, - ); - let root_specifier = - ModuleSpecifier::from_directory_path(&root_dir).unwrap(); - let publish_paths = paths::collect_publish_paths( - &root_dir, - &cli_options, - &diagnostics_collector, - file_patterns, - )?; - collect_excluded_module_diagnostics( - &root_specifier, - &graph, - &publish_paths, - &diagnostics_collector, - ); - tar::create_gzipped_tarball( - &publish_paths, - LazyGraphSourceParser::new(&source_cache, &graph), - &diagnostics_collector, - &unfurler, - ) - .context("Failed to create a tarball") + let tarball = deno_core::unsync::spawn_blocking({ + let diagnostics_collector = diagnostics_collector.clone(); + let config_path = config_path.clone(); + move || { + let bare_node_builtins = cli_options.unstable_bare_node_builtins(); + let unfurler = SpecifierUnfurler::new( + &mapped_resolver, + sloppy_imports_resolver.as_ref(), + bare_node_builtins, + ); + let root_specifier = + ModuleSpecifier::from_directory_path(&root_dir).unwrap(); + let publish_paths = + paths::collect_publish_paths(paths::CollectPublishPathsOptions { + root_dir: &root_dir, + cli_options: &cli_options, + diagnostics_collector: &diagnostics_collector, + file_patterns, + force_include_paths: vec![config_path], + })?; + collect_excluded_module_diagnostics( + &root_specifier, + &graph, + &publish_paths, + &diagnostics_collector, + ); + tar::create_gzipped_tarball( + &publish_paths, + LazyGraphSourceParser::new(&source_cache, &graph), + &diagnostics_collector, + &unfurler, + ) + .context("Failed to create a tarball") + } }) .await??; @@ -291,18 +297,19 @@ async fn get_auth_headers( .context("Failed to create interactive authorization")?; let auth_url = format!("{}?code={}", auth.verification_url, auth.code); - print!( - "Visit {} to authorize publishing of", - colors::cyan(&auth_url) - ); - if packages.len() > 1 { - println!(" {} packages", packages.len()); + let pkgs_text = if packages.len() > 1 { + format!("{} packages", packages.len()) } else { - println!(" @{}/{}", packages[0].scope, packages[0].package); - } + format!("@{}/{}", packages[0].scope, packages[0].package) + }; + log::warn!( + "Visit {} to authorize publishing of {}", + colors::cyan(&auth_url), + pkgs_text, + ); ring_bell(); - println!("{}", colors::gray("Waiting...")); + log::info!("{}", colors::gray("Waiting...")); let _ = open::that_detached(&auth_url); let interval = std::time::Duration::from_secs(auth.poll_interval); @@ -323,7 +330,7 @@ async fn get_auth_headers( .await; match res { Ok(res) => { - println!( + log::info!( "{} {} {}", colors::green("Authorization successful."), colors::gray("Authenticated as"), @@ -490,13 +497,13 @@ async fn ensure_scopes_and_packages_exist( }; ring_bell(); - println!( + log::warn!( "'@{}/{}' doesn't exist yet. Visit {} to create the package", &package.scope, &package.package, colors::cyan_with_underline(&create_package_url) ); - println!("{}", colors::gray("Waiting...")); + log::warn!("{}", colors::gray("Waiting...")); let _ = open::that_detached(&create_package_url); let package_api_url = api::get_package_api_url( @@ -510,7 +517,7 @@ async fn ensure_scopes_and_packages_exist( let response = client.get(&package_api_url).send().await?; if response.status() == 200 { let name = format!("@{}/{}", package.scope, package.package); - println!("Package {} created", colors::green(name)); + log::info!("Package {} created", colors::green(name)); break; } } @@ -615,7 +622,7 @@ async fn publish_package( provenance: bool, ) -> Result<(), AnyError> { let client = http_client.client()?; - println!( + log::info!( "{} @{}/{}@{} ...", colors::intense_blue("Publishing"), package.scope, @@ -649,7 +656,7 @@ async fn publish_package( ) .unwrap(); if task.status == "success" { - println!( + log::info!( "{} @{}/{}@{}", colors::yellow("Warning: Skipping, already published"), package.scope, @@ -658,7 +665,7 @@ async fn publish_package( ); return Ok(()); } - println!( + log::info!( "{} @{}/{}@{}", colors::yellow("Already uploaded, waiting for publishing"), package.scope, @@ -711,7 +718,7 @@ async fn publish_package( ); } - println!( + log::info!( "{} @{}/{}@{}", colors::green("Successfully published"), package.scope, @@ -748,7 +755,7 @@ async fn publish_package( let bundle = provenance::generate_provenance(subject).await?; let tlog_entry = &bundle.verification_material.tlog_entries[0]; - println!("{}", + log::info!("{}", colors::green(format!( "Provenance transparency log available at https://search.sigstore.dev/?logIndex={}", tlog_entry.log_index @@ -768,7 +775,7 @@ async fn publish_package( .await?; } - println!( + log::info!( "{}", colors::gray(format!( "Visit {}@{}/{}@{} for details", @@ -798,7 +805,7 @@ async fn prepare_packages_for_publishing( let cli_options = cli_factory.cli_options(); if members.len() > 1 { - println!("Publishing a workspace..."); + log::info!("Publishing a workspace..."); } // create the module graph diff --git a/cli/tools/registry/paths.rs b/cli/tools/registry/paths.rs index 12282b77a1cf98..721ef6ecea2081 100644 --- a/cli/tools/registry/paths.rs +++ b/cli/tools/registry/paths.rs @@ -217,17 +217,29 @@ pub struct CollectedPublishPath { pub relative_path: String, } +pub struct CollectPublishPathsOptions<'a> { + pub root_dir: &'a Path, + pub cli_options: &'a CliOptions, + pub file_patterns: FilePatterns, + pub force_include_paths: Vec, + pub diagnostics_collector: &'a PublishDiagnosticsCollector, +} + pub fn collect_publish_paths( - root_dir: &Path, - cli_options: &CliOptions, - diagnostics_collector: &PublishDiagnosticsCollector, - file_patterns: FilePatterns, + opts: CollectPublishPathsOptions, ) -> Result, AnyError> { + let diagnostics_collector = opts.diagnostics_collector; let publish_paths = - collect_paths(cli_options, diagnostics_collector, file_patterns)?; - let mut paths = HashSet::with_capacity(publish_paths.len()); - let mut result = Vec::with_capacity(publish_paths.len()); - for path in publish_paths { + collect_paths(opts.cli_options, diagnostics_collector, opts.file_patterns)?; + let publish_paths_set = publish_paths.iter().cloned().collect::>(); + let capacity = publish_paths.len() + opts.force_include_paths.len(); + let mut paths = HashSet::with_capacity(capacity); + let mut result = Vec::with_capacity(capacity); + let force_include_paths = opts + .force_include_paths + .into_iter() + .filter(|path| !publish_paths_set.contains(path)); + for path in publish_paths.into_iter().chain(force_include_paths) { let Ok(specifier) = ModuleSpecifier::from_file_path(&path) else { diagnostics_collector .to_owned() @@ -238,7 +250,7 @@ pub fn collect_publish_paths( continue; }; - let Ok(relative_path) = path.strip_prefix(root_dir) else { + let Ok(relative_path) = path.strip_prefix(opts.root_dir) else { diagnostics_collector .to_owned() .push(PublishDiagnostic::InvalidPath { diff --git a/cli/tools/repl/editor.rs b/cli/tools/repl/editor.rs index 9cb3cd1c2670e5..dbc9bce7038449 100644 --- a/cli/tools/repl/editor.rs +++ b/cli/tools/repl/editor.rs @@ -490,7 +490,7 @@ impl ReplEditor { } self.errored_on_history_save.store(true, Relaxed); - eprintln!("Unable to save history file: {e}"); + log::warn!("Unable to save history file: {}", e); } } } diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index 8847bee52a7cdc..c29e29e71f3e23 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -40,6 +40,7 @@ struct Repl { message_handler: RustylineSyncMessageHandler, } +#[allow(clippy::print_stdout)] impl Repl { async fn run(&mut self) -> Result<(), AnyError> { loop { @@ -61,7 +62,7 @@ impl Repl { break; } - println!("{output}"); + println!("{}", output); } Err(ReadlineError::Interrupted) => { if self.editor.should_exit_on_interrupt() { @@ -75,7 +76,7 @@ impl Repl { break; } Err(err) => { - println!("Error: {err:?}"); + println!("Error: {:?}", err); break; } } @@ -85,6 +86,7 @@ impl Repl { } } +#[allow(clippy::print_stdout)] async fn read_line_and_poll( repl_session: &mut ReplSession, message_handler: &mut RustylineSyncMessageHandler, @@ -152,6 +154,7 @@ async fn read_eval_file( Ok(file.into_text_decoded()?.source) } +#[allow(clippy::print_stdout)] pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { let factory = CliFactory::from_flags(flags)?; let cli_options = factory.cli_options(); diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 4d14117d845078..60867b768fda96 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::args::CliOptions; use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; @@ -16,6 +15,7 @@ use deno_core::futures; use deno_core::futures::future::LocalBoxFuture; use deno_runtime::deno_node::NodeResolver; use deno_semver::package::PackageNv; +use deno_task_shell::ExecutableCommand; use deno_task_shell::ExecuteResult; use deno_task_shell::ShellCommand; use deno_task_shell::ShellCommandContext; @@ -44,12 +44,17 @@ pub async fn execute_script( let task_name = match &task_flags.task { Some(task) => task, None => { - print_available_tasks(&tasks_config, &package_json_scripts); + print_available_tasks( + &mut std::io::stdout(), + &tasks_config, + &package_json_scripts, + )?; return Ok(1); } }; let npm_resolver = factory.npm_resolver().await?; let node_resolver = factory.node_resolver().await?; + let env_vars = real_env_vars(); if let Some( deno_config::Task::Definition(script) @@ -69,15 +74,17 @@ pub async fn execute_script( None => config_file_path.parent().unwrap().to_owned(), }; - let npm_commands = - resolve_npm_commands(npm_resolver.as_ref(), node_resolver)?; + let custom_commands = + resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?; run_task( task_name, script, &cwd, - cli_options, - npm_commands, - npm_resolver.as_ref(), + cli_options.initial_cwd(), + env_vars, + cli_options.argv(), + custom_commands, + npm_resolver.root_node_modules_path().map(|p| p.as_path()), ) .await } else if package_json_scripts.contains_key(task_name) { @@ -124,17 +131,19 @@ pub async fn execute_script( task_name.clone(), format!("post{}", task_name), ]; - let npm_commands = - resolve_npm_commands(npm_resolver.as_ref(), node_resolver)?; + let custom_commands = + resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?; for task_name in task_names { if let Some(script) = package_json_scripts.get(&task_name) { let exit_code = run_task( &task_name, script, &cwd, - cli_options, - npm_commands.clone(), - npm_resolver.as_ref(), + cli_options.initial_cwd(), + env_vars.clone(), + cli_options.argv(), + custom_commands.clone(), + npm_resolver.root_node_modules_path().map(|p| p.as_path()), ) .await?; if exit_code > 0 { @@ -145,36 +154,42 @@ pub async fn execute_script( Ok(0) } else { - eprintln!("Task not found: {task_name}"); - print_available_tasks(&tasks_config, &package_json_scripts); + log::error!("Task not found: {task_name}"); + if log::log_enabled!(log::Level::Error) { + print_available_tasks( + &mut std::io::stderr(), + &tasks_config, + &package_json_scripts, + )?; + } Ok(1) } } +#[allow(clippy::too_many_arguments)] async fn run_task( task_name: &str, script: &str, cwd: &Path, - cli_options: &CliOptions, - npm_commands: HashMap>, - npm_resolver: &dyn CliNpmResolver, + init_cwd: &Path, + env_vars: HashMap, + argv: &[String], + custom_commands: HashMap>, + root_node_modules_dir: Option<&Path>, ) -> Result { - let script = get_script_with_args(script, cli_options); + let script = get_script_with_args(script, argv); output_task(task_name, &script); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{}'.", task_name))?; - let env_vars = match npm_resolver.root_node_modules_path() { - Some(dir_path) => collect_env_vars_with_node_modules_dir(dir_path), - None => collect_env_vars(), - }; + let env_vars = prepare_env_vars(env_vars, init_cwd, root_node_modules_dir); let local = LocalSet::new(); - let future = deno_task_shell::execute(seq_list, env_vars, cwd, npm_commands); + let future = + deno_task_shell::execute(seq_list, env_vars, cwd, custom_commands); Ok(local.run_until(future).await) } -fn get_script_with_args(script: &str, options: &CliOptions) -> String { - let additional_args = options - .argv() +fn get_script_with_args(script: &str, argv: &[String]) -> String { + let additional_args = argv .iter() // surround all the additional arguments in double quotes // and sanitize any command substitution @@ -189,22 +204,30 @@ fn output_task(task_name: &str, script: &str) { log::info!( "{} {} {}", colors::green("Task"), - colors::cyan(&task_name), + colors::cyan(task_name), script, ); } -fn collect_env_vars_with_node_modules_dir( - node_modules_dir_path: &Path, +fn prepare_env_vars( + mut env_vars: HashMap, + initial_cwd: &Path, + node_modules_dir: Option<&Path>, ) -> HashMap { - let mut env_vars = collect_env_vars(); - prepend_to_path( - &mut env_vars, - node_modules_dir_path - .join(".bin") - .to_string_lossy() - .to_string(), - ); + const INIT_CWD_NAME: &str = "INIT_CWD"; + if !env_vars.contains_key(INIT_CWD_NAME) { + // if not set, set an INIT_CWD env var that has the cwd + env_vars.insert( + INIT_CWD_NAME.to_string(), + initial_cwd.to_string_lossy().to_string(), + ); + } + if let Some(node_modules_dir) = node_modules_dir { + prepend_to_path( + &mut env_vars, + node_modules_dir.join(".bin").to_string_lossy().to_string(), + ); + } env_vars } @@ -224,63 +247,71 @@ fn prepend_to_path(env_vars: &mut HashMap, value: String) { } } -fn collect_env_vars() -> HashMap { - // get the starting env vars (the PWD env var will be set by deno_task_shell) - let mut env_vars = std::env::vars().collect::>(); - const INIT_CWD_NAME: &str = "INIT_CWD"; - if !env_vars.contains_key(INIT_CWD_NAME) { - if let Ok(cwd) = std::env::current_dir() { - // if not set, set an INIT_CWD env var that has the cwd - env_vars - .insert(INIT_CWD_NAME.to_string(), cwd.to_string_lossy().to_string()); - } - } - env_vars +fn real_env_vars() -> HashMap { + std::env::vars() + .map(|(k, v)| { + if cfg!(windows) { + (k.to_uppercase(), v) + } else { + (k, v) + } + }) + .collect::>() } fn print_available_tasks( - // order can be important, so these use an index map + writer: &mut dyn std::io::Write, tasks_config: &IndexMap, package_json_scripts: &IndexMap, -) { - eprintln!("{}", colors::green("Available tasks:")); - - let mut had_task = false; - for (is_deno, (key, task)) in tasks_config - .iter() - .map(|(k, t)| (true, (k, t.clone()))) - .chain( - package_json_scripts - .iter() - .filter(|(key, _)| !tasks_config.contains_key(*key)) - .map(|(k, v)| (false, (k, deno_config::Task::Definition(v.clone())))), - ) - { - eprintln!( - "- {}{}", - colors::cyan(key), - if is_deno { - "".to_string() - } else { - format!(" {}", colors::italic_gray("(package.json)")) - } - ); - let definition = match &task { - deno_config::Task::Definition(definition) => definition, - deno_config::Task::Commented { definition, .. } => definition, - }; - if let deno_config::Task::Commented { comments, .. } = &task { - let slash_slash = colors::italic_gray("//"); - for comment in comments { - eprintln!(" {slash_slash} {}", colors::italic_gray(comment)); +) -> Result<(), std::io::Error> { + writeln!(writer, "{}", colors::green("Available tasks:"))?; + + if tasks_config.is_empty() && package_json_scripts.is_empty() { + writeln!( + writer, + " {}", + colors::red("No tasks found in configuration file") + )?; + } else { + for (is_deno, (key, task)) in tasks_config + .iter() + .map(|(k, t)| (true, (k, t.clone()))) + .chain( + package_json_scripts + .iter() + .filter(|(key, _)| !tasks_config.contains_key(*key)) + .map(|(k, v)| (false, (k, deno_config::Task::Definition(v.clone())))), + ) + { + writeln!( + writer, + "- {}{}", + colors::cyan(key), + if is_deno { + "".to_string() + } else { + format!(" {}", colors::italic_gray("(package.json)")) + } + )?; + let definition = match &task { + deno_config::Task::Definition(definition) => definition, + deno_config::Task::Commented { definition, .. } => definition, + }; + if let deno_config::Task::Commented { comments, .. } = &task { + let slash_slash = colors::italic_gray("//"); + for comment in comments { + writeln!( + writer, + " {slash_slash} {}", + colors::italic_gray(comment) + )?; + } } + writeln!(writer, " {definition}")?; } - eprintln!(" {definition}"); - had_task = true; - } - if !had_task { - eprintln!(" {}", colors::red("No tasks found in configuration file")); } + + Ok(()) } struct NpxCommand; @@ -298,10 +329,17 @@ impl ShellCommand for NpxCommand { }; command.execute(context) } else { - let _ = context - .stderr - .write_line(&format!("npx: could not resolve command '{first_arg}'")); - Box::pin(futures::future::ready(ExecuteResult::from_exit_code(1))) + // can't find the command, so fallback to running the real npx command + let npx_path = match context.resolve_command_path("npx") { + Ok(npx) => npx, + Err(err) => { + let _ = context.stderr.write_line(&format!("{}", err)); + return Box::pin(futures::future::ready( + ExecuteResult::from_exit_code(err.exit_code()), + )); + } + }; + ExecutableCommand::new("npx".to_string(), npx_path).execute(context) } } else { let _ = context.stderr.write_line("npx: missing command"); @@ -370,7 +408,7 @@ impl ShellCommand for NodeModulesFileRunCommand { } } -fn resolve_npm_commands( +fn resolve_custom_commands( npm_resolver: &dyn CliNpmResolver, node_resolver: &NodeResolver, ) -> Result>, AnyError> { diff --git a/cli/tools/test/channel.rs b/cli/tools/test/channel.rs index 780a17de606247..a8ce7a95568201 100644 --- a/cli/tools/test/channel.rs +++ b/cli/tools/test/channel.rs @@ -442,6 +442,8 @@ impl TestEventSender { } } +#[allow(clippy::print_stdout)] +#[allow(clippy::print_stderr)] #[cfg(test)] mod tests { use super::*; diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index 94541cf063cf23..2ff7203b769f98 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -10,8 +10,8 @@ use crate::factory::CliFactory; use crate::factory::CliFactoryBuilder; use crate::file_fetcher::File; use crate::file_fetcher::FileFetcher; +use crate::graph_container::MainModuleGraphContainer; use crate::graph_util::has_graph_root_local_dependent_changed; -use crate::module_loader::ModuleLoadPreparer; use crate::ops; use crate::util::file_watcher; use crate::util::fs::collect_specifiers; @@ -1305,12 +1305,10 @@ async fn fetch_inline_files( /// Type check a collection of module and document specifiers. pub async fn check_specifiers( - cli_options: &CliOptions, file_fetcher: &FileFetcher, - module_load_preparer: &ModuleLoadPreparer, + main_graph_container: &Arc, specifiers: Vec<(ModuleSpecifier, TestMode)>, ) -> Result<(), AnyError> { - let lib = cli_options.ts_type_lib_window(); let inline_files = fetch_inline_files( file_fetcher, specifiers @@ -1346,13 +1344,8 @@ pub async fn check_specifiers( } } - module_load_preparer - .prepare_module_load( - module_specifiers, - false, - lib, - PermissionsContainer::allow_all(), - ) + main_graph_container + .check_specifiers(&module_specifiers) .await?; Ok(()) @@ -1518,6 +1511,8 @@ pub async fn report_tests( &tests, &test_steps, ); + + #[allow(clippy::print_stderr)] if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) { eprint!("Test reporter failed to flush: {}", err) } @@ -1699,7 +1694,6 @@ pub async fn run_tests( let cli_options = factory.cli_options(); let test_options = cli_options.resolve_test_options(test_flags)?; let file_fetcher = factory.file_fetcher()?; - let module_load_preparer = factory.module_load_preparer().await?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. @@ -1719,10 +1713,11 @@ pub async fn run_tests( return Err(generic_error("No test modules found")); } + let main_graph_container = factory.main_module_graph_container().await?; + check_specifiers( - cli_options, file_fetcher, - module_load_preparer, + main_graph_container, specifiers_with_mode.clone(), ) .await?; @@ -1861,7 +1856,6 @@ pub async fn run_tests_with_watch( let worker_factory = Arc::new(factory.create_cli_main_worker_factory().await?); - let module_load_preparer = factory.module_load_preparer().await?; let specifiers_with_mode = fetch_specifiers_with_test_mode( &cli_options, file_fetcher, @@ -1873,10 +1867,11 @@ pub async fn run_tests_with_watch( .filter(|(specifier, _)| test_modules_to_reload.contains(specifier)) .collect::>(); + let main_graph_container = + factory.main_module_graph_container().await?; check_specifiers( - &cli_options, file_fetcher, - module_load_preparer, + main_graph_container, specifiers_with_mode.clone(), ) .await?; diff --git a/cli/tools/test/reporters/dot.rs b/cli/tools/test/reporters/dot.rs index d2e529a9cd76e3..854ef96660e120 100644 --- a/cli/tools/test/reporters/dot.rs +++ b/cli/tools/test/reporters/dot.rs @@ -11,6 +11,7 @@ pub struct DotTestReporter { summary: TestSummary, } +#[allow(clippy::print_stdout)] impl DotTestReporter { pub fn new(cwd: Url) -> DotTestReporter { let console_width = if let Some(size) = crate::util::console::console_size() @@ -80,6 +81,7 @@ fn fmt_cancelled() -> String { colors::gray("!").to_string() } +#[allow(clippy::print_stdout)] impl TestReporter for DotTestReporter { fn report_register(&mut self, _description: &TestDescription) {} diff --git a/cli/tools/test/reporters/tap.rs b/cli/tools/test/reporters/tap.rs index 0758686f03dbb2..6dc690e6bde3d1 100644 --- a/cli/tools/test/reporters/tap.rs +++ b/cli/tools/test/reporters/tap.rs @@ -22,6 +22,7 @@ pub struct TapTestReporter { step_results: HashMap>, } +#[allow(clippy::print_stdout)] impl TapTestReporter { pub fn new(cwd: Url, is_concurrent: bool) -> TapTestReporter { TapTestReporter { @@ -113,6 +114,7 @@ impl TapTestReporter { } } +#[allow(clippy::print_stdout)] impl TestReporter for TapTestReporter { fn report_register(&mut self, _description: &TestDescription) {} diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index 6bb4606d3cf0b7..073ebdc1c932db 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -274,23 +274,17 @@ pub fn check_for_upgrades( if let Some(upgrade_version) = update_checker.should_prompt() { if log::log_enabled!(log::Level::Info) && std::io::stderr().is_terminal() { if version::is_canary() { - eprint!( - "{} ", - colors::green("A new canary release of Deno is available.") - ); - eprintln!( - "{}", + log::info!( + "{} {}", + colors::green("A new canary release of Deno is available."), colors::italic_gray("Run `deno upgrade --canary` to install it.") ); } else { - eprint!( - "{} {} → {} ", + log::info!( + "{} {} → {} {}", colors::green("A new release of Deno is available:"), colors::cyan(version::deno()), - colors::cyan(&upgrade_version) - ); - eprintln!( - "{}", + colors::cyan(&upgrade_version), colors::italic_gray("Run `deno upgrade` to install it.") ); } diff --git a/cli/tools/vendor/build.rs b/cli/tools/vendor/build.rs index 5ff986f0cb76c6..5435a0035ef638 100644 --- a/cli/tools/vendor/build.rs +++ b/cli/tools/vendor/build.rs @@ -126,7 +126,7 @@ pub async fn build< // surface any errors graph_util::graph_valid( &graph, - &deno_fs::RealFs, + Arc::new(deno_fs::RealFs), &graph.roots, graph_util::GraphValidOptions { is_vendoring: true, diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index ddbd77ae0ebfa8..16e8f1ee975cdd 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -1079,19 +1079,69 @@ delete Object.prototype.__proto__; /** * @param {number} _id * @param {any} data + * @param {any | null} error */ // TODO(bartlomieju): this feels needlessly generic, both type chcking // and language server use it with inefficient serialization. Id is not used // anyway... - function respond(_id, data = null) { - ops.op_respond(JSON.stringify(data)); + function respond(_id, data = null, error = null) { + if (error) { + ops.op_respond( + "error", + "stack" in error ? error.stack.toString() : error.toString(), + ); + } else { + ops.op_respond(JSON.stringify(data), ""); + } + } + + /** @typedef {[[string, number][], number, boolean] } PendingChange */ + /** + * @template T + * @typedef {T | null} Option */ + + /** @returns {Promise<[number, string, any[], Option] | null>} */ + async function pollRequests() { + return await ops.op_poll_requests(); + } + + let hasStarted = false; + + /** @param {boolean} enableDebugLogging */ + async function serverMainLoop(enableDebugLogging) { + if (hasStarted) { + throw new Error("The language server has already been initialized."); + } + hasStarted = true; + languageService = ts.createLanguageService(host, documentRegistry); + setLogDebug(enableDebugLogging, "TSLS"); + debug("serverInit()"); + + while (true) { + const request = await pollRequests(); + if (request === null) { + break; + } + try { + serverRequest(request[0], request[1], request[2], request[3]); + } catch (err) { + const reqString = "[" + request.map((v) => + JSON.stringify(v) + ).join(", ") + "]"; + error( + `Error occurred processing request ${reqString} : ${ + "stack" in err ? err.stack : err + }`, + ); + } + } } /** * @param {number} id * @param {string} method * @param {any[]} args - * @param {[[string, number][], number, boolean] | null} maybeChange + * @param {PendingChange | null} maybeChange */ function serverRequest(id, method, args, maybeChange) { if (logDebug) { @@ -1160,11 +1210,7 @@ delete Object.prototype.__proto__; if ( !isCancellationError(e) ) { - if ("stack" in e) { - error(e.stack); - } else { - error(e); - } + respond(id, {}, e); throw e; } return respond(id, {}); @@ -1181,11 +1227,7 @@ delete Object.prototype.__proto__; return respond(id, languageService[method](...args)); } catch (e) { if (!isCancellationError(e)) { - if ("stack" in e) { - error(e.stack); - } else { - error(e); - } + respond(id, null, e); throw e; } return respond(id); @@ -1198,18 +1240,6 @@ delete Object.prototype.__proto__; } } - let hasStarted = false; - /** @param {{ debug: boolean; }} init */ - function serverInit({ debug: debugFlag }) { - if (hasStarted) { - throw new Error("The language server has already been initialized."); - } - hasStarted = true; - languageService = ts.createLanguageService(host, documentRegistry); - setLogDebug(debugFlag, "TSLS"); - debug("serverInit()"); - } - // A build time only op that provides some setup information that is used to // ensure the snapshot is setup properly. /** @type {{ buildSpecifier: string; libs: string[]; nodeBuiltInModuleNames: string[] }} */ @@ -1300,6 +1330,5 @@ delete Object.prototype.__proto__; // exposes the functions that are called when the compiler is used as a // language service. - global.serverInit = serverInit; - global.serverRequest = serverRequest; + global.serverMainLoop = serverMainLoop; })(this); diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index c940a6e61589df..09c669c2827a44 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -5344,16 +5344,16 @@ declare namespace Deno { * * @category File System */ export interface SymlinkOptions { - /** If the symbolic link should be either a file or directory. This option - * only applies to Windows and is ignored on other operating systems. */ - type: "file" | "dir"; + /** Specify the symbolic link type as file, directory or NTFS junction. This + * option only applies to Windows and is ignored on other operating systems. */ + type: "file" | "dir" | "junction"; } /** * Creates `newpath` as a symbolic link to `oldpath`. * - * The `options.type` parameter can be set to `"file"` or `"dir"`. This - * argument is only available on Windows and ignored on other platforms. + * The `options.type` parameter can be set to `"file"`, `"dir"` or `"junction"`. + * This argument is only available on Windows and ignored on other platforms. * * ```ts * await Deno.symlink("old/name", "new/name"); @@ -5373,8 +5373,8 @@ declare namespace Deno { /** * Creates `newpath` as a symbolic link to `oldpath`. * - * The `options.type` parameter can be set to `"file"` or `"dir"`. This - * argument is only available on Windows and ignored on other platforms. + * The `options.type` parameter can be set to `"file"`, `"dir"` or `"junction"`. + * This argument is only available on Windows and ignored on other platforms. * * ```ts * Deno.symlinkSync("old/name", "new/name"); diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs index 50ae7c2339362b..247ae49d8b85f5 100644 --- a/cli/util/file_watcher.rs +++ b/cli/util/file_watcher.rs @@ -73,6 +73,7 @@ impl DebouncedReceiver { } } +#[allow(clippy::print_stderr)] async fn error_handler(watch_future: F) -> bool where F: Future>, @@ -132,8 +133,9 @@ fn create_print_after_restart_fn( clear_screen: bool, ) -> impl Fn() { move || { + #[allow(clippy::print_stderr)] if clear_screen && std::io::stderr().is_terminal() { - eprint!("{CLEAR_SCREEN}"); + eprint!("{}", CLEAR_SCREEN); } info!( "{} File change detected! Restarting!", diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 92820ebe87cd14..fdc7855e6230b7 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -2,7 +2,6 @@ use std::collections::HashSet; use std::env::current_dir; -use std::fmt::Write as FmtWrite; use std::fs::FileType; use std::fs::OpenOptions; use std::io::Error; @@ -23,12 +22,12 @@ use deno_core::error::AnyError; pub use deno_core::normalize_path; use deno_core::unsync::spawn_blocking; use deno_core::ModuleSpecifier; -use deno_runtime::deno_crypto::rand; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::PathClean; use crate::util::gitignore::DirGitIgnores; use crate::util::gitignore::GitIgnoreTree; +use crate::util::path::get_atomic_file_path; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressMessagePrompt; @@ -56,14 +55,7 @@ pub fn atomic_write_file>( } fn inner(file_path: &Path, data: &[u8], mode: u32) -> std::io::Result<()> { - let temp_file_path = { - let rand: String = (0..4).fold(String::new(), |mut output, _| { - let _ = write!(output, "{:02x}", rand::random::()); - output - }); - let extension = format!("{rand}.tmp"); - file_path.with_extension(extension) - }; + let temp_file_path = get_atomic_file_path(file_path); if let Err(write_err) = atomic_write_file_raw(&temp_file_path, file_path, data, mode) diff --git a/cli/util/mod.rs b/cli/util/mod.rs index 7e0e1bd3700d51..c8155dc512bcaa 100644 --- a/cli/util/mod.rs +++ b/cli/util/mod.rs @@ -12,6 +12,7 @@ pub mod gitignore; pub mod logger; pub mod path; pub mod progress_bar; +pub mod result; pub mod sync; pub mod text_encoding; pub mod time; diff --git a/cli/util/path.rs b/cli/util/path.rs index a3109ad04ad9a5..3c848edea708b0 100644 --- a/cli/util/path.rs +++ b/cli/util/path.rs @@ -42,6 +42,32 @@ pub fn get_extension(file_path: &Path) -> Option { .map(|e| e.to_lowercase()); } +pub fn get_atomic_dir_path(file_path: &Path) -> PathBuf { + let rand = gen_rand_path_component(); + let new_file_name = format!( + ".{}_{}", + file_path + .file_name() + .map(|f| f.to_string_lossy()) + .unwrap_or(Cow::Borrowed("")), + rand + ); + file_path.with_file_name(new_file_name) +} + +pub fn get_atomic_file_path(file_path: &Path) -> PathBuf { + let rand = gen_rand_path_component(); + let extension = format!("{rand}.tmp"); + file_path.with_extension(extension) +} + +fn gen_rand_path_component() -> String { + (0..4).fold(String::new(), |mut output, _| { + output.push_str(&format!("{:02x}", rand::random::())); + output + }) +} + /// TypeScript figures out the type of file based on the extension, but we take /// other factors into account like the file headers. The hack here is to map the /// specifier passed to TypeScript to a new specifier with the file extension. diff --git a/cli/util/result.rs b/cli/util/result.rs new file mode 100644 index 00000000000000..3203d04eb7d298 --- /dev/null +++ b/cli/util/result.rs @@ -0,0 +1,16 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::convert::Infallible; + +pub trait InfallibleResultExt { + fn unwrap_infallible(self) -> T; +} + +impl InfallibleResultExt for Result { + fn unwrap_infallible(self) -> T { + match self { + Ok(value) => value, + Err(never) => match never {}, + } + } +} diff --git a/cli/util/v8.rs b/cli/util/v8.rs index 63bc495d1c707f..fb16e67b77c203 100644 --- a/cli/util/v8.rs +++ b/cli/util/v8.rs @@ -1,5 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +pub mod convert; + #[inline(always)] pub fn get_v8_flags_from_env() -> Vec { std::env::var("DENO_V8_FLAGS") @@ -43,6 +45,8 @@ pub fn init_v8_flags( .into_iter() .skip(1) .collect::>(); + + #[allow(clippy::print_stderr)] if !unrecognized_v8_flags.is_empty() { for f in unrecognized_v8_flags { eprintln!("error: V8 did not recognize flag '{f}'"); diff --git a/cli/util/v8/convert.rs b/cli/util/v8/convert.rs new file mode 100644 index 00000000000000..28107d90100b70 --- /dev/null +++ b/cli/util/v8/convert.rs @@ -0,0 +1,57 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_core::v8; +use deno_core::FromV8; +use deno_core::ToV8; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +/// A wrapper type for `Option` that (de)serializes `None` as `null` +#[repr(transparent)] +pub struct OptionNull(pub Option); + +impl From> for OptionNull { + fn from(option: Option) -> Self { + Self(option) + } +} + +impl From> for Option { + fn from(value: OptionNull) -> Self { + value.0 + } +} + +impl<'a, T> ToV8<'a> for OptionNull +where + T: ToV8<'a>, +{ + type Error = T::Error; + + fn to_v8( + self, + scope: &mut v8::HandleScope<'a>, + ) -> Result, Self::Error> { + match self.0 { + Some(value) => value.to_v8(scope), + None => Ok(v8::null(scope).into()), + } + } +} + +impl<'a, T> FromV8<'a> for OptionNull +where + T: FromV8<'a>, +{ + type Error = T::Error; + + fn from_v8( + scope: &mut v8::HandleScope<'a>, + value: v8::Local<'a, v8::Value>, + ) -> Result { + if value.is_null() { + Ok(OptionNull(None)) + } else { + T::from_v8(scope, value).map(|v| OptionNull(Some(v))) + } + } +} diff --git a/cli/worker.rs b/cli/worker.rs index 302c00e1053d34..151a4ec4f9937c 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -58,20 +58,23 @@ use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherRestartMode; use crate::version; +pub struct ModuleLoaderAndSourceMapGetter { + pub module_loader: Rc, + pub source_map_getter: Option>, +} + pub trait ModuleLoaderFactory: Send + Sync { fn create_for_main( &self, root_permissions: PermissionsContainer, dynamic_permissions: PermissionsContainer, - ) -> Rc; + ) -> ModuleLoaderAndSourceMapGetter; fn create_for_worker( &self, root_permissions: PermissionsContainer, dynamic_permissions: PermissionsContainer, - ) -> Rc; - - fn create_source_map_getter(&self) -> Option>; + ) -> ModuleLoaderAndSourceMapGetter; } #[async_trait::async_trait(?Send)] @@ -549,11 +552,12 @@ impl CliMainWorkerFactory { (main_module, false) }; - let module_loader = shared + let ModuleLoaderAndSourceMapGetter { + module_loader, + source_map_getter, + } = shared .module_loader_factory .create_for_main(PermissionsContainer::allow_all(), permissions.clone()); - let maybe_source_map_getter = - shared.module_loader_factory.create_source_map_getter(); let maybe_inspector_server = shared.maybe_inspector_server.clone(); let create_web_worker_cb = @@ -601,7 +605,8 @@ impl CliMainWorkerFactory { locale: deno_core::v8::icu::get_language_tag(), location: shared.options.location.clone(), no_color: !colors::use_color(), - is_tty: deno_terminal::is_stdout_tty(), + is_stdout_tty: deno_terminal::is_stdout_tty(), + is_stderr_tty: deno_terminal::is_stderr_tty(), unstable: shared.options.unstable, unstable_features, user_agent: version::get_user_agent().to_string(), @@ -626,7 +631,7 @@ impl CliMainWorkerFactory { .clone(), root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), seed: shared.options.seed, - source_map_getter: maybe_source_map_getter, + source_map_getter, format_js_error_fn: Some(Arc::new(format_js_error)), create_web_worker_cb, maybe_inspector_server, @@ -768,12 +773,13 @@ fn create_web_worker_callback( Arc::new(move |args| { let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let module_loader = shared.module_loader_factory.create_for_worker( + let ModuleLoaderAndSourceMapGetter { + module_loader, + source_map_getter, + } = shared.module_loader_factory.create_for_worker( args.parent_permissions.clone(), args.permissions.clone(), ); - let maybe_source_map_getter = - shared.module_loader_factory.create_source_map_getter(); let create_web_worker_cb = create_web_worker_callback(mode, shared.clone(), stdio.clone()); @@ -811,7 +817,8 @@ fn create_web_worker_callback( locale: deno_core::v8::icu::get_language_tag(), location: Some(args.main_module.clone()), no_color: !colors::use_color(), - is_tty: deno_terminal::is_stdout_tty(), + is_stdout_tty: deno_terminal::is_stdout_tty(), + is_stderr_tty: deno_terminal::is_stderr_tty(), unstable: shared.options.unstable, unstable_features, user_agent: version::get_user_agent().to_string(), @@ -837,7 +844,7 @@ fn create_web_worker_callback( seed: shared.options.seed, create_web_worker_cb, format_js_error_fn: Some(Arc::new(format_js_error)), - source_map_getter: maybe_source_map_getter, + source_map_getter, module_loader, fs: shared.fs.clone(), npm_resolver: Some(shared.npm_resolver.clone().into_npm_resolver()), @@ -868,6 +875,8 @@ fn create_web_worker_callback( }) } +#[allow(clippy::print_stdout)] +#[allow(clippy::print_stderr)] #[cfg(test)] mod tests { use super::*; diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 058a24da0a872c..086478be323fbd 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.143.0" +version = "0.146.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 3d62b50fdde3b0..7e209799992897 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.81.0" +version = "0.84.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index 1fffe075e141de..ae53774faff9f7 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_canvas" -version = "0.18.0" +version = "0.21.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -18,4 +18,3 @@ deno_core.workspace = true deno_webgpu.workspace = true image = { version = "0.24.7", default-features = false, features = ["png"] } serde = { workspace = true, features = ["derive"] } -tokio = { workspace = true, features = ["full"] } diff --git a/ext/console/01_console.js b/ext/console/01_console.js index b851b403522e62..2b807566883b1d 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -156,14 +156,20 @@ const { Uint8Array, } = primordials; -let noColor = () => false; +let noColorStdout = () => false; +let noColorStderr = () => false; -function setNoColorFn(fn) { - noColor = fn; +function setNoColorFns(stdoutFn, stderrFn) { + noColorStdout = stdoutFn; + noColorStderr = stderrFn; } -function getNoColor() { - return noColor(); +function getStdoutNoColor() { + return noColorStdout(); +} + +function getStderrNoColor() { + return noColorStderr(); } function assert(cond, msg = "Assertion failed.") { @@ -2927,6 +2933,7 @@ function cssToAnsi(css, prevCss = null) { function inspectArgs(args, inspectOptions = {}) { const ctx = { ...getDefaultInspectOptions(), + colors: inspectOptions.colors ?? !noColorStdout(), ...inspectOptions, }; if (inspectOptions.iterableLimit !== undefined) { @@ -2939,7 +2946,7 @@ function inspectArgs(args, inspectOptions = {}) { if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; - const noColor = getNoColor(); + const noColor = !ctx.colors; const first = args[0]; let a = 0; let string = ""; @@ -3053,12 +3060,12 @@ const countMap = new SafeMap(); const timerMap = new SafeMap(); const isConsoleInstance = Symbol("isConsoleInstance"); -function getConsoleInspectOptions() { - const color = !getNoColor(); +/** @param noColor {boolean} */ +function getConsoleInspectOptions(noColor) { return { ...getDefaultInspectOptions(), - colors: color, - stylize: color ? createStylizeWithColor(styles, colors) : stylizeNoColor, + colors: !noColor, + stylize: noColor ? stylizeNoColor : createStylizeWithColor(styles, colors), }; } @@ -3090,7 +3097,7 @@ class Console { log = (...args) => { this.#printFunc( inspectArgs(args, { - ...getConsoleInspectOptions(), + ...getConsoleInspectOptions(noColorStdout()), indentLevel: this.indentLevel, }) + "\n", 1, @@ -3100,7 +3107,7 @@ class Console { debug = (...args) => { this.#printFunc( inspectArgs(args, { - ...getConsoleInspectOptions(), + ...getConsoleInspectOptions(noColorStdout()), indentLevel: this.indentLevel, }) + "\n", 0, @@ -3110,7 +3117,7 @@ class Console { info = (...args) => { this.#printFunc( inspectArgs(args, { - ...getConsoleInspectOptions(), + ...getConsoleInspectOptions(noColorStdout()), indentLevel: this.indentLevel, }) + "\n", 1, @@ -3119,8 +3126,10 @@ class Console { dir = (obj = undefined, options = {}) => { this.#printFunc( - inspectArgs([obj], { ...getConsoleInspectOptions(), ...options }) + - "\n", + inspectArgs([obj], { + ...getConsoleInspectOptions(noColorStdout()), + ...options, + }) + "\n", 1, ); }; @@ -3130,7 +3139,7 @@ class Console { warn = (...args) => { this.#printFunc( inspectArgs(args, { - ...getConsoleInspectOptions(), + ...getConsoleInspectOptions(noColorStderr()), indentLevel: this.indentLevel, }) + "\n", 2, @@ -3140,7 +3149,7 @@ class Console { error = (...args) => { this.#printFunc( inspectArgs(args, { - ...getConsoleInspectOptions(), + ...getConsoleInspectOptions(noColorStderr()), indentLevel: this.indentLevel, }) + "\n", 3, @@ -3353,7 +3362,10 @@ class Console { trace = (...args) => { const message = inspectArgs( args, - { ...getConsoleInspectOptions(), indentLevel: 0 }, + { + ...getConsoleInspectOptions(noColorStderr()), + indentLevel: 0, + }, ); const err = { name: "Trace", @@ -3473,10 +3485,11 @@ export { formatNumber, formatValue, getDefaultInspectOptions, - getNoColor, + getStderrNoColor, + getStdoutNoColor, inspect, inspectArgs, quoteString, - setNoColorFn, + setNoColorFns, styles, }; diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index a66e961034a0b8..9077fae1563c3d 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.149.0" +version = "0.152.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cron/Cargo.toml b/ext/cron/Cargo.toml index 593c74ef7f41f7..304daa64c0aa61 100644 --- a/ext/cron/Cargo.toml +++ b/ext/cron/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cron" -version = "0.29.0" +version = "0.32.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 760a45d1221db9..435142c7aaf28c 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.163.0" +version = "0.166.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -40,6 +40,5 @@ sha1 = { version = "0.10.6", features = ["oid"] } sha2.workspace = true signature.workspace = true spki.workspace = true -tokio.workspace = true uuid.workspace = true x25519-dalek = "2.0.0" diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index a8f1dd96c69946..e5feca9cbd513c 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.173.0" +version = "0.176.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -20,7 +20,6 @@ deno_core.workspace = true deno_tls.workspace = true dyn-clone = "1" http_v02.workspace = true -pin-project.workspace = true reqwest.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 3e43370d3b7523..21ca040277e33f 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -46,6 +46,7 @@ use deno_tls::RootCertStoreProvider; use data_url::DataUrl; use deno_tls::TlsKey; use deno_tls::TlsKeys; +use deno_tls::TlsKeysHolder; use http_v02::header::CONTENT_LENGTH; use http_v02::Uri; use reqwest::header::HeaderMap; @@ -80,7 +81,7 @@ pub struct Options { pub request_builder_hook: Option Result>, pub unsafely_ignore_certificate_errors: Option>, - pub client_cert_chain_and_key: Option, + pub client_cert_chain_and_key: TlsKeys, pub file_fetch_handler: Rc, } @@ -101,7 +102,7 @@ impl Default for Options { proxy: None, request_builder_hook: None, unsafely_ignore_certificate_errors: None, - client_cert_chain_and_key: None, + client_cert_chain_and_key: TlsKeys::Null, file_fetch_handler: Rc::new(DefaultFileFetchHandler), } } @@ -205,7 +206,11 @@ pub fn create_client_from_options( unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), - client_cert_chain_and_key: options.client_cert_chain_and_key.clone(), + client_cert_chain_and_key: options + .client_cert_chain_and_key + .clone() + .try_into() + .unwrap_or_default(), pool_max_idle_per_host: None, pool_idle_timeout: None, http1: true, @@ -821,7 +826,7 @@ fn default_true() -> bool { pub fn op_fetch_custom_client( state: &mut OpState, #[serde] args: CreateHttpClientArgs, - #[cppgc] tls_keys: &deno_tls::TlsKeys, + #[cppgc] tls_keys: &TlsKeysHolder, ) -> Result where FP: FetchPermissions + 'static, @@ -832,11 +837,6 @@ where permissions.check_net_url(&url, "Deno.createHttpClient()")?; } - let client_cert_chain_and_key = match tls_keys { - TlsKeys::Null => None, - TlsKeys::Static(key) => Some(key.clone()), - }; - let options = state.borrow::(); let ca_certs = args .ca_certs @@ -853,7 +853,7 @@ where unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), - client_cert_chain_and_key, + client_cert_chain_and_key: tls_keys.take().try_into().unwrap(), pool_max_idle_per_host: args.pool_max_idle_per_host, pool_idle_timeout: args.pool_idle_timeout.and_then( |timeout| match timeout { @@ -915,7 +915,7 @@ pub fn create_http_client( options.root_cert_store, options.ca_certs, options.unsafely_ignore_certificate_errors, - options.client_cert_chain_and_key, + options.client_cert_chain_and_key.into(), deno_tls::SocketUse::Http, )?; diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index cd7a76abb73325..7fbcdf0398e79f 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.136.0" +version = "0.139.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -19,10 +19,10 @@ dlopen2.workspace = true dynasmrt = "1.2.3" libffi = "=3.2.0" libffi-sys = "=2.3.0" +log.workspace = true serde.workspace = true serde-value = "0.7" serde_json = "1.0" -tokio.workspace = true [target.'cfg(windows)'.dependencies] winapi = { workspace = true, features = ["errhandlingapi", "minwindef", "ntdef", "winbase", "winnt"] } diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index eeea49c2399d61..55f38c60218a44 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -176,7 +176,7 @@ unsafe extern "C" fn deno_ffi_callback( let tc_scope = &mut TryCatch::new(scope); args.run(tc_scope); if tc_scope.exception().is_some() { - eprintln!("Illegal unhandled exception in nonblocking callback."); + log::error!("Illegal unhandled exception in nonblocking callback."); } }); } diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index d8483f3da09e9a..4c3cbb7a0db2eb 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.59.0" +version = "0.62.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -22,13 +22,10 @@ base32.workspace = true deno_core.workspace = true deno_io.workspace = true filetime.workspace = true -fs3.workspace = true libc.workspace = true -log.workspace = true rand.workspace = true rayon = "1.8.0" serde.workspace = true -tokio.workspace = true [target.'cfg(unix)'.dependencies] nix.workspace = true diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 6589dd6f2022a3..c3cda3c113b826 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.146.0" +version = "0.149.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/response_body.rs b/ext/http/response_body.rs index 6b033ffe07b6aa..fd1203f53b61e8 100644 --- a/ext/http/response_body.rs +++ b/ext/http/response_body.rs @@ -625,6 +625,7 @@ impl PollFrame for BrotliResponseStream { } } +#[allow(clippy::print_stderr)] #[cfg(test)] mod tests { use super::*; diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index b7ce3ea5d7c967..fd9b624fe7bf55 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.59.0" +version = "0.62.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -18,6 +18,7 @@ async-trait.workspace = true deno_core.workspace = true filetime.workspace = true fs3.workspace = true +log.workspace = true once_cell.workspace = true tokio.workspace = true diff --git a/ext/io/winpipe.rs b/ext/io/winpipe.rs index f66dec6b6b6834..01d018008d797f 100644 --- a/ext/io/winpipe.rs +++ b/ext/io/winpipe.rs @@ -74,7 +74,7 @@ fn create_named_pipe_inner() -> io::Result<(RawHandle, RawHandle)> { // This should not happen, so we would like to get some better diagnostics here. // SAFETY: Printing last error for diagnostics unsafe { - eprintln!( + log::error!( "*** Unexpected server pipe failure '{pipe_name:?}': {:x}", GetLastError() ); @@ -99,7 +99,7 @@ fn create_named_pipe_inner() -> io::Result<(RawHandle, RawHandle)> { // SAFETY: Getting last error for diagnostics let error = unsafe { GetLastError() }; // This should not happen, so we would like to get some better diagnostics here. - eprintln!( + log::error!( "*** Unexpected client pipe failure '{pipe_name:?}': {:x}", error ); diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index 56640dfb3e0e4f..ae7301aaf1d95e 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.57.0" +version = "0.60.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -30,14 +30,9 @@ log.workspace = true num-bigint.workspace = true prost.workspace = true rand.workspace = true -reqwest.workspace = true rusqlite.workspace = true serde.workspace = true -serde_json.workspace = true -termcolor.workspace = true -tokio.workspace = true url.workspace = true -uuid = { workspace = true, features = ["serde"] } [build-dependencies] prost-build.workspace = true diff --git a/ext/kv/remote.rs b/ext/kv/remote.rs index 88127fc8fa20a5..9d5e099c73b9d8 100644 --- a/ext/kv/remote.rs +++ b/ext/kv/remote.rs @@ -16,7 +16,7 @@ use deno_fetch::CreateHttpClientOptions; use deno_tls::rustls::RootCertStore; use deno_tls::Proxy; use deno_tls::RootCertStoreProvider; -use deno_tls::TlsKey; +use deno_tls::TlsKeys; use denokv_remote::MetadataEndpoint; use denokv_remote::Remote; use url::Url; @@ -27,7 +27,7 @@ pub struct HttpOptions { pub root_cert_store_provider: Option>, pub proxy: Option, pub unsafely_ignore_certificate_errors: Option>, - pub client_cert_chain_and_key: Option, + pub client_cert_chain_and_key: TlsKeys, } impl HttpOptions { @@ -135,7 +135,11 @@ impl DatabaseHandler unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), - client_cert_chain_and_key: options.client_cert_chain_and_key.clone(), + client_cert_chain_and_key: options + .client_cert_chain_and_key + .clone() + .try_into() + .unwrap(), pool_max_idle_per_host: None, pool_idle_timeout: None, http1: false, diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index be4a3c0c393c54..3546b401fe6649 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.79.0" +version = "0.82.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/02_tls.js b/ext/net/02_tls.js index 0b775047f622b9..e51df7424a8e6d 100644 --- a/ext/net/02_tls.js +++ b/ext/net/02_tls.js @@ -6,6 +6,10 @@ import { op_net_accept_tls, op_net_connect_tls, op_net_listen_tls, + op_tls_cert_resolver_create, + op_tls_cert_resolver_poll, + op_tls_cert_resolver_resolve, + op_tls_cert_resolver_resolve_error, op_tls_handshake, op_tls_key_null, op_tls_key_static, @@ -16,6 +20,7 @@ const { Number, ObjectDefineProperty, TypeError, + SymbolFor, } = primordials; import { Conn, Listener } from "ext:deno_net/01_net.js"; @@ -87,9 +92,12 @@ async function connectTls({ keyFile, privateKey, }); + // TODO(mmastrac): We only expose this feature via symbol for now. This should actually be a feature + // in Deno.connectTls, however. + const serverName = arguments[0][serverNameSymbol] ?? null; const { 0: rid, 1: localAddr, 2: remoteAddr } = await op_net_connect_tls( { hostname, port }, - { certFile: deprecatedCertFile, caCerts, alpnProtocols }, + { certFile: deprecatedCertFile, caCerts, alpnProtocols, serverName }, keyPair, ); localAddr.transport = "tcp"; @@ -133,6 +141,10 @@ class TlsListener extends Listener { * interfaces. */ function hasTlsKeyPairOptions(options) { + // TODO(mmastrac): remove this temporary symbol when the API lands + if (options[resolverSymbol] !== undefined) { + return true; + } return (options.cert !== undefined || options.key !== undefined || options.certFile !== undefined || options.keyFile !== undefined || options.privateKey !== undefined || @@ -159,6 +171,11 @@ function loadTlsKeyPair(api, { privateKey = undefined; } + // TODO(mmastrac): remove this temporary symbol when the API lands + if (arguments[1][resolverSymbol] !== undefined) { + return createTlsKeyResolver(arguments[1][resolverSymbol]); + } + // Check for "pem" format if (keyFormat !== undefined && keyFormat !== "pem") { throw new TypeError('If `keyFormat` is specified, it must be "pem"'); @@ -275,6 +292,37 @@ async function startTls( return new TlsConn(rid, remoteAddr, localAddr); } +const resolverSymbol = SymbolFor("unstableSniResolver"); +const serverNameSymbol = SymbolFor("unstableServerName"); + +function createTlsKeyResolver(callback) { + const { 0: resolver, 1: lookup } = op_tls_cert_resolver_create(); + (async () => { + while (true) { + const sni = await op_tls_cert_resolver_poll(lookup); + if (typeof sni !== "string") { + break; + } + try { + const key = await callback(sni); + if (!hasTlsKeyPairOptions(key)) { + op_tls_cert_resolver_resolve_error(lookup, sni, "Invalid key"); + } else { + const resolved = loadTlsKeyPair("Deno.listenTls", key); + op_tls_cert_resolver_resolve(lookup, sni, resolved); + } + } catch (e) { + op_tls_cert_resolver_resolve_error(lookup, sni, e.message); + } + } + })(); + return resolver; +} + +internals.resolverSymbol = resolverSymbol; +internals.serverNameSymbol = serverNameSymbol; +internals.createTlsKeyResolver = createTlsKeyResolver; + export { connectTls, hasTlsKeyPairOptions, diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index f8d9a31df70ec6..c2d130eb346785 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.141.0" +version = "0.144.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -16,10 +16,6 @@ path = "lib.rs" [dependencies] deno_core.workspace = true deno_tls.workspace = true -# Pinning to 0.5.1, because 0.5.2 breaks "cargo publish" -# https://github.com/bluejekyll/enum-as-inner/pull/91 -enum-as-inner = "=0.5.1" -log.workspace = true pin-project.workspace = true rustls-tokio-stream.workspace = true serde.workspace = true diff --git a/ext/net/lib.rs b/ext/net/lib.rs index d137aa315a47a4..fa8074b345c27f 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -87,6 +87,10 @@ deno_core::extension!(deno_net, ops_tls::op_tls_key_null, ops_tls::op_tls_key_static, ops_tls::op_tls_key_static_from_file

, + ops_tls::op_tls_cert_resolver_create, + ops_tls::op_tls_cert_resolver_poll, + ops_tls::op_tls_cert_resolver_resolve, + ops_tls::op_tls_cert_resolver_resolve_error, ops_tls::op_tls_start

, ops_tls::op_net_connect_tls

, ops_tls::op_net_listen_tls

, diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index 487adf3bc732ce..c529859087c578 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -11,6 +11,7 @@ use crate::DefaultTlsOptions; use crate::NetPermissions; use crate::UnsafelyIgnoreCertificateErrors; use deno_core::anyhow::anyhow; +use deno_core::anyhow::bail; use deno_core::error::bad_resource; use deno_core::error::custom_error; use deno_core::error::generic_error; @@ -29,13 +30,18 @@ use deno_core::ResourceId; use deno_tls::create_client_config; use deno_tls::load_certs; use deno_tls::load_private_keys; +use deno_tls::new_resolver; use deno_tls::rustls::Certificate; +use deno_tls::rustls::ClientConnection; use deno_tls::rustls::PrivateKey; use deno_tls::rustls::ServerConfig; use deno_tls::rustls::ServerName; +use deno_tls::ServerConfigProvider; use deno_tls::SocketUse; use deno_tls::TlsKey; +use deno_tls::TlsKeyLookup; use deno_tls::TlsKeys; +use deno_tls::TlsKeysHolder; use rustls_tokio_stream::TlsStreamRead; use rustls_tokio_stream::TlsStreamWrite; use serde::Deserialize; @@ -63,14 +69,26 @@ pub(crate) const TLS_BUFFER_SIZE: Option = pub struct TlsListener { pub(crate) tcp_listener: TcpListener, - pub(crate) tls_config: Arc, + pub(crate) tls_config: Option>, + pub(crate) server_config_provider: Option, } impl TlsListener { pub async fn accept(&self) -> std::io::Result<(TlsStream, SocketAddr)> { let (tcp, addr) = self.tcp_listener.accept().await?; - let tls = - TlsStream::new_server_side(tcp, self.tls_config.clone(), TLS_BUFFER_SIZE); + let tls = if let Some(provider) = &self.server_config_provider { + TlsStream::new_server_side_acceptor( + tcp, + provider.clone(), + TLS_BUFFER_SIZE, + ) + } else { + TlsStream::new_server_side( + tcp, + self.tls_config.clone().unwrap(), + TLS_BUFFER_SIZE, + ) + }; Ok((tls, addr)) } pub fn local_addr(&self) -> std::io::Result { @@ -164,6 +182,7 @@ pub struct ConnectTlsArgs { cert_file: Option, ca_certs: Vec, alpn_protocols: Option>, + server_name: Option, } #[derive(Deserialize)] @@ -179,7 +198,10 @@ pub struct StartTlsArgs { pub fn op_tls_key_null<'s>( scope: &mut v8::HandleScope<'s>, ) -> Result, AnyError> { - Ok(deno_core::cppgc::make_cppgc_object(scope, TlsKeys::Null)) + Ok(deno_core::cppgc::make_cppgc_object( + scope, + TlsKeysHolder::from(TlsKeys::Null), + )) } #[op2] @@ -195,7 +217,7 @@ pub fn op_tls_key_static<'s>( .unwrap(); Ok(deno_core::cppgc::make_cppgc_object( scope, - TlsKeys::Static(TlsKey(cert, key)), + TlsKeysHolder::from(TlsKeys::Static(TlsKey(cert, key))), )) } @@ -224,10 +246,53 @@ where .unwrap(); Ok(deno_core::cppgc::make_cppgc_object( scope, - TlsKeys::Static(TlsKey(cert, key)), + TlsKeysHolder::from(TlsKeys::Static(TlsKey(cert, key))), )) } +#[op2] +pub fn op_tls_cert_resolver_create<'s>( + scope: &mut v8::HandleScope<'s>, +) -> v8::Local<'s, v8::Array> { + let (resolver, lookup) = new_resolver(); + let resolver = deno_core::cppgc::make_cppgc_object( + scope, + TlsKeysHolder::from(TlsKeys::Resolver(resolver)), + ); + let lookup = deno_core::cppgc::make_cppgc_object(scope, lookup); + v8::Array::new_with_elements(scope, &[resolver.into(), lookup.into()]) +} + +#[op2(async)] +#[string] +pub async fn op_tls_cert_resolver_poll( + #[cppgc] lookup: &TlsKeyLookup, +) -> Option { + lookup.poll().await +} + +#[op2(fast)] +pub fn op_tls_cert_resolver_resolve( + #[cppgc] lookup: &TlsKeyLookup, + #[string] sni: String, + #[cppgc] key: &TlsKeysHolder, +) -> Result<(), AnyError> { + let TlsKeys::Static(key) = key.take() else { + bail!("unexpected key type"); + }; + lookup.resolve(sni, Ok(key)); + Ok(()) +} + +#[op2(fast)] +pub fn op_tls_cert_resolver_resolve_error( + #[cppgc] lookup: &TlsKeyLookup, + #[string] sni: String, + #[string] error: String, +) { + lookup.resolve(sni, Err(anyhow!(error))) +} + #[op2] #[serde] pub fn op_tls_start( @@ -287,7 +352,7 @@ where root_cert_store, ca_certs, unsafely_ignore_certificate_errors, - None, + TlsKeys::Null, SocketUse::GeneralSsl, )?; @@ -299,8 +364,7 @@ where let tls_config = Arc::new(tls_config); let tls_stream = TlsStream::new_client_side( tcp_stream, - tls_config, - hostname_dns, + ClientConnection::new(tls_config, hostname_dns)?, TLS_BUFFER_SIZE, ); @@ -320,7 +384,7 @@ pub async fn op_net_connect_tls( state: Rc>, #[serde] addr: IpAddr, #[serde] args: ConnectTlsArgs, - #[cppgc] key_pair: &TlsKeys, + #[cppgc] key_pair: &TlsKeysHolder, ) -> Result<(ResourceId, IpAddr, IpAddr), AnyError> where NP: NetPermissions + 'static, @@ -357,8 +421,12 @@ where .borrow() .borrow::() .root_cert_store()?; - let hostname_dns = ServerName::try_from(&*addr.hostname) - .map_err(|_| invalid_hostname(&addr.hostname))?; + let hostname_dns = if let Some(server_name) = args.server_name { + ServerName::try_from(server_name.as_str()) + } else { + ServerName::try_from(&*addr.hostname) + } + .map_err(|_| invalid_hostname(&addr.hostname))?; let connect_addr = resolve_addr(&addr.hostname, addr.port) .await? .next() @@ -367,15 +435,11 @@ where let local_addr = tcp_stream.local_addr()?; let remote_addr = tcp_stream.peer_addr()?; - let cert_and_key = match key_pair { - TlsKeys::Null => None, - TlsKeys::Static(key) => Some(key.clone()), - }; let mut tls_config = create_client_config( root_cert_store, ca_certs, unsafely_ignore_certificate_errors, - cert_and_key, + key_pair.take(), SocketUse::GeneralSsl, )?; @@ -388,8 +452,7 @@ where let tls_stream = TlsStream::new_client_side( tcp_stream, - tls_config, - hostname_dns, + ClientConnection::new(tls_config, hostname_dns)?, TLS_BUFFER_SIZE, ); @@ -429,7 +492,7 @@ pub fn op_net_listen_tls( state: &mut OpState, #[serde] addr: IpAddr, #[serde] args: ListenTlsArgs, - #[cppgc] keys: &TlsKeys, + #[cppgc] keys: &TlsKeysHolder, ) -> Result<(ResourceId, IpAddr), AnyError> where NP: NetPermissions + 'static, @@ -444,36 +507,44 @@ where .check_net(&(&addr.hostname, Some(addr.port)), "Deno.listenTls()")?; } - let tls_config = ServerConfig::builder() - .with_safe_defaults() - .with_no_client_auth(); - - let mut tls_config = match keys { - TlsKeys::Null => Err(anyhow!("Deno.listenTls requires a key")), - TlsKeys::Static(TlsKey(cert, key)) => tls_config - .with_single_cert(cert.clone(), key.clone()) - .map_err(|e| anyhow!(e)), - } - .map_err(|e| { - custom_error("InvalidData", "Error creating TLS certificate").context(e) - })?; - - if let Some(alpn_protocols) = args.alpn_protocols { - tls_config.alpn_protocols = - alpn_protocols.into_iter().map(|s| s.into_bytes()).collect(); - } - let bind_addr = resolve_addr_sync(&addr.hostname, addr.port)? .next() .ok_or_else(|| generic_error("No resolved address found"))?; let tcp_listener = TcpListener::bind_direct(bind_addr, args.reuse_port)?; let local_addr = tcp_listener.local_addr()?; + let alpn = args + .alpn_protocols + .unwrap_or_default() + .into_iter() + .map(|s| s.into_bytes()) + .collect(); + let listener = match keys.take() { + TlsKeys::Null => Err(anyhow!("Deno.listenTls requires a key")), + TlsKeys::Static(TlsKey(cert, key)) => { + let mut tls_config = ServerConfig::builder() + .with_safe_defaults() + .with_no_client_auth() + .with_single_cert(cert, key) + .map_err(|e| anyhow!(e))?; + tls_config.alpn_protocols = alpn; + Ok(TlsListener { + tcp_listener, + tls_config: Some(tls_config.into()), + server_config_provider: None, + }) + } + TlsKeys::Resolver(resolver) => Ok(TlsListener { + tcp_listener, + tls_config: None, + server_config_provider: Some(resolver.into_server_config_provider(alpn)), + }), + } + .map_err(|e| { + custom_error("InvalidData", "Error creating TLS certificate").context(e) + })?; - let tls_listener_resource = NetworkListenerResource::new(TlsListener { - tcp_listener, - tls_config: tls_config.into(), - }); + let tls_listener_resource = NetworkListenerResource::new(listener); let rid = state.resource_table.add(tls_listener_resource); diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index e6b9a4a89cc4df..efa565cc1717c4 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.86.0" +version = "0.89.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -36,6 +36,7 @@ errno = "0.2.8" faster-hex.workspace = true h2 = { version = "0.3.26", features = ["unstable"] } hkdf.workspace = true +home = "0.5.9" http_v02.workspace = true idna = "0.3.0" indexmap.workspace = true @@ -45,7 +46,6 @@ libc.workspace = true libz-sys.workspace = true md-5 = "0.10.5" md4 = "0.10.2" -nix.workspace = true num-bigint.workspace = true num-bigint-dig = "0.8.2" num-integer = "0.1.45" @@ -72,7 +72,6 @@ signature.workspace = true simd-json = "0.13.4" spki.workspace = true tokio.workspace = true -typenum = "1.15.0" url.workspace = true winapi.workspace = true x25519-dalek = "2.0.0" diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index 0a0226625470ff..ad38a511bbb7c1 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -86,7 +86,7 @@ impl NodeCodeTranslator { permissions: &dyn NodePermissions, ) -> Result { let mut temp_var_count = 0; - let mut handled_reexports: HashSet = HashSet::default(); + let mut handled_reexports: HashSet = HashSet::default(); let analysis = self.cjs_code_analyzer.analyze_cjs(specifier, source)?; @@ -114,12 +114,6 @@ impl NodeCodeTranslator { } while let Some((reexport, referrer)) = reexports_to_handle.pop_front() { - if handled_reexports.contains(&reexport) { - continue; - } - - handled_reexports.insert(reexport.to_string()); - // First, resolve the reexport specifier let reexport_specifier = self.resolve( &reexport, @@ -131,6 +125,10 @@ impl NodeCodeTranslator { permissions, )?; + if !handled_reexports.insert(reexport_specifier.clone()) { + continue; + } + // Second, resolve its exports and re-exports let analysis = self .cjs_code_analyzer diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 43a5b158e30e41..b4eeb71c257ac9 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -309,6 +309,7 @@ deno_core::extension!(deno_node, ops::os::op_node_os_username

, ops::os::op_geteuid

, ops::os::op_cpus

, + ops::os::op_homedir

, op_node_build_os, op_node_is_promise_rejected, op_npm_process_state, diff --git a/ext/node/ops/http2.rs b/ext/node/ops/http2.rs index b89990e0ba2695..e0de8e474fa084 100644 --- a/ext/node/ops/http2.rs +++ b/ext/node/ops/http2.rs @@ -423,7 +423,7 @@ pub struct Http2ClientResponse { pub async fn op_http2_client_get_response( state: Rc>, #[smi] stream_rid: ResourceId, -) -> Result { +) -> Result<(Http2ClientResponse, bool), AnyError> { let resource = state .borrow() .resource_table @@ -439,6 +439,7 @@ pub async fn op_http2_client_get_response( for (key, val) in parts.headers.iter() { res_headers.push((key.as_str().into(), val.as_bytes().into())); } + let end_stream = body.is_end_stream(); let (trailers_tx, trailers_rx) = tokio::sync::oneshot::channel(); let body_rid = @@ -450,11 +451,14 @@ pub async fn op_http2_client_get_response( trailers_rx: AsyncRefCell::new(Some(trailers_rx)), trailers_tx: AsyncRefCell::new(Some(trailers_tx)), }); - Ok(Http2ClientResponse { - headers: res_headers, - body_rid, - status_code: status.into(), - }) + Ok(( + Http2ClientResponse { + headers: res_headers, + body_rid, + status_code: status.into(), + }, + end_stream, + )) } enum DataOrTrailers { diff --git a/ext/node/ops/os/mod.rs b/ext/node/ops/os/mod.rs index 603f678e0b9100..5b32113e5bd67e 100644 --- a/ext/node/ops/os/mod.rs +++ b/ext/node/ops/os/mod.rs @@ -88,3 +88,17 @@ where cpus::cpu_info().ok_or_else(|| type_error("Failed to get cpu info")) } + +#[op2] +#[string] +pub fn op_homedir

(state: &mut OpState) -> Result, AnyError> +where + P: NodePermissions + 'static, +{ + { + let permissions = state.borrow_mut::

(); + permissions.check_sys("homedir", "node:os.homedir()")?; + } + + Ok(home::home_dir().map(|path| path.to_string_lossy().to_string())) +} diff --git a/ext/node/polyfills/async_hooks.ts b/ext/node/polyfills/async_hooks.ts index e8960c4dc0268c..f94b8d2c649c76 100644 --- a/ext/node/polyfills/async_hooks.ts +++ b/ext/node/polyfills/async_hooks.ts @@ -207,6 +207,8 @@ export class AsyncResource { } } + emitDestroy() {} + bind(fn: (...args: unknown[]) => unknown, thisArg = this) { validateFunction(fn, "fn"); const frame = AsyncContextFrame.current(); diff --git a/ext/node/polyfills/http2.ts b/ext/node/polyfills/http2.ts index b9d9f4b0672005..59756dd0f26cef 100644 --- a/ext/node/polyfills/http2.ts +++ b/ext/node/polyfills/http2.ts @@ -822,7 +822,7 @@ export class ClientHttp2Stream extends Duplex { session[kDenoClientRid], this.#rid, ); - const response = await op_http2_client_get_response( + const [response, endStream] = await op_http2_client_get_response( this.#rid, ); debugHttp2(">>> after get response", response); @@ -831,7 +831,13 @@ export class ClientHttp2Stream extends Duplex { ...Object.fromEntries(response.headers), }; debugHttp2(">>> emitting response", headers); - this.emit("response", headers, 0); + this.emit( + "response", + headers, + endStream + ? constants.NGHTTP2_FLAG_END_STREAM + : constants.NGHTTP2_FLAG_NONE, + ); this[kDenoResponse] = response; this.emit("ready"); })(); diff --git a/ext/node/polyfills/internal/fs/utils.mjs b/ext/node/polyfills/internal/fs/utils.mjs index 09169127d23471..a1823bb32808b3 100644 --- a/ext/node/polyfills/internal/fs/utils.mjs +++ b/ext/node/polyfills/internal/fs/utils.mjs @@ -891,7 +891,7 @@ export const validateRmOptionsSync = hideStackFrames( message: "is a directory", path, syscall: "rm", - errno: EISDIR, + errno: osConstants.errno.EISDIR, }); } } diff --git a/ext/node/polyfills/os.ts b/ext/node/polyfills/os.ts index bc88b06015e5ff..753e393198b553 100644 --- a/ext/node/polyfills/os.ts +++ b/ext/node/polyfills/os.ts @@ -25,6 +25,7 @@ import { op_cpus, + op_homedir, op_node_os_get_priority, op_node_os_set_priority, op_node_os_username, @@ -32,7 +33,7 @@ import { import { validateIntegerRange } from "ext:deno_node/_utils.ts"; import process from "node:process"; -import { isWindows, osType } from "ext:deno_node/_util/os.ts"; +import { isWindows } from "ext:deno_node/_util/os.ts"; import { ERR_OS_NO_HOMEDIR } from "ext:deno_node/internal/errors.ts"; import { os } from "ext:deno_node/internal_binding/constants.ts"; import { osUptime } from "ext:runtime/30_os.js"; @@ -173,21 +174,7 @@ export function getPriority(pid = 0): number { /** Returns the string path of the current user's home directory. */ export function homedir(): string | null { - // Note: Node/libuv calls getpwuid() / GetUserProfileDirectory() when the - // environment variable isn't set but that's the (very uncommon) fallback - // path. IMO, it's okay to punt on that for now. - switch (osType) { - case "windows": - return Deno.env.get("USERPROFILE") || null; - case "linux": - case "android": - case "darwin": - case "freebsd": - case "openbsd": - return Deno.env.get("HOME") || null; - default: - throw Error("unreachable"); - } + return op_homedir(); } /** Returns the host name of the operating system as a string. */ diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index b24bf6a8e4b9eb..c281cab449d460 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -301,6 +301,16 @@ export function kill(pid: number, sig: string | number = "SIGTERM") { return true; } +let getgid, getuid, geteuid; + +if (!isWindows) { + getgid = () => Deno.gid(); + getuid = () => Deno.uid(); + geteuid = () => op_geteuid(); +} + +export { geteuid, getgid, getuid }; + // deno-lint-ignore no-explicit-any function uncaughtExceptionHandler(err: any, origin: string) { // The origin parameter can be 'unhandledRejection' or 'uncaughtException' @@ -638,19 +648,13 @@ class Process extends EventEmitter { } /** This method is removed on Windows */ - getgid?(): number { - return Deno.gid()!; - } + getgid = getgid; /** This method is removed on Windows */ - getuid?(): number { - return Deno.uid()!; - } + getuid = getuid; /** This method is removed on Windows */ - geteuid?(): number { - return op_geteuid(); - } + geteuid = geteuid; // TODO(kt3k): Implement this when we added -e option to node compat mode _eval: string | undefined = undefined; @@ -668,14 +672,9 @@ class Process extends EventEmitter { execPath = path; } - setStartTime(t: number) { - this.#startTime = t; - } - - #startTime = 0; /** https://nodejs.org/api/process.html#processuptime */ uptime() { - return (Date.now() - this.#startTime) / 1000; + return Number((performance.now() / 1000).toFixed(9)); } #allowedFlags = buildAllowedFlags(); @@ -887,16 +886,12 @@ internals.__bootstrapNodeProcess = function ( ); } - process.setStartTime(Date.now()); - arch = arch_(); platform = isWindows ? "win32" : Deno.build.os; pid = Deno.pid; initializeDebugEnv(nodeDebug); - // @ts-ignore Remove setStartTime and #startTime is not modifiable - delete process.setStartTime; delete internals.__bootstrapNodeProcess; } else { // Warmup, assuming stdin/stdout/stderr are all terminals diff --git a/ext/node/polyfills/worker_threads.ts b/ext/node/polyfills/worker_threads.ts index 71999dd6244307..36314675ac859c 100644 --- a/ext/node/polyfills/worker_threads.ts +++ b/ext/node/polyfills/worker_threads.ts @@ -32,6 +32,7 @@ import process from "node:process"; const { JSONParse, JSONStringify, ObjectPrototypeIsPrototypeOf } = primordials; const { Error, + PromiseResolve, Symbol, SymbolFor, SymbolIterator, @@ -280,7 +281,8 @@ class NodeWorker extends EventEmitter { this.#status = "TERMINATED"; op_host_terminate_worker(this.#id); } - this.emit("exit", 1); + this.emit("exit", 0); + return PromiseResolve(0); } ref() { diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index f72bc7262e9077..3c182c76d7b9ef 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.136.0" +version = "0.139.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -16,10 +16,10 @@ path = "lib.rs" [dependencies] deno_core.workspace = true deno_native_certs = "0.2.0" -once_cell.workspace = true rustls = { workspace = true, features = ["dangerous_configuration"] } rustls-pemfile.workspace = true rustls-tokio-stream.workspace = true rustls-webpki.workspace = true serde.workspace = true +tokio.workspace = true webpki-roots.workspace = true diff --git a/ext/tls/lib.rs b/ext/tls/lib.rs index 7e68971e2ef028..5122264bf179ec 100644 --- a/ext/tls/lib.rs +++ b/ext/tls/lib.rs @@ -30,6 +30,9 @@ use std::io::Cursor; use std::sync::Arc; use std::time::SystemTime; +mod tls_key; +pub use tls_key::*; + pub type Certificate = rustls::Certificate; pub type PrivateKey = rustls::PrivateKey; pub type RootCertStore = rustls::RootCertStore; @@ -175,7 +178,7 @@ pub fn create_client_config( root_cert_store: Option, ca_certs: Vec>, unsafely_ignore_certificate_errors: Option>, - maybe_cert_chain_and_key: Option, + maybe_cert_chain_and_key: TlsKeys, socket_use: SocketUse, ) -> Result { if let Some(ic_allowlist) = unsafely_ignore_certificate_errors { @@ -189,14 +192,13 @@ pub fn create_client_config( // However it's not really feasible to deduplicate it as the `client_config` instances // are not type-compatible - one wants "client cert", the other wants "transparency policy // or client cert". - let mut client = - if let Some(TlsKey(cert_chain, private_key)) = maybe_cert_chain_and_key { - client_config - .with_client_auth_cert(cert_chain, private_key) - .expect("invalid client key or certificate") - } else { - client_config.with_no_client_auth() - }; + let mut client = match maybe_cert_chain_and_key { + TlsKeys::Static(TlsKey(cert_chain, private_key)) => client_config + .with_client_auth_cert(cert_chain, private_key) + .expect("invalid client key or certificate"), + TlsKeys::Null => client_config.with_no_client_auth(), + TlsKeys::Resolver(_) => unimplemented!(), + }; add_alpn(&mut client, socket_use); return Ok(client); @@ -226,14 +228,13 @@ pub fn create_client_config( root_cert_store }); - let mut client = - if let Some(TlsKey(cert_chain, private_key)) = maybe_cert_chain_and_key { - client_config - .with_client_auth_cert(cert_chain, private_key) - .expect("invalid client key or certificate") - } else { - client_config.with_no_client_auth() - }; + let mut client = match maybe_cert_chain_and_key { + TlsKeys::Static(TlsKey(cert_chain, private_key)) => client_config + .with_client_auth_cert(cert_chain, private_key) + .expect("invalid client key or certificate"), + TlsKeys::Null => client_config.with_no_client_auth(), + TlsKeys::Resolver(_) => unimplemented!(), + }; add_alpn(&mut client, socket_use); Ok(client) @@ -325,15 +326,3 @@ pub fn load_private_keys(bytes: &[u8]) -> Result, AnyError> { Ok(keys) } - -/// A loaded key. -// FUTURE(mmastrac): add resolver enum value to support dynamic SNI -pub enum TlsKeys { - // TODO(mmastrac): We need Option<&T> for cppgc -- this is a workaround - Null, - Static(TlsKey), -} - -/// A TLS certificate/private key pair. -#[derive(Clone, Debug)] -pub struct TlsKey(pub Vec, pub PrivateKey); diff --git a/ext/tls/tls_key.rs b/ext/tls/tls_key.rs new file mode 100644 index 00000000000000..18064a91a05155 --- /dev/null +++ b/ext/tls/tls_key.rs @@ -0,0 +1,321 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +//! These represent the various types of TLS keys we support for both client and server +//! connections. +//! +//! A TLS key will most often be static, and will loaded from a certificate and key file +//! or string. These are represented by `TlsKey`, which is stored in `TlsKeys::Static`. +//! +//! In more complex cases, you may need a `TlsKeyResolver`/`TlsKeyLookup` pair, which +//! requires polling of the `TlsKeyLookup` lookup queue. The underlying channels that used for +//! key lookup can handle closing one end of the pair, in which case they will just +//! attempt to clean up the associated resources. + +use crate::Certificate; +use crate::PrivateKey; +use deno_core::anyhow::anyhow; +use deno_core::error::AnyError; +use deno_core::futures::future::poll_fn; +use deno_core::futures::future::Either; +use deno_core::futures::FutureExt; +use deno_core::unsync::spawn; +use rustls::ServerConfig; +use rustls_tokio_stream::ServerConfigProvider; +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt::Debug; +use std::future::ready; +use std::future::Future; +use std::io::ErrorKind; +use std::rc::Rc; +use std::sync::Arc; +use tokio::sync::broadcast; +use tokio::sync::mpsc; +use tokio::sync::oneshot; + +type ErrorType = Rc; + +/// A TLS certificate/private key pair. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TlsKey(pub Vec, pub PrivateKey); + +#[derive(Clone, Debug, Default)] +pub enum TlsKeys { + // TODO(mmastrac): We need Option<&T> for cppgc -- this is a workaround + #[default] + Null, + Static(TlsKey), + Resolver(TlsKeyResolver), +} + +pub struct TlsKeysHolder(RefCell); + +impl TlsKeysHolder { + pub fn take(&self) -> TlsKeys { + std::mem::take(&mut *self.0.borrow_mut()) + } +} + +impl From for TlsKeysHolder { + fn from(value: TlsKeys) -> Self { + TlsKeysHolder(RefCell::new(value)) + } +} + +impl TryInto> for TlsKeys { + type Error = Self; + fn try_into(self) -> Result, Self::Error> { + match self { + Self::Null => Ok(None), + Self::Static(key) => Ok(Some(key)), + Self::Resolver(_) => Err(self), + } + } +} + +impl From> for TlsKeys { + fn from(value: Option) -> Self { + match value { + None => TlsKeys::Null, + Some(key) => TlsKeys::Static(key), + } + } +} + +enum TlsKeyState { + Resolving(broadcast::Receiver>), + Resolved(Result), +} + +struct TlsKeyResolverInner { + resolution_tx: mpsc::UnboundedSender<( + String, + broadcast::Sender>, + )>, + cache: RefCell>, +} + +#[derive(Clone)] +pub struct TlsKeyResolver { + inner: Rc, +} + +impl TlsKeyResolver { + async fn resolve_internal( + &self, + sni: String, + alpn: Vec>, + ) -> Result, AnyError> { + let key = self.resolve(sni).await?; + + let mut tls_config = ServerConfig::builder() + .with_safe_defaults() + .with_no_client_auth() + .with_single_cert(key.0, key.1)?; + tls_config.alpn_protocols = alpn; + Ok(tls_config.into()) + } + + pub fn into_server_config_provider( + self, + alpn: Vec>, + ) -> ServerConfigProvider { + let (tx, mut rx) = mpsc::unbounded_channel::<(_, oneshot::Sender<_>)>(); + + // We don't want to make the resolver multi-threaded, but the `ServerConfigProvider` is + // required to be wrapped in an Arc. To fix this, we spawn a task in our current runtime + // to respond to the requests. + spawn(async move { + while let Some((sni, txr)) = rx.recv().await { + _ = txr.send(self.resolve_internal(sni, alpn.clone()).await); + } + }); + + Arc::new(move |hello| { + // Take ownership of the SNI information + let sni = hello.server_name().unwrap_or_default().to_owned(); + let (txr, rxr) = tokio::sync::oneshot::channel::<_>(); + _ = tx.send((sni, txr)); + rxr + .map(|res| match res { + Err(e) => Err(std::io::Error::new(ErrorKind::InvalidData, e)), + Ok(Err(e)) => Err(std::io::Error::new(ErrorKind::InvalidData, e)), + Ok(Ok(res)) => Ok(res), + }) + .boxed() + }) + } +} + +impl Debug for TlsKeyResolver { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TlsKeyResolver").finish() + } +} + +pub fn new_resolver() -> (TlsKeyResolver, TlsKeyLookup) { + let (resolution_tx, resolution_rx) = mpsc::unbounded_channel(); + ( + TlsKeyResolver { + inner: Rc::new(TlsKeyResolverInner { + resolution_tx, + cache: Default::default(), + }), + }, + TlsKeyLookup { + resolution_rx: RefCell::new(resolution_rx), + pending: Default::default(), + }, + ) +} + +impl TlsKeyResolver { + /// Resolve the certificate and key for a given host. This immediately spawns a task in the + /// background and is therefore cancellation-safe. + pub fn resolve( + &self, + sni: String, + ) -> impl Future> { + let mut cache = self.inner.cache.borrow_mut(); + let mut recv = match cache.get(&sni) { + None => { + let (tx, rx) = broadcast::channel(1); + cache.insert(sni.clone(), TlsKeyState::Resolving(rx.resubscribe())); + _ = self.inner.resolution_tx.send((sni.clone(), tx)); + rx + } + Some(TlsKeyState::Resolving(recv)) => recv.resubscribe(), + Some(TlsKeyState::Resolved(res)) => { + return Either::Left(ready(res.clone().map_err(|_| anyhow!("Failed")))); + } + }; + drop(cache); + + // Make this cancellation safe + let inner = self.inner.clone(); + let handle = spawn(async move { + let res = recv.recv().await?; + let mut cache = inner.cache.borrow_mut(); + match cache.get(&sni) { + None | Some(TlsKeyState::Resolving(..)) => { + cache.insert(sni, TlsKeyState::Resolved(res.clone())); + } + Some(TlsKeyState::Resolved(..)) => { + // Someone beat us to it + } + } + res.map_err(|_| anyhow!("Failed")) + }); + Either::Right(async move { handle.await? }) + } +} + +pub struct TlsKeyLookup { + #[allow(clippy::type_complexity)] + resolution_rx: RefCell< + mpsc::UnboundedReceiver<( + String, + broadcast::Sender>, + )>, + >, + pending: + RefCell>>>, +} + +impl TlsKeyLookup { + /// Multiple `poll` calls are safe, but this method is not starvation-safe. Generally + /// only one `poll`er should be active at any time. + pub async fn poll(&self) -> Option { + if let Some((sni, sender)) = + poll_fn(|cx| self.resolution_rx.borrow_mut().poll_recv(cx)).await + { + self.pending.borrow_mut().insert(sni.clone(), sender); + Some(sni) + } else { + None + } + } + + /// Resolve a previously polled item. + pub fn resolve(&self, sni: String, res: Result) { + _ = self + .pending + .borrow_mut() + .remove(&sni) + .unwrap() + .send(res.map_err(Rc::new)); + } +} + +#[cfg(test)] +pub mod tests { + use super::*; + use deno_core::unsync::spawn; + use rustls::Certificate; + use rustls::PrivateKey; + + fn tls_key_for_test(sni: &str) -> TlsKey { + TlsKey( + vec![Certificate(format!("{sni}-cert").into_bytes())], + PrivateKey(format!("{sni}-key").into_bytes()), + ) + } + + #[tokio::test] + async fn test_resolve_once() { + let (resolver, lookup) = new_resolver(); + let task = spawn(async move { + while let Some(sni) = lookup.poll().await { + lookup.resolve(sni.clone(), Ok(tls_key_for_test(&sni))); + } + }); + + let key = resolver.resolve("example.com".to_owned()).await.unwrap(); + assert_eq!(tls_key_for_test("example.com"), key); + drop(resolver); + + task.await.unwrap(); + } + + #[tokio::test] + async fn test_resolve_concurrent() { + let (resolver, lookup) = new_resolver(); + let task = spawn(async move { + while let Some(sni) = lookup.poll().await { + lookup.resolve(sni.clone(), Ok(tls_key_for_test(&sni))); + } + }); + + let f1 = resolver.resolve("example.com".to_owned()); + let f2 = resolver.resolve("example.com".to_owned()); + + let key = f1.await.unwrap(); + assert_eq!(tls_key_for_test("example.com"), key); + let key = f2.await.unwrap(); + assert_eq!(tls_key_for_test("example.com"), key); + drop(resolver); + + task.await.unwrap(); + } + + #[tokio::test] + async fn test_resolve_multiple_concurrent() { + let (resolver, lookup) = new_resolver(); + let task = spawn(async move { + while let Some(sni) = lookup.poll().await { + lookup.resolve(sni.clone(), Ok(tls_key_for_test(&sni))); + } + }); + + let f1 = resolver.resolve("example1.com".to_owned()); + let f2 = resolver.resolve("example2.com".to_owned()); + + let key = f1.await.unwrap(); + assert_eq!(tls_key_for_test("example1.com"), key); + let key = f2.await.unwrap(); + assert_eq!(tls_key_for_test("example2.com"), key); + drop(resolver); + + task.await.unwrap(); + } +} diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index 6914e48cda3ed6..14563bc6e086e6 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.149.0" +version = "0.152.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -15,7 +15,6 @@ path = "lib.rs" [dependencies] deno_core.workspace = true -serde.workspace = true urlpattern = "0.2.0" [dev-dependencies] diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index e0e3ccbbe17c59..9c2a0598053061 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -4964,11 +4964,11 @@ const readableStreamAsyncIteratorPrototype = ObjectSetPrototypeOf({ return PromiseResolve({ value: undefined, done: true }); }; - const returnPromise = reader[_iteratorNext] + reader[_iteratorNext] = reader[_iteratorNext] ? PromisePrototypeThen(reader[_iteratorNext], returnSteps, returnSteps) : returnSteps(); return PromisePrototypeThen( - returnPromise, + reader[_iteratorNext], () => ({ value: arg, done: true }), ); }, diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index d582839c096787..b2a570496d9d04 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.180.0" +version = "0.183.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -24,7 +24,6 @@ futures.workspace = true serde = "1.0.149" tokio.workspace = true uuid = { workspace = true, features = ["serde"] } -windows-sys.workspace = true [dev-dependencies] deno_bench_util.workspace = true diff --git a/ext/webgpu/01_webgpu.js b/ext/webgpu/01_webgpu.js index be0b87cdf020be..502de212484868 100644 --- a/ext/webgpu/01_webgpu.js +++ b/ext/webgpu/01_webgpu.js @@ -417,9 +417,12 @@ function createGPUAdapter(inner) { return adapter; } +const _invalid = Symbol("[[invalid]]"); class GPUAdapter { /** @type {InnerGPUAdapter} */ [_adapter]; + /** @type {bool} */ + [_invalid]; /** @returns {GPUSupportedFeatures} */ get features() { @@ -466,6 +469,12 @@ class GPUAdapter { } } + if (this[_invalid]) { + throw new TypeError( + "The adapter cannot be reused, as it has been invalidated by a device creation", + ); + } + const { rid, queueRid, features, limits } = op_webgpu_request_device( this[_adapter].rid, descriptor.label, @@ -473,6 +482,8 @@ class GPUAdapter { descriptor.requiredLimits, ); + this[_invalid] = true; + const inner = new InnerGPUDevice({ rid, adapter: this, @@ -496,6 +507,12 @@ class GPUAdapter { requestAdapterInfo() { webidl.assertBranded(this, GPUAdapterPrototype); + if (this[_invalid]) { + throw new TypeError( + "The adapter cannot be reused, as it has been invalidated by a device creation", + ); + } + const { vendor, architecture, diff --git a/ext/webgpu/Cargo.toml b/ext/webgpu/Cargo.toml index 61a8095a7a9a32..314ba85e2f419c 100644 --- a/ext/webgpu/Cargo.toml +++ b/ext/webgpu/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webgpu" -version = "0.116.0" +version = "0.119.0" authors = ["the Deno authors"] edition.workspace = true license = "MIT" @@ -44,10 +44,6 @@ features = ["metal"] workspace = true features = ["dx12"] -[target.'cfg(windows)'.dependencies.wgpu-hal] -workspace = true -features = ["windows_rs"] - # We want the wgpu-core Vulkan backend on Unix (but not Emscripten) and Windows. [target.'cfg(any(windows, all(unix, not(target_os = "emscripten"))))'.dependencies.wgpu-core] workspace = true diff --git a/ext/webgpu/lib.rs b/ext/webgpu/lib.rs index eeaae2dd6c7475..8a423fd00cdda9 100644 --- a/ext/webgpu/lib.rs +++ b/ext/webgpu/lib.rs @@ -673,7 +673,7 @@ pub fn op_webgpu_request_device( ) -> Result { let mut state = state.borrow_mut(); let adapter_resource = - state.resource_table.get::(adapter_rid)?; + state.resource_table.take::(adapter_rid)?; let adapter = adapter_resource.1; let instance = state.borrow::(); @@ -690,6 +690,7 @@ pub fn op_webgpu_request_device( None, None )); + adapter_resource.close(); if let Some(err) = maybe_err { return Err(DomExceptionOperationError::new(&err.to_string()).into()); } @@ -731,13 +732,14 @@ pub fn op_webgpu_request_adapter_info( state: Rc>, #[smi] adapter_rid: ResourceId, ) -> Result { - let state = state.borrow_mut(); + let mut state = state.borrow_mut(); let adapter_resource = - state.resource_table.get::(adapter_rid)?; + state.resource_table.take::(adapter_rid)?; let adapter = adapter_resource.1; let instance = state.borrow::(); let info = gfx_select!(adapter => instance.adapter_get_info(adapter))?; + adapter_resource.close(); Ok(GPUAdapterInfo { vendor: info.vendor.to_string(), diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 09ff9a7c59b3c8..3cb3fc726d9041 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.149.0" +version = "0.152.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 0166d8aba0d21d..4efee6f2efdbeb 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.154.0" +version = "0.157.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index e4df9d3d35b80b..06a75faabd9b64 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -23,8 +23,10 @@ use deno_core::ToJsBuffer; use deno_net::raw::NetworkStream; use deno_tls::create_client_config; use deno_tls::rustls::ClientConfig; +use deno_tls::rustls::ClientConnection; use deno_tls::RootCertStoreProvider; use deno_tls::SocketUse; +use deno_tls::TlsKeys; use http::header::CONNECTION; use http::header::UPGRADE; use http::HeaderName; @@ -236,8 +238,7 @@ async fn handshake_http1_wss( ServerName::try_from(domain).map_err(|_| invalid_hostname(domain))?; let mut tls_connector = TlsStream::new_client_side( tcp_socket, - tls_config.into(), - dnsname, + ClientConnection::new(tls_config.into(), dnsname)?, NonZeroUsize::new(65536), ); // If we can bail on an http/1.1 ALPN mismatch here, we can avoid doing extra work @@ -261,8 +262,11 @@ async fn handshake_http2_wss( let dnsname = ServerName::try_from(domain).map_err(|_| invalid_hostname(domain))?; // We need to better expose the underlying errors here - let mut tls_connector = - TlsStream::new_client_side(tcp_socket, tls_config.into(), dnsname, None); + let mut tls_connector = TlsStream::new_client_side( + tcp_socket, + ClientConnection::new(tls_config.into(), dnsname)?, + None, + ); let handshake = tls_connector.handshake().await?; if handshake.alpn.is_none() { bail!("Didn't receive h2 alpn, aborting connection"); @@ -332,7 +336,7 @@ pub fn create_ws_client_config( root_cert_store, vec![], unsafely_ignore_certificate_errors, - None, + TlsKeys::Null, socket_use, ) } diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index ac5cdcd3d11eba..c83cb2028c3859 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.144.0" +version = "0.147.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -17,4 +17,3 @@ path = "lib.rs" deno_core.workspace = true deno_web.workspace = true rusqlite.workspace = true -serde.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 0a1fdbe2e6d764..f2126a13e05b91 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.157.0" +version = "0.160.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -65,7 +65,6 @@ flate2 = { workspace = true, features = ["default"] } serde.workspace = true [target.'cfg(windows)'.build-dependencies] -winres.workspace = true winapi.workspace = true [dependencies] @@ -96,12 +95,9 @@ deno_webidl.workspace = true deno_websocket.workspace = true deno_webstorage.workspace = true -console_static_text.workspace = true dlopen2.workspace = true encoding_rs.workspace = true fastwebsockets.workspace = true -filetime = "0.2.16" -fs3.workspace = true http.workspace = true http-body-util.workspace = true hyper.workspace = true @@ -114,7 +110,6 @@ notify.workspace = true once_cell.workspace = true percent-encoding.workspace = true regex.workspace = true -ring.workspace = true rustyline = { workspace = true, features = ["custom-bindings"] } serde.workspace = true signal-hook = "0.3.17" @@ -125,7 +120,6 @@ uuid.workspace = true which = "4.2.5" [target.'cfg(windows)'.dependencies] -fwdansi.workspace = true winapi = { workspace = true, features = ["commapi", "knownfolders", "mswsock", "objbase", "psapi", "shlobj", "tlhelp32", "winbase", "winerror", "winuser", "winsock2"] } ntapi = "0.4.0" windows-sys.workspace = true diff --git a/runtime/examples/extension/main.rs b/runtime/examples/extension/main.rs index 0026d0de04c404..0d7c4efb007d49 100644 --- a/runtime/examples/extension/main.rs +++ b/runtime/examples/extension/main.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] + use std::path::Path; use std::rc::Rc; diff --git a/runtime/inspector_server.rs b/runtime/inspector_server.rs index 8d93819e9f7d9f..48d8e0a8fead0b 100644 --- a/runtime/inspector_server.rs +++ b/runtime/inspector_server.rs @@ -176,7 +176,7 @@ fn handle_ws_request( let websocket = match fut.await { Ok(w) => w, Err(err) => { - eprintln!( + log::error!( "Inspector server failed to upgrade to WS connection: {:?}", err ); @@ -194,7 +194,7 @@ fn handle_ws_request( rx: inbound_rx, }; - eprintln!("Debugger session started."); + log::info!("Debugger session started."); let _ = new_session_tx.unbounded_send(inspector_session_proxy); pump_websocket_messages(websocket, inbound_tx, outbound_rx).await; }); @@ -244,13 +244,13 @@ async fn server( let inspector_map = Rc::clone(&inspector_map_); let mut register_inspector_handler = pin!(register_inspector_rx .map(|info| { - eprintln!( + log::info!( "Debugger listening on {}", info.get_websocket_debugger_url(&info.host.to_string()) ); - eprintln!("Visit chrome://inspect to connect to the debugger."); + log::info!("Visit chrome://inspect to connect to the debugger."); if info.wait_for_session { - eprintln!("Deno is waiting for debugger to connect."); + log::info!("Deno is waiting for debugger to connect."); } if inspector_map.borrow_mut().insert(info.uuid, info).is_some() { panic!("Inspector UUID already in map"); @@ -277,7 +277,7 @@ async fn server( let listener = match TcpListener::from_std(listener) { Ok(l) => l, Err(err) => { - eprintln!("Cannot start inspector server: {:?}", err); + log::error!("Cannot start inspector server: {:?}", err); return; } }; @@ -293,7 +293,7 @@ async fn server( match accept_result { Ok((s, _)) => s, Err(err) => { - eprintln!("Failed to accept inspector connection: {:?}", err); + log::error!("Failed to accept inspector connection: {:?}", err); continue; } } @@ -356,7 +356,7 @@ async fn server( tokio::select! { result = conn.as_mut() => { if let Err(err) = result { - eprintln!("Failed to serve connection: {:?}", err); + log::error!("Failed to serve connection: {:?}", err); } }, _ = &mut shutdown_rx => { @@ -409,7 +409,7 @@ async fn pump_websocket_messages( OpCode::Close => { // Users don't care if there was an error coming from debugger, // just about the fact that debugger did disconnect. - eprintln!("Debugger session ended"); + log::info!("Debugger session ended"); break 'pump; } _ => { diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 2ea122e341ab2b..fcec6b91a5436d 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -8,7 +8,8 @@ import { core, internals, primordials } from "ext:core/mod.js"; const ops = core.ops; import { op_bootstrap_args, - op_bootstrap_is_tty, + op_bootstrap_is_stderr_tty, + op_bootstrap_is_stdout_tty, op_bootstrap_no_color, op_bootstrap_pid, op_main_module, @@ -62,10 +63,10 @@ import * as timers from "ext:deno_web/02_timers.js"; import { customInspect, getDefaultInspectOptions, - getNoColor, + getStderrNoColor, inspectArgs, quoteString, - setNoColorFn, + setNoColorFns, } from "ext:deno_console/01_console.js"; import * as performance from "ext:deno_web/15_performance.js"; import * as url from "ext:deno_url/00_url.js"; @@ -379,7 +380,10 @@ function importScripts(...urls) { const opArgs = memoizeLazy(() => op_bootstrap_args()); const opPid = memoizeLazy(() => op_bootstrap_pid()); -setNoColorFn(() => op_bootstrap_no_color() || !op_bootstrap_is_tty()); +setNoColorFns( + () => op_bootstrap_no_color() || !op_bootstrap_is_stdout_tty(), + () => op_bootstrap_no_color() || !op_bootstrap_is_stderr_tty(), +); function formatException(error) { if ( @@ -390,11 +394,11 @@ function formatException(error) { } else if (typeof error == "string") { return `Uncaught ${ inspectArgs([quoteString(error, getDefaultInspectOptions())], { - colors: !getNoColor(), + colors: !getStderrNoColor(), }) }`; } else { - return `Uncaught ${inspectArgs([error], { colors: !getNoColor() })}`; + return `Uncaught ${inspectArgs([error], { colors: !getStderrNoColor() })}`; } } diff --git a/runtime/ops/bootstrap.rs b/runtime/ops/bootstrap.rs index cbb87db8865fde..eb9dbc6e84a655 100644 --- a/runtime/ops/bootstrap.rs +++ b/runtime/ops/bootstrap.rs @@ -16,7 +16,8 @@ deno_core::extension!( op_bootstrap_language, op_bootstrap_log_level, op_bootstrap_no_color, - op_bootstrap_is_tty, + op_bootstrap_is_stdout_tty, + op_bootstrap_is_stderr_tty, op_bootstrap_unstable_args, op_snapshot_options, ], @@ -117,7 +118,13 @@ pub fn op_bootstrap_no_color(state: &mut OpState) -> bool { } #[op2(fast)] -pub fn op_bootstrap_is_tty(state: &mut OpState) -> bool { +pub fn op_bootstrap_is_stdout_tty(state: &mut OpState) -> bool { let options = state.borrow::(); - options.is_tty + options.is_stdout_tty +} + +#[op2(fast)] +pub fn op_bootstrap_is_stderr_tty(state: &mut OpState) -> bool { + let options = state.borrow::(); + options.is_stderr_tty } diff --git a/runtime/permissions/Cargo.toml b/runtime/permissions/Cargo.toml index 444835e1f34c55..628c478c047ea1 100644 --- a/runtime/permissions/Cargo.toml +++ b/runtime/permissions/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_permissions" -version = "0.9.0" +version = "0.12.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -14,7 +14,6 @@ name = "deno_permissions" path = "lib.rs" [dependencies] -console_static_text.workspace = true deno_core.workspace = true deno_terminal.workspace = true fqdn = "0.3.4" @@ -22,7 +21,6 @@ libc.workspace = true log.workspace = true once_cell.workspace = true serde.workspace = true -termcolor.workspace = true which = "4.2.5" [target.'cfg(windows)'.dependencies] diff --git a/runtime/permissions/lib.rs b/runtime/permissions/lib.rs index 70d1abae7cd1b2..2e94e3aec3de6b 100644 --- a/runtime/permissions/lib.rs +++ b/runtime/permissions/lib.rs @@ -913,7 +913,7 @@ impl Descriptor for SysDescriptor { pub fn parse_sys_kind(kind: &str) -> Result<&str, AnyError> { match kind { "hostname" | "osRelease" | "osUptime" | "loadavg" | "networkInterfaces" - | "systemMemoryInfo" | "uid" | "gid" | "cpus" => Ok(kind), + | "systemMemoryInfo" | "uid" | "gid" | "cpus" | "homedir" => Ok(kind), _ => Err(type_error(format!("unknown system info kind \"{kind}\""))), } } @@ -1680,7 +1680,46 @@ impl PermissionsContainer { return Ok(()); } + /// We'll allow opening /proc/self/fd/{n} without additional permissions under the following conditions: + /// + /// 1. n > 2. This allows for opening bash-style redirections, but not stdio + /// 2. the fd referred to by n is a pipe + #[cfg(unix)] + fn is_fd_file_is_pipe(path: &Path) -> bool { + if let Some(fd) = path.file_name() { + if let Ok(s) = std::str::from_utf8(fd.as_encoded_bytes()) { + if let Ok(n) = s.parse::() { + if n > 2 { + // SAFETY: This is proper use of the stat syscall + unsafe { + let mut stat = std::mem::zeroed::(); + if libc::fstat(n, &mut stat as _) == 0 + && ((stat.st_mode & libc::S_IFMT) & libc::S_IFIFO) != 0 + { + return true; + } + }; + } + } + } + } + false + } + + // On unixy systems, we allow opening /dev/fd/XXX for valid FDs that + // are pipes. + #[cfg(unix)] + if path.starts_with("/dev/fd") && is_fd_file_is_pipe(path) { + return Ok(()); + } + if cfg!(target_os = "linux") { + // On Linux, we also allow opening /proc/self/fd/XXX for valid FDs that + // are pipes. + #[cfg(unix)] + if path.starts_with("/proc/self/fd") && is_fd_file_is_pipe(path) { + return Ok(()); + } if path.starts_with("/dev") || path.starts_with("/proc") || path.starts_with("/sys") diff --git a/runtime/permissions/prompter.rs b/runtime/permissions/prompter.rs index 42567b1e95af78..59a3a2f7b09319 100644 --- a/runtime/permissions/prompter.rs +++ b/runtime/permissions/prompter.rs @@ -280,6 +280,7 @@ impl PermissionPrompter for TtyPrompter { return PromptResponse::Deny; }; + #[allow(clippy::print_stderr)] if message.len() > MAX_PERMISSION_PROMPT_LENGTH { eprintln!("❌ Permission prompt length ({} bytes) was larger than the configured maximum length ({} bytes): denying request.", message.len(), MAX_PERMISSION_PROMPT_LENGTH); eprintln!("❌ WARNING: This may indicate that code is trying to bypass or hide permission check requests."); @@ -298,6 +299,7 @@ impl PermissionPrompter for TtyPrompter { // For security reasons we must consume everything in stdin so that previously // buffered data cannot affect the prompt. + #[allow(clippy::print_stderr)] if let Err(err) = clear_stdin(&mut stdin_lock, &mut stderr_lock) { eprintln!("Error clearing stdin for permission prompt. {err:#}"); return PromptResponse::Deny; // don't grant permission if this fails @@ -336,6 +338,7 @@ impl PermissionPrompter for TtyPrompter { // Clear stdin each time we loop around in case the user accidentally pasted // multiple lines or otherwise did something silly to generate a torrent of // input. This doesn't work on Windows because `clear_stdin` has other side-effects. + #[allow(clippy::print_stderr)] #[cfg(unix)] if let Err(err) = clear_stdin(&mut stdin_lock, &mut stderr_lock) { eprintln!("Error clearing stdin for permission prompt. {err:#}"); diff --git a/runtime/snapshot.rs b/runtime/snapshot.rs index 2a7d97641e8b6b..923ea0b7596d4f 100644 --- a/runtime/snapshot.rs +++ b/runtime/snapshot.rs @@ -296,6 +296,7 @@ pub fn create_runtime_snapshot( let mut snapshot = std::fs::File::create(snapshot_path).unwrap(); snapshot.write_all(&output.output).unwrap(); + #[allow(clippy::print_stdout)] for path in output.files_loaded_during_snapshot { println!("cargo:rerun-if-changed={}", path.display()); } diff --git a/runtime/tokio_util.rs b/runtime/tokio_util.rs index da6e8b221ac423..0d81f6e235ee55 100644 --- a/runtime/tokio_util.rs +++ b/runtime/tokio_util.rs @@ -81,8 +81,9 @@ where let handle = tokio::runtime::Handle::current(); let runtime_monitor = RuntimeMonitor::new(&handle); tokio::spawn(async move { + #[allow(clippy::print_stderr)] for interval in runtime_monitor.intervals() { - println!("{:#?}", interval); + eprintln!("{:#?}", interval); // wait 500ms tokio::time::sleep(std::time::Duration::from_millis( metrics_interval, diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 27fe633ad441c0..83603569407141 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -47,6 +47,7 @@ use deno_io::Stdio; use deno_kv::dynamic::MultiBackendDbHandler; use deno_terminal::colors; use deno_tls::RootCertStoreProvider; +use deno_tls::TlsKeys; use deno_web::create_entangled_message_port; use deno_web::serialize_transferables; use deno_web::BlobStore; @@ -477,7 +478,7 @@ impl WebWorker { unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), - client_cert_chain_and_key: None, + client_cert_chain_and_key: TlsKeys::Null, proxy: None, }, ), @@ -805,6 +806,7 @@ impl WebWorker { // TODO(mmastrac): we don't want to test this w/classic workers because // WPT triggers a failure here. This is only exposed via --enable-testing-features-do-not-use. + #[allow(clippy::print_stderr)] if self.worker_type == WebWorkerType::Module { panic!( "coding error: either js is polling or the worker is terminated" @@ -878,6 +880,7 @@ impl WebWorker { } } +#[allow(clippy::print_stderr)] fn print_worker_error( error: &AnyError, name: &str, diff --git a/runtime/worker.rs b/runtime/worker.rs index ee6b256ff66a3b..1c291c64130487 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -39,6 +39,7 @@ use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::dynamic::MultiBackendDbHandler; use deno_tls::RootCertStoreProvider; +use deno_tls::TlsKeys; use deno_web::BlobStore; use log::debug; @@ -279,6 +280,7 @@ pub fn create_op_metrics( max_len.set(max_len.get().max(decl.name.len())); let max_len = max_len.clone(); Some(Rc::new( + #[allow(clippy::print_stderr)] move |op: &deno_core::_ops::OpCtx, event, source| { eprintln!( "[{: >10.3}] {name:max_len$}: {event:?} {source:?}", @@ -449,7 +451,7 @@ impl MainWorker { unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), - client_cert_chain_and_key: None, + client_cert_chain_and_key: TlsKeys::Null, proxy: None, }, ), @@ -518,7 +520,7 @@ impl MainWorker { if !has_notified_of_inspector_disconnect .swap(true, std::sync::atomic::Ordering::SeqCst) { - println!("Program finished. Waiting for inspector to disconnect to exit the process..."); + log::info!("Program finished. Waiting for inspector to disconnect to exit the process..."); } }); diff --git a/runtime/worker_bootstrap.rs b/runtime/worker_bootstrap.rs index 31cb883dbe5c86..e1abf87fcef281 100644 --- a/runtime/worker_bootstrap.rs +++ b/runtime/worker_bootstrap.rs @@ -76,7 +76,8 @@ pub struct BootstrapOptions { pub location: Option, /// Sets `Deno.noColor` in JS runtime. pub no_color: bool, - pub is_tty: bool, + pub is_stdout_tty: bool, + pub is_stderr_tty: bool, // --unstable flag, deprecated pub unstable: bool, // --unstable-* flags @@ -109,7 +110,8 @@ impl Default for BootstrapOptions { user_agent, cpu_count, no_color: !colors::use_color(), - is_tty: deno_terminal::is_stdout_tty(), + is_stdout_tty: deno_terminal::is_stdout_tty(), + is_stderr_tty: deno_terminal::is_stderr_tty(), enable_op_summary_metrics: Default::default(), enable_testing_features: Default::default(), log_level: Default::default(), diff --git a/tests/Cargo.toml b/tests/Cargo.toml index c7de0b561c6e71..fa633f60704d8e 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -43,7 +43,7 @@ deno_lockfile.workspace = true deno_terminal.workspace = true deno_tls.workspace = true fastwebsockets = { workspace = true, features = ["upgrade", "unstable-split"] } -file_test_runner = "0.5.0" +file_test_runner = "0.7.0" flaky_test = "=0.1.0" http.workspace = true http-body-util.workspace = true @@ -53,10 +53,8 @@ once_cell.workspace = true os_pipe.workspace = true pretty_assertions.workspace = true serde.workspace = true -serde_repr.workspace = true test_util.workspace = true tokio.workspace = true -tokio-util.workspace = true tower-lsp.workspace = true trust-dns-client = "=0.22.0" trust-dns-server = "=0.22.1" diff --git a/tests/ffi/src/lib.rs b/tests/ffi/src/lib.rs index f6ee31eb88ff57..09c2afb3deb40d 100644 --- a/tests/ffi/src/lib.rs +++ b/tests/ffi/src/lib.rs @@ -1,5 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] #![allow(clippy::undocumented_unsafe_blocks)] use std::os::raw::c_void; diff --git a/tests/ffi/tests/integration_tests.rs b/tests/ffi/tests/integration_tests.rs index 0ad95254ce52e4..d0ac6e1049995e 100644 --- a/tests/ffi/tests/integration_tests.rs +++ b/tests/ffi/tests/integration_tests.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] + use pretty_assertions::assert_eq; use std::process::Command; use test_util::deno_cmd; diff --git a/tests/integration/coverage_tests.rs b/tests/integration/coverage_tests.rs index a8a7ca0232e7e5..6e9a1454ca895e 100644 --- a/tests/integration/coverage_tests.rs +++ b/tests/integration/coverage_tests.rs @@ -555,3 +555,57 @@ File | Branch % | Line % | ", ); } + +#[test] +fn test_collect_summary_with_no_matches() { + let context: TestContext = TestContext::default(); + let temp_dir: &TempDir = context.temp_dir(); + let temp_dir_path: PathRef = PathRef::new(temp_dir.path().join("cov")); + + let empty_test_dir: PathRef = temp_dir_path.join("empty_dir"); + empty_test_dir.create_dir_all(); + + let output: util::TestCommandOutput = context + .new_command() + .args_vec(vec![ + "test".to_string(), + "--quiet".to_string(), + "--allow-read".to_string(), + format!("--coverage={}", temp_dir_path.as_path().display()), + empty_test_dir.as_path().to_str().unwrap().to_string(), + ]) + .run(); + + output.assert_exit_code(1); + + let actual: &str = output.combined_output(); + let expected_message: &str = "error: No test modules found"; + assert_contains!(actual, expected_message); + + // Check the contents of the coverage directory, ignoring 'empty_dir' + let mut unexpected_contents: Vec = Vec::new(); + for entry in std::fs::read_dir(temp_dir_path.as_path()) + .unwrap() + .flatten() + { + if entry.file_name() != "empty_dir" { + // Ignore the 'empty_dir' + unexpected_contents.push(entry.path()); + } + } + + // Report unexpected contents + if !unexpected_contents.is_empty() { + eprintln!("Unexpected files or directories in the coverage directory:"); + for path in &unexpected_contents { + eprintln!("{:?}", path); + } + } + + // Assert that the coverage directory is otherwise empty + assert!( + unexpected_contents.is_empty(), + "Expected the coverage directory to be empty except for 'empty_dir', but found: {:?}", + unexpected_contents + ); +} diff --git a/tests/integration/inspector_tests.rs b/tests/integration/inspector_tests.rs index d18375a831bb5f..6a0f9111e1a9f9 100644 --- a/tests/integration/inspector_tests.rs +++ b/tests/integration/inspector_tests.rs @@ -21,6 +21,7 @@ use test_util as util; use tokio::net::TcpStream; use tokio::time::timeout; use url::Url; +use util::assert_contains; use util::assert_starts_with; use util::DenoChild; use util::TestContextBuilder; @@ -94,12 +95,18 @@ impl InspectorTester { F: FnMut(&str) -> bool + 'static, { let stdout = child.stdout.take().unwrap(); - let stdout_lines = - std::io::BufReader::new(stdout).lines().map(|r| r.unwrap()); + let stdout_lines = std::io::BufReader::new(stdout).lines().map(|r| { + let line = r.unwrap(); + eprintln!("STDOUT: {}", line); + line + }); let stderr = child.stderr.take().unwrap(); - let mut stderr_lines = - std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); + let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| { + let line = r.unwrap(); + eprintln!("STDERR: {}", line); + line + }); let uri = extract_ws_url_from_stderr(&mut stderr_lines); @@ -810,7 +817,6 @@ async fn inspector_break_on_first_line_in_test() { let script = util::testdata_path().join("inspector/inspector_test.js"); let child = util::deno_cmd() .arg("test") - .arg("--quiet") .arg(inspect_flag_with_unique_port("--inspect-brk")) .arg(script) .env("NO_COLOR", "1") @@ -877,10 +883,7 @@ async fn inspector_break_on_first_line_in_test() { assert_starts_with!(&tester.stdout_line(), "running 1 test from"); let line = tester.stdout_line(); - assert!( - &line.contains("basic test ... ok"), - "Missing content: {line}" - ); + assert_contains!(line, "basic test ... ok"); tester.child.kill().unwrap(); tester.child.wait().unwrap(); @@ -907,6 +910,7 @@ async fn inspector_with_ts_files() { let mut tester = InspectorTester::create(child, notification_filter).await; tester.assert_stderr_for_inspect_brk(); + assert_eq!(&tester.stderr_line(), "Debugger session started."); tester .send_many(&[ @@ -926,19 +930,19 @@ async fn inspector_with_ts_files() { // receive messages with sources from this test let script1 = tester.recv().await; - assert!(script1.contains("testdata/inspector/test.ts")); + assert_contains!(script1, "testdata/inspector/test.ts"); let script1_id = { let v: serde_json::Value = serde_json::from_str(&script1).unwrap(); v["params"]["scriptId"].as_str().unwrap().to_string() }; let script2 = tester.recv().await; - assert!(script2.contains("testdata/inspector/foo.ts")); + assert_contains!(script2, "testdata/inspector/foo.ts"); let script2_id = { let v: serde_json::Value = serde_json::from_str(&script2).unwrap(); v["params"]["scriptId"].as_str().unwrap().to_string() }; let script3 = tester.recv().await; - assert!(script3.contains("testdata/inspector/bar.js")); + assert_contains!(script3, "testdata/inspector/bar.js"); let script3_id = { let v: serde_json::Value = serde_json::from_str(&script3).unwrap(); v["params"]["scriptId"].as_str().unwrap().to_string() @@ -996,10 +1000,12 @@ async fn inspector_with_ts_files() { ) .await; + let line = tester.stderr_line(); + assert_contains!(test_util::strip_ansi_codes(&line), "Check"); assert_eq!( - &tester.stdout_line(), - "Program finished. Waiting for inspector to disconnect to exit the process..." - ); + &tester.stderr_line(), + "Program finished. Waiting for inspector to disconnect to exit the process..." + ); tester.child.kill().unwrap(); tester.child.wait().unwrap(); @@ -1194,7 +1200,6 @@ async fn inspector_break_on_first_line_npm_esm() { .new_command() .args_vec([ "run", - "--quiet", &inspect_flag_with_unique_port("--inspect-brk"), "npm:@denotest/bin/cli-esm", "this", @@ -1262,7 +1267,6 @@ async fn inspector_break_on_first_line_npm_cjs() { .new_command() .args_vec([ "run", - "--quiet", &inspect_flag_with_unique_port("--inspect-brk"), "npm:@denotest/bin/cli-cjs", "this", @@ -1331,7 +1335,6 @@ async fn inspector_error_with_npm_import() { .new_command() .args_vec([ "run", - "--quiet", "-A", &inspect_flag_with_unique_port("--inspect-brk"), &script.to_string_lossy(), @@ -1394,7 +1397,6 @@ async fn inspector_wait() { .new_command() .args_vec([ "run", - "--quiet", "-A", &inspect_flag_with_unique_port("--inspect-wait"), &script.to_string_lossy(), diff --git a/tests/integration/js_unit_tests.rs b/tests/integration/js_unit_tests.rs index 2bf78034e944b7..cbae4a0b8c4518 100644 --- a/tests/integration/js_unit_tests.rs +++ b/tests/integration/js_unit_tests.rs @@ -94,6 +94,7 @@ util::unit_test_factory!( text_encoding_test, timers_test, tls_test, + tls_sni_test, truncate_test, tty_color_test, tty_test, @@ -129,7 +130,7 @@ fn js_unit_test(test: String) { .arg("--no-prompt"); // TODO(mmastrac): it would be better to just load a test CA for all tests - let deno = if test == "websocket_test" { + let deno = if test == "websocket_test" || test == "tls_sni_test" { deno.arg("--unsafely-ignore-certificate-errors") } else { deno diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index 9d82e0afd1eb18..1f7758a9dabc14 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -12,70 +12,13 @@ use test_util::assert_starts_with; use test_util::assertions::assert_json_subset; use test_util::deno_cmd_with_deno_dir; use test_util::env_vars_for_npm_tests; +use test_util::lsp::range_of; +use test_util::lsp::source_file; use test_util::lsp::LspClient; use test_util::testdata_path; use test_util::TestContextBuilder; use tower_lsp::lsp_types as lsp; -/// Helper to get the `lsp::Range` of the `n`th occurrence of -/// `text` in `src`. `n` is zero-based, like most indexes. -fn range_of_nth( - n: usize, - text: impl AsRef, - src: impl AsRef, -) -> lsp::Range { - let text = text.as_ref(); - - let src = src.as_ref(); - - let start = src - .match_indices(text) - .nth(n) - .map(|(i, _)| i) - .unwrap_or_else(|| panic!("couldn't find text {text} in source {src}")); - let end = start + text.len(); - let mut line = 0; - let mut col = 0; - let mut byte_idx = 0; - - let pos = |line, col| lsp::Position { - line, - character: col, - }; - - let mut start_pos = None; - let mut end_pos = None; - for c in src.chars() { - if byte_idx == start { - start_pos = Some(pos(line, col)); - } - if byte_idx == end { - end_pos = Some(pos(line, col)); - break; - } - if c == '\n' { - line += 1; - col = 0; - } else { - col += c.len_utf16() as u32; - } - byte_idx += c.len_utf8(); - } - if start_pos.is_some() && end_pos.is_none() { - // range extends to end of string - end_pos = Some(pos(line, col)); - } - - let (start, end) = (start_pos.unwrap(), end_pos.unwrap()); - lsp::Range { start, end } -} - -/// Helper to get the `lsp::Range` of the first occurrence of -/// `text` in `src`. Equivalent to `range_of_nth(0, text, src)`. -fn range_of(text: impl AsRef, src: impl AsRef) -> lsp::Range { - range_of_nth(0, text, src) -} - #[test] fn lsp_startup_shutdown() { let context = TestContextBuilder::new().use_temp_cwd().build(); @@ -3566,10 +3509,18 @@ fn lsp_semantic_tokens() { } #[test] -fn lsp_code_lens() { +fn lsp_code_lens_references() { let context = TestContextBuilder::new().use_temp_cwd().build(); let mut client = context.new_lsp_command().build(); client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + "codeLens": { + "references": true, + } + }, + })); client.did_open(json!({ "textDocument": { "uri": "file:///a/file.ts", @@ -3624,6 +3575,24 @@ fn lsp_code_lens() { "specifier": "file:///a/file.ts", "source": "references" } + }, { + "range": { + "start": { "line": 3, "character": 2 }, + "end": { "line": 3, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } + }, { + "range": { + "start": { "line": 7, "character": 2 }, + "end": { "line": 7, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } }]) ); let res = client.write_request( @@ -3741,10 +3710,19 @@ fn lsp_code_lens() { } #[test] -fn lsp_code_lens_impl() { +fn lsp_code_lens_implementations() { let context = TestContextBuilder::new().use_temp_cwd().build(); let mut client = context.new_lsp_command().build(); client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + "codeLens": { + "implementations": true, + "references": true, + } + }, + })); client.did_open( json!({ "textDocument": { @@ -3783,6 +3761,15 @@ fn lsp_code_lens_impl() { "specifier": "file:///a/file.ts", "source": "references" } + }, { + "range": { + "start": { "line": 1, "character": 2 }, + "end": { "line": 1, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } }, { "range": { "start": { "line": 4, "character": 6 }, @@ -3792,6 +3779,15 @@ fn lsp_code_lens_impl() { "specifier": "file:///a/file.ts", "source": "references" } + }, { + "range": { + "start": { "line": 5, "character": 2 }, + "end": { "line": 5, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } }, { "range": { "start": { "line": 10, "character": 10 }, @@ -4187,6 +4183,15 @@ fn lsp_code_lens_non_doc_nav_tree() { let context = TestContextBuilder::new().use_temp_cwd().build(); let mut client = context.new_lsp_command().build(); client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + "codeLens": { + "implementations": true, + "references": true, + } + }, + })); client.did_open(json!({ "textDocument": { "uri": "file:///a/file.ts", @@ -4245,6 +4250,15 @@ fn lsp_nav_tree_updates() { let context = TestContextBuilder::new().use_temp_cwd().build(); let mut client = context.new_lsp_command().build(); client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + "codeLens": { + "implementations": true, + "references": true, + } + }, + })); client.did_open( json!({ "textDocument": { @@ -4283,6 +4297,15 @@ fn lsp_nav_tree_updates() { "specifier": "file:///a/file.ts", "source": "references" } + }, { + "range": { + "start": { "line": 1, "character": 2 }, + "end": { "line": 1, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } }, { "range": { "start": { "line": 4, "character": 6 }, @@ -4292,6 +4315,15 @@ fn lsp_nav_tree_updates() { "specifier": "file:///a/file.ts", "source": "references" } + }, { + "range": { + "start": { "line": 5, "character": 2 }, + "end": { "line": 5, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } }, { "range": { "start": { "line": 10, "character": 10 }, @@ -4367,6 +4399,15 @@ fn lsp_nav_tree_updates() { "specifier": "file:///a/file.ts", "source": "references" } + }, { + "range": { + "start": { "line": 1, "character": 2 }, + "end": { "line": 1, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } }, { "range": { "start": { "line": 4, "character": 6 }, @@ -4376,6 +4417,15 @@ fn lsp_nav_tree_updates() { "specifier": "file:///a/file.ts", "source": "references" } + }, { + "range": { + "start": { "line": 5, "character": 2 }, + "end": { "line": 5, "character": 3 } + }, + "data": { + "specifier": "file:///a/file.ts", + "source": "references" + } }]) ); client.shutdown(); @@ -7138,6 +7188,43 @@ fn lsp_npm_completions_auto_import_and_quick_fix_no_import_map() { client.shutdown(); } +#[test] +fn lsp_completions_using_decl() { + let context = TestContextBuilder::new().use_temp_cwd().build(); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.did_open(json!({ + "textDocument": { + "uri": "file:///a/file.ts", + "languageId": "typescript", + "version": 1, + "text": r#"function makeResource() { + return { + [Symbol.dispose]() { + }, + }; +} + +using resource = makeResource(); + +res"# + } + })); + + let list = client.get_completion_list( + "file:///a/file.ts", + (9, 3), + json!({ + "triggerKind": 2, + "triggerCharacter": "." + }), + ); + assert!(list.items.iter().any(|i| i.label == "resource")); + assert!(!list.is_incomplete); + + client.shutdown(); +} + #[test] fn lsp_npm_always_caches() { // npm specifiers should always be cached even when not specified @@ -8867,6 +8954,34 @@ fn lsp_diagnostics_deno_types() { client.shutdown(); } +#[test] +fn lsp_root_with_global_reference_types() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .build(); + let temp_dir = context.temp_dir(); + let file = source_file( + temp_dir.path().join("file.ts"), + "import 'http://localhost:4545/subdir/foo_types.d.ts'; Foo.bar;", + ); + let file2 = source_file( + temp_dir.path().join("file2.ts"), + r#"/// "#, + ); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.write_request( + "workspace/executeCommand", + json!({ + "command": "deno.cache", + "arguments": [[], file2.uri()], + }), + ); + let diagnostics = client.did_open_file(&file); + assert_eq!(json!(diagnostics.all()), json!([])); +} + #[test] fn lsp_diagnostics_refresh_dependents() { let context = TestContextBuilder::new().use_temp_cwd().build(); @@ -12106,15 +12221,19 @@ fn lsp_deno_future_env_byonm() { } #[test] -fn lsp_sloppy_imports_warn() { +fn lsp_sloppy_imports() { let context = TestContextBuilder::new().use_temp_cwd().build(); let temp_dir = context.temp_dir(); let temp_dir = temp_dir.path(); temp_dir .join("deno.json") .write(r#"{ "unstable": ["sloppy-imports"] }"#); - // should work when exists on the fs and when doesn't + // for sloppy imports, the file must exist on the file system + // to be resolved correctly temp_dir.join("a.ts").write("export class A {}"); + temp_dir.join("b.ts").write("export class B {}"); + temp_dir.join("c.js").write("export class C {}"); + temp_dir.join("c.d.ts").write("export class C {}"); let mut client = context.new_lsp_command().build(); client.initialize(|builder| { builder.set_root_uri(temp_dir.uri_dir()); @@ -12161,137 +12280,67 @@ fn lsp_sloppy_imports_warn() { ), }, })); - assert_eq!( - diagnostics.messages_with_source("deno"), - lsp::PublishDiagnosticsParams { - uri: temp_dir.join("file.ts").uri_file(), - diagnostics: vec![ - lsp::Diagnostic { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 19 - }, - end: lsp::Position { - line: 0, - character: 24 - } - }, - severity: Some(lsp::DiagnosticSeverity::INFORMATION), - code: Some(lsp::NumberOrString::String("redirect".to_string())), - source: Some("deno".to_string()), - message: format!( - "The import of \"{}\" was redirected to \"{}\".", - temp_dir.join("a").uri_file(), - temp_dir.join("a.ts").uri_file() - ), - data: Some(json!({ - "specifier": temp_dir.join("a").uri_file(), - "redirect": temp_dir.join("a.ts").uri_file() - })), - ..Default::default() - }, - lsp::Diagnostic { - range: lsp::Range { - start: lsp::Position { - line: 1, - character: 19 - }, - end: lsp::Position { - line: 1, - character: 27 - } - }, - severity: Some(lsp::DiagnosticSeverity::INFORMATION), - code: Some(lsp::NumberOrString::String("redirect".to_string())), - source: Some("deno".to_string()), - message: format!( - "The import of \"{}\" was redirected to \"{}\".", - temp_dir.join("b.js").uri_file(), - temp_dir.join("b.ts").uri_file() - ), - data: Some(json!({ - "specifier": temp_dir.join("b.js").uri_file(), - "redirect": temp_dir.join("b.ts").uri_file() - })), - ..Default::default() - } - ], - version: Some(1), - } + + assert_eq!(json!(diagnostics.all()), json!([])); + + client.shutdown(); +} + +#[test] +fn lsp_sloppy_imports_prefers_dts() { + let context = TestContextBuilder::new().use_temp_cwd().build(); + let temp_dir = context.temp_dir(); + let temp_dir = temp_dir.path(); + + temp_dir + .join("deno.json") + .write(r#"{ "unstable": ["sloppy-imports"] }"#); + + let mut client: LspClient = context + .new_lsp_command() + .set_root_dir(temp_dir.clone()) + .build(); + client.initialize_default(); + + temp_dir.join("a.js").write("export const foo: number;"); + + let a_dts = source_file(temp_dir.join("a.d.ts"), "export const foo = 3;"); + let file = source_file( + temp_dir.join("file.ts"), + "import { foo } from './a.js';\nconsole.log(foo);", ); + let diagnostics = client.did_open_file(&file); + // no warnings because "a.js" exists + assert_eq!(diagnostics.all().len(), 0); - let res = client.write_request( - "textDocument/codeAction", + let diagnostics = client.did_open_file(&a_dts); + assert_eq!(diagnostics.all().len(), 0, "Got {:#?}", diagnostics.all()); + + let response = client.write_request( + "textDocument/references", json!({ - "textDocument": { - "uri": temp_dir.join("file.ts").uri_file() - }, - "range": { - "start": { "line": 0, "character": 19 }, - "end": { "line": 0, "character": 24 } - }, + "textDocument": a_dts.identifier(), + "position": a_dts.range_of("foo").start, "context": { - "diagnostics": [{ - "range": { - "start": { "line": 0, "character": 19 }, - "end": { "line": 0, "character": 24 } - }, - "severity": 3, - "code": "redirect", - "source": "deno", - "message": format!( - "The import of \"{}\" was redirected to \"{}\".", - temp_dir.join("a").uri_file(), - temp_dir.join("a.ts").uri_file() - ), - "data": { - "specifier": temp_dir.join("a").uri_file(), - "redirect": temp_dir.join("a.ts").uri_file(), - }, - }], - "only": ["quickfix"] + "includeDeclaration": false } }), ); - assert_eq!( - res, - json!([{ - "title": "Update specifier to its redirected specifier.", - "kind": "quickfix", - "diagnostics": [{ - "range": { - "start": { "line": 0, "character": 19 }, - "end": { "line": 0, "character": 24 } - }, - "severity": 3, - "code": "redirect", - "source": "deno", - "message": format!( - "The import of \"{}\" was redirected to \"{}\".", - temp_dir.join("a").uri_file(), - temp_dir.join("a.ts").uri_file() - ), - "data": { - "specifier": temp_dir.join("a").uri_file(), - "redirect": temp_dir.join("a.ts").uri_file() - }, - }], - "edit": { - "changes": { - temp_dir.join("file.ts").uri_file(): [{ - "range": { - "start": { "line": 0, "character": 19 }, - "end": { "line": 0, "character": 24 } - }, - "newText": "\"./a.ts\"" - }] - } + assert_json_subset( + response, + json!([ + { + "uri": file.uri(), + // the import + "range": file.range_of("foo"), + }, + { + "uri": file.uri(), + // the usage + "range": file.range_of_nth(1, "foo"), } - }]) + ]), ); - - client.shutdown(); } #[test] @@ -12677,3 +12726,87 @@ fn lsp_ts_code_fix_any_param() { panic!("failed to find 'Infer parameter types from usage' fix in fixes: {fixes:#?}"); } + +#[test] +fn lsp_semantic_token_caching() { + let context = TestContextBuilder::new().use_temp_cwd().build(); + let temp_dir = context.temp_dir().path(); + + let mut client: LspClient = context + .new_lsp_command() + .collect_perf() + .set_root_dir(temp_dir.clone()) + .build(); + client.initialize_default(); + + let a = source_file( + temp_dir.join("a.ts"), + r#" + export const a = 1; + export const b = 2; + export const bar = () => "bar"; + function foo(fun: (number, number, number) => number, c: number) { + const double = (x) => x * 2; + return fun(double(a), b, c); + }"#, + ); + + client.did_open_file(&a); + + // requesting a range won't cache the tokens, so this will + // be computed + let res = client.write_request( + "textDocument/semanticTokens/range", + json!({ + "textDocument": a.identifier(), + "range": { + "start": a.range_of("const bar").start, + "end": a.range_of("}").end, + } + }), + ); + + assert_eq!( + client + .perf_wait_for_measure("lsp.semantic_tokens_range") + .measure_count("tsc.request.getEncodedSemanticClassifications"), + 1, + ); + + // requesting for the full doc should compute and cache the tokens + let _full = client.write_request( + "textDocument/semanticTokens/full", + json!({ + "textDocument": a.identifier(), + }), + ); + + assert_eq!( + client + .perf_wait_for_measure("lsp.semantic_tokens_full") + .measure_count("tsc.request.getEncodedSemanticClassifications"), + 2, + ); + + // use the cached tokens + let res_cached = client.write_request( + "textDocument/semanticTokens/range", + json!({ + "textDocument": a.identifier(), + "range": { + "start": a.range_of("const bar").start, + "end": a.range_of("}").end, + } + }), + ); + + // make sure we actually used the cache + assert_eq!( + client + .perf_wait_for_measure("lsp.semantic_tokens_range") + .measure_count("tsc.request.getEncodedSemanticClassifications"), + 2, + ); + + assert_eq!(res, res_cached); +} diff --git a/tests/integration/mod.rs b/tests/integration/mod.rs index 30cc9a791b1e0d..59bf0db3729472 100644 --- a/tests/integration/mod.rs +++ b/tests/integration/mod.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] + // These files have `_tests.rs` suffix to make it easier to tell which file is // the test (ex. `lint_tests.rs`) and which is the implementation (ex. `lint.rs`) // when both are open, especially for two tabs in VS Code diff --git a/tests/integration/node_unit_tests.rs b/tests/integration/node_unit_tests.rs index a034897ef225fa..b067f3121d617d 100644 --- a/tests/integration/node_unit_tests.rs +++ b/tests/integration/node_unit_tests.rs @@ -202,12 +202,3 @@ itest!(unhandled_rejection_web_process { envs: env_vars_for_npm_tests(), http_server: true, }); - -// Ensure that Web `onrejectionhandled` is fired before -// Node's `process.on('rejectionHandled')`. -itest!(rejection_handled_web_process { - args: "run -A --quiet node/rejection_handled_web_process.ts", - output: "node/rejection_handled_web_process.ts.out", - envs: env_vars_for_npm_tests(), - http_server: true, -}); diff --git a/tests/integration/repl_tests.rs b/tests/integration/repl_tests.rs index 4dc6ab44c3f498..cdcab8e182c59b 100644 --- a/tests/integration/repl_tests.rs +++ b/tests/integration/repl_tests.rs @@ -895,7 +895,7 @@ fn repl_with_quiet_flag() { assert!(!out.contains("Deno")); assert!(!out.contains("exit using ctrl+d, ctrl+c, or close()")); assert_ends_with!(out, "\"done\"\n"); - assert!(err.is_empty()); + assert!(err.is_empty(), "Error: {}", err); } #[test] @@ -959,7 +959,7 @@ fn npm_packages() { ); assert_contains!(out, "hello"); - assert!(err.is_empty()); + assert!(err.is_empty(), "Error: {}", err); } { @@ -975,7 +975,7 @@ fn npm_packages() { ); assert_contains!(out, "hello"); - assert!(err.is_empty()); + assert!(err.is_empty(), "Error: {}", err); } { @@ -989,7 +989,7 @@ fn npm_packages() { assert_contains!(out, "[Module: null prototype] {"); assert_contains!(out, "Chalk: [class Chalk],"); - assert!(err.is_empty()); + assert!(err.is_empty(), "Error: {}", err); } { @@ -1005,7 +1005,7 @@ fn npm_packages() { out, "error: npm package 'asdfawe52345asdf' does not exist" ); - assert!(err.is_empty()); + assert!(err.is_empty(), "Error: {}", err); } { @@ -1021,7 +1021,7 @@ fn npm_packages() { ); assert_contains!(out, "no"); - assert!(err.is_empty()); + assert!(err.is_empty(), "Error: {}", err); } } diff --git a/tests/integration/run_tests.rs b/tests/integration/run_tests.rs index 88ddfb31859134..f3fc18fee25d9d 100644 --- a/tests/integration/run_tests.rs +++ b/tests/integration/run_tests.rs @@ -13,6 +13,7 @@ use deno_core::serde_json::json; use deno_core::url; use deno_fetch::reqwest; use deno_tls::rustls; +use deno_tls::rustls::ClientConnection; use deno_tls::rustls_pemfile; use deno_tls::TlsStream; use pretty_assertions::assert_eq; @@ -233,12 +234,6 @@ itest!(_044_bad_resource { exit_code: 1, }); -itest!(_045_proxy { - args: "run -L debug --allow-net --allow-env --allow-run --allow-read --reload --quiet run/045_proxy_test.ts", - output: "run/045_proxy_test.ts.out", - http_server: true, -}); - itest!(_046_tsx { args: "run --quiet --reload run/046_jsx_test.tsx", output: "run/046_jsx_test.tsx.out", @@ -4615,8 +4610,8 @@ fn file_fetcher_preserves_permissions() { .args("repl --quiet") .with_pty(|mut console| { console.write_line( - "const a = await import('http://localhost:4545/run/019_media_types.ts');", - ); + "const a = await import('http://localhost:4545/run/019_media_types.ts');", + ); console.expect("Allow?"); console.human_delay(); console.write_line_raw("y"); @@ -5388,8 +5383,11 @@ async fn listen_tls_alpn() { let tcp_stream = tokio::net::TcpStream::connect("localhost:4504") .await .unwrap(); - let mut tls_stream = - TlsStream::new_client_side(tcp_stream, cfg, hostname, None); + let mut tls_stream = TlsStream::new_client_side( + tcp_stream, + ClientConnection::new(cfg, hostname).unwrap(), + None, + ); let handshake = tls_stream.handshake().await.unwrap(); @@ -5437,8 +5435,11 @@ async fn listen_tls_alpn_fail() { let tcp_stream = tokio::net::TcpStream::connect("localhost:4505") .await .unwrap(); - let mut tls_stream = - TlsStream::new_client_side(tcp_stream, cfg, hostname, None); + let mut tls_stream = TlsStream::new_client_side( + tcp_stream, + ClientConnection::new(cfg, hostname).unwrap(), + None, + ); tls_stream.handshake().await.unwrap_err(); diff --git a/tests/napi/src/lib.rs b/tests/napi/src/lib.rs index 1b924648353396..f6fe6e189ab46b 100644 --- a/tests/napi/src/lib.rs +++ b/tests/napi/src/lib.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + #![allow(clippy::all)] +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] #![allow(clippy::undocumented_unsafe_blocks)] use std::ffi::c_void; diff --git a/tests/napi/tests/napi_tests.rs b/tests/napi/tests/napi_tests.rs index 671699651c9634..1c9b1ba94dc8d6 100644 --- a/tests/napi/tests/napi_tests.rs +++ b/tests/napi/tests/napi_tests.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] + use std::process::Command; use test_util::deno_cmd; use test_util::deno_config_path; @@ -80,8 +83,8 @@ fn napi_tests() { if !output.status.success() { eprintln!("exit code {:?}", output.status.code()); - println!("stdout {stdout}"); - println!("stderr {stderr}"); + println!("stdout {}", stdout); + println!("stderr {}", stderr); } assert!(output.status.success()); } diff --git a/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/api.js b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/api.js new file mode 100644 index 00000000000000..831428ef73d3b0 --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/api.js @@ -0,0 +1 @@ +module.exports.main = 1; diff --git a/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/index.js b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/index.js new file mode 100644 index 00000000000000..9e9b27a0a8a55b --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +// specifier here is the same as in sub/index.js +__exportStar(require("./api"), exports); +__exportStar(require("./sub"), exports); \ No newline at end of file diff --git a/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/package.json b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/package.json new file mode 100644 index 00000000000000..0264413a3ec16c --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/package.json @@ -0,0 +1,4 @@ +{ + "name": "@denotest/cjs-reexport-same-specifier-in-sub-folder", + "version": "1.0.0" +} \ No newline at end of file diff --git a/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/sub/api.js b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/sub/api.js new file mode 100644 index 00000000000000..bba5ee087f9109 --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/sub/api.js @@ -0,0 +1 @@ +module.exports.sub = 2; diff --git a/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/sub/index.js b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/sub/index.js new file mode 100644 index 00000000000000..48f78249e03c4e --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0/sub/index.js @@ -0,0 +1,12 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +__exportStar(require("./api"), exports); \ No newline at end of file diff --git a/tests/specs/install/future_install_local_deno/install.out b/tests/specs/install/future_install_local_deno/install.out index efb77d8f2257f1..713507f029c8aa 100644 --- a/tests/specs/install/future_install_local_deno/install.out +++ b/tests/specs/install/future_install_local_deno/install.out @@ -1,4 +1,5 @@ ⚠️ `deno install` behavior will change in Deno 2. To preserve the current behavior use the `-g` or `--global` flag. +[UNORDERED_START] Download http://localhost:4545/v1/extensionless Download http://localhost:4545/subdir/mod1.ts Download http://localhost:4545/subdir/subdir2/mod2.ts @@ -8,3 +9,4 @@ Download http://127.0.0.1:4250/@denotest/add/1.0.0_meta.json Download http://127.0.0.1:4250/@denotest/add/1.0.0/mod.ts Download http://localhost:4260/@denotest/esm-basic Download http://localhost:4260/@denotest/esm-basic/1.0.0.tgz +[UNORDERED_END] diff --git a/tests/specs/jsr/deps/main.out b/tests/specs/jsr/deps/main.out index 48f75c0819e144..ecea42a0d4fd08 100644 --- a/tests/specs/jsr/deps/main.out +++ b/tests/specs/jsr/deps/main.out @@ -1,9 +1,11 @@ Download http://127.0.0.1:4250/@denotest/deps/meta.json Download http://127.0.0.1:4250/@denotest/deps/1.0.0_meta.json +[UNORDERED_START] Download http://127.0.0.1:4250/@denotest/module-graph/meta.json Download http://127.0.0.1:4250/@denotest/no-module-graph/meta.json Download http://127.0.0.1:4250/@denotest/module-graph/1.4.0_meta.json Download http://127.0.0.1:4250/@denotest/no-module-graph/0.1.1_meta.json +[UNORDERED_END] [UNORDERED_START] Download http://127.0.0.1:4250/@denotest/deps/1.0.0/mod.ts Download http://127.0.0.1:4250/@denotest/module-graph/1.4.0/other.ts diff --git a/tests/specs/jsr/deps/main_info.out b/tests/specs/jsr/deps/main_info.out index 0abdfa94017400..ba0c571bb1478a 100644 --- a/tests/specs/jsr/deps/main_info.out +++ b/tests/specs/jsr/deps/main_info.out @@ -1,9 +1,11 @@ Download http://127.0.0.1:4250/@denotest/deps/meta.json Download http://127.0.0.1:4250/@denotest/deps/1.0.0_meta.json +[UNORDERED_START] Download http://127.0.0.1:4250/@denotest/module-graph/meta.json Download http://127.0.0.1:4250/@denotest/no-module-graph/meta.json Download http://127.0.0.1:4250/@denotest/module-graph/1.4.0_meta.json Download http://127.0.0.1:4250/@denotest/no-module-graph/0.1.1_meta.json +[UNORDERED_END] [UNORDERED_START] Download http://127.0.0.1:4250/@denotest/deps/1.0.0/mod.ts Download http://127.0.0.1:4250/@denotest/module-graph/1.4.0/other.ts diff --git a/tests/specs/jsr/import_jsr_via_https/main.out b/tests/specs/jsr/import_jsr_via_https/main.out index cb4c27fb0742c2..29f98c2cf54903 100644 --- a/tests/specs/jsr/import_jsr_via_https/main.out +++ b/tests/specs/jsr/import_jsr_via_https/main.out @@ -1,4 +1,5 @@ Download http://127.0.0.1:4250/@denotest/deps/1.0.0_meta.json +[UNORDERED_START] Download http://127.0.0.1:4250/@denotest/deps/1.0.0/mod.ts Download http://127.0.0.1:4250/@denotest/module-graph/meta.json Download http://127.0.0.1:4250/@denotest/no-module-graph/meta.json @@ -7,4 +8,5 @@ Download http://127.0.0.1:4250/@denotest/no-module-graph/0.1.1_meta.json Download http://127.0.0.1:4250/@denotest/no-module-graph/0.1.1/mod.ts Download http://127.0.0.1:4250/@denotest/no-module-graph/0.1.1/TestClass.ts Download http://127.0.0.1:4250/@denotest/module-graph/1.4.0/other.ts +[UNORDERED_END] 0.1.1 diff --git a/tests/specs/jsr/no_unused_params/main.ts b/tests/specs/jsr/no_unused_params/main.ts index e8b12ca8902c1b..54665c107e8ca2 100644 --- a/tests/specs/jsr/no_unused_params/main.ts +++ b/tests/specs/jsr/no_unused_params/main.ts @@ -1,4 +1,4 @@ -import * as inner from "jsr:@denotest/add"; +import * as inner from "jsr:@denotest/add@1"; export function add(a: number, b: number): number { return inner.add(a, b); diff --git a/tests/specs/jsr/subset_type_graph/main.check.out b/tests/specs/jsr/subset_type_graph/main.check.out index f46610c0a5db9f..35890e57f36410 100644 --- a/tests/specs/jsr/subset_type_graph/main.check.out +++ b/tests/specs/jsr/subset_type_graph/main.check.out @@ -1,7 +1,9 @@ +[UNORDERED_START] Download http://127.0.0.1:4250/@denotest/subset-type-graph/meta.json Download http://127.0.0.1:4250/@denotest/subset-type-graph-invalid/meta.json Download http://127.0.0.1:4250/@denotest/subset-type-graph/0.1.0_meta.json Download http://127.0.0.1:4250/@denotest/subset-type-graph-invalid/0.1.0_meta.json +[UNORDERED_END] [UNORDERED_START] Download http://127.0.0.1:4250/@denotest/subset-type-graph/0.1.0/mod.ts Download http://127.0.0.1:4250/@denotest/subset-type-graph-invalid/0.1.0/mod.ts diff --git a/tests/specs/mod.rs b/tests/specs/mod.rs index f367f5c773b989..a153322db62a6d 100644 --- a/tests/specs/mod.rs +++ b/tests/specs/mod.rs @@ -6,7 +6,6 @@ use std::collections::HashMap; use std::collections::HashSet; use std::panic::AssertUnwindSafe; use std::rc::Rc; -use std::sync::Arc; use deno_core::anyhow::Context; use deno_core::serde_json; @@ -174,7 +173,7 @@ pub fn main() { file_test_runner::run_tests( &root_category, file_test_runner::RunOptions { parallel: true }, - Arc::new(run_test), + run_test, ); } diff --git a/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/__test__.jsonc b/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/__test__.jsonc new file mode 100644 index 00000000000000..5517e693d6ed29 --- /dev/null +++ b/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run main.ts", + "output": "main.out" +} diff --git a/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/main.out b/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/main.out new file mode 100644 index 00000000000000..321d995b8aee09 --- /dev/null +++ b/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/main.out @@ -0,0 +1,7 @@ +Download http://localhost:4260/@denotest/cjs-reexport-same-specifier-in-sub-folder +Download http://localhost:4260/@denotest/cjs-reexport-same-specifier-in-sub-folder/1.0.0.tgz +[Module: null prototype] { + default: { main: [Getter], sub: [Getter] }, + main: 1, + sub: 2 +} diff --git a/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/main.ts b/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/main.ts new file mode 100644 index 00000000000000..fd0bf7ebac816e --- /dev/null +++ b/tests/specs/node/cjs_reexport_same_specifier_in_sub_folder/main.ts @@ -0,0 +1,3 @@ +import * as module from "npm:@denotest/cjs-reexport-same-specifier-in-sub-folder"; + +console.log(module); diff --git a/tests/specs/node/rejection_handled_web_process/__test__.jsonc b/tests/specs/node/rejection_handled_web_process/__test__.jsonc new file mode 100644 index 00000000000000..8f60df4f588b89 --- /dev/null +++ b/tests/specs/node/rejection_handled_web_process/__test__.jsonc @@ -0,0 +1,6 @@ +// Ensure that Web `onrejectionhandled` is fired before +// Node's `process.on('rejectionHandled')`. +{ + "args": "run -A --quiet rejection_handled_web_process.ts", + "output": "rejection_handled_web_process.ts.out" +} diff --git a/tests/testdata/node/rejection_handled_web_process.ts b/tests/specs/node/rejection_handled_web_process/rejection_handled_web_process.ts similarity index 72% rename from tests/testdata/node/rejection_handled_web_process.ts rename to tests/specs/node/rejection_handled_web_process/rejection_handled_web_process.ts index a5136ca3dfea02..c7957082f7c682 100644 --- a/tests/testdata/node/rejection_handled_web_process.ts +++ b/tests/specs/node/rejection_handled_web_process/rejection_handled_web_process.ts @@ -3,6 +3,8 @@ import process from "node:process"; console.log(chalk.red("Hello world!")); +const { promise, resolve } = Promise.withResolvers(); + globalThis.addEventListener("unhandledrejection", (e) => { console.log('globalThis.addEventListener("unhandledrejection");'); e.preventDefault(); @@ -14,6 +16,7 @@ globalThis.addEventListener("rejectionhandled", (_) => { process.on("rejectionHandled", (_) => { console.log("Node rejectionHandled"); + resolve(); }); const a = Promise.reject(1); @@ -21,6 +24,12 @@ setTimeout(() => { a.catch(() => console.log("Added catch handler to the promise")); }, 100); -setTimeout(() => { +const exitTimeout = setTimeout(() => { + console.error("timeout expired"); + Deno.exit(1); +}, 30_000); + +promise.then(() => { console.log("Success"); -}, 1000); + clearTimeout(exitTimeout); +}); diff --git a/tests/testdata/node/rejection_handled_web_process.ts.out b/tests/specs/node/rejection_handled_web_process/rejection_handled_web_process.ts.out similarity index 100% rename from tests/testdata/node/rejection_handled_web_process.ts.out rename to tests/specs/node/rejection_handled_web_process/rejection_handled_web_process.ts.out diff --git a/tests/specs/node/worker_threads_cache/__test__.jsonc b/tests/specs/node/worker_threads_cache/__test__.jsonc new file mode 100644 index 00000000000000..a47fed572dcd24 --- /dev/null +++ b/tests/specs/node/worker_threads_cache/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "tempDir": true, + "args": "run -A main.ts", + "output": "main.out" +} diff --git a/tests/specs/node/worker_threads_cache/main.out b/tests/specs/node/worker_threads_cache/main.out new file mode 100644 index 00000000000000..d14c028e5b7516 --- /dev/null +++ b/tests/specs/node/worker_threads_cache/main.out @@ -0,0 +1,2 @@ +[Module: null prototype] { default: true } +[Module: null prototype] { default: false } diff --git a/tests/specs/node/worker_threads_cache/main.ts b/tests/specs/node/worker_threads_cache/main.ts new file mode 100644 index 00000000000000..9703ac8f63ae0a --- /dev/null +++ b/tests/specs/node/worker_threads_cache/main.ts @@ -0,0 +1,13 @@ +import fs from "node:fs/promises"; +import { isMainThread, Worker } from "node:worker_threads"; + +await fs.writeFile("mod.mjs", "export default " + isMainThread); + +const path = new URL("mod.mjs", import.meta.url); +const i = await import(path.href); +console.log(i); + +if (isMainThread) { + const worker = new Worker(new URL("main.ts", import.meta.url)); + worker.on("message", (msg) => console.log(msg)); +} diff --git a/tests/specs/permission/proc_self_fd/__test__.jsonc b/tests/specs/permission/proc_self_fd/__test__.jsonc new file mode 100644 index 00000000000000..8d4d1ed43ddf5e --- /dev/null +++ b/tests/specs/permission/proc_self_fd/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "run -A main.js", + "output": "hi\n\n0\n", + "exitCode": 123 +} diff --git a/tests/specs/permission/proc_self_fd/main.js b/tests/specs/permission/proc_self_fd/main.js new file mode 100644 index 00000000000000..86d8334cbb9d25 --- /dev/null +++ b/tests/specs/permission/proc_self_fd/main.js @@ -0,0 +1,18 @@ +// This test is Linux/Darwin only +if (Deno.build.os !== "linux" && Deno.build.os !== "darwin") { + console.log("hi\n\n0"); + Deno.exit(123); +} + +const cmd = new Deno.Command("/usr/bin/env", { + args: [ + "bash", + "-c", + [Deno.execPath(), "run", "--allow-read", "reader.ts", '<(echo "hi")'].join( + " ", + ), + ], +}).spawn(); + +console.log((await cmd.status).code); +Deno.exit(123); diff --git a/tests/specs/permission/proc_self_fd/reader.ts b/tests/specs/permission/proc_self_fd/reader.ts new file mode 100644 index 00000000000000..4b3587fa2fcd34 --- /dev/null +++ b/tests/specs/permission/proc_self_fd/reader.ts @@ -0,0 +1 @@ +console.log(Deno.readTextFileSync(Deno.args[0])); diff --git a/tests/specs/publish/byonm_dep/publish.out b/tests/specs/publish/byonm_dep/publish.out index f3b8a0dccd9368..a7433f86fc3552 100644 --- a/tests/specs/publish/byonm_dep/publish.out +++ b/tests/specs/publish/byonm_dep/publish.out @@ -2,5 +2,6 @@ Check file:///[WILDLINE]/mod.ts Checking for slow types in the public API... Check file:///[WILDLINE]/mod.ts Simulating publish of @scope/package@0.0.0 with files: + file:///[WILDLINE]/deno.jsonc (300B) file:///[WILDLINE]/mod.ts (129B) Warning Aborting due to --dry-run diff --git a/tests/specs/publish/excluded_deno_jsonc/__test__.jsonc b/tests/specs/publish/excluded_deno_jsonc/__test__.jsonc new file mode 100644 index 00000000000000..4e8607ae897fd0 --- /dev/null +++ b/tests/specs/publish/excluded_deno_jsonc/__test__.jsonc @@ -0,0 +1,14 @@ +{ + "tempDir": true, + "steps": [{ + "args": [ + "eval", + "Deno.writeTextFileSync('.gitignore', 'deno.jsonc')" + ], + "output": "[WILDCARD]" + }, { + "args": "publish --dry-run", + "output": "mod.out", + "exitCode": 0 + }] +} diff --git a/tests/specs/publish/excluded_deno_jsonc/deno.jsonc b/tests/specs/publish/excluded_deno_jsonc/deno.jsonc new file mode 100644 index 00000000000000..fe4300ad63f053 --- /dev/null +++ b/tests/specs/publish/excluded_deno_jsonc/deno.jsonc @@ -0,0 +1,5 @@ +{ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./mod.ts" +} diff --git a/tests/specs/publish/excluded_deno_jsonc/mod.out b/tests/specs/publish/excluded_deno_jsonc/mod.out new file mode 100644 index 00000000000000..7c8db25a2332fc --- /dev/null +++ b/tests/specs/publish/excluded_deno_jsonc/mod.out @@ -0,0 +1,6 @@ +Check file:///[WILDLINE]mod.ts +Checking for slow types in the public API... +Simulating publish of @scope/pkg@1.0.0 with files: + file:///[WILDLINE]/deno.jsonc (74B) + file:///[WILDLINE]/mod.ts (22B) +Warning Aborting due to --dry-run diff --git a/tests/specs/publish/excluded_deno_jsonc/mod.ts b/tests/specs/publish/excluded_deno_jsonc/mod.ts new file mode 100644 index 00000000000000..816fdefaa881a9 --- /dev/null +++ b/tests/specs/publish/excluded_deno_jsonc/mod.ts @@ -0,0 +1,2 @@ +export class Test { +} diff --git a/tests/specs/publish/missing_constraint/__test__.jsonc b/tests/specs/publish/missing_constraint/__test__.jsonc new file mode 100644 index 00000000000000..06a91f5b65d95e --- /dev/null +++ b/tests/specs/publish/missing_constraint/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "publish --dry-run", + "output": "publish.out", + "exitCode": 1 +} diff --git a/tests/specs/publish/missing_constraint/deno.json b/tests/specs/publish/missing_constraint/deno.json new file mode 100644 index 00000000000000..89f6db90cf0c5a --- /dev/null +++ b/tests/specs/publish/missing_constraint/deno.json @@ -0,0 +1,9 @@ +{ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./mod.ts", + "imports": { + "basic": "npm:@denotest/esm-basic", + "add": "jsr:@denotest/add" + } +} diff --git a/tests/specs/publish/missing_constraint/mod.ts b/tests/specs/publish/missing_constraint/mod.ts new file mode 100644 index 00000000000000..59e40d241316b8 --- /dev/null +++ b/tests/specs/publish/missing_constraint/mod.ts @@ -0,0 +1,7 @@ +import { add } from "add"; +import * as basic from "basic"; +import * as deps from "jsr:@denotest/deps"; + +console.log(add(1, 2)); +console.log(deps); +console.log(basic); diff --git a/tests/specs/publish/missing_constraint/publish.out b/tests/specs/publish/missing_constraint/publish.out new file mode 100644 index 00000000000000..846612979e8fdb --- /dev/null +++ b/tests/specs/publish/missing_constraint/publish.out @@ -0,0 +1,37 @@ +[WILDCARD] +Checking for slow types in the public API... +Check file:///[WILDLINE]/mod.ts +error[missing-constraint]: specifier 'jsr:@denotest/add' is missing a version constraint + --> [WILDLINE]mod.ts:[WILDLINE] + | +1 | import { add } from "add"; + | ^^^^^ the specifier + = hint: specify a version constraint for the specifier in the import map + + info: the specifier resolved to version 1.0.0 today, but will resolve to a different + info: major version if one is published in the future and potentially break + docs: https://jsr.io/go/missing-constraint + +error[missing-constraint]: specifier 'npm:@denotest/esm-basic' is missing a version constraint + --> [WILDLINE]mod.ts:[WILDLINE] + | +2 | import * as basic from "basic"; + | ^^^^^^^ the specifier + = hint: specify a version constraint for the specifier in the import map + + info: the specifier resolved to version 1.0.0 today, but will resolve to a different + info: major version if one is published in the future and potentially break + docs: https://jsr.io/go/missing-constraint + +error[missing-constraint]: specifier 'jsr:@denotest/deps' is missing a version constraint + --> [WILDLINE]mod.ts:[WILDLINE] + | +3 | import * as deps from "jsr:@denotest/deps"; + | ^^^^^^^^^^^^^^^^^^^^ the specifier + = hint: specify a version constraint for the specifier + + info: the specifier resolved to version 1.0.0 today, but will resolve to a different + info: major version if one is published in the future and potentially break + docs: https://jsr.io/go/missing-constraint + +error: Found 3 problems diff --git a/tests/specs/publish/missing_constraint_jsx_import_source/__test__.jsonc b/tests/specs/publish/missing_constraint_jsx_import_source/__test__.jsonc new file mode 100644 index 00000000000000..1b96278a08bdbb --- /dev/null +++ b/tests/specs/publish/missing_constraint_jsx_import_source/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "publish --token 'sadfasdf'", + "output": "mod.out", + "exitCode": 1 +} diff --git a/tests/specs/publish/missing_constraint_jsx_import_source/foo.tsx b/tests/specs/publish/missing_constraint_jsx_import_source/foo.tsx new file mode 100644 index 00000000000000..120e8b3346b562 --- /dev/null +++ b/tests/specs/publish/missing_constraint_jsx_import_source/foo.tsx @@ -0,0 +1,5 @@ +import { renderToString } from "npm:preact-render-to-string@6"; + +export default function render() { + return renderToString(

foo.tsx
); +} diff --git a/tests/specs/publish/missing_constraint_jsx_import_source/jsr.jsonc b/tests/specs/publish/missing_constraint_jsx_import_source/jsr.jsonc new file mode 100644 index 00000000000000..7aea0884289482 --- /dev/null +++ b/tests/specs/publish/missing_constraint_jsx_import_source/jsr.jsonc @@ -0,0 +1,11 @@ +{ + "name": "@foo/bar", + "version": "1.0.0", + "exports": { + ".": "./mod.ts" + }, + "compilerOptions": { + "jsx": "react-jsx", + "jsxImportSource": "npm:preact" + } +} diff --git a/tests/specs/publish/missing_constraint_jsx_import_source/mod.out b/tests/specs/publish/missing_constraint_jsx_import_source/mod.out new file mode 100644 index 00000000000000..d1da06be8146a0 --- /dev/null +++ b/tests/specs/publish/missing_constraint_jsx_import_source/mod.out @@ -0,0 +1,17 @@ +[WILDCARD] +Checking for slow types in the public API... +Check file:///[WILDCARD]/mod.ts +error[missing-constraint]: specifier 'npm:preact/jsx-runtime' is missing a version constraint + --> [WILDLINE] + = hint: specify a version constraint for the specifier + + info: the specifier resolved to version 10.19.6 today, but will resolve to a different + info: major version if one is published in the future and potentially break + docs: https://jsr.io/go/missing-constraint + +warning[unsupported-jsx-tsx]: JSX and TSX files are currently not supported + --> [WILDLINE]foo.tsx + + info: follow https://github.com/jsr-io/jsr/issues/24 for updates + +error: Found 1 problem diff --git a/tests/specs/publish/missing_constraint_jsx_import_source/mod.ts b/tests/specs/publish/missing_constraint_jsx_import_source/mod.ts new file mode 100644 index 00000000000000..9935bf5eecf111 --- /dev/null +++ b/tests/specs/publish/missing_constraint_jsx_import_source/mod.ts @@ -0,0 +1,5 @@ +import fooTsx from "./foo.tsx"; + +export function renderTsx() { + console.log(fooTsx()); +} diff --git a/tests/specs/publish/package_json/package.json b/tests/specs/publish/package_json/package.json index c1b171f4c9ccbb..4239110bd7e811 100644 --- a/tests/specs/publish/package_json/package.json +++ b/tests/specs/publish/package_json/package.json @@ -2,6 +2,6 @@ "name": "@deno/foo", "version": "0.0.1", "dependencies": { - "picocolors": "*" + "picocolors": "1" } } diff --git a/tests/specs/publish/unsupported_jsx_tsx/foo.jsx b/tests/specs/publish/unsupported_jsx_tsx/foo.jsx index 021c2d49eae45c..120e8b3346b562 100644 --- a/tests/specs/publish/unsupported_jsx_tsx/foo.jsx +++ b/tests/specs/publish/unsupported_jsx_tsx/foo.jsx @@ -1,4 +1,4 @@ -import { renderToString } from "npm:preact-render-to-string"; +import { renderToString } from "npm:preact-render-to-string@6"; export default function render() { return renderToString(
foo.tsx
); diff --git a/tests/specs/publish/unsupported_jsx_tsx/foo.tsx b/tests/specs/publish/unsupported_jsx_tsx/foo.tsx index 021c2d49eae45c..120e8b3346b562 100644 --- a/tests/specs/publish/unsupported_jsx_tsx/foo.tsx +++ b/tests/specs/publish/unsupported_jsx_tsx/foo.tsx @@ -1,4 +1,4 @@ -import { renderToString } from "npm:preact-render-to-string"; +import { renderToString } from "npm:preact-render-to-string@6"; export default function render() { return renderToString(
foo.tsx
); diff --git a/tests/specs/publish/unsupported_jsx_tsx/jsr.jsonc b/tests/specs/publish/unsupported_jsx_tsx/jsr.jsonc index 7aea0884289482..e411bf54b6d9da 100644 --- a/tests/specs/publish/unsupported_jsx_tsx/jsr.jsonc +++ b/tests/specs/publish/unsupported_jsx_tsx/jsr.jsonc @@ -6,6 +6,6 @@ }, "compilerOptions": { "jsx": "react-jsx", - "jsxImportSource": "npm:preact" + "jsxImportSource": "npm:preact@10" } } diff --git a/tests/specs/run/045_proxy/__test__.jsonc b/tests/specs/run/045_proxy/__test__.jsonc new file mode 100644 index 00000000000000..d4fb7f60cda513 --- /dev/null +++ b/tests/specs/run/045_proxy/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run -L debug --allow-net --allow-env --allow-run --allow-read --reload --quiet proxy_test.ts", + "output": "proxy_test.ts.out" +} diff --git a/tests/testdata/run/045_programmatic_proxy_client.ts b/tests/specs/run/045_proxy/programmatic_proxy_client.ts similarity index 100% rename from tests/testdata/run/045_programmatic_proxy_client.ts rename to tests/specs/run/045_proxy/programmatic_proxy_client.ts diff --git a/tests/testdata/run/045_proxy_client.ts b/tests/specs/run/045_proxy/proxy_client.ts similarity index 100% rename from tests/testdata/run/045_proxy_client.ts rename to tests/specs/run/045_proxy/proxy_client.ts diff --git a/tests/testdata/run/045_proxy_test.ts b/tests/specs/run/045_proxy/proxy_test.ts similarity index 71% rename from tests/testdata/run/045_proxy_test.ts rename to tests/specs/run/045_proxy/proxy_test.ts index fcb898c779bf86..d3386f0d7b3a47 100644 --- a/tests/testdata/run/045_proxy_test.ts +++ b/tests/specs/run/045_proxy/proxy_test.ts @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -import { Server } from "../../../tests/util/std/http/server.ts"; -import { assertEquals } from "../../../tests/util/std/assert/mod.ts"; +import { Server } from "../../../util/std/http/server.ts"; const addr = Deno.args[1] || "localhost:4555"; @@ -30,25 +29,33 @@ async function handler(req: Request): Promise { }); } +function assertSuccessOutput(output: Deno.CommandOutput) { + if (output.code !== 0) { + console.error("STDOUT", new TextDecoder().decode(output.stdout)); + console.error("STDERR", new TextDecoder().decode(output.stderr)); + throw new Error(`Expected exit code 0, was ${output.code}`); + } +} + async function testFetch() { - const { code } = await new Deno.Command(Deno.execPath(), { + const output = await new Deno.Command(Deno.execPath(), { args: [ "run", "--quiet", "--reload", "--allow-net", - "run/045_proxy_client.ts", + "proxy_client.ts", ], env: { HTTP_PROXY: `http://${addr}`, }, }).output(); - assertEquals(code, 0); + assertSuccessOutput(output); } async function testModuleDownload() { - const { code } = await new Deno.Command(Deno.execPath(), { + const output = await new Deno.Command(Deno.execPath(), { args: [ "cache", "--reload", @@ -60,17 +67,17 @@ async function testModuleDownload() { }, }).output(); - assertEquals(code, 0); + assertSuccessOutput(output); } async function testFetchNoProxy() { - const { code } = await new Deno.Command(Deno.execPath(), { + const output = await new Deno.Command(Deno.execPath(), { args: [ "run", "--quiet", "--reload", "--allow-net", - "run/045_proxy_client.ts", + "proxy_client.ts", ], env: { HTTP_PROXY: "http://not.exising.proxy.server", @@ -78,11 +85,11 @@ async function testFetchNoProxy() { }, }).output(); - assertEquals(code, 0); + assertSuccessOutput(output); } async function testModuleDownloadNoProxy() { - const { code } = await new Deno.Command(Deno.execPath(), { + const output = await new Deno.Command(Deno.execPath(), { args: [ "cache", "--reload", @@ -95,21 +102,22 @@ async function testModuleDownloadNoProxy() { }, }).output(); - assertEquals(code, 0); + assertSuccessOutput(output); } async function testFetchProgrammaticProxy() { - const { code } = await new Deno.Command(Deno.execPath(), { + const output = await new Deno.Command(Deno.execPath(), { args: [ "run", "--quiet", "--reload", "--allow-net=localhost:4545,localhost:4555", "--unstable", - "run/045_programmatic_proxy_client.ts", + "programmatic_proxy_client.ts", ], }).output(); - assertEquals(code, 0); + + assertSuccessOutput(output); } proxyServer(); diff --git a/tests/testdata/run/045_proxy_test.ts.out b/tests/specs/run/045_proxy/proxy_test.ts.out similarity index 100% rename from tests/testdata/run/045_proxy_test.ts.out rename to tests/specs/run/045_proxy/proxy_test.ts.out diff --git a/tests/specs/run/keep_comments_ts/__test__.jsonc b/tests/specs/run/keep_comments_ts/__test__.jsonc new file mode 100644 index 00000000000000..5517e693d6ed29 --- /dev/null +++ b/tests/specs/run/keep_comments_ts/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run main.ts", + "output": "main.out" +} diff --git a/tests/specs/run/keep_comments_ts/main.out b/tests/specs/run/keep_comments_ts/main.out new file mode 100644 index 00000000000000..8cf85ee7138955 --- /dev/null +++ b/tests/specs/run/keep_comments_ts/main.out @@ -0,0 +1,4 @@ +function test() { + // this comment should be in output + return 1 + 1; +} diff --git a/tests/specs/run/keep_comments_ts/main.ts b/tests/specs/run/keep_comments_ts/main.ts new file mode 100644 index 00000000000000..ff58eff2e770fe --- /dev/null +++ b/tests/specs/run/keep_comments_ts/main.ts @@ -0,0 +1,7 @@ +function test() { + // this comment should be in output + return 1 + 1; +} + +// should include the comments because people rely on this behavior +console.log(test.toString()); diff --git a/tests/specs/task/npx_installed_pkg_non_byonm/__test__.jsonc b/tests/specs/task/npx_installed_pkg_non_byonm/__test__.jsonc new file mode 100644 index 00000000000000..ae9e640e6487d8 --- /dev/null +++ b/tests/specs/task/npx_installed_pkg_non_byonm/__test__.jsonc @@ -0,0 +1,11 @@ +{ + "tempDir": true, + "steps": [{ + "commandName": "npm", + "args": "install", + "output": "[WILDCARD]" + }, { + "args": "task say", + "output": "main.out" + }] +} diff --git a/tests/specs/task/npx_installed_pkg_non_byonm/deno.jsonc b/tests/specs/task/npx_installed_pkg_non_byonm/deno.jsonc new file mode 100644 index 00000000000000..00303820ba9ee4 --- /dev/null +++ b/tests/specs/task/npx_installed_pkg_non_byonm/deno.jsonc @@ -0,0 +1,7 @@ +{ + // not byonm + "nodeModulesDir": true, + "tasks": { + "say": "npx cowsay moo" + } +} diff --git a/tests/specs/task/npx_installed_pkg_non_byonm/main.out b/tests/specs/task/npx_installed_pkg_non_byonm/main.out new file mode 100644 index 00000000000000..db58009ce2c3d8 --- /dev/null +++ b/tests/specs/task/npx_installed_pkg_non_byonm/main.out @@ -0,0 +1,9 @@ +Task say npx cowsay moo + _____ +< moo > + ----- + \ ^__^ + \ (oo)\_______ + (__)\ )\/\ + ||----w | + || || diff --git a/tests/specs/task/npx_installed_pkg_non_byonm/package.json b/tests/specs/task/npx_installed_pkg_non_byonm/package.json new file mode 100644 index 00000000000000..7a519e140ae518 --- /dev/null +++ b/tests/specs/task/npx_installed_pkg_non_byonm/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "cowsay": "*" + } +} diff --git a/tests/testdata/coverage/complex_expected.lcov b/tests/testdata/coverage/complex_expected.lcov index 5f0983aa8d2772..94b86465ae85d0 100644 --- a/tests/testdata/coverage/complex_expected.lcov +++ b/tests/testdata/coverage/complex_expected.lcov @@ -11,6 +11,11 @@ FNF:4 FNH:2 BRF:0 BRH:0 +DA:1,1 +DA:13,1 +DA:14,1 +DA:15,1 +DA:16,1 DA:17,2 DA:18,2 DA:19,2 @@ -21,6 +26,9 @@ DA:24,2 DA:25,2 DA:26,2 DA:27,2 +DA:29,1 +DA:30,1 +DA:31,1 DA:32,1 DA:33,1 DA:34,1 @@ -30,6 +38,8 @@ DA:38,2 DA:39,2 DA:40,2 DA:42,2 +DA:44,1 +DA:45,1 DA:46,0 DA:47,0 DA:48,0 @@ -39,13 +49,19 @@ DA:52,0 DA:53,0 DA:54,0 DA:56,0 +DA:58,1 +DA:59,1 DA:60,1 +DA:62,1 +DA:63,1 DA:64,0 DA:65,0 DA:66,0 DA:68,0 +DA:70,1 DA:71,0 +DA:73,1 DA:74,1 -LH:21 -LF:35 +LH:37 +LF:51 end_of_record diff --git a/tests/testdata/coverage/complex_expected.out b/tests/testdata/coverage/complex_expected.out index bed3d93068aba5..3d5f6a0ab6a3ad 100644 --- a/tests/testdata/coverage/complex_expected.out +++ b/tests/testdata/coverage/complex_expected.out @@ -1,4 +1,4 @@ -cover [WILDCARD]/coverage/complex.ts ... 60.000% (21/35) +cover [WILDCARD]/coverage/complex.ts ... 72.549% (37/51) 46 | export function unused( 47 | foo: string, 48 | bar: string, diff --git a/tests/testdata/inspector/inspector_test.js b/tests/testdata/inspector/inspector_test.js index 86cd48854ca329..e1ac7b6359f34b 100644 --- a/tests/testdata/inspector/inspector_test.js +++ b/tests/testdata/inspector/inspector_test.js @@ -1,3 +1,6 @@ Deno.test("basic test", () => { - console.log("test has finished running"); + const value = 1 + 1; + if (value !== 2) { + throw new Error("failed"); + } }); diff --git a/tests/testdata/task/npx/non_existent.out b/tests/testdata/task/npx/non_existent.out index 5df04917e69cdc..37b0be22037259 100644 --- a/tests/testdata/task/npx/non_existent.out +++ b/tests/testdata/task/npx/non_existent.out @@ -1,2 +1,4 @@ Task non-existent npx this-command-should-not-exist-for-you -npx: could not resolve command 'this-command-should-not-exist-for-you' +npm ERR! code E404 +npm ERR! 404 Not Found - GET http://localhost:4260/this-command-should-not-exist-for-you +[WILDCARD] diff --git a/tests/unit/symlink_test.ts b/tests/unit/symlink_test.ts index 310c3693055d73..0ee4a36fda58c8 100644 --- a/tests/unit/symlink_test.ts +++ b/tests/unit/symlink_test.ts @@ -39,6 +39,24 @@ Deno.test( }, ); +Deno.test( + { + ignore: Deno.build.os !== "windows", + permissions: { read: true, write: true }, + }, + function symlinkSyncJunction() { + const testDir = Deno.makeTempDirSync(); + const oldname = testDir + "/oldname"; + const newname = testDir + "/newname"; + Deno.mkdirSync(oldname); + Deno.symlinkSync(oldname, newname, { type: "junction" }); + const newNameInfoLStat = Deno.lstatSync(newname); + const newNameInfoStat = Deno.statSync(newname); + assert(newNameInfoLStat.isSymlink); + assert(newNameInfoStat.isDirectory); + }, +); + Deno.test( { permissions: { read: false, write: false } }, function symlinkSyncPerm() { @@ -96,6 +114,24 @@ Deno.test( }, ); +Deno.test( + { + ignore: Deno.build.os !== "windows", + permissions: { read: true, write: true }, + }, + async function symlinkJunction() { + const testDir = Deno.makeTempDirSync(); + const oldname = testDir + "/oldname"; + const newname = testDir + "/newname"; + Deno.mkdirSync(oldname); + await Deno.symlink(oldname, newname, { type: "junction" }); + const newNameInfoLStat = Deno.lstatSync(newname); + const newNameInfoStat = Deno.statSync(newname); + assert(newNameInfoLStat.isSymlink, "NOT SYMLINK"); + assert(newNameInfoStat.isDirectory, "NOT DIRECTORY"); + }, +); + Deno.test( { permissions: { read: true, write: true } }, async function symlinkAlreadyExist() { diff --git a/tests/unit/tls_sni_test.ts b/tests/unit/tls_sni_test.ts new file mode 100644 index 00000000000000..404f8016e3a2bc --- /dev/null +++ b/tests/unit/tls_sni_test.ts @@ -0,0 +1,60 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +import { assertEquals, assertRejects } from "./test_util.ts"; +// @ts-expect-error TypeScript (as of 3.7) does not support indexing namespaces by symbol +const { resolverSymbol, serverNameSymbol } = Deno[Deno.internal]; + +const cert = Deno.readTextFileSync("tests/testdata/tls/localhost.crt"); +const key = Deno.readTextFileSync("tests/testdata/tls/localhost.key"); +const certEcc = Deno.readTextFileSync("tests/testdata/tls/localhost_ecc.crt"); +const keyEcc = Deno.readTextFileSync("tests/testdata/tls/localhost_ecc.key"); + +Deno.test( + { permissions: { net: true, read: true } }, + async function listenResolver() { + const sniRequests: string[] = []; + const keys: Record = { + "server-1": { cert, key }, + "server-2": { cert: certEcc, key: keyEcc }, + "fail-server-3": { cert: "(invalid)", key: "(bad)" }, + }; + const opts: unknown = { + hostname: "localhost", + port: 0, + [resolverSymbol]: (sni: string) => { + sniRequests.push(sni); + return keys[sni]!; + }, + }; + const listener = Deno.listenTls( + opts, + ); + + for ( + const server of ["server-1", "server-2", "fail-server-3", "fail-server-4"] + ) { + const conn = await Deno.connectTls({ + hostname: "localhost", + [serverNameSymbol]: server, + port: listener.addr.port, + }); + const serverConn = await listener.accept(); + if (server.startsWith("fail-")) { + await assertRejects(async () => await conn.handshake()); + await assertRejects(async () => await serverConn.handshake()); + } else { + await conn.handshake(); + await serverConn.handshake(); + } + conn.close(); + serverConn.close(); + } + + assertEquals(sniRequests, [ + "server-1", + "server-2", + "fail-server-3", + "fail-server-4", + ]); + listener.close(); + }, +); diff --git a/tests/unit/webgpu_test.ts b/tests/unit/webgpu_test.ts index 4b65e003384402..f3101029728c86 100644 --- a/tests/unit/webgpu_test.ts +++ b/tests/unit/webgpu_test.ts @@ -100,11 +100,6 @@ Deno.test({ stagingBuffer.unmap(); device.destroy(); - - // TODO(lucacasonato): webgpu spec should add a explicit destroy method for - // adapters. - const resources = Object.keys(Deno.resources()); - Deno.close(Number(resources[resources.length - 1])); }); Deno.test({ @@ -210,11 +205,6 @@ Deno.test({ outputBuffer.unmap(); device.destroy(); - - // TODO(lucacasonato): webgpu spec should add a explicit destroy method for - // adapters. - const resources = Object.keys(Deno.resources()); - Deno.close(Number(resources[resources.length - 1])); }); Deno.test({ @@ -223,8 +213,8 @@ Deno.test({ const adapter = await navigator.gpu.requestAdapter(); assert(adapter); assert(adapter.features); - const resources = Object.keys(Deno.resources()); - Deno.close(Number(resources[resources.length - 1])); + const device = await adapter.requestDevice(); + device.destroy(); }); Deno.test({ @@ -243,8 +233,6 @@ Deno.test({ ); device.destroy(); - const resources = Object.keys(Deno.resources()); - Deno.close(Number(resources[resources.length - 1])); }); Deno.test(function getPreferredCanvasFormat() { @@ -313,8 +301,6 @@ Deno.test({ ); device.destroy(); - const resources = Object.keys(Deno.resources()); - Deno.close(Number(resources[resources.length - 1])); }); Deno.test({ @@ -409,8 +395,6 @@ Deno.test({ // NOTE: GPUQueue.copyExternalImageToTexture needs to be validated the argument of copySize property's length when its a sequence, but it is not implemented yet device.destroy(); - const resources = Object.keys(Deno.resources()); - Deno.close(Number(resources[resources.length - 1])); }); Deno.test({ @@ -510,8 +494,6 @@ Deno.test({ // NOTE: GPUQueue.copyExternalImageToTexture needs to be validated the argument of destination.origin property's length when its a sequence, but it is not implemented yet device.destroy(); - const resources = Object.keys(Deno.resources()); - Deno.close(Number(resources[resources.length - 1])); }); async function checkIsWsl() { diff --git a/tests/unit_node/async_hooks_test.ts b/tests/unit_node/async_hooks_test.ts index 8d94180cf41b89..46d7dd785f6737 100644 --- a/tests/unit_node/async_hooks_test.ts +++ b/tests/unit_node/async_hooks_test.ts @@ -130,3 +130,8 @@ Deno.test(function asyncResourceStub() { const resource = new AsyncResource("dbquery"); assert(typeof resource.asyncId() === "number"); }); + +Deno.test(function emitDestroyStub() { + const resource = new AsyncResource("foo"); + assert(typeof resource.emitDestroy === "function"); +}); diff --git a/tests/unit_node/http_test.ts b/tests/unit_node/http_test.ts index c298f2abb2f50a..c57027549c8467 100644 --- a/tests/unit_node/http_test.ts +++ b/tests/unit_node/http_test.ts @@ -380,7 +380,12 @@ Deno.test("[node/http] send request with non-chunked body", async () => { req.write("world"); req.end(); - await servePromise; + await Promise.all([ + servePromise, + // wait 100ms because of the socket.setTimeout(100) above + // in order to not cause a flaky test sanitizer failure + await new Promise((resolve) => setTimeout(resolve, 100)), + ]); }); Deno.test("[node/http] send request with chunked body", async () => { diff --git a/tests/unit_node/os_test.ts b/tests/unit_node/os_test.ts index 9ce9fc9eb73735..810c2251864981 100644 --- a/tests/unit_node/os_test.ts +++ b/tests/unit_node/os_test.ts @@ -47,6 +47,14 @@ Deno.test({ }, }); +Deno.test({ + name: "home directory when HOME is not set", + fn() { + Deno.env.delete("HOME"); + assertEquals(typeof os.homedir(), "string"); + }, +}); + Deno.test({ name: "tmp directory is a string", fn() { diff --git a/tests/unit_node/process_test.ts b/tests/unit_node/process_test.ts index b92be2f3cab97b..6b90a30ba50031 100644 --- a/tests/unit_node/process_test.ts +++ b/tests/unit_node/process_test.ts @@ -6,6 +6,7 @@ import process, { argv, argv0 as importedArgv0, env, + geteuid, pid as importedPid, platform as importedPlatform, } from "node:process"; @@ -879,6 +880,7 @@ Deno.test("process.geteuid", () => { if (Deno.build.os === "windows") { assertEquals(process.geteuid, undefined); } else { + assert(geteuid); assert(typeof process.geteuid?.() === "number"); } }); @@ -1083,3 +1085,12 @@ Deno.test({ process.setSourceMapsEnabled(true); // noop }, }); + +// Regression test for https://github.com/denoland/deno/issues/23761 +Deno.test({ + name: "process.uptime without this", + fn() { + const v = (0, process.uptime)(); + assert(v >= 0); + }, +}); diff --git a/tests/unit_node/worker_threads_test.ts b/tests/unit_node/worker_threads_test.ts index f46d982fe643c4..e16bc89666ab00 100644 --- a/tests/unit_node/worker_threads_test.ts +++ b/tests/unit_node/worker_threads_test.ts @@ -516,6 +516,32 @@ Deno.test({ }, }); +Deno.test({ + name: "[node/worker_threads] Returns terminate promise with exit code", + async fn() { + const deferred = Promise.withResolvers(); + const worker = new workerThreads.Worker( + ` + import { parentPort } from "node:worker_threads"; + parentPort.postMessage("ok"); + `, + { + eval: true, + }, + ); + + worker.on("message", (data) => { + assertEquals(data, "ok"); + deferred.resolve(); + }); + + await deferred.promise; + const promise = worker.terminate(); + assertEquals(typeof promise.then, "function"); + assertEquals(await promise, 0); + }, +}); + Deno.test({ name: "[node/worker_threads] MessagePort.on all message listeners are invoked", diff --git a/tests/util/server/src/lib.rs b/tests/util/server/src/lib.rs index ec9154af24b089..ee5348049d454a 100644 --- a/tests/util/server/src/lib.rs +++ b/tests/util/server/src/lib.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] + use std::collections::HashMap; use std::env; use std::io::Write; diff --git a/tests/util/server/src/lsp.rs b/tests/util/server/src/lsp.rs index c4219b9423011c..7c48bae23edcc6 100644 --- a/tests/util/server/src/lsp.rs +++ b/tests/util/server/src/lsp.rs @@ -470,6 +470,7 @@ pub struct LspClientBuilder { use_diagnostic_sync: bool, deno_dir: TempDir, envs: HashMap, + collect_perf: bool, } impl LspClientBuilder { @@ -488,6 +489,7 @@ impl LspClientBuilder { use_diagnostic_sync: true, deno_dir, envs: Default::default(), + collect_perf: false, } } @@ -514,6 +516,15 @@ impl LspClientBuilder { self } + /// Whether to collect performance records (marks / measures, as emitted + /// by the lsp in the `performance` module). + /// Implies `capture_stderr`. + pub fn collect_perf(mut self) -> Self { + self.capture_stderr = true; + self.collect_perf = true; + self + } + /// Whether to use the synchronization messages to better sync diagnostics /// between the test client and server. pub fn use_diagnostic_sync(mut self, value: bool) -> Self { @@ -577,10 +588,12 @@ impl LspClientBuilder { let stdin = child.stdin.take().unwrap(); let writer = io::BufWriter::new(stdin); - let stderr_lines_rx = if self.capture_stderr { + let (stderr_lines_rx, perf_rx) = if self.capture_stderr { let stderr = child.stderr.take().unwrap(); let print_stderr = self.print_stderr; let (tx, rx) = mpsc::channel::(); + let (perf_tx, perf_rx) = + self.collect_perf.then(mpsc::channel::).unzip(); std::thread::spawn(move || { let stderr = BufReader::new(stderr); for line in stderr.lines() { @@ -589,6 +602,22 @@ impl LspClientBuilder { if print_stderr { eprintln!("{}", line); } + if let Some(tx) = perf_tx.as_ref() { + // look for perf records + if line.starts_with('{') && line.ends_with("},") { + match serde_json::from_str::( + line.trim_end_matches(','), + ) { + Ok(record) => { + tx.send(record).unwrap(); + continue; + } + Err(err) => { + eprintln!("failed to parse perf record: {:#}", err); + } + } + } + } tx.send(line).unwrap(); } Err(err) => { @@ -597,9 +626,9 @@ impl LspClientBuilder { } } }); - Some(rx) + (Some(rx), perf_rx) } else { - None + (None, None) }; Ok(LspClient { @@ -613,10 +642,94 @@ impl LspClientBuilder { stderr_lines_rx, config: json!("{}"), supports_workspace_configuration: false, + perf: perf_rx.map(Perf::new), }) } } +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase", tag = "type")] +/// A performance record, emitted by the `lsp::performance` +/// module. +pub enum PerfRecord { + Mark(PerfMark), + Measure(PerfMeasure), +} + +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PerfMeasure { + name: String, + count: u32, + duration: f64, +} + +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PerfMark { + name: String, + #[serde(default)] + count: Option, + #[serde(default)] + args: Option, +} + +#[derive(Debug)] +pub struct Perf { + records: Vec, + measures_counts: HashMap, + rx: mpsc::Receiver, +} + +impl Perf { + fn new(rx: mpsc::Receiver) -> Self { + Self { + records: Default::default(), + measures_counts: Default::default(), + rx, + } + } + fn drain_until(&mut self, f: impl Fn(&PerfRecord) -> bool) { + let timeout_time = + Instant::now().checked_add(Duration::from_secs(5)).unwrap(); + let mut found = false; + loop { + while let Ok(record) = self.rx.try_recv() { + if let PerfRecord::Measure(measure) = &record { + *self + .measures_counts + .entry(measure.name.clone()) + .or_default() += 1; + } + if f(&record) { + found = true; + } + self.records.push(record); + } + + if found { + break; + } + + std::thread::sleep(Duration::from_millis(20)); + + if Instant::now() > timeout_time { + panic!("timed out waiting for perf record"); + } + } + } + pub fn measures(&self) -> impl IntoIterator { + self.records.iter().filter_map(|record| match record { + PerfRecord::Measure(measure) => Some(measure), + _ => None, + }) + } + + pub fn measure_count(&self, name: &str) -> u32 { + self.measures_counts.get(name).copied().unwrap_or_default() + } +} + pub struct LspClient { child: Child, reader: LspStdoutReader, @@ -628,6 +741,7 @@ pub struct LspClient { stderr_lines_rx: Option>, config: serde_json::Value, supports_workspace_configuration: bool, + perf: Option, } impl Drop for LspClient { @@ -661,6 +775,17 @@ impl LspClient { self.reader.pending_len() } + /// Collects performance records until a measure with the given name is + /// emitted. + pub fn perf_wait_for_measure(&mut self, name: &str) -> &Perf { + let perf = self + .perf + .as_mut() + .expect("must setup with client_builder.collect_perf()"); + perf.drain_until(|record| matches!(record, PerfRecord::Measure(measure) if measure.name == name)); + perf + } + #[track_caller] pub fn wait_until_stderr_line( &self, @@ -707,8 +832,9 @@ impl LspClient { "cache": null, "certificateStores": null, "codeLens": { - "implementations": true, - "references": true, + "implementations": false, + "references": false, + "referencesAllFunctions": false, "test": true, }, "config": null, @@ -732,6 +858,9 @@ impl LspClient { "tlsCertificate": null, "unsafelyIgnoreCertificateErrors": null, "unstable": false, + // setting this causes performance records to be logged + // to stderr + "internalDebug": self.perf.is_some(), } }), ) } @@ -778,6 +907,12 @@ impl LspClient { } } + pub fn did_open_file(&mut self, file: &SourceFile) -> CollectedDiagnostics { + self.did_open(json!({ + "textDocument": file.text_document(), + })) + } + pub fn did_open(&mut self, params: Value) -> CollectedDiagnostics { self.did_open_raw(params); self.read_diagnostics() @@ -1138,6 +1273,130 @@ impl CollectedDiagnostics { } } +#[derive(Debug, Clone)] +pub struct SourceFile { + path: PathRef, + src: String, + lang: &'static str, + version: i32, +} + +impl SourceFile { + pub fn new(path: PathRef, src: String) -> Self { + path.write(&src); + Self::new_in_mem(path, src) + } + + pub fn new_in_mem(path: PathRef, src: String) -> Self { + let lang = match path.as_path().extension().unwrap().to_str().unwrap() { + "js" => "javascript", + "ts" | "d.ts" => "typescript", + "json" => "json", + other => panic!("unsupported file extension: {other}"), + }; + Self { + path, + src, + lang, + version: 1, + } + } + + pub fn range_of(&self, text: &str) -> lsp::Range { + range_of(text, &self.src) + } + + pub fn range_of_nth(&self, n: usize, text: &str) -> lsp::Range { + range_of_nth(n, text, &self.src) + } + + pub fn uri(&self) -> lsp::Url { + self.path.uri_file() + } + + pub fn text_document(&self) -> lsp::TextDocumentItem { + lsp::TextDocumentItem { + uri: self.uri(), + language_id: self.lang.to_string(), + version: self.version, + text: self.src.clone(), + } + } + + pub fn identifier(&self) -> lsp::TextDocumentIdentifier { + lsp::TextDocumentIdentifier { uri: self.uri() } + } +} + +/// Helper to create a `SourceFile` and write its contents to disk. +pub fn source_file(path: PathRef, src: impl AsRef) -> SourceFile { + SourceFile::new(path, src.as_ref().to_string()) +} + +/// Helper to create a `SourceFile` in memory without writing to disk. +pub fn source_file_in_mem(path: PathRef, src: impl AsRef) -> SourceFile { + SourceFile::new_in_mem(path, src.as_ref().to_string()) +} + +/// Helper to get the `lsp::Range` of the `n`th occurrence of +/// `text` in `src`. `n` is zero-based, like most indexes. +pub fn range_of_nth( + n: usize, + text: impl AsRef, + src: impl AsRef, +) -> lsp::Range { + let text = text.as_ref(); + + let src = src.as_ref(); + + let start = src + .match_indices(text) + .nth(n) + .map(|(i, _)| i) + .unwrap_or_else(|| panic!("couldn't find text {text} in source {src}")); + let end = start + text.len(); + let mut line = 0; + let mut col = 0; + let mut byte_idx = 0; + + let pos = |line, col| lsp::Position { + line, + character: col, + }; + + let mut start_pos = None; + let mut end_pos = None; + for c in src.chars() { + if byte_idx == start { + start_pos = Some(pos(line, col)); + } + if byte_idx == end { + end_pos = Some(pos(line, col)); + break; + } + if c == '\n' { + line += 1; + col = 0; + } else { + col += c.len_utf16() as u32; + } + byte_idx += c.len_utf8(); + } + if start_pos.is_some() && end_pos.is_none() { + // range extends to end of string + end_pos = Some(pos(line, col)); + } + + let (start, end) = (start_pos.unwrap(), end_pos.unwrap()); + lsp::Range { start, end } +} + +/// Helper to get the `lsp::Range` of the first occurrence of +/// `text` in `src`. Equivalent to `range_of_nth(0, text, src)`. +pub fn range_of(text: impl AsRef, src: impl AsRef) -> lsp::Range { + range_of_nth(0, text, src) +} + #[cfg(test)] mod tests { use super::*; diff --git a/tests/util/server/src/pty.rs b/tests/util/server/src/pty.rs index 9b2a5eb5d7bea8..d6102c1d634cff 100644 --- a/tests/util/server/src/pty.rs +++ b/tests/util/server/src/pty.rs @@ -209,7 +209,12 @@ impl Pty { #[track_caller] fn read_until_condition(&mut self, condition: impl FnMut(&mut Self) -> bool) { - self.read_until_condition_with_timeout(condition, Duration::from_secs(15)); + let duration = if std::env::var_os("CI").is_some() { + Duration::from_secs(30) + } else { + Duration::from_secs(15) + }; + self.read_until_condition_with_timeout(condition, duration); } #[track_caller] diff --git a/tests/util/server/src/test_server.rs b/tests/util/server/src/test_server.rs index 19e33f9f54d304..3ae3eaa7d94949 100644 --- a/tests/util/server/src/test_server.rs +++ b/tests/util/server/src/test_server.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#![allow(clippy::print_stdout)] +#![allow(clippy::print_stderr)] + fn main() { setup_panic_hook(); test_server::servers::run_all_servers(); diff --git a/tests/wpt/runner/expectation.json b/tests/wpt/runner/expectation.json index f20cd78f110260..1f2dfa68475776 100644 --- a/tests/wpt/runner/expectation.json +++ b/tests/wpt/runner/expectation.json @@ -2795,7 +2795,26 @@ "XSLTProcessor interface: operation reset()", "Window interface: attribute event", "idl_test setup", - "ShadowRoot interface: attribute clonable" + "ShadowRoot interface: attribute clonable", + "Document interface: operation prepend((Node or TrustedScript or DOMString)...)", + "Document interface: operation append((Node or TrustedScript or DOMString)...)", + "Document interface: operation replaceChildren((Node or TrustedScript or DOMString)...)", + "DocumentType interface: operation before((Node or TrustedScript or DOMString)...)", + "DocumentType interface: operation after((Node or TrustedScript or DOMString)...)", + "DocumentType interface: operation replaceWith((Node or TrustedScript or DOMString)...)", + "DocumentFragment interface: operation prepend((Node or TrustedScript or DOMString)...)", + "DocumentFragment interface: operation append((Node or TrustedScript or DOMString)...)", + "DocumentFragment interface: operation replaceChildren((Node or TrustedScript or DOMString)...)", + "ShadowRoot interface: attribute serializable", + "Element interface: operation prepend((Node or TrustedScript or DOMString)...)", + "Element interface: operation append((Node or TrustedScript or DOMString)...)", + "Element interface: operation replaceChildren((Node or TrustedScript or DOMString)...)", + "Element interface: operation before((Node or TrustedScript or DOMString)...)", + "Element interface: operation after((Node or TrustedScript or DOMString)...)", + "Element interface: operation replaceWith((Node or TrustedScript or DOMString)...)", + "CharacterData interface: operation before((Node or TrustedScript or DOMString)...)", + "CharacterData interface: operation after((Node or TrustedScript or DOMString)...)", + "CharacterData interface: operation replaceWith((Node or TrustedScript or DOMString)...)" ], "idlharness.window.html?include=Node": [ "Node interface: existence and properties of interface object", @@ -3296,8 +3315,18 @@ "valueOf.any.html": true, "valueOf.any.worker.html": true }, - "idlharness.any.html": true, - "idlharness.any.worker.html": true, + "idlharness.any.html": [ + "Memory interface: operation toFixedLengthBuffer()", + "Memory interface: operation toResizableBuffer()", + "Memory interface: [object WebAssembly.Memory] must inherit property \"toFixedLengthBuffer()\" with the proper type", + "Memory interface: [object WebAssembly.Memory] must inherit property \"toResizableBuffer()\" with the proper type" + ], + "idlharness.any.worker.html": [ + "Memory interface: operation toFixedLengthBuffer()", + "Memory interface: operation toResizableBuffer()", + "Memory interface: [object WebAssembly.Memory] must inherit property \"toFixedLengthBuffer()\" with the proper type", + "Memory interface: [object WebAssembly.Memory] must inherit property \"toResizableBuffer()\" with the proper type" + ], "instance": { "constructor-bad-imports.any.html": true, "constructor-bad-imports.any.worker.html": true, @@ -4512,7 +4541,15 @@ ": Setting .hash = ' ' Trailing space should be encoded", ": Setting .hash = ' ' Trailing space should be encoded", ": Setting .hash = '\u0000' Trailing C0 control should be encoded", - ": Setting .hash = '\u0000' Trailing C0 control should be encoded" + ": Setting .hash = '\u0000' Trailing C0 control should be encoded", + ": Setting .host = 'example.com?stuff:8080' Stuff after a ? delimiter is ignored, trailing 'port'", + ": Setting .host = 'example.com?stuff:8080' Stuff after a ? delimiter is ignored, trailing 'port'", + ": Setting .host = 'example.com:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error", + ": Setting .host = 'example.com:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error", + ": Setting .host = '[::1]:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error", + ": Setting .host = '[::1]:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error", + ": Setting .host = '[::1]' IPv6 without port", + ": Setting .host = '[::1]' IPv6 without port" ], "url-setters-a-area.window.html?include=file": [ ": Setting .protocol = 'http' Can’t switch from file URL with no host", @@ -4599,7 +4636,9 @@ "URL: Setting .pathname = '//p'", "URL: Setting .pathname = 'p' Drop /. from path", "URL: Setting .search = '' Do not drop trailing spaces from non-trailing opaque paths", - "URL: Setting .search = ''" + "URL: Setting .search = ''", + "URL: Setting .host = 'example.com:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error", + "URL: Setting .host = '[::1]:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error" ], "url-setters.any.html?include=file": [ "URL: Setting .pathname = '\\\\' File URLs and (back)slashes", @@ -4621,7 +4660,9 @@ "URL: Setting .pathname = '//p'", "URL: Setting .pathname = 'p' Drop /. from path", "URL: Setting .search = '' Do not drop trailing spaces from non-trailing opaque paths", - "URL: Setting .search = ''" + "URL: Setting .search = ''", + "URL: Setting .host = 'example.com:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error", + "URL: Setting .host = '[::1]:invalid' Anything other than ASCII digit stops the port parser in a setter but is not an error" ], "url-setters.any.worker.html?include=file": [ "URL: Setting .pathname = '\\\\' File URLs and (back)slashes", @@ -6603,8 +6644,8 @@ ], "forbidden-method.any.html": true, "forbidden-method.any.worker.html": true, - "request-bad-port.any.html": false, - "request-bad-port.any.worker.html": false, + "request-bad-port.any.html": true, + "request-bad-port.any.worker.html": true, "request-cache-default-conditional.any.html": true, "request-cache-default-conditional.any.worker.html": true, "request-cache-default.any.html": [ @@ -8698,9 +8739,7 @@ "blob-url.any.worker-module.html": [ "Revoking a blob URL immediately after calling import will not fail" ], - "blob-url-workers.window.html": [ - "A revoked blob URL will not resolve in a worker even if it's in the window's module graph" - ], + "blob-url-workers.window.html": [], "microtasks": { "basic.any.html": [ "import() should not drain the microtask queue if it fails during specifier resolution", diff --git a/tests/wpt/suite b/tests/wpt/suite index daa07cf3c47652..5e8f71d73049d4 160000 --- a/tests/wpt/suite +++ b/tests/wpt/suite @@ -1 +1 @@ -Subproject commit daa07cf3c47652ed67e637f2a39bbc34f91cfe10 +Subproject commit 5e8f71d73049d4fca2a8cbc62d40e821400f1624 diff --git a/tools/lint.js b/tools/lint.js index 2bb91f3d569019..d4fb4e9a018259 100755 --- a/tools/lint.js +++ b/tools/lint.js @@ -162,6 +162,12 @@ async function clippy() { "warnings", "--deny", "clippy::unused_async", + // generally prefer the `log` crate, but ignore + // these print_* rules if necessary + "--deny", + "clippy::print_stderr", + "--deny", + "clippy::print_stdout", ], stdout: "inherit", stderr: "inherit", @@ -210,12 +216,12 @@ async function ensureNoNewITests() { // Read the comment above. Please don't increase these numbers! "lsp_tests.rs": 0, "node_compat_tests.rs": 4, - "node_unit_tests.rs": 3, + "node_unit_tests.rs": 2, "npm_tests.rs": 98, "pm_tests.rs": 0, "publish_tests.rs": 0, "repl_tests.rs": 0, - "run_tests.rs": 373, + "run_tests.rs": 372, "shared_library_tests.rs": 0, "task_tests.rs": 30, "test_tests.rs": 77, diff --git a/tools/wgpu_sync.js b/tools/wgpu_sync.js index 5844c68a21906e..4939983c2aaacd 100755 --- a/tools/wgpu_sync.js +++ b/tools/wgpu_sync.js @@ -77,8 +77,7 @@ async function patchCargo() { (data) => data .replace(/^wgpu-core = .*/m, `wgpu-core = "${V_WGPU}"`) - .replace(/^wgpu-types = .*/m, `wgpu-types = "${V_WGPU}"`) - .replace(/^wgpu-hal = .*/m, `wgpu-hal = "${V_WGPU}"`), + .replace(/^wgpu-types = .*/m, `wgpu-types = "${V_WGPU}"`), ); }