From cd704d28eba856cb40c218746bc4a9f00fb2464e Mon Sep 17 00:00:00 2001 From: Kristoffer Carlsson Date: Wed, 9 Mar 2022 08:22:11 +0100 Subject: [PATCH 01/23] export CanonicalIndexError (#44524) --- base/exports.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/base/exports.jl b/base/exports.jl index 2d790f16b7986..dff6b0c9bc208 100644 --- a/base/exports.jl +++ b/base/exports.jl @@ -122,6 +122,7 @@ export Cwstring, # Exceptions + CanonicalIndexError, CapturedException, CompositeException, DimensionMismatch, From 6e8804b3e8736ad51b3fb658e9c18b831995bb1d Mon Sep 17 00:00:00 2001 From: Shuhei Kadowaki <40514306+aviatesk@users.noreply.github.com> Date: Wed, 9 Mar 2022 16:34:11 +0900 Subject: [PATCH 02/23] follow up #44448, correct `findall`/`findsup` for `OverlayMethodTable` (#44511) - respect world range of failed lookup into `OverlayMethodTable`: - fix calculation of merged valid world range: - make `findsup` return valid `WorldRange` unconditionally, and enable more strict world validation within `abstract_invoke`: - fix the default `limit::Int` value of `findall` --- base/compiler/abstractinterpretation.jl | 7 ++-- base/compiler/methodtable.jl | 46 ++++++++++++++----------- base/reflection.jl | 6 ++-- 3 files changed, 31 insertions(+), 28 deletions(-) diff --git a/base/compiler/abstractinterpretation.jl b/base/compiler/abstractinterpretation.jl index a191120e306b7..62d92b766c6ca 100644 --- a/base/compiler/abstractinterpretation.jl +++ b/base/compiler/abstractinterpretation.jl @@ -1482,11 +1482,10 @@ function abstract_invoke(interp::AbstractInterpreter, (; fargs, argtypes)::ArgIn types = rewrap_unionall(Tuple{ft, unwrap_unionall(types).parameters...}, types)::Type nargtype = Tuple{ft, nargtype.parameters...} argtype = Tuple{ft, argtype.parameters...} - result = findsup(types, method_table(interp)) - result === nothing && return CallMeta(Any, false) - match, valid_worlds = result - method = match.method + match, valid_worlds = findsup(types, method_table(interp)) + match === nothing && return CallMeta(Any, false) update_valid_age!(sv, valid_worlds) + method = match.method (ti, env::SimpleVector) = ccall(:jl_type_intersection_with_env, Any, (Any, Any), nargtype, method.sig)::SimpleVector (; rt, edge) = result = abstract_call_method(interp, method, ti, env, false, sv) edge !== nothing && add_backedge!(edge::MethodInstance, sv) diff --git a/base/compiler/methodtable.jl b/base/compiler/methodtable.jl index 841a020b43a8d..f68cdd52d1b06 100644 --- a/base/compiler/methodtable.jl +++ b/base/compiler/methodtable.jl @@ -47,29 +47,28 @@ given signature `sig`. If no applicable methods are found, an empty result is returned. If the number of applicable methods exceeded the specified limit, `missing` is returned. """ -function findall(@nospecialize(sig::Type), table::InternalMethodTable; limit::Int=typemax(Int)) +function findall(@nospecialize(sig::Type), table::InternalMethodTable; limit::Int=Int(typemax(Int32))) return _findall(sig, nothing, table.world, limit) end -function findall(@nospecialize(sig::Type), table::OverlayMethodTable; limit::Int=typemax(Int)) +function findall(@nospecialize(sig::Type), table::OverlayMethodTable; limit::Int=Int(typemax(Int32))) result = _findall(sig, table.mt, table.world, limit) result === missing && return missing - if !isempty(result) - if all(match->match.fully_covers, result) - # no need to fall back to the internal method table - return result - else - # merge the match results with the internal method table - fallback_result = _findall(sig, nothing, table.world, limit) - return MethodLookupResult( - vcat(result.matches, fallback_result.matches), - WorldRange(min(result.valid_worlds.min_world, fallback_result.valid_worlds.min_world), - max(result.valid_worlds.max_world, fallback_result.valid_worlds.max_world)), - result.ambig | fallback_result.ambig) - end + nr = length(result) + if nr β‰₯ 1 && result[nr].fully_covers + # no need to fall back to the internal method table + return result end # fall back to the internal method table - return _findall(sig, nothing, table.world, limit) + fallback_result = _findall(sig, nothing, table.world, limit) + fallback_result === missing && return missing + # merge the fallback match results with the internal method table + return MethodLookupResult( + vcat(result.matches, fallback_result.matches), + WorldRange( + max(result.valid_worlds.min_world, fallback_result.valid_worlds.min_world), + min(result.valid_worlds.max_world, fallback_result.valid_worlds.max_world)), + result.ambig | fallback_result.ambig) end function _findall(@nospecialize(sig::Type), mt::Union{Nothing,Core.MethodTable}, world::UInt, limit::Int) @@ -102,17 +101,22 @@ function findsup(@nospecialize(sig::Type), table::InternalMethodTable) end function findsup(@nospecialize(sig::Type), table::OverlayMethodTable) - result = _findsup(sig, table.mt, table.world) - result === nothing || return result - return _findsup(sig, nothing, table.world) # fall back to the internal method table + match, valid_worlds = _findsup(sig, table.mt, table.world) + match !== nothing && return match, valid_worlds + # fall back to the internal method table + fallback_match, fallback_valid_worlds = _findsup(sig, nothing, table.world) + return fallback_match, WorldRange( + max(valid_worlds.min_world, fallback_valid_worlds.min_world), + min(valid_worlds.max_world, fallback_valid_worlds.max_world)) end function _findsup(@nospecialize(sig::Type), mt::Union{Nothing,Core.MethodTable}, world::UInt) min_valid = RefValue{UInt}(typemin(UInt)) max_valid = RefValue{UInt}(typemax(UInt)) - result = ccall(:jl_gf_invoke_lookup_worlds, Any, (Any, Any, UInt, Ptr{Csize_t}, Ptr{Csize_t}), + match = ccall(:jl_gf_invoke_lookup_worlds, Any, (Any, Any, UInt, Ptr{Csize_t}, Ptr{Csize_t}), sig, mt, world, min_valid, max_valid)::Union{MethodMatch, Nothing} - return result === nothing ? result : (result, WorldRange(min_valid[], max_valid[])) + valid_worlds = WorldRange(min_valid[], max_valid[]) + return match, valid_worlds end isoverlayed(::MethodTableView) = error("unsatisfied MethodTableView interface") diff --git a/base/reflection.jl b/base/reflection.jl index 0ae3b087fc022..95fb81c8859d6 100644 --- a/base/reflection.jl +++ b/base/reflection.jl @@ -1347,11 +1347,11 @@ end print_statement_costs(args...; kwargs...) = print_statement_costs(stdout, args...; kwargs...) function _which(@nospecialize(tt::Type), world=get_world_counter()) - result = Core.Compiler._findsup(tt, nothing, world) - if result === nothing + match, _ = Core.Compiler._findsup(tt, nothing, world) + if match === nothing error("no unique matching method found for the specified argument types") end - return first(result)::Core.MethodMatch + return match end """ From 4d042949c8ea8e9bd8f0b77ede89098f6c01775e Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Wed, 9 Mar 2022 08:13:00 -0800 Subject: [PATCH 03/23] [libblastrampoline] Ugrade to v5.0.2 (#44531) This includes CBLAS workarounds for `cblas_sdot` and `cblas_ddot` which are necessary now that https://github.com/JuliaLang/julia/pull/44271 uses these symbols instead of just `sdot_` and `ddot_` directly. --- deps/Versions.make | 2 +- deps/checksums/blastrampoline | 64 +++++++++++------------ stdlib/libblastrampoline_jll/Project.toml | 2 +- 3 files changed, 34 insertions(+), 34 deletions(-) diff --git a/deps/Versions.make b/deps/Versions.make index 1d510ee4911b6..88c8ff1d3d3d3 100644 --- a/deps/Versions.make +++ b/deps/Versions.make @@ -75,7 +75,7 @@ OBJCONV_JLL_NAME := Objconv OBJCONV_JLL_VER := 2.49.1+0 # blastrampoline -BLASTRAMPOLINE_VER := 5.0.1 +BLASTRAMPOLINE_VER := 5.0.2 BLASTRAMPOLINE_JLL_NAME := libblastrampoline # OpenBLAS diff --git a/deps/checksums/blastrampoline b/deps/checksums/blastrampoline index d97f2fedd926b..8c55d40b44065 100644 --- a/deps/checksums/blastrampoline +++ b/deps/checksums/blastrampoline @@ -1,34 +1,34 @@ blastrampoline-d32042273719672c6669f6442a0be5605d434b70.tar.gz/md5/f380e4238a2dec186ecfe9598f75b824 blastrampoline-d32042273719672c6669f6442a0be5605d434b70.tar.gz/sha512/00437a96b57d99cef946257480e38e1dfdf325c46bc4a1619f5067565dfb7d9f668b0c8415badb0879b933cb1972f3c4e6be4c9e63a8a85728033e2183373819 -libblastrampoline.v5.0.1+0.aarch64-apple-darwin.tar.gz/md5/8b2b28517ef5db95a0b440f1a936422e -libblastrampoline.v5.0.1+0.aarch64-apple-darwin.tar.gz/sha512/3d479efc47b8c81fa85fd4d2a868a48304051432b92af90a2bcd2142673f2c422419731b8941f987aed429064532e8634ce3ea8f8d71222cf2d9b9e1e8ba2f7f -libblastrampoline.v5.0.1+0.aarch64-linux-gnu.tar.gz/md5/23e53049a0c30c8d24482a25954ee497 -libblastrampoline.v5.0.1+0.aarch64-linux-gnu.tar.gz/sha512/c5ba3609e5c47066d8a10acdd1c13e25a78bea6003a39a354000c832aeb1cf04a29392089600b10f0d6e5544aa910412bb50f238ac1f81d55ac15f70aaeb3161 -libblastrampoline.v5.0.1+0.aarch64-linux-musl.tar.gz/md5/5b6770a56cf3632473726a6da3da8ac4 -libblastrampoline.v5.0.1+0.aarch64-linux-musl.tar.gz/sha512/13f01e51b754a7bb4f78d0217380923e353499815872694718922a842fb1d41774e83ec07305b0ca9df2b054e26a626c20e685127e467b3bbb5adb3b9de3c7d3 -libblastrampoline.v5.0.1+0.armv6l-linux-gnueabihf.tar.gz/md5/32f33430008184705b37afcce7d09fdc -libblastrampoline.v5.0.1+0.armv6l-linux-gnueabihf.tar.gz/sha512/2af9ce233b473f2c81c4ba2e82253a88a519e4cbfa2cd410b27b1f1d7d06559376dd3743951105dbaa784310cce378516978b0d56bd8a196e2eb6c5fb7e6e969 -libblastrampoline.v5.0.1+0.armv6l-linux-musleabihf.tar.gz/md5/52da6ab8b5a9e03aebb032441b668d65 -libblastrampoline.v5.0.1+0.armv6l-linux-musleabihf.tar.gz/sha512/a6f1a375b61642e0b2fd235e27b5bf7e0cd1ff308cdfef27b904f62dfb9ac2bc8fa4e9a7869851310da90af4797994d86d581354070810ffedf3deea5afcc388 -libblastrampoline.v5.0.1+0.armv7l-linux-gnueabihf.tar.gz/md5/08fe2bf27a14e6a6fc4f6b394051aac9 -libblastrampoline.v5.0.1+0.armv7l-linux-gnueabihf.tar.gz/sha512/fdf8d054c67ca3e60dfc7739e02e28817d4510558341d9666b3ccc35818d1ea835a30676cfbe66bbb119c5574c683f1626088119dfc672bf730c87811835e48e -libblastrampoline.v5.0.1+0.armv7l-linux-musleabihf.tar.gz/md5/836fdbe9e759c71b3c7ae6be2ff6cd6a -libblastrampoline.v5.0.1+0.armv7l-linux-musleabihf.tar.gz/sha512/6333f8b9270f51c9e520ef8eee52c1796659bd7574c4e8cc04774d3b5e0574af63e5b252cc3340a62bf44771970331839083a528f402bc7929f32cffdbeba876 -libblastrampoline.v5.0.1+0.i686-linux-gnu.tar.gz/md5/11f127c422a4b51d6cd0abe370176c25 -libblastrampoline.v5.0.1+0.i686-linux-gnu.tar.gz/sha512/ad8510a804637ed144ee931a11629ee86e3c29e36be394c1f020a04e68b64a04a0eaa976961a993be0693b1f57b687f18dd25d3313aafa217a9140913dc9849d -libblastrampoline.v5.0.1+0.i686-linux-musl.tar.gz/md5/c865cd79d083de137714df55dfd015c9 -libblastrampoline.v5.0.1+0.i686-linux-musl.tar.gz/sha512/99f4938626f84e5636231f34842c6877be5ac0d528f7bcae6b15d51b82d0daa06eb7d086a28f2c516234a989dd384f932886303f13cbac33f972fbf64b16dfb9 -libblastrampoline.v5.0.1+0.i686-w64-mingw32.tar.gz/md5/e9e2cbb1c90b691fd06f4df81674d36a -libblastrampoline.v5.0.1+0.i686-w64-mingw32.tar.gz/sha512/c32a7449476f994f8d1bdb576959d6cc54018ac33be2d691b8627467ff5870deac7427e83f275db9b675c5d92bd13254979b06da33b782d6de3b49b1a6dda19c -libblastrampoline.v5.0.1+0.powerpc64le-linux-gnu.tar.gz/md5/5904dce9e258e4bdf71493e6cdc5fb20 -libblastrampoline.v5.0.1+0.powerpc64le-linux-gnu.tar.gz/sha512/e10761289aaf985e96e0c908f988218450b54b78a5ba0ca67b509d63c422849471b38e952b93e1de0ffa92d9b8e76b16cfd733a05940203213f7f10cdb953dc9 -libblastrampoline.v5.0.1+0.x86_64-apple-darwin.tar.gz/md5/2d15a24ce47dc67ef575ca514530734e -libblastrampoline.v5.0.1+0.x86_64-apple-darwin.tar.gz/sha512/5209953e6ed72c5840b926c2c50e67f3ef2e8612877e5c6c4962e687870a9c4f95ab83fab1db77419ffdd21e303e5a951a86d21979cbd2e2e8b9d65a2b86a693 -libblastrampoline.v5.0.1+0.x86_64-linux-gnu.tar.gz/md5/67092e794898efbe1d75bbaf19912538 -libblastrampoline.v5.0.1+0.x86_64-linux-gnu.tar.gz/sha512/cc117c4d6d7a34fc7abfff4d40584f63b3ed80a2aa8be887f22a65b25e9196a2173d624bda77e8a1f2c401792c090948ad0a9069af3e48ee886664e1b2dd771f -libblastrampoline.v5.0.1+0.x86_64-linux-musl.tar.gz/md5/32f65fa0681d81ab4f5a84d18ec0ef40 -libblastrampoline.v5.0.1+0.x86_64-linux-musl.tar.gz/sha512/177f25c3108af15653726871b110d77e0a5e94b06bd6996503f83b7dd7c0d9877beff5eeadbdff4952ac606fcec426c04a97566efc2d88d75ed380e566ffe0c0 -libblastrampoline.v5.0.1+0.x86_64-unknown-freebsd.tar.gz/md5/12494ac279b869c740712b8f774edadf -libblastrampoline.v5.0.1+0.x86_64-unknown-freebsd.tar.gz/sha512/6b896996f20552bb05d22fb314b6b9ad8e4359aec31f90fe7029cd13d37e6db1c305a87d9622ff4b036b155a12a5b305a8fd56e4074149bad8c3e6a225c70c5d -libblastrampoline.v5.0.1+0.x86_64-w64-mingw32.tar.gz/md5/4fdbfc6384ba4dbc74eda97dff919511 -libblastrampoline.v5.0.1+0.x86_64-w64-mingw32.tar.gz/sha512/e752486b9e6f6ed293a42337f432c8dcb86246523864674be5ff35fcc49f8cc848f77c41b2af1903959938f620d68b1de6028afc662f9e893045308eef72d624 +libblastrampoline.v5.0.2+0.aarch64-apple-darwin.tar.gz/md5/bfa0772f1683f307cd7f4300bc99bf6b +libblastrampoline.v5.0.2+0.aarch64-apple-darwin.tar.gz/sha512/e96e8d053678e979a32c893f54d76e682919184b9c1674c105651865b35f70a32ad93f545086b1b6a93e43db6c54e0d93822ba8647d0d4b16ab771a1a2bf223c +libblastrampoline.v5.0.2+0.aarch64-linux-gnu.tar.gz/md5/15a2d3ad9e744724e41209693225be58 +libblastrampoline.v5.0.2+0.aarch64-linux-gnu.tar.gz/sha512/49035889785405e8d66077ad1182b4b527c724a41c88447c3ae1ce3a0c970b27faef8035797bc376b34e96bdd54c0dce3a5c4c77615674f587cb46663f25a8dd +libblastrampoline.v5.0.2+0.aarch64-linux-musl.tar.gz/md5/ebc86535c2123b41a1f666b249e54bc6 +libblastrampoline.v5.0.2+0.aarch64-linux-musl.tar.gz/sha512/5355277d720a4ce4083d95e783f517209195b619dbd83a1fa93c3492bf25ee3c2782f9569a2b3f645e05416ef15226c6683288252db7c361d619dc7d4e4c298a +libblastrampoline.v5.0.2+0.armv6l-linux-gnueabihf.tar.gz/md5/3f0807f2c390e318930e2240fcb886f6 +libblastrampoline.v5.0.2+0.armv6l-linux-gnueabihf.tar.gz/sha512/88927035941e109ac7c7bb483d1415755966048b89d5d48f2fcd071349d85bf3f4f16b59179531d85a18435f31ced499b5a5ffeda0e80a6160ce9b65d8da616e +libblastrampoline.v5.0.2+0.armv6l-linux-musleabihf.tar.gz/md5/8e9038163fa3d8c2b875fd481ee5031d +libblastrampoline.v5.0.2+0.armv6l-linux-musleabihf.tar.gz/sha512/01edfbadfaff63313ca3626cc1b6b674ea9667ba54fa33464c31f027e640415d13312d07d3b02d3550e45fdb02730cc4850db994420f258c126dfc8f4920640d +libblastrampoline.v5.0.2+0.armv7l-linux-gnueabihf.tar.gz/md5/37db09f2d2c82ceb91604552c79d5d55 +libblastrampoline.v5.0.2+0.armv7l-linux-gnueabihf.tar.gz/sha512/d1de56c26c49d632c36281f9af097667b4e3c693a8e900fd6ae64b1c9b267449e2424945e940ad17418bedf02837cc585b084aca77b08f9b8d77ba32cf33558b +libblastrampoline.v5.0.2+0.armv7l-linux-musleabihf.tar.gz/md5/d6ebdfefbad51826d12dd36ae691550e +libblastrampoline.v5.0.2+0.armv7l-linux-musleabihf.tar.gz/sha512/75c17548f063ae7b007f074498630a77a05cdd736a5e28e49ba35dca045bf8f5aada5df2b58defb628e63c04d1a603ebb3e568a3d36e80e3fd8bebd9948c594c +libblastrampoline.v5.0.2+0.i686-linux-gnu.tar.gz/md5/c84eaf618af0011a0e2db597eb1213f3 +libblastrampoline.v5.0.2+0.i686-linux-gnu.tar.gz/sha512/c2d2e50429889fdb9de96a624428c43799d3043c444c15ea0a7e2c96dcfa11ec52376509e9d946e42cda9d60d44c701645e3586dd704a766c7195cc1c3074cce +libblastrampoline.v5.0.2+0.i686-linux-musl.tar.gz/md5/65bdcc9547bbcc15f3c1e75508b023c7 +libblastrampoline.v5.0.2+0.i686-linux-musl.tar.gz/sha512/15498b8b20c4374474cbab7288cac244f5a73f84f484190c30f9b2cd5b5f9cf56e772e786d92c6ba218f83dfc32cd91b6c12f6f4b9dbfb6d8d044ba5015ebf19 +libblastrampoline.v5.0.2+0.i686-w64-mingw32.tar.gz/md5/baa2ee8448fd5bf37aee4b73604f1c6e +libblastrampoline.v5.0.2+0.i686-w64-mingw32.tar.gz/sha512/a57dc29daf42073889a2bf1b5e9dcc063942058dba2e6b515ab791e6c7cde0b0f7e42d0efd3c957853f768d52ac8984c5a14cc26db92244f9b3ec80f48827b79 +libblastrampoline.v5.0.2+0.powerpc64le-linux-gnu.tar.gz/md5/7118a09ce0d6d93db2f86965de3f03a4 +libblastrampoline.v5.0.2+0.powerpc64le-linux-gnu.tar.gz/sha512/2bb254a813255f640c3940753d010556a822c45c26e520bf679108add828c5c927cfcc98c4e0083132f61df3c9ea64ef47d39d5d849e08154a4cf2b196834331 +libblastrampoline.v5.0.2+0.x86_64-apple-darwin.tar.gz/md5/36302044a0fd8b4585620ba081340b71 +libblastrampoline.v5.0.2+0.x86_64-apple-darwin.tar.gz/sha512/05dd71287ff4d025c9ff0a41485310e224dc7222fb5350223998745329fb9f6dfa883f1f670fb014ef888cda011c144c3b5d713e941732d644466d33fd3c3506 +libblastrampoline.v5.0.2+0.x86_64-linux-gnu.tar.gz/md5/4979d46cb033375fa69b560f661c219b +libblastrampoline.v5.0.2+0.x86_64-linux-gnu.tar.gz/sha512/ee12c621bc25effd01c58cc13790cb9b915521c3c81f9bd89f93b12cec543eb49b2ea12a5f79c1a25ba17d08125f52629e3fba45e0b28cc0c7f5fec5627485cb +libblastrampoline.v5.0.2+0.x86_64-linux-musl.tar.gz/md5/acbcd598ea31d5082aff096f76d700b7 +libblastrampoline.v5.0.2+0.x86_64-linux-musl.tar.gz/sha512/84838e69d671f13dd96cc0993b3fab721d449c2abc30576ab6905e5c8c4927e7b499cf6cf3f9cb65806980c46439bb4b256b4ab880026c2d0a10cfac6f419afb +libblastrampoline.v5.0.2+0.x86_64-unknown-freebsd.tar.gz/md5/eec09438c820e053fb35d29bc6f3858d +libblastrampoline.v5.0.2+0.x86_64-unknown-freebsd.tar.gz/sha512/3094af269dd19c5460f1a8adff8871e33cb6132b943009dcf96e5c65e532a26ce54cb3c4ca1a8f615c7e59eae63fabcea40f86a4dbbfb53f446e5982198faf0a +libblastrampoline.v5.0.2+0.x86_64-w64-mingw32.tar.gz/md5/6859880e47b1b071ac67eabfbea6ea9b +libblastrampoline.v5.0.2+0.x86_64-w64-mingw32.tar.gz/sha512/6fb59a652d1456fb5aa58631aa88f298571f1d6ea8a196884dcb30b29eab05de310fa706d3b40955975a611d0464e22be36577eede54f765374bedf4cf02469d diff --git a/stdlib/libblastrampoline_jll/Project.toml b/stdlib/libblastrampoline_jll/Project.toml index 63cad3d67acf8..97a5485e883a9 100644 --- a/stdlib/libblastrampoline_jll/Project.toml +++ b/stdlib/libblastrampoline_jll/Project.toml @@ -1,6 +1,6 @@ name = "libblastrampoline_jll" uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" -version = "5.0.1+0" +version = "5.0.2+0" [deps] Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb" From 80346c1cc0625ed988c158072587ab2fdf191e86 Mon Sep 17 00:00:00 2001 From: Christine Flood Date: Wed, 9 Mar 2022 12:10:59 -0500 Subject: [PATCH 04/23] Put off major collections to improve GC times. (#44215) * Updated GC Heuristics to avoid a full GC unless absolutely necessary. This helps with https://github.com/JuliaLang/julia/issues/40644 and other programs which suffer from non-productive full collections. --- src/gc-debug.c | 16 +++++++++++++ src/gc.c | 61 ++++++++++++++++++++------------------------------ src/gc.h | 5 +++++ 3 files changed, 45 insertions(+), 37 deletions(-) diff --git a/src/gc-debug.c b/src/gc-debug.c index 8403a9f9f2e1b..6363512ae2c01 100644 --- a/src/gc-debug.c +++ b/src/gc-debug.c @@ -977,6 +977,22 @@ void gc_time_sweep_pause(uint64_t gc_end_t, int64_t actual_allocd, jl_ns2ms(gc_postmark_end - gc_premark_end), sweep_full ? "full" : "quick", -gc_num.allocd / 1024); } + +void gc_time_summary(int sweep_full, uint64_t start, uint64_t end, + uint64_t freed, uint64_t live, uint64_t interval, + uint64_t pause) +{ + if (sweep_full > 0) + jl_safe_printf("%ld Major collection: estimate freed = %ld + live = %ldm new interval = %ldm time = %ldms\n", + end - start, freed, live/1024/1024, + interval/1024/1024, pause/1000000 ); + else + jl_safe_printf("%ld Minor collection: estimate freed = %ld live = %ldm + new interval = %ldm time = %ldms\n", + end - start, freed, live/1024/1024, + interval/1024/1024, pause/1000000 ); +} #endif void jl_gc_debug_init(void) diff --git a/src/gc.c b/src/gc.c index 4f9fb761b2797..044c9b8fc1446 100644 --- a/src/gc.c +++ b/src/gc.c @@ -653,9 +653,8 @@ static int prev_sweep_full = 1; // Full collection heuristics static int64_t live_bytes = 0; static int64_t promoted_bytes = 0; -static int64_t last_full_live = 0; // live_bytes after last full collection static int64_t last_live_bytes = 0; // live_bytes at last collection -static int64_t grown_heap_age = 0; // # of collects since live_bytes grew and remained +static int64_t t_start = 0; // Time GC starts; #ifdef __GLIBC__ // maxrss at last malloc_trim static int64_t last_trim_maxrss = 0; @@ -3140,43 +3139,23 @@ static int _jl_gc_collect(jl_ptls_t ptls, jl_gc_collection_t collection) int nptr = 0; for (int i = 0;i < jl_n_threads;i++) nptr += jl_all_tls_states[i]->heap.remset_nptr; - int large_frontier = nptr*sizeof(void*) >= default_collect_interval; // many pointers in the intergen frontier => "quick" mark is not quick - // trigger a full collection if the number of live bytes doubles since the last full - // collection and then remains at least that high for a while. - if (grown_heap_age == 0) { - if (live_bytes > 2 * last_full_live) - grown_heap_age = 1; - } - else if (live_bytes >= last_live_bytes) { - grown_heap_age++; - } + + // many pointers in the intergen frontier => "quick" mark is not quick + int large_frontier = nptr*sizeof(void*) >= default_collect_interval; int sweep_full = 0; int recollect = 0; - if ((large_frontier || - ((not_freed_enough || promoted_bytes >= gc_num.interval) && - (promoted_bytes >= default_collect_interval || prev_sweep_full)) || - grown_heap_age > 1) && gc_num.pause > 1) { - sweep_full = 1; - } + // update heuristics only if this GC was automatically triggered if (collection == JL_GC_AUTO) { - if (sweep_full) { - if (large_frontier) - gc_num.interval = last_long_collect_interval; - if (not_freed_enough || large_frontier) { - if (gc_num.interval <= 2*(max_collect_interval/5)) { - gc_num.interval = 5 * (gc_num.interval / 2); - } - } - last_long_collect_interval = gc_num.interval; + if (not_freed_enough) { + gc_num.interval = gc_num.interval * 2; } - else { - // reset interval to default, or at least half of live_bytes - int64_t half = live_bytes/2; - if (default_collect_interval < half && half <= max_collect_interval) - gc_num.interval = half; - else - gc_num.interval = default_collect_interval; + if (large_frontier) { + sweep_full = 1; + } + if (gc_num.interval > max_collect_interval) { + sweep_full = 1; + gc_num.interval = max_collect_interval; } } if (gc_sweep_always_full) { @@ -3249,10 +3228,17 @@ static int _jl_gc_collect(jl_ptls_t ptls, jl_gc_collection_t collection) gc_num.allocd = 0; last_live_bytes = live_bytes; live_bytes += -gc_num.freed + gc_num.since_sweep; - if (prev_sweep_full) { - last_full_live = live_bytes; - grown_heap_age = 0; + + if (collection == JL_GC_AUTO) { + // If the current interval is larger than half the live data decrease the interval + int64_t half = live_bytes/2; + if (gc_num.interval > half) gc_num.interval = half; + // But never go below default + if (gc_num.interval < default_collect_interval) gc_num.interval = default_collect_interval; } + + gc_time_summary(sweep_full, t_start, gc_end_t, gc_num.freed, live_bytes, gc_num.interval, pause); + prev_sweep_full = sweep_full; gc_num.pause += !recollect; gc_num.total_time += pause; @@ -3420,6 +3406,7 @@ void jl_gc_init(void) #endif jl_gc_mark_sp_t sp = {NULL, NULL, NULL, NULL}; gc_mark_loop(NULL, sp); + t_start = jl_hrtime(); } // callback for passing OOM errors from gmp diff --git a/src/gc.h b/src/gc.h index 544486d933e10..d50e32c1a0444 100644 --- a/src/gc.h +++ b/src/gc.h @@ -557,6 +557,9 @@ void gc_time_mark_pause(int64_t t0, int64_t scanned_bytes, void gc_time_sweep_pause(uint64_t gc_end_t, int64_t actual_allocd, int64_t live_bytes, int64_t estimate_freed, int sweep_full); +void gc_time_summary(int sweep_full, uint64_t start, uint64_t end, + uint64_t freed, uint64_t live, uint64_t interval, + uint64_t pause); #else #define gc_time_pool_start() STATIC_INLINE void gc_time_count_page(int freedall, int pg_skpd) JL_NOTSAFEPOINT @@ -582,6 +585,8 @@ STATIC_INLINE void gc_time_count_mallocd_array(int bits) JL_NOTSAFEPOINT #define gc_time_mark_pause(t0, scanned_bytes, perm_scanned_bytes) #define gc_time_sweep_pause(gc_end_t, actual_allocd, live_bytes, \ estimate_freed, sweep_full) +#define gc_time_summary(sweep_full, start, end, freed, live, \ + interval, pause) #endif #ifdef MEMFENCE From 8076517c979c703ebed3f1ca5494be2eddfdc30f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= Date: Wed, 9 Mar 2022 19:46:38 +0000 Subject: [PATCH 05/23] [Makefile] Fix codesign of libjulia when installing it on macOS (#44510) * [Makefile] Fix codesign of libjulia when installing it on macOS * Add shell sript for codesigning and use it in Makefile --- Makefile | 21 +++++++++++---------- contrib/codesign.sh | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 10 deletions(-) create mode 100755 contrib/codesign.sh diff --git a/Makefile b/Makefile index 952b9a00c1e63..df6e998d47c35 100644 --- a/Makefile +++ b/Makefile @@ -374,14 +374,22 @@ endif ifneq ($(LOADER_BUILD_DEP_LIBS),$(LOADER_INSTALL_DEP_LIBS)) # Next, overwrite relative path to libjulia-internal in our loader if $$(LOADER_BUILD_DEP_LIBS) != $$(LOADER_INSTALL_DEP_LIBS) $(call stringreplace,$(DESTDIR)$(shlibdir)/libjulia.$(JL_MAJOR_MINOR_SHLIB_EXT),$(LOADER_BUILD_DEP_LIBS)$$,$(LOADER_INSTALL_DEP_LIBS)) +ifeq ($(OS),Darwin) + # Codesign the libjulia we just modified + $(JULIAHOME)/contrib/codesign.sh "$(MACOS_CODESIGN_IDENTITY)" "$(DESTDIR)$(shlibdir)/libjulia.$(JL_MAJOR_MINOR_SHLIB_EXT)" +endif ifeq ($(BUNDLE_DEBUG_LIBS),1) $(call stringreplace,$(DESTDIR)$(shlibdir)/libjulia-debug.$(JL_MAJOR_MINOR_SHLIB_EXT),$(LOADER_DEBUG_BUILD_DEP_LIBS)$$,$(LOADER_DEBUG_INSTALL_DEP_LIBS)) +ifeq ($(OS),Darwin) + # Codesign the libjulia we just modified + $(JULIAHOME)/contrib/codesign.sh "$(MACOS_CODESIGN_IDENTITY)" "$(DESTDIR)$(shlibdir)/libjulia-debug.$(JL_MAJOR_MINOR_SHLIB_EXT)" +endif endif endif - # On FreeBSD, remove the build's libdir from each library's RPATH ifeq ($(OS),FreeBSD) + # On FreeBSD, remove the build's libdir from each library's RPATH $(JULIAHOME)/contrib/fixup-rpath.sh "$(PATCHELF)" $(DESTDIR)$(libdir) $(build_libdir) $(JULIAHOME)/contrib/fixup-rpath.sh "$(PATCHELF)" $(DESTDIR)$(private_libdir) $(build_libdir) $(JULIAHOME)/contrib/fixup-rpath.sh "$(PATCHELF)" $(DESTDIR)$(bindir) $(build_libdir) @@ -428,16 +436,9 @@ endif ifeq ($(OS), WINNT) cd $(BUILDROOT)/julia-$(JULIA_COMMIT)/bin && rm -f llvm* llc.exe lli.exe opt.exe LTO.dll bugpoint.exe macho-dump.exe endif - # If we're on macOS, and we have a codesigning identity, then codesign the binary-dist tarball! ifeq ($(OS),Darwin) -ifneq ($(MACOS_CODESIGN_IDENTITY),) - echo "Codesigning with identity $(MACOS_CODESIGN_IDENTITY)"; \ - MACHO_FILES=$$(find "$(BUILDROOT)/julia-$(JULIA_COMMIT)" -type f -perm -0111 | cut -d: -f1); \ - for f in $${MACHO_FILES}; do \ - echo "Codesigning $${f}..."; \ - codesign -s "$(MACOS_CODESIGN_IDENTITY)" --option=runtime --entitlements $(JULIAHOME)/contrib/mac/app/Entitlements.plist -vvv --timestamp --deep --force "$${f}"; \ - done -endif + # If we're on macOS, and we have a codesigning identity, then codesign the binary-dist tarball! + $(JULIAHOME)/contrib/codesign.sh "$(MACOS_CODESIGN_IDENTITY)" "$(BUILDROOT)/julia-$(JULIA_COMMIT)" endif cd $(BUILDROOT) && $(TAR) zcvf $(JULIA_BINARYDIST_FILENAME).tar.gz julia-$(JULIA_COMMIT) diff --git a/contrib/codesign.sh b/contrib/codesign.sh new file mode 100755 index 0000000000000..03866c4bb1ac1 --- /dev/null +++ b/contrib/codesign.sh @@ -0,0 +1,38 @@ +#!/bin/sh +# This file is a part of Julia. License is MIT: https://julialang.org/license + +# Codesign binary files for macOS. + +usage() { + echo "Usage: ${0} MACOS_CODESIGN_IDENTITY FILE-OR-DIRECTORY" + exit 0 +} + +# Default codesign identity to `-` if not provided +if [ -z "${1}" ]; then + MACOS_CODESIGN_IDENTITY="-" + ENTITLEMENTS="" +else + MACOS_CODESIGN_IDENTITY="${1}" + ENTITLEMENTS="--entitlements $(dirname "${0}")/mac/app/Entitlements.plist" +fi + +if [ "${#}" -eq 2 ]; then + if [ -f "${2}" ]; then + # Codesign only the given file + MACHO_FILES="${2}" + elif [ -d "${2}" ]; then + # Find all files in the given directory + MACHO_FILES=$(find "${2}" -type f -perm -0111 | cut -d: -f1) + else + usage + fi +else + usage +fi + +echo "Codesigning with identity ${MACOS_CODESIGN_IDENTITY}" +for f in ${MACHO_FILES}; do + echo "Codesigning ${f}..." + codesign -s "${MACOS_CODESIGN_IDENTITY}" --option=runtime ${ENTITLEMENTS} -vvv --timestamp --deep --force "${f}" +done From e90dec983feb88c1e802e53ee85cdb9fd4ee3514 Mon Sep 17 00:00:00 2001 From: Dilum Aluthge Date: Thu, 10 Mar 2022 18:54:29 -0500 Subject: [PATCH 06/23] [`master` branch] CI (Buildkite): remove the `.buildkite` folder (#43053) * [`master` branch] CI (Buildkite): remove the `.buildkite` folder * Update .gitignore Co-authored-by: Elliot Saba --- .buildkite-external-version | 1 + .buildkite/README.md | 7 - .buildkite/cryptic_repo_keys/.gitignore | 5 - .buildkite/cryptic_repo_keys/README.md | 6 - .../cryptic_repo_keys/repo_key.2297e5e7 | Bin 256 -> 0 bytes .buildkite/pipelines/experimental/0_webui.yml | 22 -- .buildkite/pipelines/experimental/README.md | 7 - .../experimental/launch_unsigned_builders.yml | 6 - .buildkite/pipelines/main/0_webui.yml | 23 -- .buildkite/pipelines/main/README.md | 15 -- .../main/launch_unsigned_builders.yml | 33 --- .buildkite/pipelines/main/misc/doctest.yml | 34 --- .buildkite/pipelines/main/misc/embedding.yml | 31 --- .buildkite/pipelines/main/misc/llvmpasses.yml | 51 ---- .buildkite/pipelines/main/misc/sanitizers.yml | 46 ---- .../main/misc/signed_pipeline_test.yml | 18 -- .../misc/signed_pipeline_test.yml.signature | 1 - .buildkite/pipelines/main/misc/whitespace.yml | 21 -- .../main/platforms/package_linux.arches | 7 - .../main/platforms/package_linux.yml | 54 ----- .../main/platforms/tester_linux.arches | 25 -- .../pipelines/main/platforms/tester_linux.yml | 120 --------- .buildkite/pipelines/scheduled/0_webui.yml | 27 --- .buildkite/pipelines/scheduled/README.md | 5 - .../scheduled/coverage/coverage_linux64.yml | 44 ---- .../coverage/coverage_linux64.yml.signature | 1 - .../scheduled/coverage/run_tests_parallel.jl | 29 --- .../scheduled/coverage/upload_coverage.jl | 228 ------------------ .../scheduled/launch_unsigned_jobs.yml | 8 - .../no_bb/no_bb_package_linux.arches | 2 - .../scheduled/no_bb/no_bb_tester_linux.arches | 10 - .buildkite/secrets/.gitignore | 11 - .buildkite/secrets/ssh_docs_deploy.encrypted | Bin 1856 -> 0 bytes .buildkite/secrets/ssh_docs_deploy.pub | 1 - .buildkite/utilities/platforms/platforms.sh | 76 ------ .buildkite/utilities/rr/rr_capture.jl | 201 --------------- .gitignore | 7 +- 37 files changed, 3 insertions(+), 1180 deletions(-) create mode 100644 .buildkite-external-version delete mode 100644 .buildkite/README.md delete mode 100644 .buildkite/cryptic_repo_keys/.gitignore delete mode 100644 .buildkite/cryptic_repo_keys/README.md delete mode 100644 .buildkite/cryptic_repo_keys/repo_key.2297e5e7 delete mode 100644 .buildkite/pipelines/experimental/0_webui.yml delete mode 100644 .buildkite/pipelines/experimental/README.md delete mode 100644 .buildkite/pipelines/experimental/launch_unsigned_builders.yml delete mode 100644 .buildkite/pipelines/main/0_webui.yml delete mode 100644 .buildkite/pipelines/main/README.md delete mode 100644 .buildkite/pipelines/main/launch_unsigned_builders.yml delete mode 100644 .buildkite/pipelines/main/misc/doctest.yml delete mode 100644 .buildkite/pipelines/main/misc/embedding.yml delete mode 100644 .buildkite/pipelines/main/misc/llvmpasses.yml delete mode 100644 .buildkite/pipelines/main/misc/sanitizers.yml delete mode 100644 .buildkite/pipelines/main/misc/signed_pipeline_test.yml delete mode 100644 .buildkite/pipelines/main/misc/signed_pipeline_test.yml.signature delete mode 100644 .buildkite/pipelines/main/misc/whitespace.yml delete mode 100644 .buildkite/pipelines/main/platforms/package_linux.arches delete mode 100644 .buildkite/pipelines/main/platforms/package_linux.yml delete mode 100644 .buildkite/pipelines/main/platforms/tester_linux.arches delete mode 100644 .buildkite/pipelines/main/platforms/tester_linux.yml delete mode 100644 .buildkite/pipelines/scheduled/0_webui.yml delete mode 100644 .buildkite/pipelines/scheduled/README.md delete mode 100644 .buildkite/pipelines/scheduled/coverage/coverage_linux64.yml delete mode 100644 .buildkite/pipelines/scheduled/coverage/coverage_linux64.yml.signature delete mode 100644 .buildkite/pipelines/scheduled/coverage/run_tests_parallel.jl delete mode 100644 .buildkite/pipelines/scheduled/coverage/upload_coverage.jl delete mode 100644 .buildkite/pipelines/scheduled/launch_unsigned_jobs.yml delete mode 100644 .buildkite/pipelines/scheduled/no_bb/no_bb_package_linux.arches delete mode 100644 .buildkite/pipelines/scheduled/no_bb/no_bb_tester_linux.arches delete mode 100644 .buildkite/secrets/.gitignore delete mode 100644 .buildkite/secrets/ssh_docs_deploy.encrypted delete mode 100644 .buildkite/secrets/ssh_docs_deploy.pub delete mode 100755 .buildkite/utilities/platforms/platforms.sh delete mode 100644 .buildkite/utilities/rr/rr_capture.jl diff --git a/.buildkite-external-version b/.buildkite-external-version new file mode 100644 index 0000000000000..ba2906d0666cf --- /dev/null +++ b/.buildkite-external-version @@ -0,0 +1 @@ +main diff --git a/.buildkite/README.md b/.buildkite/README.md deleted file mode 100644 index b3f74f2b23137..0000000000000 --- a/.buildkite/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Buildkite - -This directory contains the Buildkite configuration files for Base Julia CI. - -The rootfs image definitions are located in the [rootfs-images](https://github.com/JuliaCI/rootfs-images) repository. - -The documentation for the Base Julia CI setup is located in the [base-buildkite-docs](https://github.com/JuliaCI/base-buildkite-docs) repository. diff --git a/.buildkite/cryptic_repo_keys/.gitignore b/.buildkite/cryptic_repo_keys/.gitignore deleted file mode 100644 index 8d18931dbcf7c..0000000000000 --- a/.buildkite/cryptic_repo_keys/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -# Ignore the unencrypted repo_key -repo_key - -# Ignore any agent keys (public or private) we have stored -agent_key* diff --git a/.buildkite/cryptic_repo_keys/README.md b/.buildkite/cryptic_repo_keys/README.md deleted file mode 100644 index 93ed17ce4757b..0000000000000 --- a/.buildkite/cryptic_repo_keys/README.md +++ /dev/null @@ -1,6 +0,0 @@ -## Cryptic repository keys - -This folder contains RSA-encrypted symmetric AES keys. -These are used by buildkite agents to decrypt the secrets embedded within this repository. -Each buildkite agent contains an RSA secret key that is used to unlock the symmetric AES key that was used to encrypt the secrets within this repository. -For more information, see the [`cryptic` buildkite plugin repository](https://github.com/staticfloat/cryptic-buildkite-plugin). diff --git a/.buildkite/cryptic_repo_keys/repo_key.2297e5e7 b/.buildkite/cryptic_repo_keys/repo_key.2297e5e7 deleted file mode 100644 index 2ab9198b4ce2d7c7a9327935e18aaef5943629d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 256 zcmV+b0ssE4{g>pIb#Eq&=vC$Qr^8S_O6!cOYE`=}q%}#an@eaJd$RWGY{B`_JI5h4 zkH`Ok?yS%>4AD6gqiiL&E|`(uBGp1*D8z1}bTOSJGyXKmUCrMUN%Q;gJ{M_gVt6DF z#aRD2;iY!|^9jgEcBXhT=jxw2yplIkGkb~?zh)kpuL|wT!R7{E{^jQhsqTt2Woi5X z6&zlfSSPoyKVU`TIJAL8I6(F*9}>v<(M$c2U_*AHb;CSzxbLIaZK0;0gYIZ?hzwB# zq^tZ&SA{ud8h!8dRd!Rzp7pWdE3F(RM#CkMHZoXOfk*SkHmzrDCiOUb@l`xQjdi^f GB)!2VDuP}B diff --git a/.buildkite/pipelines/experimental/0_webui.yml b/.buildkite/pipelines/experimental/0_webui.yml deleted file mode 100644 index e62750d9d8cd5..0000000000000 --- a/.buildkite/pipelines/experimental/0_webui.yml +++ /dev/null @@ -1,22 +0,0 @@ -# This file represents what is put into the webUI. -# It is purely for keeping track of the changes we make to the webUI configuration; modifying this file has no effect. -# We use the `cryptic` buildkite plugin to provide secrets management, which requires some integration into the WebUI's steps. -agents: - queue: "julia" - sandbox.jl: "true" -steps: - - label: ":unlock: Unlock secrets, launch pipelines" - plugins: - - staticfloat/cryptic#v1: - # Our list of pipelines that should be launched (but don't require a signature) - # These pipelines can be modified by any contributor and CI will still run. - # Build secrets will not be available in these pipelines (or their children) - # but some of our signed pipelines can wait upon the completion of these unsigned - # pipelines. - unsigned_pipelines: - - .buildkite/pipelines/experimental/launch_unsigned_builders.yml - # Our signed pipelines must have a `signature` or `signature_file` parameter that - # verifies the treehash of the pipeline itself and the inputs listed in `inputs` - # signed_pipelines: - # - pipeline: .buildkite/pipelines/experimental/misc/foo_bar_baz.yml - # signature_file: .buildkite/pipelines/experimental/misc/foo_bar_baz.yml.signature diff --git a/.buildkite/pipelines/experimental/README.md b/.buildkite/pipelines/experimental/README.md deleted file mode 100644 index f92aac7a1af02..0000000000000 --- a/.buildkite/pipelines/experimental/README.md +++ /dev/null @@ -1,7 +0,0 @@ -## Experimental pipeline (`master` branch only) - -This is the [`julia-master->experimental`](https://buildkite.com/julialang/julia-master-experimental) pipeline. - -We use this pipeline for builders that are not yet stable enough to go into the main pipeline. - -These builders are triggered by GitHub webhook events, such as pushes and pull requests. diff --git a/.buildkite/pipelines/experimental/launch_unsigned_builders.yml b/.buildkite/pipelines/experimental/launch_unsigned_builders.yml deleted file mode 100644 index 04d82a6e39a5e..0000000000000 --- a/.buildkite/pipelines/experimental/launch_unsigned_builders.yml +++ /dev/null @@ -1,6 +0,0 @@ -steps: - - label: ":buildkite: Launch unsigned pipelines" - commands: | - true - agents: - queue: julia diff --git a/.buildkite/pipelines/main/0_webui.yml b/.buildkite/pipelines/main/0_webui.yml deleted file mode 100644 index bc40534c15dae..0000000000000 --- a/.buildkite/pipelines/main/0_webui.yml +++ /dev/null @@ -1,23 +0,0 @@ -# This file represents what is put into the webUI. -# It is purely for keeping track of the changes we make to the webUI configuration; modifying this file has no effect. -# We use the `cryptic` buildkite plugin to provide secrets management, which requires some integration into the WebUI's steps. -agents: - queue: "julia" - sandbox.jl: "true" -steps: - - label: ":unlock: Unlock secrets, launch pipelines" - plugins: - - staticfloat/cryptici#v1: - # Our list of pipelines that should be launched (but don't require a signature) - # These pipelines can be modified by any contributor and CI will still run. - # Build secrets will not be available in these pipelines (or their children) - # but some of our signed pipelines can wait upon the completion of these unsigned - # pipelines. - unsigned_pipelines: - - .buildkite/pipelines/main/launch_unsigned_builders.yml - - # Our signed pipelines must have a `signature` or `signature_file` parameter that - # verifies the treehash of the pipeline itself and the inputs listed in `inputs` - signed_pipelines: - - pipeline: .buildkite/pipelines/main/misc/signed_pipeline_test.yml - signature_file: .buildkite/pipelines/main/misc/signed_pipeline_test.yml.signature diff --git a/.buildkite/pipelines/main/README.md b/.buildkite/pipelines/main/README.md deleted file mode 100644 index 6b9d67bd7cc3a..0000000000000 --- a/.buildkite/pipelines/main/README.md +++ /dev/null @@ -1,15 +0,0 @@ -## Main pipeline - -This is the main pipeline. It contains most of the builders. These builders are triggered by GitHub webhook events, such as pushes and pull requests. - -We have a different main pipeline for each permanent branch. - -For example: - -| Permanent Branch | Pipeline | -| ---------------- | -------------------------------------------------------------------------------- | -| `master` | [`julia-master`](https://buildkite.com/julialang/julia-master) | -| `release-1.6` | [`julia-release-1.6`](https://buildkite.com/julialang/julia-release-1-dot-6) | -| `release-1.7` | [`julia-release-1.7`](https://buildkite.com/julialang/julia-release-1-dot-7) | - -(This is not a complete list.) diff --git a/.buildkite/pipelines/main/launch_unsigned_builders.yml b/.buildkite/pipelines/main/launch_unsigned_builders.yml deleted file mode 100644 index 2b6794ed13bd1..0000000000000 --- a/.buildkite/pipelines/main/launch_unsigned_builders.yml +++ /dev/null @@ -1,33 +0,0 @@ -# This file launches all the build jobs that _don't_ require secrets access. -# These jobs can pass their output off to jobs that do require secrets access, -# but those privileged steps require signing before they can be run. -# -# Yes, this is creating another layer of indirection; the flow now looks like: -# -# [webui] -> launch_unsigned_builders.yml -> misc/whitespace.yml -# -# when we could theoretically just have the `webui` launch `misc/whitespace.yml`, -# however this raises the bar for contributors to add new (unsigned) steps to -# our CI configuration, so I'd rather live with an extra layer of indirection -# and only need to touch the webui configuration when we need to alter -# something about the privileged steps. - -steps: - - label: ":buildkite: Launch unsigned jobs" - commands: | - # Launch the miscellaneous jobs in alphabetical order. - buildkite-agent pipeline upload .buildkite/pipelines/main/misc/doctest.yml - buildkite-agent pipeline upload .buildkite/pipelines/main/misc/embedding.yml - buildkite-agent pipeline upload .buildkite/pipelines/main/misc/llvmpasses.yml - buildkite-agent pipeline upload .buildkite/pipelines/main/misc/sanitizers.yml - - # Launch all of the platform jobs. - bash .buildkite/utilities/platforms/platforms.sh .buildkite/pipelines/main/platforms/package_linux.arches .buildkite/pipelines/main/platforms/package_linux.yml - bash .buildkite/utilities/platforms/platforms.sh .buildkite/pipelines/main/platforms/tester_linux.arches .buildkite/pipelines/main/platforms/tester_linux.yml - - # Launch the `whitespace` job last. Uploading it last actually causes it to start - # first. We want this job to start first because we want it to finish as quickly - # as possible. - buildkite-agent pipeline upload .buildkite/pipelines/main/misc/whitespace.yml - agents: - queue: julia diff --git a/.buildkite/pipelines/main/misc/doctest.yml b/.buildkite/pipelines/main/misc/doctest.yml deleted file mode 100644 index fd5feb071f076..0000000000000 --- a/.buildkite/pipelines/main/misc/doctest.yml +++ /dev/null @@ -1,34 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: "doctest" - key: doctest - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/package_linux.x86_64.tar.gz - rootfs_treehash: "2a058481b567f0e91b9aa3ce4ad4f09e6419355a" - uid: 1000 - gid: 1000 - workspaces: - # Include `/cache/repos` so that our `git` version introspection works. - - "/cache/repos:/cache/repos" - commands: | - echo "--- Build Julia from source" - make --output-sync -j 6 - - echo "--- Print Julia version info" - ./julia -e 'using InteractiveUtils; InteractiveUtils.versioninfo()' - - echo "--- Build Julia docs" - make docs - - echo "--- Run Julia doctests" - JULIA_NUM_THREADS=1 make -C doc doctest=true - timeout_in_minutes: 45 diff --git a/.buildkite/pipelines/main/misc/embedding.yml b/.buildkite/pipelines/main/misc/embedding.yml deleted file mode 100644 index 1b8b84d38358a..0000000000000 --- a/.buildkite/pipelines/main/misc/embedding.yml +++ /dev/null @@ -1,31 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: "embedding" - key: "embedding" - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/package_linux.x86_64.tar.gz - rootfs_treehash: "2a058481b567f0e91b9aa3ce4ad4f09e6419355a" - uid: 1000 - gid: 1000 - workspaces: - # Include `/cache/repos` so that our `git` version introspection works. - - "/cache/repos:/cache/repos" - commands: | - prefix="/tmp/prefix" - echo "+++ Build julia, deploy to $${prefix:?}" - make --output-sync -j$${JULIA_CPU_THREADS:?} JULIA_PRECOMPILE=0 prefix=$${prefix:?} install - - embedding_output="/tmp/embedding-test" - echo "+++ Run embedding tests, deploy to $${embedding_output:?}" - mkdir -p "$${embedding_output:?}" - make --output-sync -j$${JULIA_CPU_THREADS:?} -C test/embedding JULIA="$${prefix:?}/bin/julia" BIN="$${embedding_output:?}" - timeout_in_minutes: 60 diff --git a/.buildkite/pipelines/main/misc/llvmpasses.yml b/.buildkite/pipelines/main/misc/llvmpasses.yml deleted file mode 100644 index 97ea2e0972b28..0000000000000 --- a/.buildkite/pipelines/main/misc/llvmpasses.yml +++ /dev/null @@ -1,51 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: "analyzegc" - key: "analyzegc" - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/llvm_passes.x86_64.tar.gz - rootfs_treehash: "c7a289a8cc544b234b1e2d7cbcce3e6815359ecd" - workspaces: - # Include `/cache/repos` so that our `git` version introspection works. - - "/cache/repos:/cache/repos" - commands: | - echo "--- Install in-tree LLVM dependencies" - make --output-sync -j$${JULIA_CPU_THREADS:?} -C deps install-llvm install-clang install-llvm-tools install-libuv install-utf8proc install-unwind - echo "+++ run clangsa/analyzegc" - make --output-sync -j$${JULIA_CPU_THREADS:?} -C test/clangsa - make --output-sync -j$${JULIA_CPU_THREADS:?} -C src analyzegc - timeout_in_minutes: 60 - - label: "llvmpasses" - key: "llvmpasses" - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/package_linux.x86_64.tar.gz - rootfs_treehash: "2a058481b567f0e91b9aa3ce4ad4f09e6419355a" - uid: 1000 - gid: 1000 - workspaces: - - "/cache/repos:/cache/repos" - commands: | - echo "--- make release" - # Enable Julia assertions: FORCE_ASSERTIONS=1 - # Enable LLVM assertions: LLVM_ASSERTIONS=1 - export MAKE_ASSERT_FLAGS="FORCE_ASSERTIONS=1 LLVM_ASSERTIONS=1" - make --output-sync -j$${JULIA_CPU_THREADS:?} release JULIA_PRECOMPILE=0 $${MAKE_ASSERT_FLAGS:?} - echo "--- make src/install-analysis-deps" - make --output-sync -j$${JULIA_CPU_THREADS:?} -C src install-analysis-deps $${MAKE_ASSERT_FLAGS:?} - echo "+++ make test/llvmpasses" - make --output-sync -j$${JULIA_CPU_THREADS:?} -C test/llvmpasses $${MAKE_ASSERT_FLAGS:?} - timeout_in_minutes: 60 diff --git a/.buildkite/pipelines/main/misc/sanitizers.yml b/.buildkite/pipelines/main/misc/sanitizers.yml deleted file mode 100644 index d5e83fb91cf68..0000000000000 --- a/.buildkite/pipelines/main/misc/sanitizers.yml +++ /dev/null @@ -1,46 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: "asan" - key: "asan" - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/llvm_passes.x86_64.tar.gz - rootfs_treehash: "c7a289a8cc544b234b1e2d7cbcce3e6815359ecd" - uid: 1000 - gid: 1000 - workspaces: - - "/cache/repos:/cache/repos" - timeout_in_minutes: 120 - if: | # We only run the `asan` job on Julia 1.8 and later. - (pipeline.slug != "julia-release-1-dot-6") && (pipeline.slug != "julia-release-1-dot-7") - commands: | - echo "--- Build julia-debug with ASAN" - contrib/asan/build.sh ./tmp/test-asan -j$${JULIA_CPU_THREADS:?} debug - - label: "tsan" - key: "tsan" - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/llvm_passes.x86_64.tar.gz - rootfs_treehash: "c7a289a8cc544b234b1e2d7cbcce3e6815359ecd" - uid: 1000 - gid: 1000 - workspaces: - - "/cache/repos:/cache/repos" - timeout_in_minutes: 120 - if: | # We only run the `tsan` job on Julia 1.8 and later. - (pipeline.slug != "julia-release-1-dot-6") && (pipeline.slug != "julia-release-1-dot-7") - commands: | - echo "--- Build julia-debug runtime with TSAN" - contrib/tsan/build.sh ./tmp/test-tsan -j$${JULIA_CPU_THREADS:?} julia-src-debug diff --git a/.buildkite/pipelines/main/misc/signed_pipeline_test.yml b/.buildkite/pipelines/main/misc/signed_pipeline_test.yml deleted file mode 100644 index 1d59253d43bce..0000000000000 --- a/.buildkite/pipelines/main/misc/signed_pipeline_test.yml +++ /dev/null @@ -1,18 +0,0 @@ -agents: - queue: "julia" - os: "linux" - -## pipeline that showcases decryption of environment variable -steps: - - label: ":lock: :rocket: Signed pipeline test" - # We must accept the signed job id secret in order to propagate secrets - env: - BUILDKITE_PLUGIN_CRYPTIC_BASE64_SIGNED_JOB_ID_SECRET: ${BUILDKITE_PLUGIN_CRYPTIC_BASE64_SIGNED_JOB_ID_SECRET?} - depends_on: - plugins: - - staticfloat/cryptic#v1: - variables: - - SECRET_KEY="U2FsdGVkX18tb7st0SuQAvh4Yv4xENxOAu8q9XkmOeDVKBNY4FngEwK3xmiKUqaS" - commands: | - echo "SECRET_KEY: $${SECRET_KEY}" - diff --git a/.buildkite/pipelines/main/misc/signed_pipeline_test.yml.signature b/.buildkite/pipelines/main/misc/signed_pipeline_test.yml.signature deleted file mode 100644 index b0844748c486f..0000000000000 --- a/.buildkite/pipelines/main/misc/signed_pipeline_test.yml.signature +++ /dev/null @@ -1 +0,0 @@ -Salted__›τωJ0³Q?ΡΡν‚rΫ€‡gτ~ςdŒ·Ϋ›Ε§ΠΣ©‘βo¬λΨUjΚ€©²pζ)η$»U$ι Λχ·y@gZM}{³m‰ς©,Ϋ ΈKδάeΔrϋ \ No newline at end of file diff --git a/.buildkite/pipelines/main/misc/whitespace.yml b/.buildkite/pipelines/main/misc/whitespace.yml deleted file mode 100644 index b97de3ac677bb..0000000000000 --- a/.buildkite/pipelines/main/misc/whitespace.yml +++ /dev/null @@ -1,21 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: "whitespace" - key: "whitespace" - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/package_linux.x86_64.tar.gz - rootfs_treehash: "2a058481b567f0e91b9aa3ce4ad4f09e6419355a" - workspaces: - - "/cache/repos:/cache/repos" - timeout_in_minutes: 10 - commands: | - make --output-sync -j$${JULIA_CPU_THREADS:?} check-whitespace diff --git a/.buildkite/pipelines/main/platforms/package_linux.arches b/.buildkite/pipelines/main/platforms/package_linux.arches deleted file mode 100644 index dec82f530a832..0000000000000 --- a/.buildkite/pipelines/main/platforms/package_linux.arches +++ /dev/null @@ -1,7 +0,0 @@ -# PLATFORM LABEL GROUP ALLOW_FAIL ARCH ARCH_ROOTFS MAKE_FLAGS TIMEOUT_BK TIMEOUT_RR RETRIES IS_RR IS_ST IS_MT ROOTFS_TAG ROOTFS_HASH -linux 32 . . 32 i686 . . . . . . . v4.8 b6dffc772ab4c2cd7fd4f83459308f6f0d89b957 -linux 64 . . 64 x86_64 . . . . . . . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -# linux aarch64 . . aarch64 aarch64 . . . . . . . .... ........................................ -# linux armv7l . . armv7l armv7l . . . . . . . .... ........................................ -# linux ppc64le . . ppc64le powerpc64le . . . . . . . .... ........................................ -musl 64 . . 64 x86_64 . . . . . . . v4.8 d13a47c87c38005bd5d97132e51789cafd852f90 diff --git a/.buildkite/pipelines/main/platforms/package_linux.yml b/.buildkite/pipelines/main/platforms/package_linux.yml deleted file mode 100644 index 06245543c73b0..0000000000000 --- a/.buildkite/pipelines/main/platforms/package_linux.yml +++ /dev/null @@ -1,54 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: "package_${PLATFORM?}${LABEL?}" - key: package_${PLATFORM?}${LABEL?} - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/${ROOTFS_TAG?}/package_${PLATFORM?}.${ARCH_ROOTFS?}.tar.gz - rootfs_treehash: "${ROOTFS_HASH?}" - uid: 1000 - gid: 1000 - workspaces: - # Include `/cache/repos` so that our `git` version introspection works. - - "/cache/repos:/cache/repos" - timeout_in_minutes: ${TIMEOUT_BK?} - commands: | - echo "--- Print the full and short commit hashes" - SHORT_COMMIT_LENGTH=10 - SHORT_COMMIT=`echo $${BUILDKITE_COMMIT:?} | cut -c1-$${SHORT_COMMIT_LENGTH:?}` - ARTIFACT_FILE_EXTENSION="tar.gz" - ARTIFACT_FILENAME="julia-$${SHORT_COMMIT:?}-${PLATFORM?}${ARCH?}.$${ARTIFACT_FILE_EXTENSION:?}" - JULIA_BINARYDIST_FILENAME=`make print-JULIA_BINARYDIST_FILENAME ${MAKE_FLAGS?} | cut -c27- | tr -s ' '` - JULIA_BINARYDIST="$${JULIA_BINARYDIST_FILENAME:?}.$${ARTIFACT_FILE_EXTENSION:?}" - - echo "The full commit is: $${BUILDKITE_COMMIT:?}" - echo "The short commit is: $${SHORT_COMMIT:?}" - echo "The artifact filename will be: $${ARTIFACT_FILENAME:?}" - - echo "--- Build Julia from source" - rm -rf $${ARTIFACT_FILENAME:?} - make --output-sync -j 8 ${MAKE_FLAGS?} - - echo "--- Check that the working directory is clean" - if [ -z "$(git status --short)" ]; then echo "INFO: The working directory is clean."; else echo "ERROR: The working directory is dirty."; echo "Output of git status:"; git status; exit 1; fi - - echo "--- Print Julia version info" - ./julia -e 'using InteractiveUtils; InteractiveUtils.versioninfo()' - - echo "--- Create build artifacts" - make --output-sync -j 8 binary-dist ${MAKE_FLAGS?} - ls -l $${JULIA_BINARYDIST:?} - if [[ "$${JULIA_BINARYDIST:?}" != "$${ARTIFACT_FILENAME:?}" ]]; then - mv $${JULIA_BINARYDIST:?} $${ARTIFACT_FILENAME:?} - fi - ls -l $${ARTIFACT_FILENAME:?} - echo "--- Upload build artifacts" - buildkite-agent artifact upload $${ARTIFACT_FILENAME:?} diff --git a/.buildkite/pipelines/main/platforms/tester_linux.arches b/.buildkite/pipelines/main/platforms/tester_linux.arches deleted file mode 100644 index 000bcacd10b8f..0000000000000 --- a/.buildkite/pipelines/main/platforms/tester_linux.arches +++ /dev/null @@ -1,25 +0,0 @@ -# PLATFORM LABEL GROUP ALLOW_FAIL ARCH ARCH_ROOTFS MAKE_FLAGS TIMEOUT_BK TIMEOUT_RR RETRIES IS_RR IS_ST IS_MT ROOTFS_TAG ROOTFS_HASH -linux 32_g1 g1 . 32 i686 . 120 . . . . . v4.8 b6dffc772ab4c2cd7fd4f83459308f6f0d89b957 -linux 32_g2 g2 . 32 i686 . . . 3 . . . v4.8 b6dffc772ab4c2cd7fd4f83459308f6f0d89b957 - -linux 64_g1_mt g1 . 64 x86_64 . . . . . . yes v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64_g2_mt g2 . 64 x86_64 . . . 3 . . yes v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a - -linux 64_g1_st g1 . 64 x86_64 . . . . . yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64_g2_st g2 . 64 x86_64 . . . 3 . yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a - -linux 64_g1_rrst g1 . 64 x86_64 . 300 240 . yes yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64_g2_rrst g2 . 64 x86_64 . 180 120 3 yes yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64_g3_st g3 . 64 x86_64 . . . 3 . yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a - -# linux aarch64_g1 g1 true aarch64 aarch64 . . . . . . . ---- ---------------------------------------- -# linux aarch64_g2 g2 true aarch64 aarch64 . . . . . . . ---- ---------------------------------------- - -# linux armv7l_g1 g1 true armv7l armv7l . . . . . . . ---- ---------------------------------------- -# linux armv7l_g2 g2 true armv7l armv7l . . . . . . . ---- ---------------------------------------- - -# linux ppc64le_g1 g1 true ppc64le powerpc64le . . . . . . . ---- ---------------------------------------- -# linux ppc64le_g2 g2 true ppc64le powerpc64le . . . . . . . ---- ---------------------------------------- - -musl 64_g1 g1 true 64 x86_64 . . . . . . . v4.8 d13a47c87c38005bd5d97132e51789cafd852f90 -musl 64_g2 g2 true 64 x86_64 . . . . . . . v4.8 d13a47c87c38005bd5d97132e51789cafd852f90 diff --git a/.buildkite/pipelines/main/platforms/tester_linux.yml b/.buildkite/pipelines/main/platforms/tester_linux.yml deleted file mode 100644 index 9deb81675db1c..0000000000000 --- a/.buildkite/pipelines/main/platforms/tester_linux.yml +++ /dev/null @@ -1,120 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: "tester_${PLATFORM?}${LABEL?}" - key: tester_${PLATFORM?}${LABEL?} - depends_on: package_${PLATFORM?}${ARCH?} - plugins: - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1.2: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/${ROOTFS_TAG?}/package_${PLATFORM?}.${ARCH_ROOTFS?}.tar.gz - # rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/${ROOTFS_TAG?}/tester${PLATFORM?}.${ARCH_ROOTFS?}.tar.gz - rootfs_treehash: "${ROOTFS_HASH?}" - uid: 1000 - gid: 1000 - workspaces: - # Include `/cache/repos` so that our `git` version introspection works. - - "/cache/repos:/cache/repos" - env: - JULIA_SHELL: "/bin/bash" - timeout_in_minutes: ${TIMEOUT_BK?} - retry: - automatic: - - exit_status: "*" - limit: ${RETRIES?} - soft_fail: ${ALLOW_FAIL?} - commands: | - echo "--- Print the full and short commit hashes" - SHORT_COMMIT_LENGTH=10 - SHORT_COMMIT=`echo $${BUILDKITE_COMMIT:?} | cut -c1-$${SHORT_COMMIT_LENGTH:?}` - JULIA_DIR="julia-$${SHORT_COMMIT:?}" - JULIA_BINARY="$${JULIA_DIR:?}/bin/julia" - ARTIFACT_FILE_EXTENSION="tar.gz" - ARTIFACT_FILENAME="julia-$${SHORT_COMMIT:?}-${PLATFORM?}${ARCH?}.$${ARTIFACT_FILE_EXTENSION:?}" - echo "The full commit is: $${BUILDKITE_COMMIT:?}" - echo "The short commit is: $${SHORT_COMMIT:?}" - echo "The artifact filename will be: $${ARTIFACT_FILENAME:?}" - echo "The Julia directory name will be: $${JULIA_DIR:?}" - echo "The Julia binary will be: $${JULIA_BINARY:?}" - - echo "--- Download build artifacts" - rm -rf $${ARTIFACT_FILENAME:?} - buildkite-agent artifact download $${ARTIFACT_FILENAME:?} . - - echo "--- Extract build artifacts" - rm -rf $${JULIA_DIR:?}/ - tar xzf $${ARTIFACT_FILENAME:?} $${JULIA_DIR:?}/ - - echo "--- Print Julia version info" - $${JULIA_BINARY:?} -e 'using InteractiveUtils; InteractiveUtils.versioninfo()' - echo "JULIA_CPU_THREADS is: $${JULIA_CPU_THREADS:?}" - $${JULIA_BINARY:?} -e '@info "" Sys.CPU_THREADS' - - echo "--- Set some environment variables" - export OPENBLAS_NUM_THREADS=8 - unset JULIA_DEPOT_PATH - unset JULIA_PKG_SERVER - - # Make sure that temp files and temp directories are created in a location that is - # backed by real storage. - export TMPDIR="$(pwd)/tmp" - mkdir -p $${TMPDIR:?} - - export NETWORK_RELATED_TESTS="Artifacts Downloads download LazyArtifacts LibGit2/online Pkg" - - if [[ "${GROUP?}" == "all" ]]; then - export TESTS="all LibGit2/online --ci" - elif [[ "${GROUP?}" == "all_except_pkg" ]]; then - export TESTS="all LibGit2/online --ci --skip Pkg" - elif [[ "${GROUP?}" == "g1" ]]; then - # Group 1: ALL tests EXCEPT the network-related tests. - export TESTS="all --ci --skip $${NETWORK_RELATED_TESTS:?}" - elif [[ "${GROUP?}" == "g2" ]]; then - # Group 2: ONLY the network-related tests. - # In Group 2, we use whatever the default setting is with regards to the Pkg server. - export TESTS="$${NETWORK_RELATED_TESTS:?} --ci" - elif [[ "${GROUP?}" == "g3" ]]; then - # Group 3: only Pkg. - # In Group 3, we explicitly opt-out of the Pkg server. - # The purpose of group 3 is to test the non-Pkg-server codepaths of Pkg. - export TESTS="Pkg --ci" - export JULIA_PKG_SERVER="" - else - echo "Invalid value for GROUP: ${GROUP?}" - exit 1 - fi - - export JULIA_TEST_RR_TIMEOUT="${TIMEOUT_RR?}" - - if [[ "${IS_RR?}" == "yes" ]]; then - export JULIA_CMD_FOR_TESTS="$${JULIA_BINARY:?} .buildkite/utilities/rr/rr_capture.jl $${JULIA_BINARY:?}" - export NCORES_FOR_TESTS="parse(Int, ENV[\"JULIA_RRCAPTURE_NUM_CORES\"])" - else - export JULIA_CMD_FOR_TESTS="$${JULIA_BINARY:?}" - export NCORES_FOR_TESTS="Sys.CPU_THREADS" - fi - - if [[ "${IS_ST?}" == "yes" ]]; then - export JULIA_NUM_THREADS=1 - fi - - if [[ "${IS_MT?}" == "yes" ]]; then - export JULIA_NUM_THREADS=16 - fi - - echo "--- Print the test group, list of test sets, and other useful environment variables" - echo "JULIA_CMD_FOR_TESTS is: $${JULIA_CMD_FOR_TESTS:?}" - echo "JULIA_NUM_THREADS is: $${JULIA_NUM_THREADS}" # Note: this environment variable might not be set - echo "NCORES_FOR_TESTS is: $${NCORES_FOR_TESTS:?}" - echo "OPENBLAS_NUM_THREADS is: $${OPENBLAS_NUM_THREADS:?}" - echo "GROUP is: ${GROUP?}" - echo "TESTS is: $${TESTS:?}" - - echo "--- Run the Julia test suite" - $${JULIA_CMD_FOR_TESTS:?} -e "Base.runtests(\"$${TESTS:?}\"; ncores = $${NCORES_FOR_TESTS:?})" diff --git a/.buildkite/pipelines/scheduled/0_webui.yml b/.buildkite/pipelines/scheduled/0_webui.yml deleted file mode 100644 index ad2216286bda7..0000000000000 --- a/.buildkite/pipelines/scheduled/0_webui.yml +++ /dev/null @@ -1,27 +0,0 @@ -# This file represents what is put into the webUI. -# It is purely for keeping track of the changes we make to the webUI configuration; modifying this file has no effect. -# We use the `cryptic` buildkite plugin to provide secrets management, which requires some integration into the WebUI's steps. -agents: - queue: "julia" - sandbox.jl: "true" -steps: - - label: ":unlock: Unlock secrets, launch pipelines" - plugins: - - staticfloat/cryptic#v1: - # Our list of pipelines that should be launched (but don't require a signature) - # These pipelines can be modified by any contributor and CI will still run. - # Build secrets will not be available in these pipelines (or their children) - # but some of our signed pipelines can wait upon the completion of these unsigned - # pipelines. - unsigned_pipelines: - - .buildkite/pipelines/scheduled/launch_unsigned_jobs.yml - - # Our signed pipelines must have a `signature` or `signature_file` parameter that - # verifies the treehash of the pipeline itself and the inputs listed in `inputs` - signed_pipelines: - - pipeline: .buildkite/pipelines/scheduled/coverage/coverage_linux64.yml - signature_file: .buildkite/pipelines/scheduled/coverage/coverage_linux64.yml.signature - inputs: - - .buildkite/pipelines/scheduled/coverage/coverage_linux64.yml - - .buildkite/pipelines/scheduled/coverage/run_tests_parallel.jl - - .buildkite/pipelines/scheduled/coverage/upload_coverage.jl diff --git a/.buildkite/pipelines/scheduled/README.md b/.buildkite/pipelines/scheduled/README.md deleted file mode 100644 index ca071dceb2a44..0000000000000 --- a/.buildkite/pipelines/scheduled/README.md +++ /dev/null @@ -1,5 +0,0 @@ -## Scheduled pipeline (`master` branch only) - -This is the [`julia-master->scheduled`](https://buildkite.com/julialang/julia-master-scheduled) pipeline. - -We use this pipeline for scheduled builds. The builders in this pipeline run on a schedule once per day. They are not triggered by GitHub webhooks. diff --git a/.buildkite/pipelines/scheduled/coverage/coverage_linux64.yml b/.buildkite/pipelines/scheduled/coverage/coverage_linux64.yml deleted file mode 100644 index 8ebe53d1ab492..0000000000000 --- a/.buildkite/pipelines/scheduled/coverage/coverage_linux64.yml +++ /dev/null @@ -1,44 +0,0 @@ -agents: - queue: "julia" - # Only run on `sandbox.jl` machines (not `docker`-isolated ones) since we need nestable sandboxing - sandbox.jl: "true" - os: "linux" -steps: - - label: ":unlock: :coverage: Run coverage test" - # We must accept the signed job id secret in order to propagate secrets - env: - BUILDKITE_PLUGIN_CRYPTIC_BASE64_SIGNED_JOB_ID_SECRET: ${BUILDKITE_PLUGIN_CRYPTIC_BASE64_SIGNED_JOB_ID_SECRET?} - depends_on: - plugins: - - staticfloat/cryptic#v1: - variables: - - CODECOV_TOKEN="U2FsdGVkX19l0fhdBabbuiEdysyEabkJLRHfxm7CNRkuGbnwPV365sxxC7Czs/CVcws0N1oB4pVwALRRMe36oA==" - - COVERALLS_TOKEN="U2FsdGVkX19zopI0hMNzzi2UUOvNVFD8Y0iisFnO/ryVxU7Tit8ZEaeN+gxodRx4CosUUh192F1+q3dTMWRIvw==" - - JuliaCI/julia#v1: - # Drop default "registries" directory, so it is not persisted from execution to execution - persist_depot_dirs: packages,artifacts,compiled - version: '1.6' - - staticfloat/sandbox#v1: - rootfs_url: https://github.com/JuliaCI/rootfs-images/releases/download/v4.8/package_linux.x86_64.tar.gz - rootfs_treehash: "2a058481b567f0e91b9aa3ce4ad4f09e6419355a" - uid: 1000 - gid: 1000 - timeout_in_minutes: 360 # 360 minutes = 6 hours - commands: | - echo "--- Build Julia from source" - make --output-sync -j 6 - - echo "--- Print Julia version info" - ./julia -e 'using InteractiveUtils; InteractiveUtils.versioninfo()' - ./julia -e '@info "" Sys.CPU_THREADS' - # this is necessary to make sure that the LibGit2 tests passes - git config --global init.defaultBranch master - - echo "--- Run Julia tests in parallel with code coverage enabled" - export JULIA_NUM_THREADS=1 - export JULIA_WORKER_TIMEOUT=1200 # 1200 seconds = 20 minutes - ./julia -e 'import Distributed; @info "" Distributed.worker_timeout()' - ./julia .buildkite/pipelines/scheduled/coverage/run_tests_parallel.jl - - echo "--- Process and upload coverage information" - ./julia .buildkite/pipelines/scheduled/coverage/upload_coverage.jl diff --git a/.buildkite/pipelines/scheduled/coverage/coverage_linux64.yml.signature b/.buildkite/pipelines/scheduled/coverage/coverage_linux64.yml.signature deleted file mode 100644 index 4ecec8e8bb72c..0000000000000 --- a/.buildkite/pipelines/scheduled/coverage/coverage_linux64.yml.signature +++ /dev/null @@ -1 +0,0 @@ -Salted__ΎIyρΌΦŒ…θ >yίNc kB€v„nΪ+ΛHvrލΆγΖΌ©r‘΅±η/ΝuY‘ž·ΩušυI‹iiEφΩ(«„v¬L©£!σ”„?Ο–v³ \ No newline at end of file diff --git a/.buildkite/pipelines/scheduled/coverage/run_tests_parallel.jl b/.buildkite/pipelines/scheduled/coverage/run_tests_parallel.jl deleted file mode 100644 index b6eed225f652d..0000000000000 --- a/.buildkite/pipelines/scheduled/coverage/run_tests_parallel.jl +++ /dev/null @@ -1,29 +0,0 @@ -# Important note: even if one or more tests fail, we will still exit with status code 0. -# -# The reason for this is that we always want to upload code coverage, even if some of the -# tests fail. Therefore, even if the `coverage_linux64` builder passes, you should not -# assume that all of the tests passed. If you want to know if all of the tests are passing, -# please look at the status of the `tester_*` builders (e.g. `tester_linux64`). - -const ncores = Sys.CPU_THREADS -@info "" Sys.CPU_THREADS -@info "" ncores - -script_native_yes = """ - Base.runtests(["cmdlineargs"]; ncores = $(ncores)) -""" -script_native_no = """ - Base.runtests(["all", "--skip", "cmdlineargs"]; ncores = $(ncores)) -""" - -base_cmd = `$(Base.julia_cmd()) --code-coverage=all` -cmd_native_yes = `$(base_cmd) --sysimage-native-code=yes -e $(script_native_yes)` -cmd_native_no = `$(base_cmd) --sysimage-native-code=no -e $(script_native_no)` - -@info "Running command" cmd_native_yes -p1 = run(pipeline(cmd_native_yes; stdin, stdout, stderr); wait = false) -wait(p1) - -@info "Running command" cmd_native_no -p2 = run(pipeline(cmd_native_no; stdin, stdout, stderr); wait = false) -wait(p2) diff --git a/.buildkite/pipelines/scheduled/coverage/upload_coverage.jl b/.buildkite/pipelines/scheduled/coverage/upload_coverage.jl deleted file mode 100644 index d995e97fc17fb..0000000000000 --- a/.buildkite/pipelines/scheduled/coverage/upload_coverage.jl +++ /dev/null @@ -1,228 +0,0 @@ -empty!(Base.DEPOT_PATH) -push!(Base.DEPOT_PATH, mktempdir(; cleanup = true)) - -import Pkg -import Logging -import TOML - -Pkg.add(; name = "Coverage", uuid = "a2441757-f6aa-5fb2-8edb-039e3f45d037", version = "1") -Pkg.precompile() - -import Coverage - -function process_folders() - # `Coverage.process_folder` will have a LOT of `@info` statements that will make the log - # way too long. So before we run `Coverage.process_folder`, we disable logging for `@info` - # statements. After we run `Coverage.process_folder`, we re-enable logging for `@info` - # statements. - Logging.disable_logging(Logging.Info) - fcs_base = Coverage.process_folder("base"); - fcs_stdlib = Coverage.process_folder("stdlib"); - Logging.disable_logging(Logging.Debug) - - fcs = Coverage.merge_coverage_counts( - fcs_base, - fcs_stdlib, - ); - - return fcs -end - -function get_external_stdlib_names(stdlib_dir::AbstractString) - filename_list = filter(x -> isfile(joinpath(stdlib_dir, x)), readdir(stdlib_dir)) - # find all of the files like `Pkg.version`, `Statistics.version`, etc. - regex_matches_or_nothing = match.(Ref(r"^([\w].*?)\.version$"), filename_list) - regex_matches = filter(x -> x !== nothing, regex_matches_or_nothing) - # get the names of the external stdlibs, like `Pkg`, `Statistics`, etc. - external_stdlib_names = only.(regex_matches) - unique!(external_stdlib_names) - sort!(external_stdlib_names) - @info "# Begin list of external stdlibs" - for (i, x) in enumerate(external_stdlib_names) - @info "$(i). $(x)" - end - @info "# End list of external stdlibs" - return external_stdlib_names -end - -function get_external_stdlib_prefixes(stdlib_dir::AbstractString) - external_stdlib_names = get_external_stdlib_names(stdlib_dir) - prefixes_1 = joinpath.(Ref(stdlib_dir), external_stdlib_names, Ref("")) - prefixes_2 = joinpath.(Ref(stdlib_dir), string.(external_stdlib_names, Ref("-"))) - prefixes = vcat(prefixes_1, prefixes_2) - unique!(prefixes) - sort!(prefixes) - # example of what `prefixes` might look like: - # 4-element Vector{String}: - # "stdlib/Pkg-" - # "stdlib/Pkg/" - # "stdlib/Statistics-" - # "stdlib/Statistics/" - return prefixes -end - -function print_coverage_summary(fc::Coverage.FileCoverage) - cov_lines, tot_lines = Coverage.get_summary(fc) - if cov_lines == tot_lines == 0 - cov_pct = 0 - else - cov_pct = floor(Int, cov_lines/tot_lines * 100) - end - pad_1 = 71 - pad_2 = 15 - pad_3 = 15 - col_1 = rpad(fc.filename, pad_1) - col_2 = rpad(string(cov_pct, " %"), pad_2) - col_3 = string( - rpad(string(cov_lines), pad_3), - string(tot_lines), - ) - @info "$(col_1) $(col_2) $(col_3)" - return nothing -end - -function print_coverage_summary( - fcs::Vector{Coverage.FileCoverage}, description::AbstractString, - ) - cov_lines, tot_lines = Coverage.get_summary(fcs) - if cov_lines == tot_lines == 0 - cov_pct = 0 - else - cov_pct = floor(Int, cov_lines/tot_lines * 100) - end - @info "$(description): $(cov_pct)% ($(cov_lines)/$(tot_lines))" - return (; cov_pct) -end - -function buildkite_env(name::String) - value = String(strip(ENV[name])) - if isempty(value) - throw(ErrorException("environment variable $(name) is empty")) - end - return value -end - -function buildkite_env(name_1::String, name_2::String, default::String) - value_1 = String(strip(ENV[name_1])) - value_2 = String(strip(ENV[name_2])) - !isempty(value_1) && return value_1 - !isempty(value_2) && return value_2 - return default -end - -function buildkite_branch_and_commit() - branch = buildkite_env("BUILDKITE_BRANCH") - commit = buildkite_env("BUILDKITE_COMMIT") - head_rev_parse = String(strip(read(`git rev-parse HEAD`, String))) - if strip(commit) == "HEAD" - commit = head_rev_parse - end - if commit !== head_rev_parse - msg = "mismatch" - @error msg commit head_rev_parse - throw(ErrorException(msg)) - end - if !occursin(r"^[a-f0-9]{40}$", commit) - msg = "BUILDKITE_COMMIT does not look like a long commit SHA" - @error msg commit - throw(ErrorException(msg)) - end - return (; branch, commit) -end - -function codecov_buildkite_add_local_to_kwargs() - branch, commit = buildkite_branch_and_commit() - kwargs = Coverage.Codecov.set_defaults( - Dict(); - branch, - commit, - ) - return kwargs -end - -function coveralls_buildkite_query_git_info() - branch, commit = buildkite_branch_and_commit() - remote_name = "origin" - remote = buildkite_env("BUILDKITE_REPO") - message = buildkite_env("BUILDKITE_MESSAGE") - author_name = buildkite_env( - "BUILDKITE_BUILD_AUTHOR", - "BUILDKITE_BUILD_CREATOR", - "", - ) - author_email = buildkite_env( - "BUILDKITE_BUILD_AUTHOR_EMAIL", - "BUILDKITE_BUILD_CREATOR_EMAIL", - "", - ) - remotes = [ - Dict( - "name" => remote_name, - "url" => remote, - ) - ] - head = Dict( - "id" => commit, - "author_name" => author_name, - "author_email" => author_email, - "committer_name" => author_name, - "committer_email" => author_email, - "message" => message, - ) - git_info = Dict( - "branch" => branch, - "remotes" => remotes, - "head" => head, - ) - return git_info -end - -const fcs = process_folders() - -# Only include source code files. Exclude test files, benchmarking files, etc. -filter!(fcs) do fc - occursin(r"^base\/", fc.filename) || occursin("/src/", fc.filename) -end; - -# Exclude all external stdlibs (stdlibs that live in external repos). -const external_stdlib_prefixes = get_external_stdlib_prefixes("stdlib") -filter!(fcs) do fc - all(x -> !startswith(fc.filename, x), external_stdlib_prefixes) -end; - -# Exclude all stdlib JLLs (stdlibs of the form `stdlib/*_jll/`). -filter!(fcs) do fc - !occursin(r"^stdlib\/[A-Za-z0-9]*?_jll\/", fc.filename) -end; - -sort!(fcs; by = fc -> fc.filename); - -print_coverage_summary.(fcs); -const total_cov_pct = print_coverage_summary(fcs, "Total").cov_pct - -let - git_info = coveralls_buildkite_query_git_info() - @info "" git_info - @info "" git_info["branch"] - @info "" git_info["head"] - - # In order to upload to Coveralls, you need to have the `COVERALLS_TOKEN` environment variable defined. - Coverage.Coveralls.submit_local(fcs, git_info) -end - -let - kwargs = codecov_buildkite_add_local_to_kwargs() - @info "" kwargs - - # In order to upload to Codecov, you need to have the `CODECOV_TOKEN` environment variable defined. - Coverage.Codecov.submit_generic(fcs, kwargs) -end - -if total_cov_pct < 50 - msg = string( - "The total coverage is less than 50%. This should never happen, ", - "so it means that something has probably gone wrong with the code coverage job.", - ) - @error msg total_cov_pct - throw(ErrorException(msg)) -end diff --git a/.buildkite/pipelines/scheduled/launch_unsigned_jobs.yml b/.buildkite/pipelines/scheduled/launch_unsigned_jobs.yml deleted file mode 100644 index 300c8d8466aea..0000000000000 --- a/.buildkite/pipelines/scheduled/launch_unsigned_jobs.yml +++ /dev/null @@ -1,8 +0,0 @@ -steps: - - label: ":buildkite: Launch unsigned jobs" - commands: | - # Launch all of the `USE_BINARYBUILDER=0` jobs. - bash .buildkite/utilities/platforms/platforms.sh .buildkite/pipelines/scheduled/no_bb/no_bb_package_linux.arches .buildkite/pipelines/main/platforms/package_linux.yml - bash .buildkite/utilities/platforms/platforms.sh .buildkite/pipelines/scheduled/no_bb/no_bb_tester_linux.arches .buildkite/pipelines/main/platforms/tester_linux.yml - agents: - queue: julia diff --git a/.buildkite/pipelines/scheduled/no_bb/no_bb_package_linux.arches b/.buildkite/pipelines/scheduled/no_bb/no_bb_package_linux.arches deleted file mode 100644 index dff2aab4591e2..0000000000000 --- a/.buildkite/pipelines/scheduled/no_bb/no_bb_package_linux.arches +++ /dev/null @@ -1,2 +0,0 @@ -# PLATFORM LABEL GROUP ALLOW_FAIL ARCH ARCH_ROOTFS MAKE_FLAGS TIMEOUT_BK TIMEOUT_RR RETRIES IS_RR IS_ST IS_MT ROOTFS_TAG ROOTFS_HASH -linux 64src . . 64src x86_64 USE_BINARYBUILDER=0 180 . . . . . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a diff --git a/.buildkite/pipelines/scheduled/no_bb/no_bb_tester_linux.arches b/.buildkite/pipelines/scheduled/no_bb/no_bb_tester_linux.arches deleted file mode 100644 index 0b1fbdf63b796..0000000000000 --- a/.buildkite/pipelines/scheduled/no_bb/no_bb_tester_linux.arches +++ /dev/null @@ -1,10 +0,0 @@ -# PLATFORM LABEL GROUP ALLOW_FAIL ARCH ARCH_ROOTFS MAKE_FLAGS TIMEOUT_BK TIMEOUT_RR RETRIES IS_RR IS_ST IS_MT ROOTFS_TAG ROOTFS_HASH -linux 64src_g1_mt g1 . 64src x86_64 . . . . . . yes v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64src_g2_mt g2 . 64src x86_64 . . . 3 . . yes v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a - -linux 64src_g1_st g1 . 64src x86_64 . . . . . yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64src_g2_st g2 . 64src x86_64 . . . 3 . yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a - -linux 64src_g1_rrst g1 . 64src x86_64 . 300 240 . yes yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64src_g2_rrst g2 . 64src x86_64 . 180 120 3 yes yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a -linux 64src_g3_st g3 . 64src x86_64 . . . 3 . yes . v4.8 2a058481b567f0e91b9aa3ce4ad4f09e6419355a diff --git a/.buildkite/secrets/.gitignore b/.buildkite/secrets/.gitignore deleted file mode 100644 index 2a84f48682a04..0000000000000 --- a/.buildkite/secrets/.gitignore +++ /dev/null @@ -1,11 +0,0 @@ -# Ignore everything -* - -# Don't ignore this `.gitignore` file -!.gitignore - -# Don't ignore encrypted files -!*.encrypted - -# Don't ignore public keys, that's fine to include -!*.pub diff --git a/.buildkite/secrets/ssh_docs_deploy.encrypted b/.buildkite/secrets/ssh_docs_deploy.encrypted deleted file mode 100644 index 8b7e2ffe279402c74a57005d327220a4806500d0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1856 zcmV-G2fz4JVQh3|WM5x6_BQu$(kQ%->4|0fa6`t(&xPEIE=okh>cTg)MnE6hWl4%W z%<3W-{MyzND_s>N=jAkwv=;0N%hTRP{Cvd@J_VNs`*gk&F>exm{qkM5tsA}DTlf{R zE3wfuA1RqY3nBxTkXkcD(Homn*7scj70QL=N#-$yyBFcv>M0p_5i7lvUA=JQhG3Z+2M#8S>T*R*KB;@y_|LL#USeKun zkfFh$9|tFr_kr7O0Ql?;<^&yF6V>Ef_^4H5{F9V5G3qwi<-e|_aeeqH6Owo8;Rrxo zER^|Tk@}rbeW>kdJH7Id@zC2jnZu)5rJL0_p-$QD*LUAw?J(|;mrWh`aEL}&i${cZ zY)9c(4&4g(YI$*P$Uy@y4*%%H4eDuo_PIMyknL`x$yy}5&8_!DWuxhtrLErV$9Yk!bromBYjTI}xQ6_n+A$HgyXBLV)@ybZI~VFDh6B2@^*>Lj*b-pbw{G=x0sV~412 zD^63?S_Jp7Yw_f9vE6|`Rzeh$kOKcHQ#g#$nc5-bokDPt0>54uD*tsi`jrC?C6Hnp zyta39hntcy$WBN?9Lzx69D+`$2(Ofh)~8PNRvU@V*T`0o;K)roVPfVBwCkalL{maZ5qqQLtJG; zPN^s$@56cDc1#rP?@^P~J=N7j6JB`kMqKKE%1zt~u}KiyTt{NsNUL5c$j^c_AHyOl ziX5j1M3+!k&H%g}NZ9n}!wQ^c!`@wtk70?|6IWO^3+RzA&t;XE@Qgeo*f|$?HNc#v zroCb}MgdxKJ1&nVgVk(ADAtVfMA*sdWr5li%*zneS9atN$?KHX6#h92?9s;f57^ya zwO}Q*b*P=ao`JFwLr9tH*%M&;EB`lZ)XvM)%znRP_lHz~^Y^V4r$)=d@Q8q5N;|riRzw4GteZ)2amD zu<@cbLZp~&>IO(=XVO6c62;Kj*;x)plTO2Q!68R*?$j|bnm_Eoh%i^v_6mn*HF7a1 zZv_!pari;`$#s97Z*XA>vg~`325A;w0W0Vw{1h%&{fKK1oM`Eeg^I~}WtpZ^ch{yd z%lS?c8IqGcJCDb!LI2wP9R_NG@}l;~Tj&xjC}98gTC|vs2iXz^DkT-j6{m~aFz`xf zH_+Cd`skfz8WSz>I<$zoRitWpm@@!zcJ$K3rn~Y0ebJLgz)Un_68_D1Lk7K)o=w}v zm`MzC4hhfT2d8758RTL=q+sKAxexZ1n7v-c4%Bw=c zvfVjEpgF=yzWJ;HC;fqYJg&3car(huXIu zTy-=%EaXCo$MpXLIYe&JpuG9O*g=d9-maj{#}IEyzeRcUguycE(_agoB<=P>B46VH za)c3lWQrd2sWAp6c;OeEjOnGInOh?7zR#b7V%4x0$sB#%9_K{@9Y6rJ+^iAg+yv4` z7~bPZGm>{d1NhK}DMKyfLwaxAH z;(X}Vd;1P}+WzJ5u1k`KR6bB$I$}XI5B6xx%}Z|)Z4mFEyU3=${b) z-YJO2(&)IoTz6MidbNwn3Mv#8(69wQmsj6Y1I3Svw5o+r(*XNBO_<%$mmh?vjKI1ze;=FMc9)PSS&U0!++8Co}z4{($e zWfbj0!zl{njIk$UDNDGhSLFu*Fg|WQBX7T2=Dm3rb)_pHap strip |> lowercase - result = parse(Bool, value)::Bool - return result -end - -const is_buildkite = get_bool_from_env("BUILDKITE", false) -const always_save_rr_trace = get_bool_from_env("JULIA_ALWAYS_SAVE_RR_TRACE", false) - -function get_from_env(name::AbstractString) - if is_buildkite - value = ENV[name] - else - value = get(ENV, name, "") - end - result = convert(String, strip(value))::String - return result -end - -function my_exit(process::Base.Process) - wait(process) - - @info( - "", - process.exitcode, - process.termsignal, - ) - - # Pass the exit code back up - if process.termsignal != 0 - ccall(:raise, Cvoid, (Cint,), process.termsignal) - - # If for some reason the signal did not cause an exit, we'll exit manually. - # We need to make sure that we exit with a non-zero exit code. - if process.exitcode != 0 - exit(process.exitcode) - else - exit(1) - end - end - exit(process.exitcode) -end - -if Base.VERSION < v"1.6" - throw(ErrorException("The `$(basename(@__FILE__))` script requires Julia 1.6 or greater")) -end - -if length(ARGS) < 1 - throw(ErrorException("Usage: julia $(basename(@__FILE__)) [command...]")) -end - -@info "We will run the command under rr" - -const build_number = get_from_env("BUILDKITE_BUILD_NUMBER") -const job_name = get_from_env("BUILDKITE_STEP_KEY") -const commit_full = get_from_env("BUILDKITE_COMMIT") -const commit_short = first(commit_full, 10) -const JULIA_TEST_RR_TIMEOUT = get(ENV, "JULIA_TEST_RR_TIMEOUT", "120") -const timeout_minutes = parse(Int, JULIA_TEST_RR_TIMEOUT) -const JULIA_TEST_NUM_CORES = get(ENV, "JULIA_TEST_NUM_CORES", "8") -const julia_test_num_cores_int = parse(Int, JULIA_TEST_NUM_CORES) -const num_cores = min( - 8, - Sys.CPU_THREADS, - julia_test_num_cores_int + 1, -) - -ENV["JULIA_RRCAPTURE_NUM_CORES"] = "$(num_cores)" - -@info( - "", - build_number, - job_name, - commit_full, - commit_short, - timeout_minutes, - num_cores, -) - -const dumps_dir = joinpath(pwd(), "dumps") -const temp_parent_dir = joinpath(pwd(), "temp_for_rr") - -mkpath(dumps_dir) -mkpath(temp_parent_dir) - -proc = nothing - -mktempdir(temp_parent_dir) do dir - Pkg.activate(dir) - Pkg.add("rr_jll") - Pkg.add("Zstd_jll") - - rr_jll = Base.require(Base.PkgId(Base.UUID((0xe86bdf43_55f7_5ea2_9fd0_e7daa2c0f2b4)), "rr_jll")) - zstd_jll = Base.require(Base.PkgId(Base.UUID((0x3161d3a3_bdf6_5164_811a_617609db77b4)), "Zstd_jll")) - rr(func) = Base.invokelatest(rr_jll.rr, func; adjust_LIBPATH=false) - rr() do rr_path - capture_script_path = joinpath(dir, "capture_output.sh") - loader = Sys.WORD_SIZE == 64 ? "/lib64/ld-linux-x86-64.so.2" : "/lib/ld-linux.so.2" - open(capture_script_path, "w") do io - write(io, """ - #!/bin/bash - - $(rr_path) record --nested=detach "\$@" > >(tee -a $(dir)/stdout.log) 2> >(tee -a $(dir)/stderr.log >&2) - """) - end - chmod(capture_script_path, 0o755) - - new_env = copy(ENV) - new_env["_RR_TRACE_DIR"] = joinpath(dir, "rr_traces") - new_env["RR_LOG"] = "all:debug" - new_env["RR_UNDER_RR_LOG"] = "all:debug" - new_env["RR_LOG_BUFFER"]="100000" - new_env["JULIA_RR"] = capture_script_path - t_start = time() - global proc = run(setenv(`$(rr_path) record --num-cores=$(num_cores) $ARGS`, new_env), (stdin, stdout, stderr); wait=false) - - # Start asynchronous timer that will kill `rr` - @async begin - sleep(timeout_minutes * 60) - - # If we've exceeded the timeout and `rr` is still running, kill it. - if isopen(proc) - println(stderr, "\n\nProcess timed out (with a timeout of $(timeout_minutes) minutes). Signalling `rr` for force-cleanup!") - kill(proc, Base.SIGTERM) - - # Give `rr` a chance to cleanup and upload. - # Note: this time period includes the time to upload the `rr` trace files - # as Buildkite artifacts, so make sure it is long enough to allow the - # uploads to finish. - cleanup_minutes = 30 - sleep(cleanup_minutes * 60) - - if isopen(proc) - println(stderr, "\n\n`rr` failed to cleanup and upload within $(cleanup_minutes) minutes, killing and exiting immediately!") - kill(proc, Base.SIGKILL) - exit(1) - end - end - end - - # Wait for `rr` to finish, either through naturally finishing its run, or `SIGTERM`. - wait(proc) - process_failed = !success(proc) - - if process_failed || always_save_rr_trace || is_buildkite - println(stderr, "`rr` returned $(proc.exitcode), packing and uploading traces...") - - if !isdir(joinpath(dir, "rr_traces")) - println(stderr, "No `rr_traces` directory! Did `rr` itself fail?") - exit(1) - end - - # Clean up non-traces - rm(joinpath(dir, "rr_traces", "latest-trace")) - rm(joinpath(dir, "rr_traces", "cpu_lock")) - - # Create a directory for the pack files to go - pack_dir = joinpath(dir, "pack") - mkdir(pack_dir) - - # Pack all traces - trace_dirs = [joinpath(dir, "rr_traces", f) for f in readdir(joinpath(dir, "rr_traces"))] - filter!(isdir, trace_dirs) - run(ignorestatus(`$(rr_path) pack --pack-dir=$pack_dir $(trace_dirs)`)) - - # Tar it up - mkpath(dumps_dir) - date_str = Dates.format(Dates.now(), Dates.dateformat"yyyy_mm_dd_HH_MM_SS") - dst_file_name = string( - "rr", - "--build_$(build_number)", - "--$(job_name)", - "--commit_$(commit_short)", - "--$(date_str)", - ".tar.zst", - ) - dst_full_path = joinpath(dumps_dir, dst_file_name) - zstd_jll.zstdmt() do zstdp - tarproc = open(`$(zstdp) -o $(dst_full_path)`, "w") - Tar.create(dir, tarproc) - close(tarproc.in) - end - - @info "The `rr` trace file has been saved to: $(dst_full_path)" - if is_buildkite - @info "Since this is a Buildkite run, we will upload the `rr` trace file." - cd(dumps_dir) do - run(`buildkite-agent artifact upload $(dst_file_name)`) - end - end - end - - end -end - -@info "Finished running the command under rr" -my_exit(proc) diff --git a/.gitignore b/.gitignore index ca14ec31874d4..69a0fc0c8dab0 100644 --- a/.gitignore +++ b/.gitignore @@ -34,8 +34,5 @@ .idea/* .vscode/* -# Buildkite: cryptic plugin -# Ignore the unencrypted repo_key -repo_key -# Ignore any agent keys (public or private) we have stored -agent_key* +# Buildkite: Ignore entire .buildkite directory +/.buildkite From f5d15571b3f1745f7783509e336c4d9f7246cdd9 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 11 Mar 2022 12:10:29 -0500 Subject: [PATCH 07/23] [LLVM/ABI] Don't pass null pointer to byVal attribute (#44555) --- src/abi_x86_64.cpp | 1 - src/ccall.cpp | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/abi_x86_64.cpp b/src/abi_x86_64.cpp index 2a06ee6be36a6..43e539b8386ce 100644 --- a/src/abi_x86_64.cpp +++ b/src/abi_x86_64.cpp @@ -202,7 +202,6 @@ bool needPassByRef(jl_datatype_t *dt, AttrBuilder &ab, LLVMContext &ctx, Type *T else if (jl_is_structtype(dt)) { // spill to memory even though we would ordinarily pass // it in registers - Type* Ty = preferred_llvm_type(dt, false, ctx); ab.addByValAttr(Ty); return true; } diff --git a/src/ccall.cpp b/src/ccall.cpp index 04d8b5525cd30..a7e8b0f4daa7c 100644 --- a/src/ccall.cpp +++ b/src/ccall.cpp @@ -1098,6 +1098,7 @@ std::string generate_func_sig(const char *fname) } // Whether or not LLVM wants us to emit a pointer to the data + assert(t && "LLVM type should not be null"); bool byRef = abi->needPassByRef((jl_datatype_t*)tti, ab, LLVMCtx, t); if (jl_is_cpointer_type(tti)) { From 45ab66452ef08cfcb62aa091037ce0a78367a762 Mon Sep 17 00:00:00 2001 From: Juan Ignacio Polanco Date: Fri, 11 Mar 2022 21:34:33 +0100 Subject: [PATCH 08/23] `range` uses `TwicePrecision` when possible (part 2) (#44528) --- base/twiceprecision.jl | 18 ++++++++++++++--- test/ranges.jl | 46 ++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 59 insertions(+), 5 deletions(-) diff --git a/base/twiceprecision.jl b/base/twiceprecision.jl index 6d91431e47abb..860f2d23185cc 100644 --- a/base/twiceprecision.jl +++ b/base/twiceprecision.jl @@ -453,8 +453,14 @@ end step(r::StepRangeLen{T,TwicePrecision{T},TwicePrecision{T}}) where {T<:AbstractFloat} = T(r.step) step(r::StepRangeLen{T,TwicePrecision{T},TwicePrecision{T}}) where {T} = T(r.step) -range_start_step_length(a, st::IEEEFloat, len::Integer) = - range_start_step_length(oftype(st, a), st, len) +range_start_step_length(a::Real, st::IEEEFloat, len::Integer) = + range_start_step_length(promote(a, st)..., len) + +range_start_step_length(a::IEEEFloat, st::Real, len::Integer) = + range_start_step_length(promote(a, st)..., len) + +range_start_step_length(a::IEEEFloat, st::IEEEFloat, len::Integer) = + range_start_step_length(promote(a, st)..., len) function range_start_step_length(a::T, st::T, len::Integer) where T<:IEEEFloat len = len + 0 # promote with Int @@ -474,7 +480,13 @@ function range_start_step_length(a::T, st::T, len::Integer) where T<:IEEEFloat steprangelen_hp(T, a, st, 0, len, 1) end -function range_step_stop_length(step::IEEEFloat, stop, len::Integer) +range_step_stop_length(step::Real, stop::IEEEFloat, len::Integer) = + range_step_stop_length(promote(step, stop)..., len) + +range_step_stop_length(step::IEEEFloat, stop::Real, len::Integer) = + range_step_stop_length(promote(step, stop)..., len) + +function range_step_stop_length(step::IEEEFloat, stop::IEEEFloat, len::Integer) r = range_start_step_length(stop, negate(step), len) reverse(r) end diff --git a/test/ranges.jl b/test/ranges.jl index 536c1f4710d9d..c448f4b99e201 100644 --- a/test/ranges.jl +++ b/test/ranges.jl @@ -1618,9 +1618,51 @@ end @test x == [0.0, 0.2, 0.4, 0.6, 0.8] end - let x = @inferred range(stop=1, step=0.2, length=5) + let x = @inferred range(0.0, step=2, length=5) @test x isa StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}} - @test x == [0.2, 0.4, 0.6, 0.8, 1.0] + @test x == [0.0, 2.0, 4.0, 6.0, 8.0] + @test x === range(0.0, step=2.0, length=5) + @test x === range(0.0f0, step=2e0, length=5) + @test x === range(0e0, step=2.0f0, length=5) + end + + # start::IEEEFloat and step::Complex + let x = @inferred range(2.0, step=1im, length=3) + @test typeof(x) === StepRangeLen{ComplexF64, Float64, Complex{Int}, Int} + @test x == range(2, step=1im, length=3) # compare with integer range + @test x == 2.0 .+ [0im, 1im, 2im] + end + + # start::Complex and step::IEEEFloat + let x = @inferred range(2im, step=1.0, length=3) + @test typeof(x) === StepRangeLen{ComplexF64, Complex{Int}, Float64, Int} + @test x == range(2im, step=1, length=3) # compare with integer range + end + + # stop::IEEEFloat and step::Complex + let x = @inferred range(stop=2.0, step=1im, length=3) + @test typeof(x) === StepRangeLen{ComplexF64, ComplexF64, Complex{Int}, Int} + @test x == range(stop=2, step=1im, length=3) # compare with integer range + @test x == 2.0 .- [2im, 1im, 0im] + end + + # stop::Complex and step::IEEEFloat + let x = @inferred range(stop=2im, step=1.0, length=3) + @test typeof(x) === StepRangeLen{ComplexF64, ComplexF64, Float64, Int} + @test x == range(stop=2im, step=1, length=3) # compare with integer range + end + + let x = @inferred range(stop=10, step=2.0, length=5) + @test x isa StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}} + @test x === @inferred range(stop=10.0, step=2.0, length=5) + @test x === @inferred range(stop=10f0, step=2.0, length=5) + @test x === @inferred range(stop=10e0, step=2.0f0, length=5) + @test x == [2, 4, 6, 8, 10] + end + + let x = @inferred range(stop=10.0, step=2, length=4) + @test x isa StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}} + @test x == [4.0, 6.0, 8.0, 10.0] end end From 5ec1f9f92dbdb236d2f2abc0972db76acf8812bc Mon Sep 17 00:00:00 2001 From: DilumAluthgeBot <43731525+DilumAluthgeBot@users.noreply.github.com> Date: Fri, 11 Mar 2022 16:46:16 -0500 Subject: [PATCH 09/23] =?UTF-8?q?=F0=9F=A4=96=20Bump=20the=20Pkg=20stdlib?= =?UTF-8?q?=20from=20544bb894=20to=2053cefb5c=20(#44544)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Dilum Aluthge --- .../Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/md5 | 1 + .../Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/sha512 | 1 + .../Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/md5 | 1 - .../Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/sha512 | 1 - stdlib/Pkg.version | 2 +- 5 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/md5 create mode 100644 deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/sha512 delete mode 100644 deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/md5 delete mode 100644 deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/sha512 diff --git a/deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/md5 b/deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/md5 new file mode 100644 index 0000000000000..67449851aa6ff --- /dev/null +++ b/deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/md5 @@ -0,0 +1 @@ +ac0a70456feb4e06acb355ff0d7d1e5d diff --git a/deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/sha512 b/deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/sha512 new file mode 100644 index 0000000000000..bd4f6c4324792 --- /dev/null +++ b/deps/checksums/Pkg-53cefb5c6ce8ee2710b581825130c477b6dd5d96.tar.gz/sha512 @@ -0,0 +1 @@ +297c3b2afc50faad364d12992daecff0c92908d01d904a8f9470d05e1d139b726ddec36ceefc876b90313e3b034e59186c96bb6c72ff90efc403aa9db00510d2 diff --git a/deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/md5 b/deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/md5 deleted file mode 100644 index b47caaa8119a5..0000000000000 --- a/deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/md5 +++ /dev/null @@ -1 +0,0 @@ -d1ac17546b48a5d0479e0235eebcd1ad diff --git a/deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/sha512 b/deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/sha512 deleted file mode 100644 index 221eb09df1673..0000000000000 --- a/deps/checksums/Pkg-544bb89495649376b1a91a90fe0b3f7d0758e42b.tar.gz/sha512 +++ /dev/null @@ -1 +0,0 @@ -c7247caa820f1231070572628e9b9d24b0370d4278b476ebe68c35fcc1f329a7e4daaf20a5a327ec8fcd8960f64da96e92704557a5b3c64bb6e7f651f4f8dea8 diff --git a/stdlib/Pkg.version b/stdlib/Pkg.version index 3d5151f47e066..418dc641b6e40 100644 --- a/stdlib/Pkg.version +++ b/stdlib/Pkg.version @@ -1,4 +1,4 @@ PKG_BRANCH = master -PKG_SHA1 = 544bb89495649376b1a91a90fe0b3f7d0758e42b +PKG_SHA1 = 53cefb5c6ce8ee2710b581825130c477b6dd5d96 PKG_GIT_URL := https://github.com/JuliaLang/Pkg.jl.git PKG_TAR_URL = https://api.github.com/repos/JuliaLang/Pkg.jl/tarball/$1 From 6be86a380b09d0f02404140cb042f1ffb06c3442 Mon Sep 17 00:00:00 2001 From: Takafumi Arakaki Date: Fri, 11 Mar 2022 16:59:07 -0800 Subject: [PATCH 10/23] Fix a concurrency bug in `iterate(::Dict)` (#44534) --- NEWS.md | 8 ++++++-- base/dict.jl | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/NEWS.md b/NEWS.md index 1ce80e71dd90c..137a501586f3b 100644 --- a/NEWS.md +++ b/NEWS.md @@ -30,9 +30,13 @@ Build system changes New library functions --------------------- +Library changes +--------------- -New library features --------------------- +* A known concurrency issue of `iterate` methods on `Dict` and other derived objects such + as `keys(::Dict)`, `values(::Dict)`, and `Set` is fixed. These methods of `iterate` can + now be called on a dictionary or set shared by arbitrary tasks provided that there are no + tasks mutating the dictionary or set ([#44534]). Standard library changes diff --git a/base/dict.jl b/base/dict.jl index 53f760999482f..12ff26fbebe17 100644 --- a/base/dict.jl +++ b/base/dict.jl @@ -703,7 +703,7 @@ end @propagate_inbounds _iterate(t::Dict{K,V}, i) where {K,V} = i == 0 ? nothing : (Pair{K,V}(t.keys[i],t.vals[i]), i == typemax(Int) ? 0 : i+1) @propagate_inbounds function iterate(t::Dict) - _iterate(t, skip_deleted_floor!(t)) + _iterate(t, skip_deleted(t, t.idxfloor)) end @propagate_inbounds iterate(t::Dict, i) = _iterate(t, skip_deleted(t, i)) From d291c8b16c632339d5fbff8bc503db27dfb4afea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= Date: Sat, 12 Mar 2022 02:27:48 +0000 Subject: [PATCH 11/23] [Make.inc] Require C11 standard (#44556) * [Make.inc] Require C11 standard Julia effectively requires C11 because of the use of `_Atomic`. This is shown when compiling with `-pedantic`. * Add missing header file needed for `ssize_t` * Change C standard also in `contrib/julia-config.jl` --- Make.inc | 4 ++-- contrib/julia-config.jl | 2 +- src/support/ios.h | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Make.inc b/Make.inc index da83dd3ea567f..1be3cb7ad80d7 100644 --- a/Make.inc +++ b/Make.inc @@ -485,7 +485,7 @@ endif ifeq ($(USEGCC),1) CC := $(CROSS_COMPILE)gcc CXX := $(CROSS_COMPILE)g++ -JCFLAGS := -std=gnu99 -pipe $(fPIC) -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 +JCFLAGS := -std=gnu11 -pipe $(fPIC) -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 # AArch64 needs this flag to generate the .eh_frame used by libunwind JCPPFLAGS := -fasynchronous-unwind-tables JCXXFLAGS := -pipe $(fPIC) -fno-rtti -std=c++14 @@ -500,7 +500,7 @@ endif ifeq ($(USECLANG),1) CC := $(CROSS_COMPILE)clang CXX := $(CROSS_COMPILE)clang++ -JCFLAGS := -std=gnu99 -pipe $(fPIC) -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 +JCFLAGS := -std=gnu11 -pipe $(fPIC) -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 # AArch64 needs this flag to generate the .eh_frame used by libunwind JCPPFLAGS := -fasynchronous-unwind-tables JCXXFLAGS := -pipe $(fPIC) -fno-rtti -pedantic -std=c++14 diff --git a/contrib/julia-config.jl b/contrib/julia-config.jl index ad275c078c49c..9c6e39216d817 100755 --- a/contrib/julia-config.jl +++ b/contrib/julia-config.jl @@ -77,7 +77,7 @@ end function cflags(doframework) flags = IOBuffer() - print(flags, "-std=gnu99") + print(flags, "-std=gnu11") if doframework include = shell_escape(frameworkDir()) print(flags, " -F", include) diff --git a/src/support/ios.h b/src/support/ios.h index e5d83ec974a2b..9d0f42d6d1bc4 100644 --- a/src/support/ios.h +++ b/src/support/ios.h @@ -4,6 +4,7 @@ #define JL_IOS_H #include +#include #include "analyzer_annotations.h" #ifdef __cplusplus From a25c25af4c30b14e9fc9c3bd4d4ab528862c0ec3 Mon Sep 17 00:00:00 2001 From: Jameson Nash Date: Sat, 12 Mar 2022 02:00:23 -0500 Subject: [PATCH 12/23] libuv: bump version to v2-1.44.1 (#44543) Includes several fixes: https://github.com/JuliaLang/libuv/compare/3a63bf71de62c64097989254e4f03212e3bf5fc8...c2d8a538b79e135176c2b0653e04d287aada2891 --- base/libc.jl | 6 +- deps/checksums/libuv | 68 +++++----- deps/libuv.version | 4 +- src/sys.c | 225 ---------------------------------- stdlib/LibUV_jll/Project.toml | 2 +- 5 files changed, 40 insertions(+), 265 deletions(-) diff --git a/base/libc.jl b/base/libc.jl index 38b62847eaeb4..d5a54909a9700 100644 --- a/base/libc.jl +++ b/base/libc.jl @@ -433,7 +433,7 @@ end function getpwuid(uid::Unsigned, throw_error::Bool=true) ref_pd = Ref(Cpasswd()) - ret = ccall(:jl_os_get_passwd, Cint, (Ref{Cpasswd}, Culong), ref_pd, uid) + ret = ccall(:uv_os_get_passwd2, Cint, (Ref{Cpasswd}, Culong), ref_pd, uid) if ret != 0 throw_error && Base.uv_error("getpwuid", ret) return @@ -452,7 +452,7 @@ function getpwuid(uid::Unsigned, throw_error::Bool=true) end function getgrgid(gid::Unsigned, throw_error::Bool=true) ref_gp = Ref(Cgroup()) - ret = ccall(:jl_os_get_group, Cint, (Ref{Cgroup}, Culong), ref_gp, gid) + ret = ccall(:uv_os_get_group, Cint, (Ref{Cgroup}, Culong), ref_gp, gid) if ret != 0 throw_error && Base.uv_error("getgrgid", ret) return @@ -471,7 +471,7 @@ function getgrgid(gid::Unsigned, throw_error::Bool=true) gp.gid, members, ) - ccall(:jl_os_free_group, Cvoid, (Ref{Cgroup},), ref_gp) + ccall(:uv_os_free_group, Cvoid, (Ref{Cgroup},), ref_gp) return gp end diff --git a/deps/checksums/libuv b/deps/checksums/libuv index 6c90c1b2115c7..1974ea787d228 100644 --- a/deps/checksums/libuv +++ b/deps/checksums/libuv @@ -1,34 +1,34 @@ -LibUV.v2.0.1+5.aarch64-apple-darwin.tar.gz/md5/54a94c839c561f5b74601d6d2bd5bf1e -LibUV.v2.0.1+5.aarch64-apple-darwin.tar.gz/sha512/bba06826461a4f35abbe54ba5266d9bf354d22e1f33d75f4273a917ce92437432d8b2cc9d4b4670164c14542e896ee97396a1c34ce0f653d6a2787ab4b6160bb -LibUV.v2.0.1+5.aarch64-linux-gnu.tar.gz/md5/b2680a3cebeb850bfec0df820e27072c -LibUV.v2.0.1+5.aarch64-linux-gnu.tar.gz/sha512/9c5611ae653642ef0060c46235fa2d2e0e4094804fb52629456ae4e5deed7e5fcc88640537799d11d824b6c0c00e75fa2bbddc0206e69c587ae3a77b68e11366 -LibUV.v2.0.1+5.aarch64-linux-musl.tar.gz/md5/a50cea6c75ea4093851cd7420168a59e -LibUV.v2.0.1+5.aarch64-linux-musl.tar.gz/sha512/51ed9be7dec0546cba4822eb116188c15c464ef155df03f0d5d8e9431ba8fe4c23dffde33c3331ef6e7ef3f8135b025fe26b01f036ab193aa340020f9d3bcb6e -LibUV.v2.0.1+5.armv6l-linux-gnueabihf.tar.gz/md5/1b6750b5c85c5f456a448325a77bee06 -LibUV.v2.0.1+5.armv6l-linux-gnueabihf.tar.gz/sha512/06decd104aad78de07101576fab5c0200867c332d12f1cb0cbe8c558c0c2c84c918e5772fbfc62f6ce80437ad68ae97e3d180c97dd40383c80d5e81fee96ecd7 -LibUV.v2.0.1+5.armv6l-linux-musleabihf.tar.gz/md5/54e9820e027e97af7f324d7b5c12fee1 -LibUV.v2.0.1+5.armv6l-linux-musleabihf.tar.gz/sha512/a30353cbf74bf698e38fd357e57fec03345a4ce71e971d9eb034aa211b536dc83b994da533df914a65ba3f5babc7ab66423ed12da665b67c050a8e799cdeada6 -LibUV.v2.0.1+5.armv7l-linux-gnueabihf.tar.gz/md5/252f5fc6d094edea5faef71630f4ba83 -LibUV.v2.0.1+5.armv7l-linux-gnueabihf.tar.gz/sha512/79ebe1e57cefa243219525fdebad35765736534a4b036f2487d6dfa0376a685c8e9f16259bbce83155baebe5ceeeff2592933b597ceafa724060ffd4dd63b0c4 -LibUV.v2.0.1+5.armv7l-linux-musleabihf.tar.gz/md5/39bc81ad36519ee9261a662d444c13b4 -LibUV.v2.0.1+5.armv7l-linux-musleabihf.tar.gz/sha512/97a312f2a42a2377458ff5d5356905fb469c9c30f9ae3fa7d091c7e2cdab3a7ea813e1142fb7d08f2e0000a3d8388fb5fe0d82d3ff646310924439ba99f02903 -LibUV.v2.0.1+5.i686-linux-gnu.tar.gz/md5/ca4b4a317b62cd48f4277bba5ebb9b80 -LibUV.v2.0.1+5.i686-linux-gnu.tar.gz/sha512/2cf17359c976b10a2e0e08d92b43ef2d113a0071748209ad6b2896d9578cb3e96b55f7c72a7c7243ded244b95945c67ea3aa248c1513b5fd37ea714154e04c2d -LibUV.v2.0.1+5.i686-linux-musl.tar.gz/md5/7f088f43c6ae4029e9d90c2881cf2509 -LibUV.v2.0.1+5.i686-linux-musl.tar.gz/sha512/b3653bd4cd95b2d4247b4b83215bfb756e211a3cc02e7e7ca1887e820cb1a7d461397d7259057b63e51825dc344e2f20e904d17defeba59584ddc54df94f1ccc -LibUV.v2.0.1+5.i686-w64-mingw32.tar.gz/md5/8ec8f225a708ebb95fd6dbe6039c386d -LibUV.v2.0.1+5.i686-w64-mingw32.tar.gz/sha512/fd9575300a65af9b7c3a59451646a5f617fd9df0fcae21db02f0f1e9c689605b1e75d12f0ee46654cb8d2b44ac044d2b44b34f9c6d008c19d41b001a69e40c6e -LibUV.v2.0.1+5.powerpc64le-linux-gnu.tar.gz/md5/54c51f81a0b69687f0cbfce63b530991 -LibUV.v2.0.1+5.powerpc64le-linux-gnu.tar.gz/sha512/79a9daa826432da8f389bbb6788720f0bdf0e6a09a16b8296f0ead8e0eae175a72a0690e4ffa5e5d8169e22f596a8ad41607eb836d3f55b217bcf74885e707e0 -LibUV.v2.0.1+5.x86_64-apple-darwin.tar.gz/md5/9ea7e5bf6107f0773e7cdb875d831939 -LibUV.v2.0.1+5.x86_64-apple-darwin.tar.gz/sha512/07b5137c94adaf1c024373b27c2a2a0e77b20cc87f536551e6080b59bd47f65d6ccaaf40ec14068e9e24140c07ad518ef749c09d93fcc36b0507c4ed6acc7032 -LibUV.v2.0.1+5.x86_64-linux-gnu.tar.gz/md5/c4feae1cb61b43ab38b8adb80f8cb46f -LibUV.v2.0.1+5.x86_64-linux-gnu.tar.gz/sha512/cef015385abca586215796c7d2420a4b2496b8a50a62bd9c483d76bb00adb4e3decefe17ba8398353166818bb23b758d3bdb311965849ea68f8b68377c1b08bc -LibUV.v2.0.1+5.x86_64-linux-musl.tar.gz/md5/47f23d12e6c2094604f168c6c40ca131 -LibUV.v2.0.1+5.x86_64-linux-musl.tar.gz/sha512/abe0d74ceabc2d7efc80c1e8d0a6938205bea883257c43a637fc739c82a7085d4f0109c22d0f67e332aa14bed60433dd739676e0237fd28aba6a15c82d3e41f4 -LibUV.v2.0.1+5.x86_64-unknown-freebsd.tar.gz/md5/6a6eeb9108db8a30f776685d4f98a853 -LibUV.v2.0.1+5.x86_64-unknown-freebsd.tar.gz/sha512/e08961cfeb904145b67c2833e6ea3f91b90bc9c8948cfd61399c7d10b1a9cffe17728a6c906a9d791b71da406d8012014b7dcde70ed445084d21e99563cdd377 -LibUV.v2.0.1+5.x86_64-w64-mingw32.tar.gz/md5/7d592fefa8b295e09b4640bd999aa358 -LibUV.v2.0.1+5.x86_64-w64-mingw32.tar.gz/sha512/b4e738c5d86ad27171289f284e35124c6bcf94fc55512622563c6be75027de5033672100008e283aced530c71a6bb1da038872719e1073566d5979278ea76e0b -libuv-3a63bf71de62c64097989254e4f03212e3bf5fc8.tar.gz/md5/a385b594c170085018bc954e50cb42cc -libuv-3a63bf71de62c64097989254e4f03212e3bf5fc8.tar.gz/sha512/5415e992a20498ae29c09bfdb4819857d15be83367488e9fbd8c5f6a460da4cd2d0dff7eaa6087a4bcf6dee6d1c873acbe5751f5594851c978456665d6a21cf9 +LibUV.v2.0.1+6.aarch64-apple-darwin.tar.gz/md5/bff12bc642215646c8c03f2003a3c5ef +LibUV.v2.0.1+6.aarch64-apple-darwin.tar.gz/sha512/9c0bb5e648d1e967caec07c700e4657c97ea9db8b48625887eb4e91af286be62380f5c85bc51bc51c87ed6104ffc26bbd498f501e3892ca1d41eb96bab88d955 +LibUV.v2.0.1+6.aarch64-linux-gnu.tar.gz/md5/af5b11ff1354c591990285e29840d83d +LibUV.v2.0.1+6.aarch64-linux-gnu.tar.gz/sha512/67f6c6a7c780b15b9e4b317c44450a325f6966fd2948d28e113f7d4b0c2893b8b5f9b1eb6da73cce683fa7176b5587e1c73b5b1faaf09d2ad378d8b085a75392 +LibUV.v2.0.1+6.aarch64-linux-musl.tar.gz/md5/2bda667ab6f9b7f8962ec675272be6b2 +LibUV.v2.0.1+6.aarch64-linux-musl.tar.gz/sha512/271772a7acff9d2cce1ab36a46f0807bf2f30a00227d0cfbcbb8eac4c583e0bd406c6406a7e9b5afa720e844b1b2bcc01ec60cae3d907d0d004a7a40ed182397 +LibUV.v2.0.1+6.armv6l-linux-gnueabihf.tar.gz/md5/5765a268e960ebbff2e7f6a386435b06 +LibUV.v2.0.1+6.armv6l-linux-gnueabihf.tar.gz/sha512/31d1a223b57dfd859f6a6633c75b53507b99a3eeccbef9d47f12e0dbf1e4b5a77e489348bda625f0cb6ecf5450edcb751d4fc4603beebb01fde73aceb7ae6d2b +LibUV.v2.0.1+6.armv6l-linux-musleabihf.tar.gz/md5/be91036ac0626c1b5a9b28a15026e942 +LibUV.v2.0.1+6.armv6l-linux-musleabihf.tar.gz/sha512/0e8a338f84ce24ba99357110aa6982956a9970715202005ac4a748d3a78cb75816a9063b3ad5a96569261966792f87fe698777d33b6fa428068ec07ceb944fdf +LibUV.v2.0.1+6.armv7l-linux-gnueabihf.tar.gz/md5/921038ac4396791a555e1c2a8f5af558 +LibUV.v2.0.1+6.armv7l-linux-gnueabihf.tar.gz/sha512/45519d49d857721f025bdb08522e3c08262f264b8a00bc36d9ca4bd05d6a32ce0b1b40ba7c9cfc98bbd1201e6b4592632aa8852652abb61604bcd324abc17c76 +LibUV.v2.0.1+6.armv7l-linux-musleabihf.tar.gz/md5/06b404efd3d62d107f9331ab85deb893 +LibUV.v2.0.1+6.armv7l-linux-musleabihf.tar.gz/sha512/3e73341346060df832fcc591bc447f713a8188c06f22961ae03cba4620d524edae7b84e63ac8fd5b675abb62bf0e12f176468f09e7014fbb8df6cc763dda12b6 +LibUV.v2.0.1+6.i686-linux-gnu.tar.gz/md5/e6b31595a27a91bf34b7a5aeae48d459 +LibUV.v2.0.1+6.i686-linux-gnu.tar.gz/sha512/b59516d2340ed469be8d86dc903e3497867b522082dc6096683b23fec4b03bdc5e0c643bc2cf36ca49c2dfa11689946bd5f7e92bd68978ff2a409935203ba533 +LibUV.v2.0.1+6.i686-linux-musl.tar.gz/md5/49a84d0c90ec136b933fcd939f371716 +LibUV.v2.0.1+6.i686-linux-musl.tar.gz/sha512/1abff45b3a0894b78d20e31c4dcda8673a3e3b6d3e8fa89e8f57da115ae8feff58bcb16cd3107b4c768e9c6bfb777864056fab47de5b2babead3eaa508b2e748 +LibUV.v2.0.1+6.i686-w64-mingw32.tar.gz/md5/6ef4d726e171dc8f2aaa5603180b154b +LibUV.v2.0.1+6.i686-w64-mingw32.tar.gz/sha512/0699afa096208829d7b3795ee150a94e2e0446a17e77c204a7e013f63f51791df0f8c8416c0549809cb0d0c3b1f52fb525310153a68f80652e6c8def9bf17903 +LibUV.v2.0.1+6.powerpc64le-linux-gnu.tar.gz/md5/72cc19fa36b7803a4973c3913c720d46 +LibUV.v2.0.1+6.powerpc64le-linux-gnu.tar.gz/sha512/694d96e8127e4a206496388db4f09d0af0673818f5168fc3ffaa9bd15da132d5af843f068c89f057a0c62404f1e3171725b86e1cdade3e27a3f0e8b6be8e9b2c +LibUV.v2.0.1+6.x86_64-apple-darwin.tar.gz/md5/e3c076ab2aaf47f423f9de96bcd50faa +LibUV.v2.0.1+6.x86_64-apple-darwin.tar.gz/sha512/3a3e31ccb0e2a1c1aec1b2ac52ff33f7116ef84452d70bb0f680a276411a5a9ff4aad5e5533bb7d3d981f168974a94f1ea90d41b4ddc6dab1a334f16000bf812 +LibUV.v2.0.1+6.x86_64-linux-gnu.tar.gz/md5/752545518774845ee93933fce9c9516c +LibUV.v2.0.1+6.x86_64-linux-gnu.tar.gz/sha512/458494e07a096793552ee4f9e0bd302d160186e20d702e7c0691b50984692c5725042faa49df0b1595b3d6f2459bd6d73225af1385e4ff5a9d7e4dd5baaa4dae +LibUV.v2.0.1+6.x86_64-linux-musl.tar.gz/md5/6988efa401aaf11e82a916632b26141e +LibUV.v2.0.1+6.x86_64-linux-musl.tar.gz/sha512/95abfa548c8581be9f512041c1b904532ab8e62610e70b2e184d6638d1bb2552883d946565e3071e6c8f3127a524313d432df370d6d6361a5f0ce5d3c60649ec +LibUV.v2.0.1+6.x86_64-unknown-freebsd.tar.gz/md5/5e35a7220027cd6a8ded93611fed1a57 +LibUV.v2.0.1+6.x86_64-unknown-freebsd.tar.gz/sha512/218b2f40bc1c49d91c9457b9014d536b6fd6b1f6c3704a6aeec2739bcf2ecbadda1bfd36a9ef84ffb2aebd1cb6b1903276658259d4a2d873cd61780a9762934d +LibUV.v2.0.1+6.x86_64-w64-mingw32.tar.gz/md5/1aa9e7ff08da10c79984ac470b31a701 +LibUV.v2.0.1+6.x86_64-w64-mingw32.tar.gz/sha512/675adf9330de80fee97e9ebf7a6de7763a3cafad20b6aa9e009832a590a1a20272578861bb357e3ca41961a247e2be178e4455ad107951d88ce8d3467504c235 +libuv-c2d8a538b79e135176c2b0653e04d287aada2891.tar.gz/md5/51f0f453f4531b68911f122aa429e545 +libuv-c2d8a538b79e135176c2b0653e04d287aada2891.tar.gz/sha512/44bcf449afa899327769a180c110ee93e3a5d4f1b24a31b77d6873fcbc213e34f0f0b3937b52c5677d10123476e7752c2101a1770d07da12d726dc931a8209ed diff --git a/deps/libuv.version b/deps/libuv.version index 7339533223083..0f5057c40991f 100644 --- a/deps/libuv.version +++ b/deps/libuv.version @@ -1,2 +1,2 @@ -LIBUV_BRANCH=julia-uv2-1.42.0 -LIBUV_SHA1=3a63bf71de62c64097989254e4f03212e3bf5fc8 +LIBUV_BRANCH=julia-uv2-1.44.1 +LIBUV_SHA1=c2d8a538b79e135176c2b0653e04d287aada2891 diff --git a/src/sys.c b/src/sys.c index 39682b24ef0b0..8f2be76e648c8 100644 --- a/src/sys.c +++ b/src/sys.c @@ -242,231 +242,6 @@ JL_DLLEXPORT unsigned long jl_geteuid(void) #endif } -JL_DLLEXPORT int jl_os_get_passwd(uv_passwd_t *pwd, unsigned long uid) -{ -#ifdef _OS_WINDOWS_ - return UV_ENOTSUP; -#else - // taken directly from libuv - struct passwd pw; - struct passwd* result; - char* buf; - size_t bufsize; - size_t name_size; - size_t homedir_size; - size_t shell_size; - size_t gecos_size; - long initsize; - int r; - - if (pwd == NULL) - return UV_EINVAL; - - initsize = sysconf(_SC_GETPW_R_SIZE_MAX); - - if (initsize <= 0) - bufsize = 4096; - else - bufsize = (size_t) initsize; - - buf = NULL; - - for (;;) { - free(buf); - buf = (char*)malloc(bufsize); - - if (buf == NULL) - return UV_ENOMEM; - - r = getpwuid_r(uid, &pw, buf, bufsize, &result); - - if (r != ERANGE) - break; - - bufsize *= 2; - } - - if (r != 0) { - free(buf); - return -r; - } - - if (result == NULL) { - free(buf); - return UV_ENOENT; - } - - /* Allocate memory for the username, gecos, shell, and home directory. */ - name_size = strlen(pw.pw_name) + 1; - homedir_size = strlen(pw.pw_dir) + 1; - shell_size = strlen(pw.pw_shell) + 1; - -#ifdef __MVS__ - gecos_size = 0; /* pw_gecos does not exist on zOS. */ -#else - if (pw.pw_gecos != NULL) - gecos_size = strlen(pw.pw_gecos) + 1; - else - gecos_size = 0; -#endif - - pwd->username = (char*)malloc(name_size + - homedir_size + - shell_size + - gecos_size); - - if (pwd->username == NULL) { - free(buf); - return UV_ENOMEM; - } - - /* Copy the username */ - memcpy(pwd->username, pw.pw_name, name_size); - - /* Copy the home directory */ - pwd->homedir = pwd->username + name_size; - memcpy(pwd->homedir, pw.pw_dir, homedir_size); - - /* Copy the shell */ - pwd->shell = pwd->homedir + homedir_size; - memcpy(pwd->shell, pw.pw_shell, shell_size); - - /* Copy the gecos field */ -#ifdef __MVS__ - pwd->gecos = NULL; /* pw_gecos does not exist on zOS. */ -#else - if (pw.pw_gecos == NULL) { - pwd->gecos = NULL; - } else { - pwd->gecos = pwd->shell + shell_size; - memcpy(pwd->gecos, pw.pw_gecos, gecos_size); - } -#endif - - /* Copy the uid and gid */ - pwd->uid = pw.pw_uid; - pwd->gid = pw.pw_gid; - - free(buf); - - return 0; -#endif -} - -typedef struct jl_group_s { - char* groupname; - unsigned long gid; - char** members; -} jl_group_t; - -JL_DLLEXPORT int jl_os_get_group(jl_group_t *grp, unsigned long gid) -{ -#ifdef _OS_WINDOWS_ - return UV_ENOTSUP; -#else - // modified directly from uv_os_get_password - struct group gp; - struct group* result; - char* buf; - char* gr_mem; - size_t bufsize; - size_t name_size; - long members; - size_t mem_size; - long initsize; - int r; - - if (grp == NULL) - return UV_EINVAL; - - initsize = sysconf(_SC_GETGR_R_SIZE_MAX); - - if (initsize <= 0) - bufsize = 4096; - else - bufsize = (size_t) initsize; - - buf = NULL; - - for (;;) { - free(buf); - buf = (char*)malloc(bufsize); - - if (buf == NULL) - return UV_ENOMEM; - - r = getgrgid_r(gid, &gp, buf, bufsize, &result); - - if (r != ERANGE) - break; - - bufsize *= 2; - } - - if (r != 0) { - free(buf); - return -r; - } - - if (result == NULL) { - free(buf); - return UV_ENOENT; - } - - /* Allocate memory for the groupname and members. */ - name_size = strlen(gp.gr_name) + 1; - members = 0; - mem_size = sizeof(char*); - for (r = 0; gp.gr_mem[r] != NULL; r++) { - mem_size += strlen(gp.gr_mem[r]) + 1 + sizeof(char*); - members++; - } - - gr_mem = (char*)malloc(name_size + mem_size); - if (gr_mem == NULL) { - free(buf); - return UV_ENOMEM; - } - - /* Copy the members */ - grp->members = (char**) gr_mem; - grp->members[members] = NULL; - gr_mem = (char*) ((char**) gr_mem + members + 1); - for (r = 0; r < members; r++) { - grp->members[r] = gr_mem; - gr_mem = stpcpy(gr_mem, gp.gr_mem[r]) + 1; - } - assert(gr_mem == (char*)grp->members + mem_size); - - /* Copy the groupname */ - grp->groupname = gr_mem; - memcpy(grp->groupname, gp.gr_name, name_size); - gr_mem += name_size; - - /* Copy the gid */ - grp->gid = gp.gr_gid; - - free(buf); - - return 0; -#endif -} - -JL_DLLEXPORT void jl_os_free_group(jl_group_t *grp) -{ - if (grp == NULL) - return; - - /* - The memory for is allocated in a single uv__malloc() call. The base of the - pointer is stored in grp->members, so that is the only field that needs - to be freed. - */ - free(grp->members); - grp->members = NULL; - grp->groupname = NULL; -} - // --- buffer manipulation --- JL_DLLEXPORT jl_array_t *jl_take_buffer(ios_t *s) diff --git a/stdlib/LibUV_jll/Project.toml b/stdlib/LibUV_jll/Project.toml index ec084417b7744..9441fbf857263 100644 --- a/stdlib/LibUV_jll/Project.toml +++ b/stdlib/LibUV_jll/Project.toml @@ -1,6 +1,6 @@ name = "LibUV_jll" uuid = "183b4373-6708-53ba-ad28-60e28bb38547" -version = "2.0.1+5" +version = "2.0.1+6" [deps] Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb" From 3c34dac8b04bd44cf5df19735dcbcc9f55b4d7d3 Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Fri, 11 Mar 2022 23:14:23 -0800 Subject: [PATCH 13/23] Bump libblastrampoline to v5.1.0 (#44574) This should fix some serious issues with calling {c,z}dot{c,u} on Windows x86_64. See [0] for more details, and [1] for a test case. [0] https://github.com/JuliaLinearAlgebra/libblastrampoline/commit/082924837dec5df66eb08e19aec0bf81ac78c3be [1] https://github.com/JuliaLinearAlgebra/libblastrampoline/commit/2ef456212b4765e97d9557f35adc2c52a3aac346 --- deps/Versions.make | 2 +- deps/checksums/blastrampoline | 64 +++++++++++------------ stdlib/libblastrampoline_jll/Project.toml | 2 +- 3 files changed, 34 insertions(+), 34 deletions(-) diff --git a/deps/Versions.make b/deps/Versions.make index 88c8ff1d3d3d3..77d568ee7c6b5 100644 --- a/deps/Versions.make +++ b/deps/Versions.make @@ -75,7 +75,7 @@ OBJCONV_JLL_NAME := Objconv OBJCONV_JLL_VER := 2.49.1+0 # blastrampoline -BLASTRAMPOLINE_VER := 5.0.2 +BLASTRAMPOLINE_VER := 5.1.0 BLASTRAMPOLINE_JLL_NAME := libblastrampoline # OpenBLAS diff --git a/deps/checksums/blastrampoline b/deps/checksums/blastrampoline index 8c55d40b44065..3b5e4359e43ec 100644 --- a/deps/checksums/blastrampoline +++ b/deps/checksums/blastrampoline @@ -1,34 +1,34 @@ blastrampoline-d32042273719672c6669f6442a0be5605d434b70.tar.gz/md5/f380e4238a2dec186ecfe9598f75b824 blastrampoline-d32042273719672c6669f6442a0be5605d434b70.tar.gz/sha512/00437a96b57d99cef946257480e38e1dfdf325c46bc4a1619f5067565dfb7d9f668b0c8415badb0879b933cb1972f3c4e6be4c9e63a8a85728033e2183373819 -libblastrampoline.v5.0.2+0.aarch64-apple-darwin.tar.gz/md5/bfa0772f1683f307cd7f4300bc99bf6b -libblastrampoline.v5.0.2+0.aarch64-apple-darwin.tar.gz/sha512/e96e8d053678e979a32c893f54d76e682919184b9c1674c105651865b35f70a32ad93f545086b1b6a93e43db6c54e0d93822ba8647d0d4b16ab771a1a2bf223c -libblastrampoline.v5.0.2+0.aarch64-linux-gnu.tar.gz/md5/15a2d3ad9e744724e41209693225be58 -libblastrampoline.v5.0.2+0.aarch64-linux-gnu.tar.gz/sha512/49035889785405e8d66077ad1182b4b527c724a41c88447c3ae1ce3a0c970b27faef8035797bc376b34e96bdd54c0dce3a5c4c77615674f587cb46663f25a8dd -libblastrampoline.v5.0.2+0.aarch64-linux-musl.tar.gz/md5/ebc86535c2123b41a1f666b249e54bc6 -libblastrampoline.v5.0.2+0.aarch64-linux-musl.tar.gz/sha512/5355277d720a4ce4083d95e783f517209195b619dbd83a1fa93c3492bf25ee3c2782f9569a2b3f645e05416ef15226c6683288252db7c361d619dc7d4e4c298a -libblastrampoline.v5.0.2+0.armv6l-linux-gnueabihf.tar.gz/md5/3f0807f2c390e318930e2240fcb886f6 -libblastrampoline.v5.0.2+0.armv6l-linux-gnueabihf.tar.gz/sha512/88927035941e109ac7c7bb483d1415755966048b89d5d48f2fcd071349d85bf3f4f16b59179531d85a18435f31ced499b5a5ffeda0e80a6160ce9b65d8da616e -libblastrampoline.v5.0.2+0.armv6l-linux-musleabihf.tar.gz/md5/8e9038163fa3d8c2b875fd481ee5031d -libblastrampoline.v5.0.2+0.armv6l-linux-musleabihf.tar.gz/sha512/01edfbadfaff63313ca3626cc1b6b674ea9667ba54fa33464c31f027e640415d13312d07d3b02d3550e45fdb02730cc4850db994420f258c126dfc8f4920640d -libblastrampoline.v5.0.2+0.armv7l-linux-gnueabihf.tar.gz/md5/37db09f2d2c82ceb91604552c79d5d55 -libblastrampoline.v5.0.2+0.armv7l-linux-gnueabihf.tar.gz/sha512/d1de56c26c49d632c36281f9af097667b4e3c693a8e900fd6ae64b1c9b267449e2424945e940ad17418bedf02837cc585b084aca77b08f9b8d77ba32cf33558b -libblastrampoline.v5.0.2+0.armv7l-linux-musleabihf.tar.gz/md5/d6ebdfefbad51826d12dd36ae691550e -libblastrampoline.v5.0.2+0.armv7l-linux-musleabihf.tar.gz/sha512/75c17548f063ae7b007f074498630a77a05cdd736a5e28e49ba35dca045bf8f5aada5df2b58defb628e63c04d1a603ebb3e568a3d36e80e3fd8bebd9948c594c -libblastrampoline.v5.0.2+0.i686-linux-gnu.tar.gz/md5/c84eaf618af0011a0e2db597eb1213f3 -libblastrampoline.v5.0.2+0.i686-linux-gnu.tar.gz/sha512/c2d2e50429889fdb9de96a624428c43799d3043c444c15ea0a7e2c96dcfa11ec52376509e9d946e42cda9d60d44c701645e3586dd704a766c7195cc1c3074cce -libblastrampoline.v5.0.2+0.i686-linux-musl.tar.gz/md5/65bdcc9547bbcc15f3c1e75508b023c7 -libblastrampoline.v5.0.2+0.i686-linux-musl.tar.gz/sha512/15498b8b20c4374474cbab7288cac244f5a73f84f484190c30f9b2cd5b5f9cf56e772e786d92c6ba218f83dfc32cd91b6c12f6f4b9dbfb6d8d044ba5015ebf19 -libblastrampoline.v5.0.2+0.i686-w64-mingw32.tar.gz/md5/baa2ee8448fd5bf37aee4b73604f1c6e -libblastrampoline.v5.0.2+0.i686-w64-mingw32.tar.gz/sha512/a57dc29daf42073889a2bf1b5e9dcc063942058dba2e6b515ab791e6c7cde0b0f7e42d0efd3c957853f768d52ac8984c5a14cc26db92244f9b3ec80f48827b79 -libblastrampoline.v5.0.2+0.powerpc64le-linux-gnu.tar.gz/md5/7118a09ce0d6d93db2f86965de3f03a4 -libblastrampoline.v5.0.2+0.powerpc64le-linux-gnu.tar.gz/sha512/2bb254a813255f640c3940753d010556a822c45c26e520bf679108add828c5c927cfcc98c4e0083132f61df3c9ea64ef47d39d5d849e08154a4cf2b196834331 -libblastrampoline.v5.0.2+0.x86_64-apple-darwin.tar.gz/md5/36302044a0fd8b4585620ba081340b71 -libblastrampoline.v5.0.2+0.x86_64-apple-darwin.tar.gz/sha512/05dd71287ff4d025c9ff0a41485310e224dc7222fb5350223998745329fb9f6dfa883f1f670fb014ef888cda011c144c3b5d713e941732d644466d33fd3c3506 -libblastrampoline.v5.0.2+0.x86_64-linux-gnu.tar.gz/md5/4979d46cb033375fa69b560f661c219b -libblastrampoline.v5.0.2+0.x86_64-linux-gnu.tar.gz/sha512/ee12c621bc25effd01c58cc13790cb9b915521c3c81f9bd89f93b12cec543eb49b2ea12a5f79c1a25ba17d08125f52629e3fba45e0b28cc0c7f5fec5627485cb -libblastrampoline.v5.0.2+0.x86_64-linux-musl.tar.gz/md5/acbcd598ea31d5082aff096f76d700b7 -libblastrampoline.v5.0.2+0.x86_64-linux-musl.tar.gz/sha512/84838e69d671f13dd96cc0993b3fab721d449c2abc30576ab6905e5c8c4927e7b499cf6cf3f9cb65806980c46439bb4b256b4ab880026c2d0a10cfac6f419afb -libblastrampoline.v5.0.2+0.x86_64-unknown-freebsd.tar.gz/md5/eec09438c820e053fb35d29bc6f3858d -libblastrampoline.v5.0.2+0.x86_64-unknown-freebsd.tar.gz/sha512/3094af269dd19c5460f1a8adff8871e33cb6132b943009dcf96e5c65e532a26ce54cb3c4ca1a8f615c7e59eae63fabcea40f86a4dbbfb53f446e5982198faf0a -libblastrampoline.v5.0.2+0.x86_64-w64-mingw32.tar.gz/md5/6859880e47b1b071ac67eabfbea6ea9b -libblastrampoline.v5.0.2+0.x86_64-w64-mingw32.tar.gz/sha512/6fb59a652d1456fb5aa58631aa88f298571f1d6ea8a196884dcb30b29eab05de310fa706d3b40955975a611d0464e22be36577eede54f765374bedf4cf02469d +libblastrampoline.v5.1.0+0.aarch64-apple-darwin.tar.gz/md5/edf090a17d862c33d611875058438757 +libblastrampoline.v5.1.0+0.aarch64-apple-darwin.tar.gz/sha512/a3413c7d46c04318a5bebf10d6f930d04b5997d4be6be4e2748a7b60f968d2f2be7de140eee6c699962a12e8439f68f144e5323dea17d91587e82f97aaaaaa24 +libblastrampoline.v5.1.0+0.aarch64-linux-gnu.tar.gz/md5/fe88a410d795f805756488915679edbd +libblastrampoline.v5.1.0+0.aarch64-linux-gnu.tar.gz/sha512/cbd31304278ea67ddc0f766c4647275c87829cf5377c3851153b7568015f4f016fd0f3e095f479c33d23a50f4af8c38bae4555b02dcbf45a04b6e5a0dd3504a8 +libblastrampoline.v5.1.0+0.aarch64-linux-musl.tar.gz/md5/d4d8c393eb28953297b37a7bae79ed2e +libblastrampoline.v5.1.0+0.aarch64-linux-musl.tar.gz/sha512/3b5dca87e089ac10486f75663b4cf7d404c71b040231b04e1ec5110d13f30ac620b4cb880040106273866d465da9bdda5643887534de8e35668a7ab545422216 +libblastrampoline.v5.1.0+0.armv6l-linux-gnueabihf.tar.gz/md5/8b5f2fbd5442bf31bd10836ffd177968 +libblastrampoline.v5.1.0+0.armv6l-linux-gnueabihf.tar.gz/sha512/f1d6314c785afc0aaa3ebcf8a532312e676ca41d427b9c2abdea88c700df4d6a7cb5cfa54d65493e5c3d711a64062a20a5de7e3b75feee0653115cee7de05446 +libblastrampoline.v5.1.0+0.armv6l-linux-musleabihf.tar.gz/md5/8ed3013c644ab3be5dce013fb23fd413 +libblastrampoline.v5.1.0+0.armv6l-linux-musleabihf.tar.gz/sha512/da40cbb0114d46a66ae41284d36dc855aa52dcd3993643858308f18c5d8eedbf92fc8ee57d3e3cc2153f29670b40bc03a8dd01d5b49dde210c8a7a2d471a59b7 +libblastrampoline.v5.1.0+0.armv7l-linux-gnueabihf.tar.gz/md5/23b8ef9ea92a8d474646d814c0c91577 +libblastrampoline.v5.1.0+0.armv7l-linux-gnueabihf.tar.gz/sha512/97789adc18a54b953ce8696b484a4314e734a8092a27f81f43c1ae269b592b18ba7c67082396220a1906ffb075895c34462be976e0059aded9f6a6948abb1672 +libblastrampoline.v5.1.0+0.armv7l-linux-musleabihf.tar.gz/md5/d5a47ebe37a4a234ee6a4f3cf830e8c5 +libblastrampoline.v5.1.0+0.armv7l-linux-musleabihf.tar.gz/sha512/65366692c074576733e3b3f15d011e326d6a1e2357055a1a0159db31cdd7d5ff0e9aba9a33c1f2a949e128ac10b72776a3f76907df4cadcf7e67ace934cf4ef0 +libblastrampoline.v5.1.0+0.i686-linux-gnu.tar.gz/md5/14a342ab1bd16ef61d747e99acc97e6a +libblastrampoline.v5.1.0+0.i686-linux-gnu.tar.gz/sha512/8eca984912e69af769f06cd2b38d1df9d724e4e42d6d5b2fcb77a8e74b2aa9f9c31beb36d634e5da28d4d2f0838957f5c5cd336db616768d8ffb60217fe92edc +libblastrampoline.v5.1.0+0.i686-linux-musl.tar.gz/md5/201e6c737df0c0e2f4327c395133969f +libblastrampoline.v5.1.0+0.i686-linux-musl.tar.gz/sha512/778daa7a0d3a6fb8d6480a14123e874009f0fdc5f1d3411518f8d9975c45ca418e88d71db72af8465d4064f4c177d0abb70bc568df3a4c765eed7c5aeddca428 +libblastrampoline.v5.1.0+0.i686-w64-mingw32.tar.gz/md5/8ddf4dec49fac4888f94f90143126e5f +libblastrampoline.v5.1.0+0.i686-w64-mingw32.tar.gz/sha512/388b797f4c86f0ea090058acaff0eed34c42d45092c001410d11a4a4da93668c1729453290872cd44615ee517d62546f4dc42005240a6c36e40e7152f5c9cf5c +libblastrampoline.v5.1.0+0.powerpc64le-linux-gnu.tar.gz/md5/db626123ab94b489ac8b4d395b2f5cf4 +libblastrampoline.v5.1.0+0.powerpc64le-linux-gnu.tar.gz/sha512/8c96f518dea82057fe85bdb2ee867cc7abc33e9c53fe94dd84d097a16268630c22082db7fc003dadfc4749400f3465564088e05cabd6844c31b870319432c433 +libblastrampoline.v5.1.0+0.x86_64-apple-darwin.tar.gz/md5/65b9aae2f749ec608b61412aa1921d65 +libblastrampoline.v5.1.0+0.x86_64-apple-darwin.tar.gz/sha512/38e974c9260614d855b0b13f78e72bbd65aa889e88101d25441dd4e78ce37baf81bab7de1950d71d8e35b32d62fb88ac9c3f39ab5a4aff11d00619441bc003f8 +libblastrampoline.v5.1.0+0.x86_64-linux-gnu.tar.gz/md5/0ab01f256277b4ea96f6d83c50891b99 +libblastrampoline.v5.1.0+0.x86_64-linux-gnu.tar.gz/sha512/2b2178d74beb1c12e348f6469777d31116f26229c243d5e08a6ac36a74c3eb38854c1d82429d0e7cabee259d0d5220c47c334a561ea5caac6f61d91aa6b34f52 +libblastrampoline.v5.1.0+0.x86_64-linux-musl.tar.gz/md5/52a9da4586daa6572b8fe2c13db6268a +libblastrampoline.v5.1.0+0.x86_64-linux-musl.tar.gz/sha512/04abc5a0b6f80f10d1fccceee8a0e1c58aba76a45e3f6662ce4115d9d39d20dd05b3859434037d21bf6c5088a5a428565cd86e1cf6d1676666ce7e3eb1921b80 +libblastrampoline.v5.1.0+0.x86_64-unknown-freebsd.tar.gz/md5/f2b66517937a7647086ba96acc81c6a6 +libblastrampoline.v5.1.0+0.x86_64-unknown-freebsd.tar.gz/sha512/c19654b97928bdba36ccf3dbecf8ca994a46929c29c5c120d2d17062128a3df8927230fe7c418d6f780557abb8ce94b6a6a023bddcd3aeb91c8302cdbfe2b39e +libblastrampoline.v5.1.0+0.x86_64-w64-mingw32.tar.gz/md5/4b50ad8399c733ee5d60ce1ad00e1e5e +libblastrampoline.v5.1.0+0.x86_64-w64-mingw32.tar.gz/sha512/6a0f1d061350d53dd2a030ba11a0ac02c5ae598cd2c21dda39f95d81a2b0f43a454d60cf32c2fc0546df074181100e2d247d229d62c4a6b94bc7b697b02f0e0e diff --git a/stdlib/libblastrampoline_jll/Project.toml b/stdlib/libblastrampoline_jll/Project.toml index 97a5485e883a9..44dd330f000a6 100644 --- a/stdlib/libblastrampoline_jll/Project.toml +++ b/stdlib/libblastrampoline_jll/Project.toml @@ -1,6 +1,6 @@ name = "libblastrampoline_jll" uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" -version = "5.0.2+0" +version = "5.1.0+0" [deps] Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb" From 258ddc07d413e4c5889a6594a5b93ac6e1508018 Mon Sep 17 00:00:00 2001 From: Oscar Smith Date: Sat, 12 Mar 2022 03:01:25 -0500 Subject: [PATCH 14/23] fix precision issue in Float64^Float64. (#44529) * improve accuracy for x^-3 --- base/intfuncs.jl | 1 - base/math.jl | 9 +++--- .../manual/complex-and-rational-numbers.md | 2 +- test/math.jl | 30 +++++++++++++++---- 4 files changed, 30 insertions(+), 12 deletions(-) diff --git a/base/intfuncs.jl b/base/intfuncs.jl index 3c2d9b4beec7b..44c7be0626126 100644 --- a/base/intfuncs.jl +++ b/base/intfuncs.jl @@ -321,7 +321,6 @@ const HWNumber = Union{HWReal, Complex{<:HWReal}, Rational{<:HWReal}} @inline literal_pow(::typeof(^), x::HWNumber, ::Val{3}) = x*x*x @inline literal_pow(::typeof(^), x::HWNumber, ::Val{-1}) = inv(x) @inline literal_pow(::typeof(^), x::HWNumber, ::Val{-2}) = (i=inv(x); i*i) -@inline literal_pow(::typeof(^), x::HWNumber, ::Val{-3}) = (i=inv(x); i*i*i) # don't use the inv(x) transformation here since float^p is slightly more accurate @inline literal_pow(::typeof(^), x::AbstractFloat, ::Val{p}) where {p} = x^p diff --git a/base/math.jl b/base/math.jl index 15bee7f1fccb4..0d20c7bf1cca5 100644 --- a/base/math.jl +++ b/base/math.jl @@ -1010,8 +1010,8 @@ end !isfinite(x) && return x*(y>0 || isnan(x)) x==0 && return abs(y)*Inf*(!(y>0)) logxhi,logxlo = Base.Math._log_ext(x) - xyhi = logxhi*y - xylo = logxlo*y + xyhi, xylo = two_mul(logxhi,y) + xylo = muladd(logxlo, y, xylo) hi = xyhi+xylo return Base.Math.exp_impl(hi, xylo-(hi-xyhi), Val(:β„―)) end @@ -1041,6 +1041,7 @@ end @assume_effects :terminates_locally @noinline function pow_body(x::Float64, n::Integer) y = 1.0 xnlo = ynlo = 0.0 + n == 3 && return x*x*x # keep compatibility with literal_pow if n < 0 rx = inv(x) n==-2 && return rx*rx #keep compatability with literal_pow @@ -1048,7 +1049,6 @@ end x = rx n = -n end - n == 3 && return x*x*x # keep compatibility with literal_pow while n > 1 if n&1 > 0 err = muladd(y, xnlo, x*ynlo) @@ -1065,8 +1065,9 @@ end end function ^(x::Float32, n::Integer) - n < 0 && return inv(x)^(-n) + n == -2 && return (i=inv(x); i*i) n == 3 && return x*x*x #keep compatibility with literal_pow + n < 0 && return Float32(Base.power_by_squaring(inv(Float64(x)),-n)) Float32(Base.power_by_squaring(Float64(x),n)) end @inline ^(x::Float16, y::Integer) = Float16(Float32(x) ^ y) diff --git a/doc/src/manual/complex-and-rational-numbers.md b/doc/src/manual/complex-and-rational-numbers.md index 94ad70982bbae..ac48e5b420f5e 100644 --- a/doc/src/manual/complex-and-rational-numbers.md +++ b/doc/src/manual/complex-and-rational-numbers.md @@ -36,7 +36,7 @@ julia> (-1 + 2im)^2 -3 - 4im julia> (-1 + 2im)^2.5 -2.7296244647840084 - 6.960664459571898im +2.729624464784009 - 6.9606644595719im julia> (-1 + 2im)^(1 + 1im) -0.27910381075826657 + 0.08708053414102428im diff --git a/test/math.jl b/test/math.jl index 34a21ca3335ad..3ae8703c09100 100644 --- a/test/math.jl +++ b/test/math.jl @@ -155,12 +155,6 @@ end @test x^y === T(big(x)^big(y)) @test x^1 === x @test x^yi === T(big(x)^yi) - # test (-x)^y for y larger than typemax(Int) - @test T(-1)^floatmax(T) === T(1) - @test prevfloat(T(-1))^floatmax(T) === T(Inf) - @test nextfloat(T(-1))^floatmax(T) === T(0.0) - # test for large negative exponent where error compensation matters - @test 0.9999999955206014^-1.0e8 == 1.565084574870928 @test (-x)^yi == x^yi @test (-x)^(yi+1) == -(x^(yi+1)) @test acos(x) β‰ˆ acos(big(x)) @@ -1323,6 +1317,30 @@ end end end +@testset "pow" begin + for T in (Float16, Float32, Float64) + for x in (0.0, -0.0, 1.0, 10.0, 2.0, Inf, NaN, -Inf, -NaN) + for y in (0.0, -0.0, 1.0, -3.0,-10.0 , Inf, NaN, -Inf, -NaN) + got, expected = T(x)^T(y), T(big(x))^T(y) + @test isnan_type(T, got) && isnan_type(T, expected) || (got === expected) + end + end + for _ in 1:2^16 + x=rand(T)*100; y=rand(T)*200-100 + got, expected = x^y, widen(x)^y + if isfinite(eps(T(expected))) + @test abs(expected-got) <= 1.3*eps(T(expected)) || (x,y) + end + end + # test (-x)^y for y larger than typemax(Int) + @test T(-1)^floatmax(T) === T(1) + @test prevfloat(T(-1))^floatmax(T) === T(Inf) + @test nextfloat(T(-1))^floatmax(T) === T(0.0) + end + # test for large negative exponent where error compensation matters + @test 0.9999999955206014^-1.0e8 == 1.565084574870928 +end + # Test that sqrt behaves correctly and doesn't exhibit fp80 double rounding. # This happened on old glibc versions. # Test case from https://sourceware.org/bugzilla/show_bug.cgi?id=14032. From 2cba553ec0b156ef298b68e7ddb50bfa4f5a29d1 Mon Sep 17 00:00:00 2001 From: Saransh Date: Sat, 12 Mar 2022 23:28:30 +0530 Subject: [PATCH 15/23] Create a table for badges in `README` (#44569) * Create a table for badges in README * Convert README table from Markdown to HTML --- README.md | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/README.md b/README.md index 368d971fed77b..abc6e9730f1e6 100644 --- a/README.md +++ b/README.md @@ -5,31 +5,31 @@ -Documentation: -[![Documentation][docs-img]][docs-url] - -[docs-img]: https://img.shields.io/badge/docs-v1-blue.svg "Documentation (version 1)" -[docs-url]: https://docs.julialang.org - -Continuous integration: -[![Continuous integration (master)][buildkite-master-img]][buildkite-master-url] - - -[buildkite-master-img]: https://badge.buildkite.com/f28e0d28b345f9fad5856ce6a8d64fffc7c70df8f4f2685cd8.svg?branch=master "Continuous integration (master)" -[buildkite-master-url]: https://buildkite.com/julialang/julia-master - -Code coverage: -[![Code coverage (Coveralls)][coveralls-img]][coveralls-url] -[![Code coverage (Codecov)][codecov-img]][codecov-url] - -[coveralls-img]: https://img.shields.io/coveralls/github/JuliaLang/julia/master.svg?label=coveralls "Code coverage (Coveralls)" -[coveralls-url]: https://coveralls.io/r/JuliaLang/julia?branch=master - -[codecov-img]: https://img.shields.io/codecov/c/github/JuliaLang/julia/master.svg?label=codecov "Code coverage (Codecov)" -[codecov-url]: https://codecov.io/github/JuliaLang/julia?branch=master + + + + + + + + + + + + + + + + +
Documentation + +
Continuous integration + +
Code coverage + +
## The Julia Language From e6c1525450fa27c4025094144b742011bb9cbb7e Mon Sep 17 00:00:00 2001 From: Kristoffer Carlsson Date: Sat, 12 Mar 2022 20:02:00 +0100 Subject: [PATCH 16/23] extend the API for `LazyString` a bit (#44581) --- base/strings/lazy.jl | 3 +++ test/strings/basic.jl | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/base/strings/lazy.jl b/base/strings/lazy.jl index b40fd9a5842b3..a2f872c40660e 100644 --- a/base/strings/lazy.jl +++ b/base/strings/lazy.jl @@ -61,3 +61,6 @@ iterate(s::LazyString, i::Integer) = iterate(String(s), i) isequal(a::LazyString, b::LazyString) = isequal(String(a), String(b)) ==(a::LazyString, b::LazyString) = (String(a) == String(b)) ncodeunits(s::LazyString) = ncodeunits(String(s)) +codeunit(s::LazyString) = codeunit(String(s)) +codeunit(s::LazyString, i::Integer) = codeunit(String(s), i) +isvalid(s::LazyString, i::Integer) = isvalid(String(s), i) diff --git a/test/strings/basic.jl b/test/strings/basic.jl index c1df87420d7da..b20c18e636db7 100644 --- a/test/strings/basic.jl +++ b/test/strings/basic.jl @@ -1100,4 +1100,8 @@ end let d = Dict(lazy"$(1+2) is 3" => 3) @test d["3 is 3"] == 3 end + l = lazy"1+2" + @test codeunit(l) == UInt8 + @test codeunit(l,2) == 0x2b + @test isvalid(l, 1) end From 24d526871d5203317fa17f93f9a4ef9fb34e8632 Mon Sep 17 00:00:00 2001 From: Antoine Levitt Date: Sat, 12 Mar 2022 20:29:50 +0100 Subject: [PATCH 17/23] Remove number / vector (#44358) * Remove number / vector * Fix test --- NEWS.md | 3 +++ stdlib/LinearAlgebra/src/generic.jl | 3 --- stdlib/LinearAlgebra/test/dense.jl | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/NEWS.md b/NEWS.md index 137a501586f3b..2fedf928137a7 100644 --- a/NEWS.md +++ b/NEWS.md @@ -46,6 +46,9 @@ Standard library changes #### LinearAlgebra +* The methods `a / b` and `b \ a` with `a` a scalar and `b` a vector, + which were equivalent to `a * pinv(b)`, have been removed due to the + risk of confusion with elementwise division ([#44358]). * We are now wholly reliant on libblastrampoline (LBT) for calling BLAS and LAPACK. OpenBLAS is shipped by default, but building the system image with other BLAS/LAPACK libraries is not diff --git a/stdlib/LinearAlgebra/src/generic.jl b/stdlib/LinearAlgebra/src/generic.jl index 676e965652e8f..aa38419614b73 100644 --- a/stdlib/LinearAlgebra/src/generic.jl +++ b/stdlib/LinearAlgebra/src/generic.jl @@ -1142,9 +1142,6 @@ function (/)(A::AbstractVecOrMat, B::AbstractVecOrMat) size(A,2) != size(B,2) && throw(DimensionMismatch("Both inputs should have the same number of columns")) return copy(adjoint(adjoint(B) \ adjoint(A))) end -# \(A::StridedMatrix,x::Number) = inv(A)*x Should be added at some point when the old elementwise version has been deprecated long enough -# /(x::Number,A::StridedMatrix) = x*inv(A) -/(x::Number, v::AbstractVector) = x*pinv(v) cond(x::Number) = iszero(x) ? Inf : 1.0 cond(x::Number, p) = cond(x) diff --git a/stdlib/LinearAlgebra/test/dense.jl b/stdlib/LinearAlgebra/test/dense.jl index f03bf4a953ac6..9bdc732d1f67a 100644 --- a/stdlib/LinearAlgebra/test/dense.jl +++ b/stdlib/LinearAlgebra/test/dense.jl @@ -1108,12 +1108,12 @@ end end function test_rdiv_pinv_consistency(a, b) - @test (a*b)/b β‰ˆ a*(b/b) β‰ˆ (a*b)*pinv(b) β‰ˆ a*(b*pinv(b)) - @test typeof((a*b)/b) == typeof(a*(b/b)) == typeof((a*b)*pinv(b)) == typeof(a*(b*pinv(b))) + @test a*(b/b) β‰ˆ (a*b)*pinv(b) β‰ˆ a*(b*pinv(b)) + @test typeof(a*(b/b)) == typeof((a*b)*pinv(b)) == typeof(a*(b*pinv(b))) end function test_ldiv_pinv_consistency(a, b) - @test a\(a*b) β‰ˆ (a\a)*b β‰ˆ (pinv(a)*a)*b β‰ˆ pinv(a)*(a*b) - @test typeof(a\(a*b)) == typeof((a\a)*b) == typeof((pinv(a)*a)*b) == typeof(pinv(a)*(a*b)) + @test (a\a)*b β‰ˆ (pinv(a)*a)*b β‰ˆ pinv(a)*(a*b) + @test typeof((a\a)*b) == typeof((pinv(a)*a)*b) == typeof(pinv(a)*(a*b)) end function test_div_pinv_consistency(a, b) test_rdiv_pinv_consistency(a, b) From b49a1b47985edc76526a5a006ddbe0c385a39f9f Mon Sep 17 00:00:00 2001 From: pchintalapudi <34727397+pchintalapudi@users.noreply.github.com> Date: Sat, 12 Mar 2022 14:35:38 -0500 Subject: [PATCH 18/23] Fix llvm powi intrinsic calls in fastmath.jl (#44580) --- base/fastmath.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/base/fastmath.jl b/base/fastmath.jl index c01a8a5b225f7..05a5ce0503e68 100644 --- a/base/fastmath.jl +++ b/base/fastmath.jl @@ -279,8 +279,8 @@ exp10_fast(x::Union{Float32,Float64}) = Base.Math.exp10_fast(x) # builtins -pow_fast(x::Float32, y::Integer) = ccall("llvm.powi.f32", llvmcall, Float32, (Float32, Int32), x, y) -pow_fast(x::Float64, y::Integer) = ccall("llvm.powi.f64", llvmcall, Float64, (Float64, Int32), x, y) +pow_fast(x::Float32, y::Integer) = ccall("llvm.powi.f32.i32", llvmcall, Float32, (Float32, Int32), x, y) +pow_fast(x::Float64, y::Integer) = ccall("llvm.powi.f64.i32", llvmcall, Float64, (Float64, Int32), x, y) pow_fast(x::FloatTypes, ::Val{p}) where {p} = pow_fast(x, p) # inlines already via llvm.powi @inline pow_fast(x, v::Val) = Base.literal_pow(^, x, v) From ceec252630f08bff7901b06fe94cdbfe93f37cdc Mon Sep 17 00:00:00 2001 From: Dilum Aluthge Date: Sat, 12 Mar 2022 19:00:50 -0500 Subject: [PATCH 19/23] CI (`Create Buildbot Statuses`): add `linux32` to the list (#44594) --- .github/workflows/statuses.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/statuses.yml b/.github/workflows/statuses.yml index 16c07f0f040cc..6280fda97854a 100644 --- a/.github/workflows/statuses.yml +++ b/.github/workflows/statuses.yml @@ -48,6 +48,7 @@ jobs: - run: | declare -a CONTEXT_LIST=( "buildbot/tester_freebsd64" + "buildbot/tester_linux32" "buildbot/tester_macos64" "buildbot/tester_win32" "buildbot/tester_win64" From ff88fa446f44b8bcde15cea8c29549a6fff65375 Mon Sep 17 00:00:00 2001 From: Jameson Nash Date: Sat, 12 Mar 2022 19:09:16 -0500 Subject: [PATCH 20/23] inference: refine PartialStruct lattice tmerge (#44404) * inference: fix tmerge lattice over issimpleenoughtype Previously we assumed only union type could have complexity that violated the tmerge lattice requirements, but other types can have that too. This lets us fix an issue with the PartialStruct comparison failing for undefined fields, mentioned in #43784. * inference: refine PartialStruct lattice tmerge Be more aggressive about merging fields to greatly accelerate convergence, but also compute anyrefine more correctly as we do now elsewhere (since #42831, a121721f975fc4105ed24ebd0ad1020d08d07a38) Move the tmeet algorithm, without changes, since it is a precise lattice operation, not a heuristic limit like tmerge. Close #43784 --- base/compiler/typelattice.jl | 86 ++++++++++++++++++++++- base/compiler/typelimits.jl | 132 +++++++++++++++++++++-------------- test/compiler/inference.jl | 28 ++++++-- 3 files changed, 189 insertions(+), 57 deletions(-) diff --git a/base/compiler/typelattice.jl b/base/compiler/typelattice.jl index bba9f41bf64d3..f6eb92a040f2a 100644 --- a/base/compiler/typelattice.jl +++ b/base/compiler/typelattice.jl @@ -200,7 +200,7 @@ The non-strict partial order over the type inference lattice. end for i in 1:nfields(a.val) # XXX: let's handle varargs later - isdefined(a.val, i) || return false + isdefined(a.val, i) || continue # since βˆ€ T Union{} βŠ‘ T βŠ‘(Const(getfield(a.val, i)), b.fields[i]) || return false end return true @@ -289,6 +289,48 @@ function is_lattice_equal(@nospecialize(a), @nospecialize(b)) return a βŠ‘ b && b βŠ‘ a end +# compute typeintersect over the extended inference lattice, +# as precisely as we can, +# where v is in the extended lattice, and t is a Type. +function tmeet(@nospecialize(v), @nospecialize(t)) + if isa(v, Const) + if !has_free_typevars(t) && !isa(v.val, t) + return Bottom + end + return v + elseif isa(v, PartialStruct) + has_free_typevars(t) && return v + widev = widenconst(v) + if widev <: t + return v + end + ti = typeintersect(widev, t) + valid_as_lattice(ti) || return Bottom + @assert widev <: Tuple + new_fields = Vector{Any}(undef, length(v.fields)) + for i = 1:length(new_fields) + vfi = v.fields[i] + if isvarargtype(vfi) + new_fields[i] = vfi + else + new_fields[i] = tmeet(vfi, widenconst(getfield_tfunc(t, Const(i)))) + if new_fields[i] === Bottom + return Bottom + end + end + end + return tuple_tfunc(new_fields) + elseif isa(v, Conditional) + if !(Bool <: t) + return Bottom + end + return v + end + ti = typeintersect(widenconst(v), t) + valid_as_lattice(ti) || return Bottom + return ti +end + widenconst(c::AnyConditional) = Bool widenconst((; val)::Const) = isa(val, Type) ? Type{val} : typeof(val) widenconst(m::MaybeUndef) = widenconst(m.typ) @@ -427,3 +469,45 @@ function stupdate1!(state::VarTable, change::StateUpdate) end return false end + +# compute typeintersect over the extended inference lattice, +# as precisely as we can, +# where v is in the extended lattice, and t is a Type. +function tmeet(@nospecialize(v), @nospecialize(t)) + if isa(v, Const) + if !has_free_typevars(t) && !isa(v.val, t) + return Bottom + end + return v + elseif isa(v, PartialStruct) + has_free_typevars(t) && return v + widev = widenconst(v) + if widev <: t + return v + end + ti = typeintersect(widev, t) + valid_as_lattice(ti) || return Bottom + @assert widev <: Tuple + new_fields = Vector{Any}(undef, length(v.fields)) + for i = 1:length(new_fields) + vfi = v.fields[i] + if isvarargtype(vfi) + new_fields[i] = vfi + else + new_fields[i] = tmeet(vfi, widenconst(getfield_tfunc(t, Const(i)))) + if new_fields[i] === Bottom + return Bottom + end + end + end + return tuple_tfunc(new_fields) + elseif isa(v, Conditional) + if !(Bool <: t) + return Bottom + end + return v + end + ti = typeintersect(widenconst(v), t) + valid_as_lattice(ti) || return Bottom + return ti +end diff --git a/base/compiler/typelimits.jl b/base/compiler/typelimits.jl index a7989777317c3..d25c77deb6d2e 100644 --- a/base/compiler/typelimits.jl +++ b/base/compiler/typelimits.jl @@ -298,11 +298,57 @@ union_count_abstract(x::Union) = union_count_abstract(x.a) + union_count_abstrac union_count_abstract(@nospecialize(x)) = !isdispatchelem(x) function issimpleenoughtype(@nospecialize t) - t = ignorelimited(t) return unionlen(t) + union_count_abstract(t) <= MAX_TYPEUNION_LENGTH && unioncomplexity(t) <= MAX_TYPEUNION_COMPLEXITY end +# A simplified type_more_complex query over the extended lattice +# (assumes typeb βŠ‘ typea) +function issimplertype(@nospecialize(typea), @nospecialize(typeb)) + typea = ignorelimited(typea) + typeb = ignorelimited(typeb) + typea isa MaybeUndef && (typea = typea.typ) # n.b. does not appear in inference + typeb isa MaybeUndef && (typeb = typeb.typ) # n.b. does not appear in inference + typea === typeb && return true + if typea isa PartialStruct + aty = widenconst(typea) + for i = 1:length(typea.fields) + ai = typea.fields[i] + bi = fieldtype(aty, i) + is_lattice_equal(ai, bi) && continue + tni = _typename(widenconst(ai)) + if tni isa Const + bi = (tni.val::Core.TypeName).wrapper + is_lattice_equal(ai, bi) && continue + end + bi = getfield_tfunc(typeb, Const(i)) + is_lattice_equal(ai, bi) && continue + # It is not enough for ai to be simpler than bi: it must exactly equal + # (for this, an invariant struct field, by contrast to + # type_more_complex above which handles covariant tuples). + return false + end + elseif typea isa Type + return issimpleenoughtype(typea) + # elseif typea isa Const # fall-through good + elseif typea isa Conditional # follow issubconditional query + typeb isa Const && return true + typeb isa Conditional || return false + is_same_conditionals(typea, typeb) || return false + issimplertype(typea.vtype, typeb.vtype) || return false + issimplertype(typea.elsetype, typeb.elsetype) || return false + elseif typea isa InterConditional # ibid + typeb isa Const && return true + typeb isa InterConditional || return false + is_same_conditionals(typea, typeb) || return false + issimplertype(typea.vtype, typeb.vtype) || return false + issimplertype(typea.elsetype, typeb.elsetype) || return false + elseif typea isa PartialOpaque + # TODO + end + return true +end + # pick a wider type that contains both typea and typeb, # with some limits on how "large" it can get, # but without losing too much precision in common cases @@ -310,11 +356,13 @@ end function tmerge(@nospecialize(typea), @nospecialize(typeb)) typea === Union{} && return typeb typeb === Union{} && return typea + typea === typeb && return typea + suba = typea βŠ‘ typeb - suba && issimpleenoughtype(typeb) && return typeb + suba && issimplertype(typeb, typea) && return typeb subb = typeb βŠ‘ typea suba && subb && return typea - subb && issimpleenoughtype(typea) && return typea + subb && issimplertype(typea, typeb) && return typea # type-lattice for LimitedAccuracy wrapper # the merge create a slightly narrower type than needed, but we can't @@ -404,6 +452,7 @@ function tmerge(@nospecialize(typea), @nospecialize(typeb)) aty = widenconst(typea) bty = widenconst(typeb) if aty === bty + # must have egal here, since we do not create PartialStruct for non-concrete types typea_nfields = nfields_tfunc(typea) typeb_nfields = nfields_tfunc(typeb) isa(typea_nfields, Const) || return aty @@ -412,18 +461,40 @@ function tmerge(@nospecialize(typea), @nospecialize(typeb)) type_nfields === typeb_nfields.val::Int || return aty type_nfields == 0 && return aty fields = Vector{Any}(undef, type_nfields) - anyconst = false + anyrefine = false for i = 1:type_nfields ai = getfield_tfunc(typea, Const(i)) bi = getfield_tfunc(typeb, Const(i)) - ity = tmerge(ai, bi) - if ai === Union{} || bi === Union{} - ity = widenconst(ity) + ft = fieldtype(aty, i) + if is_lattice_equal(ai, bi) || is_lattice_equal(ai, ft) + # Since ai===bi, the given type has no restrictions on complexity. + # and can be used to refine ft + tyi = ai + elseif is_lattice_equal(bi, ft) + tyi = bi + else + # Otherwise choose between using the fieldtype or some other simple merged type. + # The wrapper type never has restrictions on complexity, + # so try to use that to refine the estimated type too. + tni = _typename(widenconst(ai)) + if tni isa Const && tni === _typename(widenconst(bi)) + # A tmeet call may cause tyi to become complex, but since the inputs were + # strictly limited to being egal, this has no restrictions on complexity. + # (Otherwise, we would need to use <: and take the narrower one without + # intersection. See the similar comment in abstract_call_method.) + tyi = typeintersect(ft, (tni.val::Core.TypeName).wrapper) + else + # Since aty===bty, the fieldtype has no restrictions on complexity. + tyi = ft + end + end + fields[i] = tyi + if !anyrefine + anyrefine = has_nontrivial_const_info(tyi) || # constant information + tyi β‹€ ft # just a type-level information, but more precise than the declared type end - fields[i] = ity - anyconst |= has_nontrivial_const_info(ity) end - return anyconst ? PartialStruct(aty, fields) : aty + return anyrefine ? PartialStruct(aty, fields) : aty end end if isa(typea, PartialOpaque) && isa(typeb, PartialOpaque) && widenconst(typea) == widenconst(typeb) @@ -610,44 +681,3 @@ function tuplemerge(a::DataType, b::DataType) end return Tuple{p...} end - -# compute typeintersect over the extended inference lattice -# where v is in the extended lattice, and t is a Type -function tmeet(@nospecialize(v), @nospecialize(t)) - if isa(v, Const) - if !has_free_typevars(t) && !isa(v.val, t) - return Bottom - end - return v - elseif isa(v, PartialStruct) - has_free_typevars(t) && return v - widev = widenconst(v) - if widev <: t - return v - end - ti = typeintersect(widev, t) - valid_as_lattice(ti) || return Bottom - @assert widev <: Tuple - new_fields = Vector{Any}(undef, length(v.fields)) - for i = 1:length(new_fields) - vfi = v.fields[i] - if isvarargtype(vfi) - new_fields[i] = vfi - else - new_fields[i] = tmeet(vfi, widenconst(getfield_tfunc(t, Const(i)))) - if new_fields[i] === Bottom - return Bottom - end - end - end - return tuple_tfunc(new_fields) - elseif isa(v, Conditional) - if !(Bool <: t) - return Bottom - end - return v - end - ti = typeintersect(widenconst(v), t) - valid_as_lattice(ti) || return Bottom - return ti -end diff --git a/test/compiler/inference.jl b/test/compiler/inference.jl index 1349e7da398fb..5d160143483af 100644 --- a/test/compiler/inference.jl +++ b/test/compiler/inference.jl @@ -3999,15 +3999,33 @@ end @test βŠ‘(a, c) @test βŠ‘(b, c) - @test @eval Module() begin - const ginit = Base.ImmutableDict{Any,Any}() - Base.return_types() do - g = ginit + init = Base.ImmutableDict{Number,Number}() + a = Const(init) + b = Core.PartialStruct(typeof(init), Any[Const(init), Any, ComplexF64]) + c = Core.Compiler.tmerge(a, b) + @test βŠ‘(a, c) && βŠ‘(b, c) + @test c === typeof(init) + + a = Core.PartialStruct(typeof(init), Any[Const(init), ComplexF64, ComplexF64]) + c = Core.Compiler.tmerge(a, b) + @test βŠ‘(a, c) && βŠ‘(b, c) + @test c.fields[2] === Any # or Number + @test c.fields[3] === ComplexF64 + + b = Core.PartialStruct(typeof(init), Any[Const(init), ComplexF32, Union{ComplexF32,ComplexF64}]) + c = Core.Compiler.tmerge(a, b) + @test βŠ‘(a, c) + @test βŠ‘(b, c) + @test c.fields[2] === Complex + @test c.fields[3] === Complex + + global const ginit43784 = Base.ImmutableDict{Any,Any}() + @test Base.return_types() do + g = ginit43784 while true g = Base.ImmutableDict(g, 1=>2) end end |> only === Union{} - end end # Test that purity modeling doesn't accidentally introduce new world age issues From 0791aef08100fe58aa35bfc98ab35e071be71723 Mon Sep 17 00:00:00 2001 From: pchintalapudi <34727397+pchintalapudi@users.noreply.github.com> Date: Sat, 12 Mar 2022 22:34:22 -0500 Subject: [PATCH 21/23] Refactor JIT engine to make it friendlier to multiple contexts (#44573) --- src/jitlayers.cpp | 31 ++++++++++++++----------------- src/jitlayers.h | 1 + 2 files changed, 15 insertions(+), 17 deletions(-) diff --git a/src/jitlayers.cpp b/src/jitlayers.cpp index 231b37778eae5..bc8d5eb216130 100644 --- a/src/jitlayers.cpp +++ b/src/jitlayers.cpp @@ -68,8 +68,8 @@ void jl_dump_llvm_opt_impl(void *s) dump_llvm_opt_stream = (JL_STREAM*)s; } -static void jl_add_to_ee(std::unique_ptr m); static void jl_add_to_ee(std::unique_ptr &M, StringMap*> &NewExports); +static void jl_decorate_module(Module &M); static uint64_t getAddressForFunction(StringRef fname); void jl_link_global(GlobalVariable *GV, void *addr) @@ -130,7 +130,7 @@ static jl_callptr_t _jl_compile_codeinst( jl_compile_workqueue(emitted, params, CompilationPolicy::Default, context); if (params._shared_module) - jl_add_to_ee(std::unique_ptr(params._shared_module)); + jl_ExecutionEngine->addModule(std::unique_ptr(params._shared_module)); StringMap*> NewExports; StringMap NewGlobals; for (auto &global : params.globals) { @@ -236,10 +236,10 @@ int jl_compile_extern_c_impl(LLVMModuleRef llvmmod, void *p, void *sysimg, jl_va jl_jit_globals(params.globals); assert(params.workqueue.empty()); if (params._shared_module) - jl_add_to_ee(std::unique_ptr(params._shared_module)); + jl_ExecutionEngine->addModule(std::unique_ptr(params._shared_module)); } if (success && llvmmod == NULL) - jl_add_to_ee(std::unique_ptr(into)); + jl_ExecutionEngine->addModule(std::unique_ptr(into)); } if (jl_codegen_lock.count == 1 && measure_compile_time_enabled) jl_atomic_fetch_add_relaxed(&jl_cumulative_compile_time, (jl_hrtime() - compiler_start_time)); @@ -1016,6 +1016,8 @@ void JuliaOJIT::addGlobalMapping(StringRef Name, uint64_t Addr) void JuliaOJIT::addModule(std::unique_ptr M) { JL_TIMING(LLVM_MODULE_FINISH); + jl_decorate_module(*M); + shareStrings(*M); std::vector NewExports; for (auto &F : M->global_values()) { if (!F.isDeclaration() && F.getLinkage() == GlobalValue::ExternalLinkage) { @@ -1307,7 +1309,7 @@ void jl_merge_module(Module *dest, std::unique_ptr src) // optimize memory by turning long strings into memoized copies, instead of // making a copy per object file of output. -void jl_jit_share_data(Module &M) +void JuliaOJIT::shareStrings(Module &M) { std::vector erase; for (auto &GV : M.globals()) { @@ -1320,7 +1322,7 @@ void jl_jit_share_data(Module &M) if (data.size() > 16) { // only for long strings: keep short ones as values Type *T_size = Type::getIntNTy(GV.getContext(), sizeof(void*) * 8); Constant *v = ConstantExpr::getIntToPtr( - ConstantInt::get(T_size, (uintptr_t)data.data()), + ConstantInt::get(T_size, (uintptr_t)(*ES.intern(data)).data()), GV.getType()); GV.replaceAllUsesWith(v); erase.push_back(&GV); @@ -1330,26 +1332,21 @@ void jl_jit_share_data(Module &M) GV->eraseFromParent(); } -static void jl_add_to_ee(std::unique_ptr m) -{ +static void jl_decorate_module(Module &M) { #if defined(_CPU_X86_64_) && defined(_OS_WINDOWS_) // Add special values used by debuginfo to build the UnwindData table registration for Win64 - Type *T_uint32 = Type::getInt32Ty(m->getContext()); - ArrayType *atype = ArrayType::get(T_uint32, 3); // want 4-byte alignment of 12-bytes of data + ArrayType *atype = ArrayType::get(Type::getInt32Ty(M.getContext()), 3); // want 4-byte alignment of 12-bytes of data GlobalVariable *gvs[2] = { - new GlobalVariable(*m, atype, + new GlobalVariable(M, atype, false, GlobalVariable::InternalLinkage, ConstantAggregateZero::get(atype), "__UnwindData"), - new GlobalVariable(*m, atype, + new GlobalVariable(M, atype, false, GlobalVariable::InternalLinkage, ConstantAggregateZero::get(atype), "__catchjmp") }; gvs[0]->setSection(".text"); gvs[1]->setSection(".text"); - appendToCompilerUsed(*m, makeArrayRef((GlobalValue**)gvs, 2)); + appendToCompilerUsed(M, makeArrayRef((GlobalValue**)gvs, 2)); #endif - jl_jit_share_data(*m); - assert(jl_ExecutionEngine); - jl_ExecutionEngine->addModule(std::move(m)); } static int jl_add_to_ee( @@ -1393,7 +1390,7 @@ static int jl_add_to_ee( Queued.erase(CM->get()); jl_merge_module(M.get(), std::move(*CM)); } - jl_add_to_ee(std::move(M)); + jl_ExecutionEngine->addModule(std::move(M)); MergeUp = 0; } else { diff --git a/src/jitlayers.h b/src/jitlayers.h index 035e81bead88d..94e73665b3fba 100644 --- a/src/jitlayers.h +++ b/src/jitlayers.h @@ -242,6 +242,7 @@ class JuliaOJIT { private: std::string getMangledName(StringRef Name); std::string getMangledName(const GlobalValue *GV); + void shareStrings(Module &M); std::unique_ptr TM; DataLayout DL; From 98b4b069c0560c9a358f30a43737345e67562e69 Mon Sep 17 00:00:00 2001 From: Dilum Aluthge Date: Sat, 12 Mar 2022 23:41:40 -0500 Subject: [PATCH 22/23] CI (Buildkite): ignore any private keys, regardless of where in the repository they are found (#44597) --- .gitignore | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 69a0fc0c8dab0..2780210c41a9b 100644 --- a/.gitignore +++ b/.gitignore @@ -34,5 +34,11 @@ .idea/* .vscode/* -# Buildkite: Ignore entire .buildkite directory +# Buildkite: Ignore the entire .buildkite directory /.buildkite + +# Buildkite: Ignore the unencrypted repo_key +repo_key + +# Buildkite: Ignore any agent keys (public or private) we have stored +agent_key* From c72f48264540d5d3043d071ddbe07c6c20c8c9d3 Mon Sep 17 00:00:00 2001 From: Shuhei Kadowaki Date: Wed, 27 Oct 2021 02:17:26 +0900 Subject: [PATCH 23/23] optimizer: inline abstract union-split callsite MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Currently the optimizer handles abstract callsite only when there is a single dispatch candidate (in most cases), and so inlining and static-dispatch are prohibited when the callsite is union-split (in other word, union-split happens only when all the dispatch candidates are concrete). However, there are certain patterns of code (most notably our Julia-level compiler code) that inherently need to deal with abstract callsite. The following example is taken from `Core.Compiler` utility: ```julia julia> @inline isType(@nospecialize t) = isa(t, DataType) && t.name === Type.body.name isType (generic function with 1 method) julia> code_typed((Any,)) do x # abstract, but no union-split, successful inlining isType(x) end |> only CodeInfo( 1 ─ %1 = (x isa Main.DataType)::Bool └── goto #3 if not %1 2 ─ %3 = Ο€ (x, DataType) β”‚ %4 = Base.getfield(%3, :name)::Core.TypeName β”‚ %5 = Base.getfield(Type{T}, :name)::Core.TypeName β”‚ %6 = (%4 === %5)::Bool └── goto #4 3 ─ goto #4 4 β”„ %9 = Ο† (#2 => %6, #3 => false)::Bool └── return %9 ) => Bool julia> code_typed((Union{Type,Nothing},)) do x # abstract, union-split, unsuccessful inlining isType(x) end |> only CodeInfo( 1 ─ %1 = (isa)(x, Nothing)::Bool └── goto #3 if not %1 2 ─ goto #4 3 ─ %4 = Main.isType(x)::Bool └── goto #4 4 β”„ %6 = Ο† (#2 => false, #3 => %4)::Bool └── return %6 ) => Bool ``` (note that this is a limitation of the inlining algorithm, and so any user-provided hints like callsite inlining annotation doesn't help here) This commit enables inlining and static dispatch for abstract union-split callsite. The core idea here is that we can simulate our dispatch semantics by generating `isa` checks in order of the specialities of dispatch candidates: ```julia julia> code_typed((Union{Type,Nothing},)) do x # union-split, unsuccessful inlining isType(x) end |> only CodeInfo( 1 ─ %1 = (isa)(x, Nothing)::Bool └── goto #3 if not %1 2 ─ goto #9 3 ─ %4 = (isa)(x, Type)::Bool └── goto #8 if not %4 4 ─ %6 = Ο€ (x, Type) β”‚ %7 = (%6 isa Main.DataType)::Bool └── goto #6 if not %7 5 ─ %9 = Ο€ (%6, DataType) β”‚ %10 = Base.getfield(%9, :name)::Core.TypeName β”‚ %11 = Base.getfield(Type{T}, :name)::Core.TypeName β”‚ %12 = (%10 === %11)::Bool └── goto #7 6 ─ goto #7 7 β”„ %15 = Ο† (#5 => %12, #6 => false)::Bool └── goto #9 8 ─ Core.throw(ErrorException("fatal error in type inference (type bound)"))::Union{} └── unreachable 9 β”„ %19 = Ο† (#2 => false, #7 => %15)::Bool └── return %19 ) => Bool ``` Inlining/static-dispatch of abstract union-split callsite will improve the performance in such situations (and so this commit will improve the latency of our JIT compilation). Especially, this commit helps us avoid excessive specializations of `Core.Compiler` code by statically-resolving `@nospecialize`d callsites, and as the result, the # of precompiled statements is now reduced from `1956` ([`master`](dc45d776a900ef17581a842952c51297065afa3a)) to `1901` (this commit). And also, as a side effect, the implementation of our inlining algorithm gets much simplified now since we no longer need the previous special handlings for abstract callsites. One possible drawback would be increased code size. This change seems to certainly increase the size of sysimage, but I think these numbers are in an acceptable range: > [`master`](dc45d776a900ef17581a842952c51297065afa3a) ``` ❯ du -sh usr/lib/julia/* 17M usr/lib/julia/corecompiler.ji 188M usr/lib/julia/sys-o.a 164M usr/lib/julia/sys.dylib 23M usr/lib/julia/sys.dylib.dSYM 101M usr/lib/julia/sys.ji ``` > this commit ``` ❯ du -sh usr/lib/julia/* 17M usr/lib/julia/corecompiler.ji 190M usr/lib/julia/sys-o.a 166M usr/lib/julia/sys.dylib 23M usr/lib/julia/sys.dylib.dSYM 102M usr/lib/julia/sys.ji ``` --- base/compiler/ssair/inlining.jl | 130 +++++++++++--------------------- base/sort.jl | 2 +- test/compiler/inline.jl | 78 ++++++++++++++++++- 3 files changed, 120 insertions(+), 90 deletions(-) diff --git a/base/compiler/ssair/inlining.jl b/base/compiler/ssair/inlining.jl index ab93375db4d0e..8723c09aeb7c2 100644 --- a/base/compiler/ssair/inlining.jl +++ b/base/compiler/ssair/inlining.jl @@ -241,7 +241,7 @@ function cfg_inline_unionsplit!(ir::IRCode, idx::Int, push!(from_bbs, length(state.new_cfg_blocks)) # TODO: Right now we unconditionally generate a fallback block # in case of subtyping errors - This is probably unnecessary. - if i != length(cases) || (!fully_covered || (!params.trust_inference && isdispatchtuple(cases[i].sig))) + if i != length(cases) || (!fully_covered || (!params.trust_inference)) # This block will have the next condition or the final else case push!(state.new_cfg_blocks, BasicBlock(StmtRange(idx, idx))) push!(state.new_cfg_blocks[cond_bb].succs, length(state.new_cfg_blocks)) @@ -313,7 +313,6 @@ function ir_inline_item!(compact::IncrementalCompact, idx::Int, argexprs::Vector spec = item.spec::ResolvedInliningSpec sparam_vals = item.mi.sparam_vals def = item.mi.def::Method - inline_cfg = spec.ir.cfg linetable_offset::Int32 = length(linetable) # Append the linetable of the inlined function to our line table inlined_at = Int(compact.result[idx][:line]) @@ -471,8 +470,9 @@ function ir_inline_unionsplit!(compact::IncrementalCompact, idx::Int, join_bb = bbs[end] pn = PhiNode() local bb = compact.active_result_bb - @assert length(bbs) >= length(cases) - for i in 1:length(cases) + ncases = length(cases) + @assert length(bbs) >= ncases + for i = 1:ncases ithcase = cases[i] mtype = ithcase.sig::DataType # checked within `handle_cases!` case = ithcase.item @@ -480,8 +480,7 @@ function ir_inline_unionsplit!(compact::IncrementalCompact, idx::Int, cond = true nparams = fieldcount(atype) @assert nparams == fieldcount(mtype) - if i != length(cases) || !fully_covered || - (!params.trust_inference && isdispatchtuple(cases[i].sig)) + if i != ncases || !fully_covered || !params.trust_inference for i = 1:nparams a, m = fieldtype(atype, i), fieldtype(mtype, i) # If this is always true, we don't need to check for it @@ -538,7 +537,7 @@ function ir_inline_unionsplit!(compact::IncrementalCompact, idx::Int, bb += 1 # We're now in the fall through block, decide what to do if fully_covered - if !params.trust_inference && isdispatchtuple(cases[end].sig) + if !params.trust_inference e = Expr(:call, GlobalRef(Core, :throw), FATAL_TYPE_BOUND_ERROR) insert_node_here!(compact, NewInstruction(e, Union{}, line)) insert_node_here!(compact, NewInstruction(ReturnNode(), Union{}, line)) @@ -561,7 +560,7 @@ function batch_inline!(todo::Vector{Pair{Int, Any}}, ir::IRCode, linetable::Vect state = CFGInliningState(ir) for (idx, item) in todo if isa(item, UnionSplit) - cfg_inline_unionsplit!(ir, idx, item::UnionSplit, state, params) + cfg_inline_unionsplit!(ir, idx, item, state, params) else item = item::InliningTodo spec = item.spec::ResolvedInliningSpec @@ -1175,12 +1174,8 @@ function analyze_single_call!( sig::Signature, state::InliningState, todo::Vector{Pair{Int, Any}}) argtypes = sig.argtypes cases = InliningCase[] - local only_method = nothing # keep track of whether there is one matching method - local meth::MethodLookupResult + local any_fully_covered = false local handled_all_cases = true - local any_covers_full = false - local revisit_idx = nothing - for i in 1:length(infos) meth = infos[i].results if meth.ambig @@ -1191,66 +1186,20 @@ function analyze_single_call!( # No applicable methods; try next union split handled_all_cases = false continue - else - if length(meth) == 1 && only_method !== false - if only_method === nothing - only_method = meth[1].method - elseif only_method !== meth[1].method - only_method = false - end - else - only_method = false - end end - for (j, match) in enumerate(meth) - any_covers_full |= match.fully_covers - if !isdispatchtuple(match.spec_types) - if !match.fully_covers - handled_all_cases = false - continue - end - if revisit_idx === nothing - revisit_idx = (i, j) - else - handled_all_cases = false - revisit_idx = nothing - end - else - handled_all_cases &= handle_match!(match, argtypes, flag, state, cases) - end + for match in meth + handled_all_cases &= handle_match!(match, argtypes, flag, state, cases, true) + any_fully_covered |= match.fully_covers end end - atype = argtypes_to_type(argtypes) - if handled_all_cases && revisit_idx !== nothing - # If there's only one case that's not a dispatchtuple, we can - # still unionsplit by visiting all the other cases first. - # This is useful for code like: - # foo(x::Int) = 1 - # foo(@nospecialize(x::Any)) = 2 - # where we where only a small number of specific dispatchable - # cases are split off from an ::Any typed fallback. - (i, j) = revisit_idx - match = infos[i].results[j] - handled_all_cases &= handle_match!(match, argtypes, flag, state, cases, true) - elseif length(cases) == 0 && only_method isa Method - # if the signature is fully covered and there is only one applicable method, - # we can try to inline it even if the signature is not a dispatch tuple. - # -- But don't try it if we already tried to handle the match in the revisit_idx - # case, because that'll (necessarily) be the same method. - if length(infos) > 1 - (metharg, methsp) = ccall(:jl_type_intersection_with_env, Any, (Any, Any), - atype, only_method.sig)::SimpleVector - match = MethodMatch(metharg, methsp::SimpleVector, only_method, true) - else - @assert length(meth) == 1 - match = meth[1] - end - handle_match!(match, argtypes, flag, state, cases, true) || return nothing - any_covers_full = handled_all_cases = match.fully_covers + if !handled_all_cases + # if we've not seen all candidates, union split is valid only for dispatch tuples + filter!(case::InliningCase->isdispatchtuple(case.sig), cases) end - handle_cases!(ir, idx, stmt, atype, cases, any_covers_full && handled_all_cases, todo, state.params) + handle_cases!(ir, idx, stmt, argtypes_to_type(argtypes), cases, + handled_all_cases & any_fully_covered, todo, state.params) end # similar to `analyze_single_call!`, but with constant results @@ -1261,8 +1210,8 @@ function handle_const_call!( (; call, results) = cinfo infos = isa(call, MethodMatchInfo) ? MethodMatchInfo[call] : call.matches cases = InliningCase[] + local any_fully_covered = false local handled_all_cases = true - local any_covers_full = false local j = 0 for i in 1:length(infos) meth = infos[i].results @@ -1278,32 +1227,26 @@ function handle_const_call!( for match in meth j += 1 result = results[j] - any_covers_full |= match.fully_covers + any_fully_covered |= match.fully_covers if isa(result, ConstResult) case = const_result_item(result, state) push!(cases, InliningCase(result.mi.specTypes, case)) elseif isa(result, InferenceResult) - handled_all_cases &= handle_inf_result!(result, argtypes, flag, state, cases) + handled_all_cases &= handle_inf_result!(result, argtypes, flag, state, cases, true) else @assert result === nothing - handled_all_cases &= handle_match!(match, argtypes, flag, state, cases) + handled_all_cases &= handle_match!(match, argtypes, flag, state, cases, true) end end end - # if the signature is fully covered and there is only one applicable method, - # we can try to inline it even if the signature is not a dispatch tuple - atype = argtypes_to_type(argtypes) - if length(cases) == 0 - length(results) == 1 || return nothing - result = results[1] - isa(result, InferenceResult) || return nothing - handle_inf_result!(result, argtypes, flag, state, cases, true) || return nothing - spec_types = cases[1].sig - any_covers_full = handled_all_cases = atype <: spec_types + if !handled_all_cases + # if we've not seen all candidates, union split is valid only for dispatch tuples + filter!(case::InliningCase->isdispatchtuple(case.sig), cases) end - handle_cases!(ir, idx, stmt, atype, cases, any_covers_full && handled_all_cases, todo, state.params) + handle_cases!(ir, idx, stmt, argtypes_to_type(argtypes), cases, + handled_all_cases & any_fully_covered, todo, state.params) end function handle_match!( @@ -1313,7 +1256,6 @@ function handle_match!( allow_abstract || isdispatchtuple(spec_types) || return false item = analyze_method!(match, argtypes, flag, state) item === nothing && return false - _any(case->case.sig === spec_types, cases) && return true push!(cases, InliningCase(spec_types, item)) return true end @@ -1349,7 +1291,24 @@ function handle_cases!(ir::IRCode, idx::Int, stmt::Expr, @nospecialize(atype), handle_single_case!(ir, idx, stmt, cases[1].item, todo, params) elseif length(cases) > 0 isa(atype, DataType) || return nothing - all(case::InliningCase->isa(case.sig, DataType), cases) || return nothing + # `ir_inline_unionsplit!` is going to generate `isa` checks corresponding to the + # signatures of union-split dispatch candidates in order to simulate the dispatch + # semantics, and inline their bodies into each `isa`-conditional block -- and since + # we may deal with abstract union-split callsites here, these dispatch candidates + # need to be sorted in order of their signature specificity. + # Fortunately, ml_matches already sorted them in that way, so we can just process + # them in order, as far as we haven't changed their order somewhere up to this point. + ncases = length(cases) + for i = 1:ncases + sigα΅’ = cases[i].sig + isa(sigα΅’, DataType) || return nothing + for j = i+1:ncases + sigβ±Ό = cases[j].sig + # since we already bail out from ambiguous case, we can use `morespecific` as + # a strict total order of specificity (in a case when they don't have a type intersection) + @assert !hasintersect(sigα΅’, sigβ±Ό) || morespecific(sigα΅’, sigβ±Ό) "invalid order of dispatch candidate" + end + end push!(todo, idx=>UnionSplit(fully_covered, atype, cases)) end return nothing @@ -1445,7 +1404,8 @@ function assemble_inline_todo!(ir::IRCode, state::InliningState) analyze_single_call!(ir, idx, stmt, infos, flag, sig, state, todo) end - todo + + return todo end function linear_inline_eligible(ir::IRCode) diff --git a/base/sort.jl b/base/sort.jl index d26e9a4b09332..981eea35d96ab 100644 --- a/base/sort.jl +++ b/base/sort.jl @@ -5,7 +5,7 @@ module Sort import ..@__MODULE__, ..parentmodule const Base = parentmodule(@__MODULE__) using .Base.Order -using .Base: copymutable, LinearIndices, length, (:), +using .Base: copymutable, LinearIndices, length, (:), iterate, eachindex, axes, first, last, similar, zip, OrdinalRange, AbstractVector, @inbounds, AbstractRange, @eval, @inline, Vector, @noinline, AbstractMatrix, AbstractUnitRange, isless, identity, eltype, >, <, <=, >=, |, +, -, *, !, diff --git a/test/compiler/inline.jl b/test/compiler/inline.jl index 7619d4e8a0308..275a59b036730 100644 --- a/test/compiler/inline.jl +++ b/test/compiler/inline.jl @@ -810,6 +810,76 @@ let @test invoke(Any[10]) === false end +# test union-split, non-dispatchtuple callsite inlining + +@constprop :none @noinline abstract_unionsplit(@nospecialize x::Any) = Base.inferencebarrier(:Any) +@constprop :none @noinline abstract_unionsplit(@nospecialize x::Number) = Base.inferencebarrier(:Number) +let src = code_typed1((Any,)) do x + abstract_unionsplit(x) + end + @test count(isinvoke(:abstract_unionsplit), src.code) == 2 + @test count(iscall((src, abstract_unionsplit)), src.code) == 0 # no fallback dispatch +end +let src = code_typed1((Union{Type,Number},)) do x + abstract_unionsplit(x) + end + @test count(isinvoke(:abstract_unionsplit), src.code) == 2 + @test count(iscall((src, abstract_unionsplit)), src.code) == 0 # no fallback dispatch +end + +@constprop :none @noinline abstract_unionsplit_fallback(@nospecialize x::Type) = Base.inferencebarrier(:Any) +@constprop :none @noinline abstract_unionsplit_fallback(@nospecialize x::Number) = Base.inferencebarrier(:Number) +let src = code_typed1((Any,)) do x + abstract_unionsplit_fallback(x) + end + @test count(isinvoke(:abstract_unionsplit_fallback), src.code) == 2 + @test count(iscall((src, abstract_unionsplit_fallback)), src.code) == 1 # fallback dispatch +end +let src = code_typed1((Union{Type,Number},)) do x + abstract_unionsplit_fallback(x) + end + @test count(isinvoke(:abstract_unionsplit_fallback), src.code) == 2 + @test count(iscall((src, abstract_unionsplit)), src.code) == 0 # no fallback dispatch +end + +@constprop :aggressive @inline abstract_unionsplit(c, @nospecialize x::Any) = (c && println("erase me"); typeof(x)) +@constprop :aggressive @inline abstract_unionsplit(c, @nospecialize x::Number) = (c && println("erase me"); typeof(x)) +let src = code_typed1((Any,)) do x + abstract_unionsplit(false, x) + end + @test count(iscall((src, typeof)), src.code) == 2 + @test count(isinvoke(:println), src.code) == 0 + @test count(iscall((src, println)), src.code) == 0 + @test count(iscall((src, abstract_unionsplit)), src.code) == 0 # no fallback dispatch +end +let src = code_typed1((Union{Type,Number},)) do x + abstract_unionsplit(false, x) + end + @test count(iscall((src, typeof)), src.code) == 2 + @test count(isinvoke(:println), src.code) == 0 + @test count(iscall((src, println)), src.code) == 0 + @test count(iscall((src, abstract_unionsplit)), src.code) == 0 # no fallback dispatch +end + +@constprop :aggressive @inline abstract_unionsplit_fallback(c, @nospecialize x::Type) = (c && println("erase me"); typeof(x)) +@constprop :aggressive @inline abstract_unionsplit_fallback(c, @nospecialize x::Number) = (c && println("erase me"); typeof(x)) +let src = code_typed1((Any,)) do x + abstract_unionsplit_fallback(false, x) + end + @test count(iscall((src, typeof)), src.code) == 2 + @test count(isinvoke(:println), src.code) == 0 + @test count(iscall((src, println)), src.code) == 0 + @test count(iscall((src, abstract_unionsplit_fallback)), src.code) == 1 # fallback dispatch +end +let src = code_typed1((Union{Type,Number},)) do x + abstract_unionsplit_fallback(false, x) + end + @test count(iscall((src, typeof)), src.code) == 2 + @test count(isinvoke(:println), src.code) == 0 + @test count(iscall((src, println)), src.code) == 0 + @test count(iscall((src, abstract_unionsplit)), src.code) == 0 # no fallback dispatch +end + # issue 43104 @inline isGoodType(@nospecialize x::Type) = @@ -1090,11 +1160,11 @@ end global x44200::Int = 0 function f44200() - global x = 0 - while x < 10 - x += 1 + global x44200 = 0 + while x44200 < 10 + x44200 += 1 end - x + x44200 end let src = code_typed1(f44200) @test count(x -> isa(x, Core.PiNode), src.code) == 0