diff --git a/base/inference.jl b/base/inference.jl index 67156c16cee90..b43c320f025ef 100644 --- a/base/inference.jl +++ b/base/inference.jl @@ -139,6 +139,7 @@ mutable struct InferenceState # ssavalue sparsity and restart info ssavalue_uses::Vector{IntSet} ssavalue_defs::Vector{LineNum} + vararg_type_container #::Type backedges::Vector{Tuple{InferenceState, LineNum}} # call-graph backedges connecting from callee to caller callers_in_cycle::Vector{InferenceState} @@ -190,6 +191,7 @@ mutable struct InferenceState atypes = unwrap_unionall(linfo.specTypes) nargs::Int = toplevel ? 0 : linfo.def.nargs la = nargs + vararg_type_container = nothing if la > 0 if linfo.def.isva if atypes == Tuple @@ -198,8 +200,8 @@ mutable struct InferenceState end vararg_type = Tuple else - vararg_type = limit_tuple_depth(params, tupletype_tail(atypes, la)) - vararg_type = tuple_tfunc(vararg_type) # returns a Const object, if applicable + vararg_type_container = limit_tuple_depth(params, tupletype_tail(atypes, la)) + vararg_type = tuple_tfunc(vararg_type_container) # returns a Const object, if applicable vararg_type = rewrap(vararg_type, linfo.specTypes) end s_types[1][la] = VarState(vararg_type, false) @@ -275,7 +277,7 @@ mutable struct InferenceState nargs, s_types, s_edges, Union{}, W, 1, n, cur_hand, handler_at, n_handlers, - ssavalue_uses, ssavalue_defs, + ssavalue_uses, ssavalue_defs, vararg_type_container, Vector{Tuple{InferenceState,LineNum}}(), # backedges Vector{InferenceState}(), # callers_in_cycle #=parent=#nothing, @@ -369,6 +371,11 @@ isknownlength(t::DataType) = !isvatuple(t) || # t[n:end] tupletype_tail(@nospecialize(t), n) = Tuple{t.parameters[n:end]...} +function is_specializable_vararg_slot(arg, sv::InferenceState) + return (isa(arg, Slot) && slot_id(arg) == sv.nargs && + isa(sv.vararg_type_container, DataType)) +end + #### type-functions for builtins / intrinsics #### @@ -1432,8 +1439,19 @@ end function tuple_tfunc(@nospecialize(argtype)) if isa(argtype, DataType) && argtype.name === Tuple.name - p = Any[ isType(x) && !isa(x.parameters[1], TypeVar) ? typeof(x.parameters[1]) : x - for x in argtype.parameters ] + p = Vector{Any}() + for x in argtype.parameters + if isType(x) && !isa(x.parameters[1], TypeVar) + xparam = x.parameters[1] + if isleaftype(xparam) || xparam === Bottom + push!(p, typeof(xparam)) + else + push!(p, Type) + end + else + push!(p, x) + end + end t = Tuple{p...} # replace a singleton type with its equivalent Const object isdefined(t, :instance) && return Const(t.instance) @@ -1827,6 +1845,10 @@ function precise_container_type(@nospecialize(arg), @nospecialize(typ), vtypes:: arg = stmt.args[2] end + if is_specializable_vararg_slot(arg, sv) + return Any[rewrap_unionall(p, sv.linfo.specTypes) for p in sv.vararg_type_container.parameters] + end + tti0 = widenconst(typ) tti = unwrap_unionall(tti0) if isa(arg, Expr) && arg.head === :call && (abstract_evals_to_constant(arg.args[1], svec, vtypes, sv) || @@ -4975,7 +4997,11 @@ function inlining_pass(e::Expr, sv::InferenceState, stmts, ins) tmpv = newvar!(sv, t) push!(newstmts, Expr(:(=), tmpv, aarg)) end - tp = t.parameters + if is_specializable_vararg_slot(aarg, sv) + tp = sv.vararg_type_container.parameters + else + tp = t.parameters + end newargs[i-2] = Any[ mk_getfield(tmpv,j,tp[j]) for j=1:length(tp) ] else # not all args expandable diff --git a/test/inference.jl b/test/inference.jl index e27794368976b..358b70f47186f 100644 --- a/test/inference.jl +++ b/test/inference.jl @@ -922,6 +922,14 @@ let niter = 0 @test niter == 4 end +# issue #22875 + +typeargs = (Type{Int},) +@test Base.Core.Inference.return_type((args...) -> one(args...), typeargs) === Int + +typeargs = (Type{Int},Type{Int},Type{Int},Type{Int},Type{Int},Type{Int}) +@test Base.Core.Inference.return_type(promote_type, typeargs) === Type{Int} + # demonstrate that inference must converge # while doing constant propagation Base.@pure plus1(x) = x + 1