diff --git a/Compiler/src/Compiler.jl b/Compiler/src/Compiler.jl
index 68eab073b7e2d..e1c167e57ed08 100644
--- a/Compiler/src/Compiler.jl
+++ b/Compiler/src/Compiler.jl
@@ -38,7 +38,7 @@ else
using Core.Intrinsics, Core.IR
using Core: ABIOverride, Builtin, CodeInstance, IntrinsicFunction, MethodInstance, MethodMatch,
- MethodTable, PartialOpaque, SimpleVector, TypeofVararg,
+ MethodTable, MethodCache, PartialOpaque, SimpleVector, TypeofVararg,
_apply_iterate, apply_type, compilerbarrier, donotdelete, memoryref_isassigned,
memoryrefget, memoryrefnew, memoryrefoffset, memoryrefset!, print, println, show, svec,
typename, unsafe_write, write
diff --git a/Compiler/src/abstractinterpretation.jl b/Compiler/src/abstractinterpretation.jl
index a16bc06dca103..8d64575331e4a 100644
--- a/Compiler/src/abstractinterpretation.jl
+++ b/Compiler/src/abstractinterpretation.jl
@@ -363,15 +363,13 @@ function find_union_split_method_matches(interp::AbstractInterpreter, argtypes::
arg_n = split_argtypes[i]::Vector{Any}
sig_n = argtypes_to_type(arg_n)
sig_n === Bottom && continue
- mt = ccall(:jl_method_table_for, Any, (Any,), sig_n)
- mt === nothing && return FailedMethodMatch("Could not identify method table for call")
- mt = mt::MethodTable
thismatches = findall(sig_n, method_table(interp); limit = max_methods)
if thismatches === nothing
return FailedMethodMatch("For one of the union split cases, too many methods matched")
end
valid_worlds = intersect(valid_worlds, thismatches.valid_worlds)
thisfullmatch = any(match::MethodMatch->match.fully_covers, thismatches)
+ mt = Core.GlobalMethods
thisinfo = MethodMatchInfo(thismatches, mt, sig_n, thisfullmatch)
push!(infos, thisinfo)
for idx = 1:length(thismatches)
@@ -385,11 +383,6 @@ function find_union_split_method_matches(interp::AbstractInterpreter, argtypes::
end
function find_simple_method_matches(interp::AbstractInterpreter, @nospecialize(atype), max_methods::Int)
- mt = ccall(:jl_method_table_for, Any, (Any,), atype)
- if mt === nothing
- return FailedMethodMatch("Could not identify method table for call")
- end
- mt = mt::MethodTable
matches = findall(atype, method_table(interp); limit = max_methods)
if matches === nothing
# this means too many methods matched
@@ -397,6 +390,7 @@ function find_simple_method_matches(interp::AbstractInterpreter, @nospecialize(a
return FailedMethodMatch("Too many methods matched")
end
fullmatch = any(match::MethodMatch->match.fully_covers, matches)
+ mt = Core.GlobalMethods
info = MethodMatchInfo(matches, mt, atype, fullmatch)
applicable = MethodMatchTarget[MethodMatchTarget(matches[idx], info.edges, idx) for idx = 1:length(matches)]
return MethodMatches(applicable, info, matches.valid_worlds)
diff --git a/Compiler/src/stmtinfo.jl b/Compiler/src/stmtinfo.jl
index d108c671301b9..8f08748e1bc57 100644
--- a/Compiler/src/stmtinfo.jl
+++ b/Compiler/src/stmtinfo.jl
@@ -47,17 +47,16 @@ end
add_edges_impl(edges::Vector{Any}, info::MethodMatchInfo) = _add_edges_impl(edges, info)
function _add_edges_impl(edges::Vector{Any}, info::MethodMatchInfo, mi_edge::Bool=false)
if !fully_covering(info)
- # add legacy-style missing backedge info also
exists = false
for i in 2:length(edges)
- if edges[i] === info.mt && edges[i-1] == info.atype
+ if edges[i] === Core.GlobalMethods && edges[i-1] == info.atype
exists = true
break
end
end
if !exists
push!(edges, info.atype)
- push!(edges, info.mt)
+ push!(edges, Core.GlobalMethods)
end
end
nmatches = length(info.results)
diff --git a/Compiler/src/tfuncs.jl b/Compiler/src/tfuncs.jl
index f3ce3b010a345..9e07567a39adc 100644
--- a/Compiler/src/tfuncs.jl
+++ b/Compiler/src/tfuncs.jl
@@ -3199,15 +3199,12 @@ function _hasmethod_tfunc(interp::AbstractInterpreter, argtypes::Vector{Any}, sv
isdispatchelem(ft) || return CallMeta(Bool, Any, Effects(), NoCallInfo()) # check that we might not have a subtype of `ft` at runtime, before doing supertype lookup below
types = rewrap_unionall(Tuple{ft, unwrapped.parameters...}, types)::Type
end
- mt = ccall(:jl_method_table_for, Any, (Any,), types)
- if !isa(mt, MethodTable)
- return CallMeta(Bool, Any, EFFECTS_THROWS, NoCallInfo())
- end
match, valid_worlds = findsup(types, method_table(interp))
update_valid_age!(sv, valid_worlds)
if match === nothing
rt = Const(false)
vresults = MethodLookupResult(Any[], valid_worlds, true)
+ mt = Core.GlobalMethods
vinfo = MethodMatchInfo(vresults, mt, types, false) # XXX: this should actually be an info with invoke-type edge
else
rt = Const(true)
diff --git a/Compiler/src/typeinfer.jl b/Compiler/src/typeinfer.jl
index f8ea506dd3c99..dacde593fe1f2 100644
--- a/Compiler/src/typeinfer.jl
+++ b/Compiler/src/typeinfer.jl
@@ -767,7 +767,7 @@ function store_backedges(caller::CodeInstance, edges::SimpleVector)
if item isa Core.Binding
maybe_add_binding_backedge!(item, caller)
elseif item isa MethodTable
- ccall(:jl_method_table_add_backedge, Cvoid, (Any, Any, Any), item, invokesig, caller)
+ ccall(:jl_method_table_add_backedge, Cvoid, (Any, Any), invokesig, caller)
else
item::MethodInstance
ccall(:jl_method_instance_add_backedge, Cvoid, (Any, Any, Any), item, invokesig, caller)
diff --git a/Compiler/src/utilities.jl b/Compiler/src/utilities.jl
index a5f271b0c3ef9..fe8966c32fc17 100644
--- a/Compiler/src/utilities.jl
+++ b/Compiler/src/utilities.jl
@@ -158,10 +158,8 @@ end
function get_compileable_sig(method::Method, @nospecialize(atype), sparams::SimpleVector)
isa(atype, DataType) || return nothing
- mt = ccall(:jl_method_get_table, Any, (Any,), method)
- mt === nothing && return nothing
- return ccall(:jl_normalize_to_compilable_sig, Any, (Any, Any, Any, Any, Cint),
- mt, atype, sparams, method, #=int return_if_compileable=#1)
+ return ccall(:jl_normalize_to_compilable_sig, Any, (Any, Any, Any, Cint),
+ atype, sparams, method, #=int return_if_compileable=#1)
end
diff --git a/base/Base_compiler.jl b/base/Base_compiler.jl
index c85ad4547379c..91e765bac8a13 100644
--- a/base/Base_compiler.jl
+++ b/base/Base_compiler.jl
@@ -218,7 +218,7 @@ function Core.kwcall(kwargs::NamedTuple, ::typeof(invoke), f, T, args...)
return invoke(Core.kwcall, T, kwargs, f, args...)
end
# invoke does not have its own call cache, but kwcall for invoke does
-setfield!(typeof(invoke).name.mt, :max_args, 3, :monotonic) # invoke, f, T, args...
+setfield!(typeof(invoke).name, :max_args, Int32(3), :monotonic) # invoke, f, T, args...
# define applicable(f, T, args...; kwargs...), without kwargs wrapping
# to forward to applicable
@@ -252,7 +252,7 @@ function Core.kwcall(kwargs::NamedTuple, ::typeof(invokelatest), f, args...)
@inline
return Core.invokelatest(Core.kwcall, kwargs, f, args...)
end
-setfield!(typeof(invokelatest).name.mt, :max_args, 2, :monotonic) # invokelatest, f, args...
+setfield!(typeof(invokelatest).name, :max_args, Int32(2), :monotonic) # invokelatest, f, args...
"""
invoke_in_world(world, f, args...; kwargs...)
@@ -286,7 +286,7 @@ function Core.kwcall(kwargs::NamedTuple, ::typeof(invoke_in_world), world::UInt,
@inline
return Core.invoke_in_world(world, Core.kwcall, kwargs, f, args...)
end
-setfield!(typeof(invoke_in_world).name.mt, :max_args, 3, :monotonic) # invoke_in_world, world, f, args...
+setfield!(typeof(invoke_in_world).name, :max_args, Int32(3), :monotonic) # invoke_in_world, world, f, args...
# core operations & types
include("promotion.jl")
diff --git a/base/deprecated.jl b/base/deprecated.jl
index c5701adf1a420..e7ea1e15e7b50 100644
--- a/base/deprecated.jl
+++ b/base/deprecated.jl
@@ -211,7 +211,7 @@ macro deprecate(old, new, export_old=true)
maybe_export,
:($(esc(old)) = begin
$meta
- depwarn($"`$oldcall` is deprecated, use `$newcall` instead.", Core.Typeof($(esc(fnexpr))).name.mt.name)
+ depwarn($"`$oldcall` is deprecated, use `$newcall` instead.", Core.Typeof($(esc(fnexpr))).name.singletonname)
$(esc(new))
end))
else
@@ -222,7 +222,7 @@ macro deprecate(old, new, export_old=true)
export_old ? Expr(:export, esc(old)) : nothing,
:(function $(esc(old))(args...; kwargs...)
$meta
- depwarn($"`$old` is deprecated, use `$new` instead.", Core.Typeof($(esc(old))).name.mt.name)
+ depwarn($"`$old` is deprecated, use `$new` instead.", Core.Typeof($(esc(old))).name.singletonname)
$(esc(new))(args...; kwargs...)
end))
end
diff --git a/base/docs/bindings.jl b/base/docs/bindings.jl
index 5c65a35659f81..5a0e8e01762e2 100644
--- a/base/docs/bindings.jl
+++ b/base/docs/bindings.jl
@@ -42,6 +42,6 @@ end
aliasof(b::Binding) = defined(b) ? (a = aliasof(resolve(b), b); defined(a) ? a : b) : b
aliasof(d::DataType, b) = Binding(d.name.module, d.name.name)
-aliasof(λ::Function, b) = (m = typeof(λ).name.mt; Binding(m.module, m.name))
+aliasof(λ::Function, b) = (m = typeof(λ).name; Binding(m.module, m.singletonname))
aliasof(m::Module, b) = Binding(m, nameof(m))
aliasof(other, b) = b
diff --git a/base/errorshow.jl b/base/errorshow.jl
index 4a8003ad82b6f..8d90aae79be82 100644
--- a/base/errorshow.jl
+++ b/base/errorshow.jl
@@ -328,7 +328,7 @@ function showerror(io::IO, ex::MethodError)
print(io, "\nIn case you're trying to index into the array, use square brackets [] instead of parentheses ().")
end
# Check for local functions that shadow methods in Base
- let name = ft.name.mt.name
+ let name = ft.name.singletonname
if f_is_function && isdefined(Base, name)
basef = getfield(Base, name)
if basef !== f && hasmethod(basef, arg_types)
diff --git a/base/invalidation.jl b/base/invalidation.jl
index f26e7968d8a2a..34f260f7379fd 100644
--- a/base/invalidation.jl
+++ b/base/invalidation.jl
@@ -15,36 +15,6 @@ function iterate(gri::GlobalRefIterator, i = 1)
return ((b::Core.Binding).globalref, i+1)
end
-const TYPE_TYPE_MT = Type.body.name.mt
-const NONFUNCTION_MT = Core.MethodTable.name.mt
-function foreach_module_mtable(visit, m::Module, world::UInt)
- for gb in globalrefs(m)
- binding = gb.binding
- bpart = lookup_binding_partition(world, binding)
- if is_defined_const_binding(binding_kind(bpart))
- v = partition_restriction(bpart)
- uw = unwrap_unionall(v)
- name = gb.name
- if isa(uw, DataType)
- tn = uw.name
- if tn.module === m && tn.name === name && tn.wrapper === v && isdefined(tn, :mt)
- # this is the original/primary binding for the type (name/wrapper)
- mt = tn.mt
- if mt !== nothing && mt !== TYPE_TYPE_MT && mt !== NONFUNCTION_MT
- @assert mt.module === m
- visit(mt) || return false
- end
- end
- elseif isa(v, Core.MethodTable) && v.module === m && v.name === name
- # this is probably an external method table here, so let's
- # assume so as there is no way to precisely distinguish them
- visit(v) || return false
- end
- end
- end
- return true
-end
-
function foreachgr(visit, src::CodeInfo)
stmts = src.code
for i = 1:length(stmts)
diff --git a/base/methodshow.jl b/base/methodshow.jl
index 7fdefc9b7311f..dc3f564d70db7 100644
--- a/base/methodshow.jl
+++ b/base/methodshow.jl
@@ -81,7 +81,7 @@ function kwarg_decl(m::Method, kwtype = nothing)
if m.sig !== Tuple # OpaqueClosure or Builtin
kwtype = typeof(Core.kwcall)
sig = rewrap_unionall(Tuple{kwtype, NamedTuple, (unwrap_unionall(m.sig)::DataType).parameters...}, m.sig)
- kwli = ccall(:jl_methtable_lookup, Any, (Any, Any, UInt), kwtype.name.mt, sig, get_world_counter())
+ kwli = ccall(:jl_methtable_lookup, Any, (Any, UInt), sig, get_world_counter())
if kwli !== nothing
kwli = kwli::Method
slotnames = ccall(:jl_uncompress_argnames, Vector{Symbol}, (Any,), kwli.slot_syms)
@@ -259,10 +259,10 @@ function show_method(io::IO, m::Method; modulecolor = :light_black, digit_align_
end
function show_method_list_header(io::IO, ms::MethodList, namefmt::Function)
- mt = ms.mt
- name = mt.name
- hasname = isdefined(mt.module, name) &&
- typeof(getfield(mt.module, name)) <: Function
+ tn = ms.tn
+ name = tn.singletonname
+ hasname = isdefined(tn.module, name) &&
+ typeof(getfield(tn.module, name)) <: Function
n = length(ms)
m = n==1 ? "method" : "methods"
print(io, "# $n $m")
@@ -271,18 +271,18 @@ function show_method_list_header(io::IO, ms::MethodList, namefmt::Function)
if hasname
what = (startswith(sname, '@') ?
"macro"
- : mt.module === Core && mt.defs isa Core.TypeMapEntry && (mt.defs.func::Method).sig === Tuple ?
+ : tn.module === Core && tn.wrapper <: Core.Builtin ?
"builtin function"
: # else
"generic function")
print(io, " for ", what, " ", namedisplay, " from ")
- col = get!(() -> popfirst!(STACKTRACE_MODULECOLORS), STACKTRACE_FIXEDCOLORS, parentmodule_before_main(ms.mt.module))
+ col = get!(() -> popfirst!(STACKTRACE_MODULECOLORS), STACKTRACE_FIXEDCOLORS, parentmodule_before_main(tn.module))
- printstyled(io, ms.mt.module, color=col)
+ printstyled(io, tn.module, color=col)
elseif '#' in sname
print(io, " for anonymous function ", namedisplay)
- elseif mt === _TYPE_NAME.mt
+ elseif tn === _TYPE_NAME || iskindtype(tn.wrapper)
print(io, " for type constructor")
else
print(io, " for callable object")
@@ -293,6 +293,8 @@ end
# Determine the `modulecolor` value to pass to `show_method`
function _modulecolor(method::Method)
mmt = get_methodtable(method)
+ # TODO: this looks like a buggy bit of internal hacking, so disable for now
+ return nothing
if mmt === nothing || mmt.module === parentmodule(method)
return nothing
end
@@ -314,10 +316,10 @@ function _modulecolor(method::Method)
end
function show_method_table(io::IO, ms::MethodList, max::Int=-1, header::Bool=true)
- mt = ms.mt
- name = mt.name
- hasname = isdefined(mt.module, name) &&
- typeof(getfield(mt.module, name)) <: Function
+ tn = ms.tn
+ name = tn.singletonname
+ hasname = isdefined(tn.module, name) &&
+ typeof(getfield(tn.module, name)) <: Function
if header
show_method_list_header(io, ms, str -> "\""*str*"\"")
end
@@ -458,7 +460,6 @@ function show(io::IO, ::MIME"text/html", m::Method)
end
function show(io::IO, mime::MIME"text/html", ms::MethodList)
- mt = ms.mt
show_method_list_header(io, ms, str -> ""*str*"")
print(io, "
")
for meth in ms
diff --git a/base/operators.jl b/base/operators.jl
index d87e55498bd75..51729b852070d 100644
--- a/base/operators.jl
+++ b/base/operators.jl
@@ -633,7 +633,7 @@ function afoldl(op, a, bs...)
end
return y
end
-setfield!(typeof(afoldl).name.mt, :max_args, 34, :monotonic)
+setfield!(typeof(afoldl).name, :max_args, Int32(34), :monotonic)
for op in (:+, :*, :&, :|, :xor, :min, :max, :kron)
@eval begin
diff --git a/base/reflection.jl b/base/reflection.jl
index 2e976add50190..304683639b6d3 100644
--- a/base/reflection.jl
+++ b/base/reflection.jl
@@ -933,13 +933,7 @@ this is a compiler-generated name. For explicitly-declared subtypes of
`Function`, it is the name of the function's type.
"""
function nameof(f::Function)
- t = typeof(f)
- mt = t.name.mt
- if mt === Symbol.name.mt
- # uses shared method table, so name is not unique to this function type
- return nameof(t)
- end
- return mt.name
+ return typeof(f).name.singletonname
end
function nameof(f::Core.IntrinsicFunction)
diff --git a/base/runtime_internals.jl b/base/runtime_internals.jl
index dc78d35a0bc0d..b50d0c7b23881 100644
--- a/base/runtime_internals.jl
+++ b/base/runtime_internals.jl
@@ -1356,14 +1356,14 @@ hasproperty(x, s::Symbol) = s in propertynames(x)
Make method `m` uncallable and force recompilation of any methods that use(d) it.
"""
function delete_method(m::Method)
- ccall(:jl_method_table_disable, Cvoid, (Any, Any), get_methodtable(m), m)
+ ccall(:jl_method_table_disable, Cvoid, (Any,), m)
end
# type for reflecting and pretty-printing a subset of methods
mutable struct MethodList <: AbstractArray{Method,1}
ms::Array{Method,1}
- mt::Core.MethodTable
+ tn::Core.TypeName # contains module.singletonname globalref for altering some aspects of printing
end
size(m::MethodList) = size(m.ms)
@@ -1374,10 +1374,10 @@ function MethodList(mt::Core.MethodTable)
visit(mt) do m
push!(ms, m)
end
- return MethodList(ms, mt)
+ return MethodList(ms, Any.name)
end
-function matches_to_methods(ms::Array{Any,1}, mt::Core.MethodTable, mod)
+function matches_to_methods(ms::Array{Any,1}, tn::Core.TypeName, mod)
# Lack of specialization => a comprehension triggers too many invalidations via _collect, so collect the methods manually
ms = Method[(ms[i]::Core.MethodMatch).method for i in 1:length(ms)]
# Remove shadowed methods with identical type signatures
@@ -1392,7 +1392,7 @@ function matches_to_methods(ms::Array{Any,1}, mt::Core.MethodTable, mod)
mod === nothing || filter!(ms) do m
return parentmodule(m) ∈ mod
end
- return MethodList(ms, mt)
+ return MethodList(ms, tn)
end
"""
@@ -1414,7 +1414,7 @@ function methods(@nospecialize(f), @nospecialize(t),
world = get_world_counter()
world == typemax(UInt) && error("code reflection cannot be used from generated functions")
ms = _methods(f, t, -1, world)::Vector{Any}
- return matches_to_methods(ms, typeof(f).name.mt, mod)
+ return matches_to_methods(ms, typeof(f).name, mod)
end
methods(@nospecialize(f), @nospecialize(t), mod::Module) = methods(f, t, (mod,))
@@ -1425,7 +1425,7 @@ function methods_including_ambiguous(@nospecialize(f), @nospecialize(t))
min = RefValue{UInt}(typemin(UInt))
max = RefValue{UInt}(typemax(UInt))
ms = _methods_by_ftype(tt, nothing, -1, world, true, min, max, Ptr{Int32}(C_NULL))::Vector{Any}
- return matches_to_methods(ms, typeof(f).name.mt, nothing)
+ return matches_to_methods(ms, typeof(f).name, nothing)
end
function methods(@nospecialize(f),
@@ -1623,10 +1623,8 @@ end
function get_nospecializeinfer_sig(method::Method, @nospecialize(atype), sparams::SimpleVector)
isa(atype, DataType) || return method.sig
- mt = ccall(:jl_method_get_table, Any, (Any,), method)
- mt === nothing && return method.sig
- return ccall(:jl_normalize_to_compilable_sig, Any, (Any, Any, Any, Any, Cint),
- mt, atype, sparams, method, #=int return_if_compileable=#0)
+ return ccall(:jl_normalize_to_compilable_sig, Any, (Any, Any, Any, Cint),
+ atype, sparams, method, #=int return_if_compileable=#0)
end
is_nospecialized(method::Method) = method.nospecialize ≠ 0
diff --git a/base/show.jl b/base/show.jl
index 9864fc7e5dec8..14518be0f153e 100644
--- a/base/show.jl
+++ b/base/show.jl
@@ -39,16 +39,16 @@ end
function _isself(ft::DataType)
ftname = ft.name
- isdefined(ftname, :mt) || return false
- name = ftname.mt.name
- mod = parentmodule(ft) # NOTE: not necessarily the same as ft.name.mt.module
- return invokelatest(isdefinedglobal, mod, name) && ft == typeof(invokelatest(getglobal, mod, name))
+ name = ftname.singletonname
+ ftname.name === name && return false
+ mod = parentmodule(ft)
+ return invokelatest(isdefinedglobal, mod, name) && ft === typeof(invokelatest(getglobal, mod, name))
end
function show(io::IO, ::MIME"text/plain", f::Function)
get(io, :compact, false)::Bool && return show(io, f)
ft = typeof(f)
- name = ft.name.mt.name
+ name = ft.name.singletonname
if isa(f, Core.IntrinsicFunction)
print(io, f)
id = Core.Intrinsics.bitcast(Int32, f)
@@ -542,22 +542,20 @@ module UsesCoreAndBaseOnly
end
function show_function(io::IO, f::Function, compact::Bool, fallback::Function)
- ft = typeof(f)
- mt = ft.name.mt
- if mt === Symbol.name.mt
- # uses shared method table
+ fname = typeof(f).name
+ if fname.name === fname.singletonname
fallback(io, f)
elseif compact
- print(io, mt.name)
- elseif isdefined(mt, :module) && isdefinedglobal(mt.module, mt.name) &&
- getglobal(mt.module, mt.name) === f
+ print(io, fname.singletonname)
+ elseif isdefined(fname, :module) && isdefinedglobal(fname.module, fname.singletonname) && isconst(fname.module, fname.singletonname) &&
+ getglobal(fname.module, fname.singletonname) === f
# this used to call the removed internal function `is_exported_from_stdlib`, which effectively
# just checked for exports from Core and Base.
mod = get(io, :module, UsesCoreAndBaseOnly)
- if !(isvisible(mt.name, mt.module, mod) || mt.module === mod)
- print(io, mt.module, ".")
+ if !(isvisible(fname.singletonname, fname.module, mod) || fname.module === mod)
+ print(io, fname.module, ".")
end
- show_sym(io, mt.name)
+ show_sym(io, fname.singletonname)
else
fallback(io, f)
end
@@ -965,7 +963,7 @@ function show(io::IO, ::MIME"text/plain", @nospecialize(x::Type))
# give a helpful hint for function types
if x isa DataType && x !== UnionAll && !(get(io, :compact, false)::Bool)
tn = x.name::Core.TypeName
- globname = isdefined(tn, :mt) ? tn.mt.name : nothing
+ globname = tn.singletonname
if is_global_function(tn, globname)
print(io, " (singleton type of function ")
show_sym(io, globname)
@@ -1048,11 +1046,11 @@ function isvisible(sym::Symbol, parent::Module, from::Module)
end
function is_global_function(tn::Core.TypeName, globname::Union{Symbol,Nothing})
- if globname !== nothing
+ if globname !== nothing && isconcretetype(tn.wrapper) && tn !== DataType.name # ignore that typeof(DataType)===DataType, since it is valid but not useful
globname_str = string(globname::Symbol)
- if ('#' ∉ globname_str && '@' ∉ globname_str && isdefined(tn, :module) &&
- isdefinedglobal(tn.module, globname) &&
- isconcretetype(tn.wrapper) && isa(getglobal(tn.module, globname), tn.wrapper))
+ if '#' ∉ globname_str && '@' ∉ globname_str && isdefined(tn, :module) &&
+ isdefinedglobal(tn.module, globname) && isconst(tn.module, globname) &&
+ isa(getglobal(tn.module, globname), tn.wrapper)
return true
end
end
@@ -1083,7 +1081,7 @@ function show_type_name(io::IO, tn::Core.TypeName)
# intercept this case and print `UnionAll` instead.
return print(io, "UnionAll")
end
- globname = isdefined(tn, :mt) ? tn.mt.name : nothing
+ globname = tn.singletonname
globfunc = is_global_function(tn, globname)
sym = (globfunc ? globname : tn.name)::Symbol
globfunc && print(io, "typeof(")
@@ -2567,10 +2565,10 @@ function show_signature_function(io::IO, @nospecialize(ft), demangle=false, farg
uw = unwrap_unionall(ft)
if ft <: Function && isa(uw, DataType) && isempty(uw.parameters) && _isself(uw)
uwmod = parentmodule(uw)
- if qualified && !isexported(uwmod, uw.name.mt.name) && uwmod !== Main
+ if qualified && !isexported(uwmod, uw.name.singletonname) && uwmod !== Main
print_within_stacktrace(io, uwmod, '.', bold=true)
end
- s = sprint(show_sym, (demangle ? demangle_function_name : identity)(uw.name.mt.name), context=io)
+ s = sprint(show_sym, (demangle ? demangle_function_name : identity)(uw.name.singletonname), context=io)
print_within_stacktrace(io, s, bold=true)
elseif isType(ft) && (f = ft.parameters[1]; !isa(f, TypeVar))
uwf = unwrap_unionall(f)
diff --git a/base/strings/io.jl b/base/strings/io.jl
index c9c5de1d791d5..f3a0783e98a9b 100644
--- a/base/strings/io.jl
+++ b/base/strings/io.jl
@@ -51,7 +51,7 @@ function print(io::IO, xs...)
return nothing
end
-setfield!(typeof(print).name.mt, :max_args, 10, :monotonic)
+setfield!(typeof(print).name, :max_args, Int32(10), :monotonic)
"""
println([io::IO], xs...)
@@ -76,7 +76,7 @@ julia> takestring!(io)
"""
println(io::IO, xs...) = print(io, xs..., "\n")
-setfield!(typeof(println).name.mt, :max_args, 10, :monotonic)
+setfield!(typeof(println).name, :max_args, Int32(10), :monotonic)
## conversion of general objects to strings ##
"""
@@ -148,7 +148,7 @@ function print_to_string(xs...)
end
takestring!(s)
end
-setfield!(typeof(print_to_string).name.mt, :max_args, 10, :monotonic)
+setfield!(typeof(print_to_string).name, :max_args, Int32(10), :monotonic)
function string_with_env(env, xs...)
if isempty(xs)
diff --git a/base/summarysize.jl b/base/summarysize.jl
index 1e4d546e675aa..9dfd1431b84c7 100644
--- a/base/summarysize.jl
+++ b/base/summarysize.jl
@@ -139,7 +139,7 @@ end
function (ss::SummarySize)(obj::Core.TypeName)
key = pointer_from_objref(obj)
haskey(ss.seen, key) ? (return 0) : (ss.seen[key] = true)
- return Core.sizeof(obj) + (isdefined(obj, :mt) ? ss(obj.mt) : 0)
+ return Core.sizeof(obj)
end
function (ss::SummarySize)(obj::GenericMemory)
diff --git a/base/sysimg.jl b/base/sysimg.jl
index 8adb05ece0b2c..f5354c6ebea1f 100644
--- a/base/sysimg.jl
+++ b/base/sysimg.jl
@@ -15,8 +15,8 @@ ccall(:jl_init_restored_module, Cvoid, (Any,), Base)
include([mapexpr::Function,] path::AbstractString)
Evaluate the contents of the input source file in the global scope of the containing module.
-Every module (except those defined with `baremodule`) has its own
-definition of `include`, which evaluates the file in that module.
+Every `Module` (except those defined with `baremodule`) has a private 1-argument definition
+of `include`, which evaluates the file in that module, for use inside that module.
Returns the result of the last evaluated expression of the input file. During including,
a task-local include path is set to the directory containing the file. Nested calls to
`include` will search relative to that path. This function is typically used to load source
@@ -40,15 +40,18 @@ Use [`Base.include`](@ref) to evaluate a file into another module.
!!! compat "Julia 1.5"
Julia 1.5 is required for passing the `mapexpr` argument.
"""
-const include = Base.IncludeInto(Main)
+Base.IncludeInto
"""
eval(expr)
Evaluate an expression in the global scope of the containing module.
-Every `Module` (except those defined with `baremodule`) has its own 1-argument
-definition of `eval`, which evaluates expressions in that module.
+Every `Module` (except those defined with `baremodule`) has a private 1-argument definition
+of `eval`, which evaluates expressions in that module, for use inside that module.
"""
+Core.EvalInto
+
+const include = Base.IncludeInto(Main)
const eval = Core.EvalInto(Main)
# Ensure this file is also tracked
diff --git a/doc/src/devdocs/functions.md b/doc/src/devdocs/functions.md
index fb67dfd17c3e2..51df95ba0bc42 100644
--- a/doc/src/devdocs/functions.md
+++ b/doc/src/devdocs/functions.md
@@ -1,5 +1,6 @@
# Julia Functions
+
This document will explain how functions, method definitions, and method tables work.
## Method Tables
@@ -15,7 +16,7 @@ has a `TypeName`.
## [Function calls](@id Function-calls)
-Given the call `f(x, y)`, the following steps are performed: first, the method table to use is
+Given the call `f(x, y)`, the following steps are performed: first, the method cache to use is
accessed as `typeof(f).name.mt`. Second, an argument tuple type is formed, `Tuple{typeof(f), typeof(x), typeof(y)}`.
Note that the type of the function itself is the first element. This is because the type might
have parameters, and so needs to take part in dispatch. This tuple type is looked up in the method
@@ -187,7 +188,7 @@ is absent.
Finally there is the kwsorter definition:
```
-function (::Core.kwftype(typeof(circle)))(kws, circle, center, radius)
+function (::Core.kwcall)(kws, circle, center, radius)
if haskey(kws, :color)
color = kws.color
else
@@ -205,30 +206,6 @@ function (::Core.kwftype(typeof(circle)))(kws, circle, center, radius)
end
```
-The function `Core.kwftype(t)` creates the field `t.name.mt.kwsorter` (if it hasn't been created
-yet), and returns the type of that function.
-
-This design has the feature that call sites that don't use keyword arguments require no special
-handling; everything works as if they were not part of the language at all. Call sites that do
-use keyword arguments are dispatched directly to the called function's kwsorter. For example the
-call:
-
-```julia
-circle((0, 0), 1.0, color = red; other...)
-```
-
-is lowered to:
-
-```julia
-kwcall(merge((color = red,), other), circle, (0, 0), 1.0)
-```
-
-`kwcall` (also in`Core`) denotes a kwcall signature and dispatch.
-The keyword splatting operation (written as `other...`) calls the named tuple `merge` function.
-This function further unpacks each *element* of `other`, expecting each one to contain two values
-(a symbol and a value).
-Naturally, a more efficient implementation is available if all splatted arguments are named tuples.
-Notice that the original `circle` function is passed through, to handle closures.
## [Compiler efficiency issues](@id compiler-efficiency-issues)
diff --git a/doc/src/devdocs/types.md b/doc/src/devdocs/types.md
index a09df61e4881d..b63f1c315f457 100644
--- a/doc/src/devdocs/types.md
+++ b/doc/src/devdocs/types.md
@@ -199,7 +199,6 @@ TypeName
name: Symbol Array
defs: Nothing nothing
cache: Nothing nothing
- max_args: Int64 0
module: Module Core
: Int64 0
: Int64 0
diff --git a/src/builtins.c b/src/builtins.c
index 36b5d79ec0851..0e309ab912f9c 100644
--- a/src/builtins.c
+++ b/src/builtins.c
@@ -2476,14 +2476,11 @@ void jl_init_intrinsic_functions(void) JL_GC_DISABLED
jl_module_t *inm = jl_new_module_(jl_symbol("Intrinsics"), jl_core_module, 0, 1);
jl_set_initial_const(jl_core_module, jl_symbol("Intrinsics"), (jl_value_t*)inm, 0);
jl_mk_builtin_func(jl_intrinsic_type, jl_symbol("IntrinsicFunction"), jl_f_intrinsic_call);
- jl_mk_builtin_func(
- (jl_datatype_t*)jl_unwrap_unionall((jl_value_t*)jl_opaque_closure_type),
- jl_symbol("OpaqueClosure"), jl_f_opaque_closure_call);
+ jl_datatype_t *oc = (jl_datatype_t*)jl_unwrap_unionall((jl_value_t*)jl_opaque_closure_type);
// Save a reference to the just created OpaqueClosure method, so we can provide special
// codegen for it later.
- jl_opaque_closure_method = (jl_method_t*)jl_methtable_lookup(jl_opaque_closure_typename->mt,
- (jl_value_t*)jl_anytuple_type, 1);
+ jl_opaque_closure_method = jl_mk_builtin_func(oc, jl_symbol("OpaqueClosure"), jl_f_opaque_closure_call); // TODO: awkwardly not actually declared a Builtin, even though it relies on being handled by the special cases for Builtin everywhere else
#define ADD_I(name, nargs) add_intrinsic(inm, #name, name);
#define ADD_HIDDEN(name, nargs)
@@ -2533,6 +2530,8 @@ void jl_init_primitives(void) JL_GC_DISABLED
add_builtin("Module", (jl_value_t*)jl_module_type);
add_builtin("MethodTable", (jl_value_t*)jl_methtable_type);
+ add_builtin("GlobalMethods", (jl_value_t*)jl_method_table);
+ add_builtin("MethodCache", (jl_value_t*)jl_methcache_type);
add_builtin("Method", (jl_value_t*)jl_method_type);
add_builtin("CodeInstance", (jl_value_t*)jl_code_instance_type);
add_builtin("TypeMapEntry", (jl_value_t*)jl_typemap_entry_type);
@@ -2597,6 +2596,26 @@ void jl_init_primitives(void) JL_GC_DISABLED
add_builtin("AbstractString", (jl_value_t*)jl_abstractstring_type);
add_builtin("String", (jl_value_t*)jl_string_type);
+
+ // ensure that primitive types are fully allocated (since jl_init_types is incomplete)
+ assert(jl_atomic_load_relaxed(&jl_world_counter) == 1);
+ jl_module_t *core = jl_core_module;
+ jl_svec_t *bindings = jl_atomic_load_relaxed(&core->bindings);
+ jl_value_t **table = jl_svec_data(bindings);
+ for (size_t i = 0; i < jl_svec_len(bindings); i++) {
+ if (table[i] != jl_nothing) {
+ jl_binding_t *b = (jl_binding_t*)table[i];
+ jl_value_t *v = jl_get_binding_value_in_world(b, 1);
+ if (v) {
+ if (jl_is_unionall(v))
+ v = jl_unwrap_unionall(v);
+ if (jl_is_datatype(v)) {
+ jl_datatype_t *tt = (jl_datatype_t*)v;
+ tt->name->module = core;
+ }
+ }
+ }
+ }
}
#ifdef __cplusplus
diff --git a/src/clangsa/GCChecker.cpp b/src/clangsa/GCChecker.cpp
index af07ca2227839..09a034a9549d8 100644
--- a/src/clangsa/GCChecker.cpp
+++ b/src/clangsa/GCChecker.cpp
@@ -836,6 +836,7 @@ bool GCChecker::isGCTrackedType(QualType QT) {
Name.ends_with_insensitive("jl_typemap_t") ||
Name.ends_with_insensitive("jl_unionall_t") ||
Name.ends_with_insensitive("jl_methtable_t") ||
+ Name.ends_with_insensitive("jl_methcache_t") ||
Name.ends_with_insensitive("jl_cgval_t") ||
Name.ends_with_insensitive("jl_codectx_t") ||
Name.ends_with_insensitive("jl_ast_context_t") ||
diff --git a/src/codegen.cpp b/src/codegen.cpp
index c0aeb5bac1915..5aa175df270f3 100644
--- a/src/codegen.cpp
+++ b/src/codegen.cpp
@@ -7561,16 +7561,11 @@ static const char *derive_sigt_name(jl_value_t *jargty)
jl_datatype_t *dt = (jl_datatype_t*)jl_argument_datatype(jargty);
if ((jl_value_t*)dt == jl_nothing)
return NULL;
- jl_sym_t *name = dt->name->name;
- // if we have a kwcall, use that as the name anyways
- jl_methtable_t *mt = dt->name->mt;
- if (mt == jl_type_type_mt || mt == jl_nonfunction_mt || mt == NULL) {
- // our value for `name` from MethodTable is not good, try to come up with something better
- if (jl_is_type_type((jl_value_t*)dt)) {
- dt = (jl_datatype_t*)jl_argument_datatype(jl_tparam0(dt));
- if ((jl_value_t*)dt != jl_nothing) {
- name = dt->name->name;
- }
+ jl_sym_t *name = dt->name->singletonname;
+ if (jl_is_type_type((jl_value_t*)dt)) {
+ dt = (jl_datatype_t*)jl_argument_datatype(jl_tparam0(dt));
+ if ((jl_value_t*)dt != jl_nothing) {
+ name = dt->name->singletonname;
}
}
return jl_symbol_name(name);
@@ -7747,7 +7742,7 @@ const char *jl_generate_ccallable(Module *llvmmod, jl_value_t *nameval, jl_value
assert(jl_is_datatype(ft));
jl_value_t *ff = ft->instance;
assert(ff);
- const char *name = !jl_is_string(nameval) ? jl_symbol_name(ft->name->mt->name) : jl_string_data(nameval);
+ const char *name = !jl_is_string(nameval) ? jl_symbol_name(ft->name->singletonname) : jl_string_data(nameval);
jl_value_t *crt = declrt;
if (jl_is_abstract_ref_type(declrt)) {
declrt = jl_tparam0(declrt);
diff --git a/src/datatype.c b/src/datatype.c
index 0ccdd0b61d06f..eb25907647157 100644
--- a/src/datatype.c
+++ b/src/datatype.c
@@ -39,22 +39,30 @@ static jl_sym_t *jl_demangle_typename(jl_sym_t *s) JL_NOTSAFEPOINT
return _jl_symbol(&n[1], len);
}
+JL_DLLEXPORT jl_methcache_t *jl_new_method_cache(void)
+{
+ jl_task_t *ct = jl_current_task;
+ jl_methcache_t *mc =
+ (jl_methcache_t*)jl_gc_alloc(ct->ptls, sizeof(jl_methcache_t),
+ jl_methcache_type);
+ jl_atomic_store_relaxed(&mc->leafcache, (jl_genericmemory_t*)jl_an_empty_memory_any);
+ jl_atomic_store_relaxed(&mc->cache, jl_nothing);
+ JL_MUTEX_INIT(&mc->writelock, "methodtable->writelock");
+ return mc;
+}
+
JL_DLLEXPORT jl_methtable_t *jl_new_method_table(jl_sym_t *name, jl_module_t *module)
{
+ jl_methcache_t *mc = jl_new_method_cache();
+ JL_GC_PUSH1(&mc);
jl_task_t *ct = jl_current_task;
jl_methtable_t *mt =
- (jl_methtable_t*)jl_gc_alloc(ct->ptls, sizeof(jl_methtable_t),
- jl_methtable_type);
- mt->name = jl_demangle_typename(name);
- mt->module = module;
+ (jl_methtable_t*)jl_gc_alloc(ct->ptls, sizeof(jl_methtable_t), jl_methtable_type);
jl_atomic_store_relaxed(&mt->defs, jl_nothing);
- jl_atomic_store_relaxed(&mt->leafcache, (jl_genericmemory_t*)jl_an_empty_memory_any);
- jl_atomic_store_relaxed(&mt->cache, jl_nothing);
- jl_atomic_store_relaxed(&mt->max_args, 0);
- mt->backedges = NULL;
- JL_MUTEX_INIT(&mt->writelock, "methodtable->writelock");
- mt->offs = 0;
- mt->frozen = 0;
+ mt->cache = mc;
+ mt->name = name;
+ mt->module = module;
+ JL_GC_POP();
return mt;
}
@@ -67,21 +75,23 @@ JL_DLLEXPORT jl_typename_t *jl_new_typename_in(jl_sym_t *name, jl_module_t *modu
tn->name = name;
tn->module = module;
tn->wrapper = NULL;
+ tn->singletonname = jl_demangle_typename(name);
jl_atomic_store_relaxed(&tn->Typeofwrapper, NULL);
jl_atomic_store_relaxed(&tn->cache, jl_emptysvec);
jl_atomic_store_relaxed(&tn->linearcache, jl_emptysvec);
tn->names = NULL;
tn->hash = bitmix(bitmix(module ? module->build_id.lo : 0, name->hash), 0xa1ada1da);
- tn->_reserved = 0;
+ tn->_unused = 0;
tn->abstract = abstract;
tn->mutabl = mutabl;
tn->mayinlinealloc = 0;
- tn->mt = NULL;
tn->partial = NULL;
tn->atomicfields = NULL;
tn->constfields = NULL;
- jl_atomic_store_relaxed(&tn->cache_entry_count, 0);
+ tn->backedges = NULL;
tn->max_methods = 0;
+ jl_atomic_store_relaxed(&tn->max_args, 0);
+ jl_atomic_store_relaxed(&tn->cache_entry_count, 0);
tn->constprop_heustic = 0;
return tn;
}
@@ -861,18 +871,6 @@ JL_DLLEXPORT jl_datatype_t *jl_new_datatype(
}
else {
tn = jl_new_typename_in((jl_sym_t*)name, module, abstract, mutabl);
- if (super == jl_function_type || super == jl_builtin_type || is_anonfn_typename(jl_symbol_name(name))) {
- // Callable objects (including compiler-generated closures) get independent method tables
- // as an optimization
- tn->mt = jl_new_method_table(name, module);
- jl_gc_wb(tn, tn->mt);
- if (jl_svec_len(parameters) == 0 && !abstract)
- tn->mt->offs = 1;
- }
- else {
- // Everything else, gets to use the unified table
- tn->mt = jl_nonfunction_mt;
- }
}
t->name = tn;
jl_gc_wb(t, t->name);
diff --git a/src/gc-stock.c b/src/gc-stock.c
index 01453a30b2a4b..ce0502058baa7 100644
--- a/src/gc-stock.c
+++ b/src/gc-stock.c
@@ -2845,6 +2845,8 @@ static void gc_mark_roots(jl_gc_markqueue_t *mq)
gc_try_claim_and_push(mq, jl_main_module, NULL);
gc_heap_snapshot_record_gc_roots((jl_value_t*)jl_main_module, "main_module");
// invisible builtin values
+ gc_try_claim_and_push(mq, jl_method_table, NULL);
+ gc_heap_snapshot_record_gc_roots((jl_value_t*)jl_method_table, "global_method_table");
gc_try_claim_and_push(mq, jl_an_empty_vec_any, NULL);
gc_heap_snapshot_record_gc_roots((jl_value_t*)jl_an_empty_vec_any, "an_empty_vec_any");
gc_try_claim_and_push(mq, jl_module_init_order, NULL);
diff --git a/src/gf.c b/src/gf.c
index 5a960c33ef503..4bfe910ac6cae 100644
--- a/src/gf.c
+++ b/src/gf.c
@@ -27,6 +27,8 @@ extern "C" {
_Atomic(int) allow_new_worlds = 1;
JL_DLLEXPORT _Atomic(size_t) jl_world_counter = 1; // uses atomic acquire/release
jl_mutex_t world_counter_lock;
+jl_methtable_t *jl_method_table;
+
JL_DLLEXPORT size_t jl_get_world_counter(void) JL_NOTSAFEPOINT
{
jl_task_t *ct = jl_current_task;
@@ -41,31 +43,41 @@ JL_DLLEXPORT size_t jl_get_tls_world_age(void) JL_NOTSAFEPOINT
}
// Compute the maximum number of times to unroll Varargs{T}, based on
-// m->max_varargs (if specified) or a heuristic based on the maximum
-// number of non-varargs arguments in the provided method table.
+// m->max_varargs (if specified) or a heuristic based on the maximum number of
+// non-varargs arguments for the function type of the method signature.
//
// If provided, `may_increase` is set to 1 if the returned value is
// heuristic-based and has a chance of increasing in the future.
static size_t get_max_varargs(
jl_method_t *m,
- jl_methtable_t *kwmt,
- jl_methtable_t *mt,
uint8_t *may_increase) JL_NOTSAFEPOINT
{
size_t max_varargs = 1;
if (may_increase != NULL)
*may_increase = 0;
- if (m->max_varargs != UINT8_MAX)
+ if (m->max_varargs != UINT8_MAX) {
max_varargs = m->max_varargs;
- else if (kwmt != NULL && kwmt != jl_type_type_mt && kwmt != jl_nonfunction_mt && kwmt != jl_kwcall_mt) {
- if (may_increase != NULL)
- *may_increase = 1; // `max_args` can increase as new methods are inserted
-
- max_varargs = jl_atomic_load_relaxed(&kwmt->max_args) + 2;
- if (mt == jl_kwcall_mt)
- max_varargs += 2;
- max_varargs -= m->nargs;
+ }
+ else {
+ jl_datatype_t *dt1 = jl_nth_argument_datatype(m->sig, 1);
+ jl_datatype_t *dt;
+ if (jl_kwcall_type && dt1 == jl_kwcall_type)
+ dt = jl_nth_argument_datatype(m->sig, 3);
+ else
+ dt = dt1;
+ if (dt != NULL && !jl_is_type_type((jl_value_t*)dt) && dt != jl_kwcall_type) {
+ if (may_increase != NULL)
+ *may_increase = 1; // `max_args` can increase as new methods are inserted
+
+ max_varargs = jl_atomic_load_relaxed(&dt->name->max_args) + 2;
+ if (jl_kwcall_type && dt1 == jl_kwcall_type)
+ max_varargs += 2;
+ if (max_varargs > m->nargs)
+ max_varargs -= m->nargs;
+ else
+ max_varargs = 0;
+ }
}
return max_varargs;
}
@@ -104,9 +116,9 @@ void jl_call_tracer(tracer_cb callback, jl_value_t *tracee)
/// ----- Definitions for various internal TypeMaps ----- ///
-static int8_t jl_cachearg_offset(jl_methtable_t *mt)
+static int8_t jl_cachearg_offset(void)
{
- return mt->offs;
+ return 0;
}
/// ----- Insertion logic for special entries ----- ///
@@ -274,13 +286,13 @@ JL_DLLEXPORT jl_value_t *jl_specializations_lookup(jl_method_t *m, jl_value_t *t
return mi;
}
-JL_DLLEXPORT jl_value_t *jl_methtable_lookup(jl_methtable_t *mt, jl_value_t *type, size_t world)
+JL_DLLEXPORT jl_value_t *jl_methtable_lookup(jl_value_t *type, size_t world)
{
// TODO: this is sort of an odd lookup strategy (and the only user of
// jl_typemap_assoc_by_type with subtype=0), while normally jl_gf_invoke_lookup would be
// expected to be used instead
struct jl_typemap_assoc search = {type, world, NULL};
- jl_typemap_entry_t *sf = jl_typemap_assoc_by_type(jl_atomic_load_relaxed(&mt->defs), &search, jl_cachearg_offset(mt), /*subtype*/0);
+ jl_typemap_entry_t *sf = jl_typemap_assoc_by_type(jl_atomic_load_relaxed(&jl_method_table->defs), &search, 0, /*subtype*/0);
if (!sf)
return jl_nothing;
return sf->func.value;
@@ -288,7 +300,7 @@ JL_DLLEXPORT jl_value_t *jl_methtable_lookup(jl_methtable_t *mt, jl_value_t *typ
// ----- MethodInstance specialization instantiation ----- //
-void jl_mk_builtin_func(jl_datatype_t *dt, jl_sym_t *sname, jl_fptr_args_t fptr) JL_GC_DISABLED
+jl_method_t *jl_mk_builtin_func(jl_datatype_t *dt, jl_sym_t *sname, jl_fptr_args_t fptr) JL_GC_DISABLED
{
jl_method_t *m = jl_new_method_uninit(jl_core_module);
m->name = sname;
@@ -302,31 +314,36 @@ void jl_mk_builtin_func(jl_datatype_t *dt, jl_sym_t *sname, jl_fptr_args_t fptr)
m->nospecialize = 0;
m->nospecialize = ~m->nospecialize;
- jl_methtable_t *mt = dt->name->mt;
jl_typemap_entry_t *newentry = NULL;
- JL_GC_PUSH2(&m, &newentry);
+ jl_datatype_t *tuptyp = NULL;
+ JL_GC_PUSH3(&m, &newentry, &tuptyp);
- newentry = jl_typemap_alloc(jl_anytuple_type, NULL, jl_emptysvec,
- (jl_value_t*)m, 1, ~(size_t)0);
- jl_typemap_insert(&mt->defs, (jl_value_t*)mt, newentry, jl_cachearg_offset(mt));
+ jl_value_t *params[2];
+ params[0] = dt->name->wrapper;
+ params[1] = jl_tparam0(jl_anytuple_type);
+ tuptyp = (jl_datatype_t*)jl_apply_tuple_type_v(params, 2);
- jl_method_instance_t *mi = jl_get_specialized(m, (jl_value_t*)jl_anytuple_type, jl_emptysvec);
+ jl_method_instance_t *mi = jl_get_specialized(m, (jl_value_t*)tuptyp, jl_emptysvec);
jl_atomic_store_relaxed(&m->unspecialized, mi);
jl_gc_wb(m, mi);
jl_code_instance_t *codeinst = jl_new_codeinst(mi, jl_nothing,
(jl_value_t*)jl_any_type, (jl_value_t*)jl_any_type, jl_nothing, jl_nothing,
0, 1, ~(size_t)0, 0, jl_nothing, NULL, NULL);
- jl_mi_cache_insert(mi, codeinst);
jl_atomic_store_relaxed(&codeinst->specptr.fptr1, fptr);
jl_atomic_store_relaxed(&codeinst->invoke, jl_fptr_args);
+ jl_mi_cache_insert(mi, codeinst);
+
+ newentry = jl_typemap_alloc(tuptyp, NULL, jl_emptysvec,
+ (jl_value_t*)m, 1, ~(size_t)0);
+ jl_typemap_insert(&jl_method_table->defs, (jl_value_t*)jl_method_table, newentry, 0);
- newentry = jl_typemap_alloc(jl_anytuple_type, NULL, jl_emptysvec,
+ newentry = jl_typemap_alloc(tuptyp, NULL, jl_emptysvec,
(jl_value_t*)mi, 1, ~(size_t)0);
- jl_typemap_insert(&mt->cache, (jl_value_t*)mt, newentry, 0);
+ jl_typemap_insert(&jl_method_table->cache->cache, (jl_value_t*)jl_method_table->cache, newentry, 0);
- mt->frozen = 1;
JL_GC_POP();
+ return m;
}
// only relevant for bootstrapping. otherwise fairly broken.
@@ -537,7 +554,7 @@ JL_DLLEXPORT jl_code_instance_t *jl_get_method_inferred(
{
jl_value_t *owner = jl_nothing; // TODO: owner should be arg
jl_code_instance_t *codeinst = jl_atomic_load_relaxed(&mi->cache);
- while (codeinst) {
+ for (; codeinst; codeinst = jl_atomic_load_relaxed(&codeinst->next)) {
if (jl_atomic_load_relaxed(&codeinst->min_world) == min_world &&
jl_atomic_load_relaxed(&codeinst->max_world) == max_world &&
jl_egal(codeinst->owner, owner) &&
@@ -555,7 +572,6 @@ JL_DLLEXPORT jl_code_instance_t *jl_get_method_inferred(
if (e && jl_egal((jl_value_t*)e, (jl_value_t*)edges))
return codeinst;
}
- codeinst = jl_atomic_load_relaxed(&codeinst->next);
}
codeinst = jl_new_codeinst(
mi, owner, rettype, (jl_value_t*)jl_any_type, NULL, NULL,
@@ -760,9 +776,9 @@ JL_DLLEXPORT int jl_mi_try_insert(jl_method_instance_t *mi JL_ROOTING_ARGUMENT,
return ret;
}
-int foreach_mtable_in_module(
+static int foreach_typename_in_module(
jl_module_t *m,
- int (*visit)(jl_methtable_t *mt, void *env),
+ int (*visit)(jl_typename_t *tn, void *env),
void *env)
{
jl_svec_t *table = jl_atomic_load_relaxed(&m->bindings);
@@ -778,15 +794,50 @@ int foreach_mtable_in_module(
jl_typename_t *tn = ((jl_datatype_t*)uw)->name;
if (tn->module == m && tn->name == name && tn->wrapper == v) {
// this is the original/primary binding for the type (name/wrapper)
- jl_methtable_t *mt = tn->mt;
- if (mt != NULL && (jl_value_t*)mt != jl_nothing && mt != jl_type_type_mt && mt != jl_nonfunction_mt) {
- assert(mt->module == m);
- if (!visit(mt, env))
- return 0;
- }
+ if (!visit(((jl_datatype_t*)uw)->name, env))
+ return 0;
}
}
else if (jl_is_module(v)) {
+ jl_module_t *child = (jl_module_t*)v;
+ if (child != m && child->parent == m && child->name == name) {
+ // this is the original/primary binding for the submodule
+ if (!foreach_typename_in_module(child, visit, env))
+ return 0;
+ }
+ }
+ }
+ table = jl_atomic_load_relaxed(&m->bindings);
+ }
+ return 1;
+}
+
+static int jl_foreach_reachable_typename(int (*visit)(jl_typename_t *tn, void *env), jl_array_t *mod_array, void *env)
+{
+ for (size_t i = 0; i < jl_array_nrows(mod_array); i++) {
+ jl_module_t *m = (jl_module_t*)jl_array_ptr_ref(mod_array, i);
+ assert(jl_is_module(m));
+ if (m->parent == m) // some toplevel modules (really just Base) aren't actually
+ if (!foreach_typename_in_module(m, visit, env))
+ return 0;
+ }
+ return 1;
+}
+
+int foreach_mtable_in_module(
+ jl_module_t *m,
+ int (*visit)(jl_methtable_t *mt, void *env),
+ void *env)
+{
+ jl_svec_t *table = jl_atomic_load_relaxed(&m->bindings);
+ for (size_t i = 0; i < jl_svec_len(table); i++) {
+ jl_binding_t *b = (jl_binding_t*)jl_svecref(table, i);
+ if ((void*)b == jl_nothing)
+ break;
+ jl_sym_t *name = b->globalref->name;
+ jl_value_t *v = jl_get_latest_binding_value_if_const(b);
+ if (v) {
+ if (jl_is_module(v)) {
jl_module_t *child = (jl_module_t*)v;
if (child != m && child->parent == m && child->name == name) {
// this is the original/primary binding for the submodule
@@ -796,9 +847,7 @@ int foreach_mtable_in_module(
}
else if (jl_is_mtable(v)) {
jl_methtable_t *mt = (jl_methtable_t*)v;
- if (mt->module == m && mt->name == name) {
- // this is probably an external method table here, so let's
- // assume so as there is no way to precisely distinguish them
+ if (mt && mt != jl_method_table) {
if (!visit(mt, env))
return 0;
}
@@ -809,37 +858,23 @@ int foreach_mtable_in_module(
return 1;
}
-int jl_foreach_reachable_mtable(int (*visit)(jl_methtable_t *mt, void *env), void *env)
+
+int jl_foreach_reachable_mtable(int (*visit)(jl_methtable_t *mt, void *env), jl_array_t *mod_array, void *env)
{
- if (!visit(jl_type_type_mt, env))
+ if (!visit(jl_method_table, env))
return 0;
- if (!visit(jl_nonfunction_mt, env))
- return 0;
- jl_array_t *mod_array = jl_get_loaded_modules();
if (mod_array) {
- JL_GC_PUSH1(&mod_array);
- int i;
- for (i = 0; i < jl_array_nrows(mod_array); i++) {
+ for (size_t i = 0; i < jl_array_nrows(mod_array); i++) {
jl_module_t *m = (jl_module_t*)jl_array_ptr_ref(mod_array, i);
assert(jl_is_module(m));
if (m->parent == m) // some toplevel modules (really just Base) aren't actually
- if (!foreach_mtable_in_module(m, visit, env)) {
- JL_GC_POP();
+ if (!foreach_mtable_in_module(m, visit, env))
return 0;
- }
}
- JL_GC_POP();
- }
- else {
- if (!foreach_mtable_in_module(jl_main_module, visit, env))
- return 0;
- if (!foreach_mtable_in_module(jl_core_module, visit, env))
- return 0;
}
return 1;
}
-
jl_function_t *jl_typeinf_func JL_GLOBALLY_ROOTED = NULL;
JL_DLLEXPORT size_t jl_typeinf_world = 1;
@@ -923,7 +958,7 @@ static jl_value_t *inst_varargp_in_env(jl_value_t *decl, jl_svec_t *sparams)
return vm;
}
-static jl_value_t *ml_matches(jl_methtable_t *mt,
+static jl_value_t *ml_matches(jl_methtable_t *mt, jl_methcache_t *mc,
jl_tupletype_t *type, int lim, int include_ambiguous,
int intersections, size_t world, int cache_result,
size_t *min_valid, size_t *max_valid, int *ambig);
@@ -1130,9 +1165,10 @@ static void jl_compilation_sig(
// and the types we find should be bigger.
if (np >= nspec && jl_va_tuple_kind((jl_datatype_t*)decl) == JL_VARARG_UNBOUND) {
if (!*newparams) *newparams = tt->parameters;
- if (max_varargs > 0) {
+ if (max_varargs > 0 && nspec >= 2) {
type_i = jl_svecref(*newparams, nspec - 2);
- } else {
+ }
+ else {
// If max varargs is zero, always specialize to (Any...) since
// there is no preceding parameter to use for `type_i`
type_i = jl_bottom_type;
@@ -1207,15 +1243,11 @@ JL_DLLEXPORT int jl_isa_compileable_sig(
if (definition->isva) {
unsigned nspec_min = nargs + 1; // min number of arg values (including tail vararg)
unsigned nspec_max = INT32_MAX; // max number of arg values (including tail vararg)
- jl_methtable_t *mt = jl_method_table_for(decl);
- jl_methtable_t *kwmt = mt == jl_kwcall_mt ? jl_kwmethod_table_for(decl) : mt;
- if ((jl_value_t*)mt != jl_nothing) {
- // try to refine estimate of min and max
- uint8_t heuristic_used = 0;
- nspec_max = nspec_min = nargs + get_max_varargs(definition, kwmt, mt, &heuristic_used);
- if (heuristic_used)
- nspec_max = INT32_MAX; // new methods may be added, increasing nspec_min later
- }
+ // try to refine estimate of min and max
+ uint8_t heuristic_used = 0;
+ nspec_max = nspec_min = nargs + get_max_varargs(definition, &heuristic_used);
+ if (heuristic_used)
+ nspec_max = INT32_MAX; // new methods may be added, increasing nspec_min later
int isunbound = (jl_va_tuple_kind((jl_datatype_t*)decl) == JL_VARARG_UNBOUND);
if (jl_is_vararg(jl_tparam(type, np - 1))) {
if (!isunbound || np < nspec_min || np > nspec_max)
@@ -1404,18 +1436,18 @@ static inline jl_typemap_entry_t *lookup_leafcache(jl_genericmemory_t *leafcache
return NULL;
}
jl_method_instance_t *cache_method(
- jl_methtable_t *mt, _Atomic(jl_typemap_t*) *cache, jl_value_t *parent JL_PROPAGATES_ROOT,
+ jl_methtable_t *mt, jl_methcache_t *mc, _Atomic(jl_typemap_t*) *cache, jl_value_t *parent JL_PROPAGATES_ROOT,
jl_tupletype_t *tt, // the original tupletype of the signature
jl_method_t *definition,
size_t world, size_t min_valid, size_t max_valid,
jl_svec_t *sparams)
{
- // caller must hold the mt->writelock
+ // caller must hold the parent->writelock
// short-circuit (now that we hold the lock) if this entry is already present
- int8_t offs = mt ? jl_cachearg_offset(mt) : 1;
+ int8_t offs = mc ? jl_cachearg_offset() : 1;
{ // scope block
- if (mt) {
- jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mt->leafcache);
+ if (mc) {
+ jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mc->leafcache);
jl_typemap_entry_t *entry = lookup_leafcache(leafcache, (jl_value_t*)tt, world);
if (entry)
return entry->func.linfo;
@@ -1428,10 +1460,23 @@ jl_method_instance_t *cache_method(
return entry->func.linfo;
}
+ jl_method_instance_t *newmeth = NULL;
+ if (definition->sig == (jl_value_t*)jl_anytuple_type && definition != jl_opaque_closure_method && !definition->is_for_opaque_closure) {
+ newmeth = jl_atomic_load_relaxed(&definition->unspecialized);
+ if (newmeth != NULL) { // handle builtin methods de-specialization (for invoke, or if the global cache entry somehow gets lost)
+ jl_tupletype_t *cachett = (jl_tupletype_t*)newmeth->specTypes;
+ assert(cachett != jl_anytuple_type);
+ jl_typemap_entry_t *newentry = jl_typemap_alloc(cachett, NULL, jl_emptysvec, (jl_value_t*)newmeth, min_valid, max_valid);
+ JL_GC_PUSH1(&newentry);
+ jl_typemap_insert(cache, parent, newentry, offs);
+ JL_GC_POP();
+ return newmeth;
+ }
+ }
+
jl_value_t *temp = NULL;
jl_value_t *temp2 = NULL;
jl_value_t *temp3 = NULL;
- jl_method_instance_t *newmeth = NULL;
jl_svec_t *newparams = NULL;
JL_GC_PUSH5(&temp, &temp2, &temp3, &newmeth, &newparams);
@@ -1439,8 +1484,7 @@ jl_method_instance_t *cache_method(
// so that we can minimize the number of required cache entries.
int cache_with_orig = 1;
jl_tupletype_t *compilationsig = tt;
- jl_methtable_t *kwmt = mt == jl_kwcall_mt ? jl_kwmethod_table_for(definition->sig) : mt;
- intptr_t max_varargs = get_max_varargs(definition, kwmt, mt, NULL);
+ intptr_t max_varargs = get_max_varargs(definition, NULL);
jl_compilation_sig(tt, sparams, definition, max_varargs, &newparams);
if (newparams) {
temp2 = jl_apply_tuple_type(newparams, 1);
@@ -1476,7 +1520,7 @@ jl_method_instance_t *cache_method(
// now examine what will happen if we chose to use this sig in the cache
size_t min_valid2 = 1;
size_t max_valid2 = ~(size_t)0;
- temp = ml_matches(mt, compilationsig, MAX_UNSPECIALIZED_CONFLICTS, 1, 1, world, 0, &min_valid2, &max_valid2, NULL);
+ temp = ml_matches(mt, mc, compilationsig, MAX_UNSPECIALIZED_CONFLICTS, 1, 1, world, 0, &min_valid2, &max_valid2, NULL);
int guards = 0;
if (temp == jl_nothing) {
cache_with_orig = 1;
@@ -1524,7 +1568,7 @@ jl_method_instance_t *cache_method(
guards++;
// alternative approach: insert sentinel entry
//jl_typemap_insert(cache, parent, (jl_tupletype_t*)matc->spec_types,
- // NULL, jl_emptysvec, /*guard*/NULL, jl_cachearg_offset(mt), other->min_world, other->max_world);
+ // NULL, jl_emptysvec, /*guard*/NULL, jl_cachearg_offset(), other->min_world, other->max_world);
}
}
assert(guards == jl_svec_len(guardsigs));
@@ -1584,7 +1628,7 @@ jl_method_instance_t *cache_method(
jl_typemap_entry_t *newentry = jl_typemap_alloc(cachett, simplett, guardsigs, (jl_value_t*)newmeth, min_valid, max_valid);
temp = (jl_value_t*)newentry;
- if (mt && cachett == tt && jl_svec_len(guardsigs) == 0 && tt->hash && !tt->hasfreetypevars) {
+ if (mc && cachett == tt && jl_svec_len(guardsigs) == 0 && tt->hash && !tt->hasfreetypevars) {
// we check `tt->hash` exists, since otherwise the NamedTuple
// constructor and `structdiff` method pollutes this lookup with a lot
// of garbage in the linear table search
@@ -1597,14 +1641,14 @@ jl_method_instance_t *cache_method(
jl_cache_type_(tt);
JL_UNLOCK(&typecache_lock); // Might GC
}
- jl_genericmemory_t *oldcache = jl_atomic_load_relaxed(&mt->leafcache);
+ jl_genericmemory_t *oldcache = jl_atomic_load_relaxed(&mc->leafcache);
jl_typemap_entry_t *old = (jl_typemap_entry_t*)jl_eqtable_get(oldcache, (jl_value_t*)tt, jl_nothing);
jl_atomic_store_relaxed(&newentry->next, old);
jl_gc_wb(newentry, old);
- jl_genericmemory_t *newcache = jl_eqtable_put(jl_atomic_load_relaxed(&mt->leafcache), (jl_value_t*)tt, (jl_value_t*)newentry, NULL);
+ jl_genericmemory_t *newcache = jl_eqtable_put(jl_atomic_load_relaxed(&mc->leafcache), (jl_value_t*)tt, (jl_value_t*)newentry, NULL);
if (newcache != oldcache) {
- jl_atomic_store_release(&mt->leafcache, newcache);
- jl_gc_wb(mt, newcache);
+ jl_atomic_store_release(&mc->leafcache, newcache);
+ jl_gc_wb(mc, newcache);
}
}
else {
@@ -1624,50 +1668,52 @@ jl_method_instance_t *cache_method(
return newmeth;
}
-static jl_method_match_t *_gf_invoke_lookup(jl_value_t *types JL_PROPAGATES_ROOT, jl_value_t *mt, size_t world, size_t *min_valid, size_t *max_valid);
+static jl_method_match_t *_gf_invoke_lookup(jl_value_t *types JL_PROPAGATES_ROOT, jl_methtable_t *mt, size_t world, size_t *min_valid, size_t *max_valid);
+
+JL_DLLEXPORT jl_typemap_entry_t *jl_mt_find_cache_entry(jl_methcache_t *mc JL_PROPAGATES_ROOT, jl_datatype_t *tt JL_MAYBE_UNROOTED JL_ROOTS_TEMPORARILY, size_t world)
+{ // exported only for debugging purposes, not for casual use
+ if (tt->isdispatchtuple) {
+ jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mc->leafcache);
+ jl_typemap_entry_t *entry = lookup_leafcache(leafcache, (jl_value_t*)tt, world);
+ if (entry)
+ return entry;
+ }
+ JL_GC_PUSH1(&tt);
+ struct jl_typemap_assoc search = {(jl_value_t*)tt, world, NULL};
+ jl_typemap_entry_t *entry = jl_typemap_assoc_by_type(jl_atomic_load_relaxed(&mc->cache), &search, jl_cachearg_offset(), /*subtype*/1);
+ JL_GC_POP();
+ return entry;
+}
-static jl_method_instance_t *jl_mt_assoc_by_type(jl_methtable_t *mt JL_PROPAGATES_ROOT, jl_datatype_t *tt JL_MAYBE_UNROOTED, size_t world)
+static jl_method_instance_t *jl_mt_assoc_by_type(jl_methcache_t *mc JL_PROPAGATES_ROOT, jl_datatype_t *tt JL_MAYBE_UNROOTED, size_t world)
{
- jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mt->leafcache);
- jl_typemap_entry_t *entry = lookup_leafcache(leafcache, (jl_value_t*)tt, world);
+ jl_typemap_entry_t *entry = jl_mt_find_cache_entry(mc, tt, world);
if (entry)
return entry->func.linfo;
+ assert(tt->isdispatchtuple || tt->hasfreetypevars);
JL_TIMING(METHOD_LOOKUP_SLOW, METHOD_LOOKUP_SLOW);
jl_method_match_t *matc = NULL;
JL_GC_PUSH2(&tt, &matc);
- JL_LOCK(&mt->writelock);
- assert(tt->isdispatchtuple || tt->hasfreetypevars);
+ JL_LOCK(&mc->writelock);
jl_method_instance_t *mi = NULL;
- if (tt->isdispatchtuple) {
- jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mt->leafcache);
- jl_typemap_entry_t *entry = lookup_leafcache(leafcache, (jl_value_t*)tt, world);
- if (entry)
- mi = entry->func.linfo;
- }
-
- if (!mi) {
- struct jl_typemap_assoc search = {(jl_value_t*)tt, world, NULL};
- jl_typemap_entry_t *entry = jl_typemap_assoc_by_type(jl_atomic_load_relaxed(&mt->cache), &search, jl_cachearg_offset(mt), /*subtype*/1);
- if (entry)
- mi = entry->func.linfo;
- }
-
+ entry = jl_mt_find_cache_entry(mc, tt, world);
+ if (entry)
+ mi = entry->func.linfo;
if (!mi) {
size_t min_valid = 0;
size_t max_valid = ~(size_t)0;
- matc = _gf_invoke_lookup((jl_value_t*)tt, jl_nothing, world, &min_valid, &max_valid);
+ matc = _gf_invoke_lookup((jl_value_t*)tt, jl_method_table, world, &min_valid, &max_valid);
if (matc) {
jl_method_t *m = matc->method;
jl_svec_t *env = matc->sparams;
- mi = cache_method(mt, &mt->cache, (jl_value_t*)mt, tt, m, world, min_valid, max_valid, env);
+ mi = cache_method(jl_method_table, mc, &mc->cache, (jl_value_t*)mc, tt, m, world, min_valid, max_valid, env);
}
}
- JL_UNLOCK(&mt->writelock);
+ JL_UNLOCK(&mc->writelock);
JL_GC_POP();
return mi;
}
-
struct matches_env {
struct typemap_intersection_env match;
jl_typemap_entry_t *newentry;
@@ -1705,7 +1751,7 @@ static int get_intersect_visitor(jl_typemap_entry_t *oldentry, struct typemap_in
return 1;
}
-static jl_value_t *get_intersect_matches(jl_typemap_t *defs, jl_typemap_entry_t *newentry, jl_typemap_entry_t **replaced, int8_t offs, size_t world)
+static jl_value_t *get_intersect_matches(jl_typemap_t *defs, jl_typemap_entry_t *newentry, jl_typemap_entry_t **replaced, size_t world)
{
jl_tupletype_t *type = newentry->sig;
jl_tupletype_t *ttypes = (jl_tupletype_t*)jl_unwrap_unionall((jl_value_t*)type);
@@ -1724,7 +1770,7 @@ static jl_value_t *get_intersect_matches(jl_typemap_t *defs, jl_typemap_entry_t
/* .ti = */ NULL, /* .env = */ jl_emptysvec, /* .issubty = */ 0},
/* .newentry = */ newentry, /* .shadowed */ NULL, /* .replaced */ NULL};
JL_GC_PUSH3(&env.match.env, &env.match.ti, &env.shadowed);
- jl_typemap_intersection_visitor(defs, offs, &env.match);
+ jl_typemap_intersection_visitor(defs, 0, &env.match);
env.match.env = NULL;
env.match.ti = NULL;
*replaced = env.replaced;
@@ -1748,7 +1794,7 @@ static void method_overwrite(jl_typemap_entry_t *newentry, jl_method_t *oldvalue
jl_module_t *newmod = method->module;
jl_module_t *oldmod = oldvalue->module;
jl_datatype_t *dt = jl_nth_argument_datatype(oldvalue->sig, 1);
- if (dt == (jl_datatype_t*)jl_typeof(jl_kwcall_func))
+ if (jl_kwcall_type && dt == jl_kwcall_type)
dt = jl_nth_argument_datatype(oldvalue->sig, 3);
int anon = dt && is_anonfn_typename(jl_symbol_name(dt->name->name));
if ((jl_options.warn_overwrite == JL_OPTIONS_WARN_OVERWRITE_ON) ||
@@ -1774,18 +1820,20 @@ static void method_overwrite(jl_typemap_entry_t *newentry, jl_method_t *oldvalue
}
}
-static void update_max_args(jl_methtable_t *mt, jl_value_t *type)
+static void update_max_args(jl_value_t *type)
{
- if (mt == jl_type_type_mt || mt == jl_nonfunction_mt || mt == jl_kwcall_mt)
- return;
type = jl_unwrap_unionall(type);
+ jl_datatype_t *dt = jl_nth_argument_datatype(type, 1);
+ if (dt == NULL || dt == jl_kwcall_type || jl_is_type_type((jl_value_t*)dt))
+ return;
+ jl_typename_t *tn = dt->name;
assert(jl_is_datatype(type));
size_t na = jl_nparams(type);
if (jl_va_tuple_kind((jl_datatype_t*)type) == JL_VARARG_UNBOUND)
na--;
- // update occurs inside mt->writelock
- if (na > jl_atomic_load_relaxed(&mt->max_args))
- jl_atomic_store_relaxed(&mt->max_args, na);
+ // update occurs inside global writelock
+ if (na > jl_atomic_load_relaxed(&tn->max_args))
+ jl_atomic_store_relaxed(&tn->max_args, na);
}
jl_array_t *_jl_debug_method_invalidation JL_GLOBALLY_ROOTED = NULL;
@@ -1829,7 +1877,9 @@ static void invalidate_code_instance(jl_code_instance_t *replaced, size_t max_wo
jl_atomic_store_release(&replaced->max_world, max_world);
// recurse to all backedges to update their valid range also
_invalidate_backedges(replaced_mi, replaced, max_world, depth + 1);
- } else {
+ // TODO: should we visit all forward edges now and delete ourself from all of those lists too?
+ }
+ else {
assert(jl_atomic_load_relaxed(&replaced->max_world) <= max_world);
}
JL_UNLOCK(&replaced_mi->def.method->writelock);
@@ -1893,6 +1943,33 @@ static void _invalidate_backedges(jl_method_instance_t *replaced_mi, jl_code_ins
JL_GC_POP();
}
+static int jl_type_intersection2(jl_value_t *t1, jl_value_t *t2, jl_value_t **isect JL_REQUIRE_ROOTED_SLOT, jl_value_t **isect2 JL_REQUIRE_ROOTED_SLOT)
+{
+ *isect2 = NULL;
+ int is_subty = 0;
+ *isect = jl_type_intersection_env_s(t1, t2, NULL, &is_subty);
+ if (*isect == jl_bottom_type)
+ return 0;
+ if (is_subty)
+ return 1;
+ // TODO: sometimes type intersection returns types with free variables
+ if (jl_has_free_typevars(t1) || jl_has_free_typevars(t2))
+ return 1;
+ // determine if type-intersection can be convinced to give a better, non-bad answer
+ // if the intersection was imprecise, see if we can do better by switching the types
+ *isect2 = jl_type_intersection(t2, t1);
+ if (*isect2 == jl_bottom_type) {
+ *isect = jl_bottom_type;
+ *isect2 = NULL;
+ return 0;
+ }
+ if (jl_types_egal(*isect2, *isect)) {
+ *isect2 = NULL;
+ }
+ return 1;
+}
+
+
enum morespec_options {
morespec_unknown,
morespec_isnot,
@@ -2010,39 +2087,128 @@ JL_DLLEXPORT void jl_method_instance_add_backedge(jl_method_instance_t *callee,
JL_UNLOCK(&callee->def.method->writelock);
}
-// add a backedge from a non-existent signature to caller
-JL_DLLEXPORT void jl_method_table_add_backedge(jl_methtable_t *mt, jl_value_t *typ, jl_code_instance_t *caller)
+
+struct _typename_add_backedge {
+ jl_value_t *typ;
+ jl_value_t *caller;
+};
+
+static void _typename_add_backedge(jl_typename_t *tn, void *env0)
{
- assert(jl_is_code_instance(caller));
- if (!jl_atomic_load_relaxed(&allow_new_worlds))
- return;
- JL_LOCK(&mt->writelock);
+ struct _typename_add_backedge *env = (struct _typename_add_backedge*)env0;
+ JL_GC_PROMISE_ROOTED(env->typ);
+ JL_GC_PROMISE_ROOTED(env->caller);
if (jl_atomic_load_relaxed(&allow_new_worlds)) {
- if (!mt->backedges) {
+ if (!tn->backedges) {
// lazy-init the backedges array
- mt->backedges = jl_alloc_vec_any(2);
- jl_gc_wb(mt, mt->backedges);
- jl_array_ptr_set(mt->backedges, 0, typ);
- jl_array_ptr_set(mt->backedges, 1, caller);
+ tn->backedges = jl_alloc_vec_any(2);
+ jl_gc_wb(tn, tn->backedges);
+ jl_array_ptr_set(tn->backedges, 0, env->typ);
+ jl_array_ptr_set(tn->backedges, 1, env->caller);
}
else {
// check if the edge is already present and avoid adding a duplicate
- size_t i, l = jl_array_nrows(mt->backedges);
+ size_t i, l = jl_array_nrows(tn->backedges);
// reuse an already cached instance of this type, if possible
// TODO: use jl_cache_type_(tt) like cache_method does, instead of this linear scan?
for (i = 1; i < l; i += 2) {
- if (jl_array_ptr_ref(mt->backedges, i) != (jl_value_t*)caller) {
- if (jl_types_equal(jl_array_ptr_ref(mt->backedges, i - 1), typ)) {
- typ = jl_array_ptr_ref(mt->backedges, i - 1);
+ if (jl_array_ptr_ref(tn->backedges, i) != env->caller) {
+ if (jl_types_equal(jl_array_ptr_ref(tn->backedges, i - 1), env->typ)) {
+ env->typ = jl_array_ptr_ref(tn->backedges, i - 1);
break;
}
}
}
- jl_array_ptr_1d_push(mt->backedges, typ);
- jl_array_ptr_1d_push(mt->backedges, (jl_value_t*)caller);
+ jl_array_ptr_1d_push(tn->backedges, env->typ);
+ jl_array_ptr_1d_push(tn->backedges, env->caller);
}
}
- JL_UNLOCK(&mt->writelock);
+}
+
+// add a backedge from a non-existent signature to caller
+JL_DLLEXPORT void jl_method_table_add_backedge(jl_value_t *typ, jl_code_instance_t *caller)
+{
+ assert(jl_is_code_instance(caller));
+ if (!jl_atomic_load_relaxed(&allow_new_worlds))
+ return;
+ // try to pick the best cache(s) for this typ edge
+ struct _typename_add_backedge env = {typ, (jl_value_t*)caller};
+ jl_methcache_t *mc = jl_method_table->cache;
+ JL_LOCK(&mc->writelock);
+ if (jl_atomic_load_relaxed(&allow_new_worlds))
+ jl_foreach_top_typename_for(_typename_add_backedge, typ, &env);
+ JL_UNLOCK(&mc->writelock);
+}
+
+struct _typename_invalidate_backedge {
+ jl_value_t *type;
+ jl_value_t **isect;
+ jl_value_t **isect2;
+ jl_method_t *const *d;
+ size_t n;
+ size_t max_world;
+ int invalidated;
+};
+
+static void _typename_invalidate_backedges(jl_typename_t *tn, void *env0)
+{
+ struct _typename_invalidate_backedge *env = (struct _typename_invalidate_backedge*)env0;
+ JL_GC_PROMISE_ROOTED(env->type);
+ JL_GC_PROMISE_ROOTED(env->isect); // isJuliaType considers jl_value_t** to be a julia object too
+ JL_GC_PROMISE_ROOTED(env->isect2); // isJuliaType considers jl_value_t** to be a julia object too
+ if (tn->backedges) {
+ jl_value_t **backedges = jl_array_ptr_data(tn->backedges);
+ size_t i, na = jl_array_nrows(tn->backedges);
+ size_t ins = 0;
+ for (i = 1; i < na; i += 2) {
+ jl_value_t *backedgetyp = backedges[i - 1];
+ JL_GC_PROMISE_ROOTED(backedgetyp);
+ int missing = 0;
+ if (jl_type_intersection2(backedgetyp, (jl_value_t*)env->type, env->isect, env->isect2)) {
+ // See if the intersection was actually already fully
+ // covered, but that the new method is ambiguous.
+ // -> no previous method: now there is one, need to update the missing edge
+ // -> one+ previously matching method(s):
+ // -> more specific then all of them: need to update the missing edge
+ // -> some may have been ambiguous: now there is a replacement
+ // -> some may have been called: now there is a replacement (also will be detected in the loop later)
+ // -> less specific or ambiguous with any one of them: can ignore the missing edge (not missing)
+ // -> some may have been ambiguous: still are
+ // -> some may have been called: they may be partly replaced (will be detected in the loop later)
+ // c.f. `is_replacing`, which is a similar query, but with an existing method match to compare against
+ missing = 1;
+ for (size_t j = 0; j < env->n; j++) {
+ jl_method_t *m = env->d[j];
+ JL_GC_PROMISE_ROOTED(m);
+ if (jl_subtype(*env->isect, m->sig) || (*env->isect2 && jl_subtype(*env->isect2, m->sig))) {
+ // We now know that there actually was a previous
+ // method for this part of the type intersection.
+ if (!jl_type_morespecific(env->type, m->sig)) {
+ missing = 0;
+ break;
+ }
+ }
+ }
+ }
+ *env->isect = *env->isect2 = NULL;
+ if (missing) {
+ jl_code_instance_t *backedge = (jl_code_instance_t*)backedges[i];
+ JL_GC_PROMISE_ROOTED(backedge);
+ invalidate_code_instance(backedge, env->max_world, 0);
+ env->invalidated = 1;
+ if (_jl_debug_method_invalidation)
+ jl_array_ptr_1d_push(_jl_debug_method_invalidation, (jl_value_t*)backedgetyp);
+ }
+ else {
+ backedges[ins++] = backedges[i - 1];
+ backedges[ins++] = backedges[i - 0];
+ }
+ }
+ if (ins == 0)
+ tn->backedges = NULL;
+ else
+ jl_array_del_end(tn->backedges, na - ins);
+ }
}
struct invalidate_mt_env {
@@ -2128,35 +2294,36 @@ static jl_typemap_entry_t *do_typemap_search(jl_methtable_t *mt JL_PROPAGATES_RO
return (jl_typemap_entry_t *)closure;
}
-static void jl_method_table_invalidate(jl_methtable_t *mt, jl_method_t *replaced, size_t max_world)
+static void _method_table_invalidate(jl_methcache_t *mc, void *env0)
{
- if (jl_options.incremental && jl_generating_output())
- jl_error("Method deletion is not possible during Module precompile.");
- assert(!replaced->is_for_opaque_closure);
- assert(jl_atomic_load_relaxed(&jl_world_counter) == max_world);
- // drop this method from mt->cache
- struct disable_mt_env mt_cache_env;
- mt_cache_env.max_world = max_world;
- mt_cache_env.replaced = replaced;
- jl_typemap_visitor(jl_atomic_load_relaxed(&mt->cache), disable_mt_cache, (void*)&mt_cache_env);
- jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mt->leafcache);
+ // drop this method from mc->cache
+ jl_typemap_visitor(jl_atomic_load_relaxed(&mc->cache), disable_mt_cache, env0);
+ jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mc->leafcache);
size_t i, l = leafcache->length;
for (i = 1; i < l; i += 2) {
jl_typemap_entry_t *oldentry = (jl_typemap_entry_t*)jl_genericmemory_ptr_ref(leafcache, i);
if (oldentry) {
while ((jl_value_t*)oldentry != jl_nothing) {
- disable_mt_cache(oldentry, (void*)&mt_cache_env);
+ disable_mt_cache(oldentry, env0);
oldentry = jl_atomic_load_relaxed(&oldentry->next);
}
}
}
+}
+
+static void jl_method_table_invalidate(jl_method_t *replaced, size_t max_world)
+{
+ if (jl_options.incremental && jl_generating_output())
+ jl_error("Method deletion is not possible during Module precompile.");
+ assert(!replaced->is_for_opaque_closure);
+ assert(jl_atomic_load_relaxed(&jl_world_counter) == max_world);
// Invalidate the backedges
int invalidated = 0;
jl_value_t *specializations = jl_atomic_load_relaxed(&replaced->specializations);
JL_GC_PUSH1(&specializations);
if (!jl_is_svec(specializations))
specializations = (jl_value_t*)jl_svec1(specializations);
- l = jl_svec_len(specializations);
+ size_t i, l = jl_svec_len(specializations);
for (i = 0; i < l; i++) {
jl_method_instance_t *mi = (jl_method_instance_t*)jl_svecref(specializations, i);
if ((jl_value_t*)mi != jl_nothing) {
@@ -2164,6 +2331,12 @@ static void jl_method_table_invalidate(jl_methtable_t *mt, jl_method_t *replaced
invalidate_backedges(mi, max_world, "jl_method_table_disable");
}
}
+
+ jl_methtable_t *mt = jl_method_get_table(replaced);
+ struct disable_mt_env mt_cache_env;
+ mt_cache_env.max_world = max_world;
+ mt_cache_env.replaced = replaced;
+ _method_table_invalidate(mt->cache, &mt_cache_env);
JL_GC_POP();
// XXX: this might have resolved an ambiguity, for which we have not tracked the edge here,
// and thus now introduce a mistake into inference
@@ -2200,15 +2373,17 @@ static int erase_method_backedges(jl_typemap_entry_t *def, void *closure)
static int erase_all_backedges(jl_methtable_t *mt, void *env)
{
- // removes all method caches
- // this might not be entirely safe (GC or MT), thus we only do it very early in bootstrapping
- JL_LOCK(&mt->writelock);
- mt->backedges = NULL;
- JL_UNLOCK(&mt->writelock);
- jl_typemap_visitor(jl_atomic_load_relaxed(&mt->defs), erase_method_backedges, env);
+ return jl_typemap_visitor(jl_atomic_load_relaxed(&mt->defs), erase_method_backedges, env);
+}
+
+static int erase_all_mc_backedges(jl_typename_t *tn, void *env)
+{
+ tn->backedges = NULL;
return 1;
}
+static int jl_foreach_reachable_typename(int (*visit)(jl_typename_t *tn, void *env), jl_array_t *mod_array, void *env);
+
JL_DLLEXPORT void jl_disable_new_worlds(void)
{
if (jl_generating_output())
@@ -2216,59 +2391,39 @@ JL_DLLEXPORT void jl_disable_new_worlds(void)
JL_LOCK(&world_counter_lock);
jl_atomic_store_relaxed(&allow_new_worlds, 0);
JL_UNLOCK(&world_counter_lock);
- jl_foreach_reachable_mtable(erase_all_backedges, (void*)NULL);
+ jl_array_t *mod_array = jl_get_loaded_modules();
+ JL_GC_PUSH1(&mod_array);
+ jl_foreach_reachable_mtable(erase_all_backedges, mod_array, (void*)NULL);
+
+ JL_LOCK(&jl_method_table->cache->writelock);
+ jl_foreach_reachable_typename(erase_all_mc_backedges, mod_array, (void*)NULL);
+ JL_UNLOCK(&jl_method_table->cache->writelock);
+ JL_GC_POP();
}
-JL_DLLEXPORT void jl_method_table_disable(jl_methtable_t *mt, jl_method_t *method)
+JL_DLLEXPORT void jl_method_table_disable(jl_method_t *method)
{
+ jl_methtable_t *mt = jl_method_get_table(method);
jl_typemap_entry_t *methodentry = do_typemap_search(mt, method);
JL_LOCK(&world_counter_lock);
if (!jl_atomic_load_relaxed(&allow_new_worlds))
jl_error("Method changes have been disabled via a call to disable_new_worlds.");
int enabled = jl_atomic_load_relaxed(&methodentry->max_world) == ~(size_t)0;
if (enabled) {
- JL_LOCK(&mt->writelock);
- // Narrow the world age on the method to make it uncallable
+ // Narrow the world age on the method to make it uncallable
size_t world = jl_atomic_load_relaxed(&jl_world_counter);
assert(method == methodentry->func.method);
jl_atomic_store_relaxed(&method->dispatch_status, 0);
assert(jl_atomic_load_relaxed(&methodentry->max_world) == ~(size_t)0);
jl_atomic_store_relaxed(&methodentry->max_world, world);
- jl_method_table_invalidate(mt, method, world);
+ jl_method_table_invalidate(method, world);
jl_atomic_store_release(&jl_world_counter, world + 1);
- JL_UNLOCK(&mt->writelock);
- }
+ }
JL_UNLOCK(&world_counter_lock);
if (!enabled)
jl_errorf("Method of %s already disabled", jl_symbol_name(method->name));
}
-static int jl_type_intersection2(jl_value_t *t1, jl_value_t *t2, jl_value_t **isect JL_REQUIRE_ROOTED_SLOT, jl_value_t **isect2 JL_REQUIRE_ROOTED_SLOT)
-{
- *isect2 = NULL;
- int is_subty = 0;
- *isect = jl_type_intersection_env_s(t1, t2, NULL, &is_subty);
- if (*isect == jl_bottom_type)
- return 0;
- if (is_subty)
- return 1;
- // TODO: sometimes type intersection returns types with free variables
- if (jl_has_free_typevars(t1) || jl_has_free_typevars(t2))
- return 1;
- // determine if type-intersection can be convinced to give a better, non-bad answer
- // if the intersection was imprecise, see if we can do better by switching the types
- *isect2 = jl_type_intersection(t2, t1);
- if (*isect2 == jl_bottom_type) {
- *isect = jl_bottom_type;
- *isect2 = NULL;
- return 0;
- }
- if (jl_types_egal(*isect2, *isect)) {
- *isect2 = NULL;
- }
- return 1;
-}
-
jl_typemap_entry_t *jl_method_table_add(jl_methtable_t *mt, jl_method_t *method, jl_tupletype_t *simpletype)
{
JL_TIMING(ADD_METHOD, ADD_METHOD);
@@ -2277,30 +2432,32 @@ jl_typemap_entry_t *jl_method_table_add(jl_methtable_t *mt, jl_method_t *method,
jl_timing_show_method(method, JL_TIMING_DEFAULT_BLOCK);
jl_typemap_entry_t *newentry = NULL;
JL_GC_PUSH1(&newentry);
- JL_LOCK(&mt->writelock);
// add our new entry
assert(jl_atomic_load_relaxed(&method->primary_world) == ~(size_t)0); // min-world
assert((jl_atomic_load_relaxed(&method->dispatch_status) & METHOD_SIG_LATEST_WHICH) == 0);
assert((jl_atomic_load_relaxed(&method->dispatch_status) & METHOD_SIG_LATEST_ONLY) == 0);
+ JL_LOCK(&mt->cache->writelock);
newentry = jl_typemap_alloc((jl_tupletype_t*)method->sig, simpletype, jl_emptysvec, (jl_value_t*)method, ~(size_t)0, 1);
- jl_typemap_insert(&mt->defs, (jl_value_t*)mt, newentry, jl_cachearg_offset(mt));
- update_max_args(mt, method->sig);
- JL_UNLOCK(&mt->writelock);
+ jl_typemap_insert(&mt->defs, (jl_value_t*)mt, newentry, 0);
+
+ if (mt == jl_method_table)
+ update_max_args(method->sig);
+ JL_UNLOCK(&mt->cache->writelock);
JL_GC_POP();
return newentry;
}
-void jl_method_table_activate(jl_methtable_t *mt, jl_typemap_entry_t *newentry)
+void jl_method_table_activate(jl_typemap_entry_t *newentry)
{
JL_TIMING(ADD_METHOD, ADD_METHOD);
jl_method_t *method = newentry->func.method;
+ jl_methtable_t *mt = jl_method_get_table(method);
assert(jl_is_mtable(mt));
assert(jl_is_method(method));
jl_timing_show_method(method, JL_TIMING_DEFAULT_BLOCK);
jl_value_t *type = (jl_value_t*)newentry->sig;
jl_value_t *oldvalue = NULL;
jl_array_t *oldmi = NULL;
- JL_LOCK(&mt->writelock);
size_t world = jl_atomic_load_relaxed(&method->primary_world);
assert(world == jl_atomic_load_relaxed(&jl_world_counter) + 1); // min-world
assert((jl_atomic_load_relaxed(&method->dispatch_status) & METHOD_SIG_LATEST_WHICH) == 0);
@@ -2317,7 +2474,8 @@ void jl_method_table_activate(jl_methtable_t *mt, jl_typemap_entry_t *newentry)
JL_GC_PUSH6(&oldvalue, &oldmi, &loctag, &isect, &isect2, &isect3);
jl_typemap_entry_t *replaced = NULL;
// then check what entries we replaced
- oldvalue = get_intersect_matches(jl_atomic_load_relaxed(&mt->defs), newentry, &replaced, jl_cachearg_offset(mt), max_world);
+ oldvalue = get_intersect_matches(jl_atomic_load_relaxed(&mt->defs), newentry, &replaced, max_world);
+
int invalidated = 0;
int only = !(jl_atomic_load_relaxed(&method->dispatch_status) & METHOD_SIG_PRECOMPILE_MANY); // will compute if this will be currently the only result that would returned from `ml_matches` given `sig`
if (replaced) {
@@ -2326,7 +2484,7 @@ void jl_method_table_activate(jl_methtable_t *mt, jl_typemap_entry_t *newentry)
invalidated = 1;
method_overwrite(newentry, m);
// this is an optimized version of below, given we know the type-intersection is exact
- jl_method_table_invalidate(mt, m, max_world);
+ jl_method_table_invalidate(m, max_world);
int m_dispatch = jl_atomic_load_relaxed(&m->dispatch_status);
jl_atomic_store_relaxed(&m->dispatch_status, 0);
only = m_dispatch & METHOD_SIG_LATEST_ONLY;
@@ -2342,60 +2500,7 @@ void jl_method_table_activate(jl_methtable_t *mt, jl_typemap_entry_t *newentry)
assert(jl_is_array(oldvalue));
d = (jl_method_t**)jl_array_ptr_data(oldvalue);
n = jl_array_nrows(oldvalue);
- }
- if (mt->backedges) {
- jl_value_t **backedges = jl_array_ptr_data(mt->backedges);
- size_t i, na = jl_array_nrows(mt->backedges);
- size_t ins = 0;
- for (i = 1; i < na; i += 2) {
- jl_value_t *backedgetyp = backedges[i - 1];
- JL_GC_PROMISE_ROOTED(backedgetyp);
- int missing = 0;
- if (jl_type_intersection2(backedgetyp, (jl_value_t*)type, &isect, &isect2)) {
- // See if the intersection was actually already fully
- // covered, but that the new method is ambiguous.
- // -> no previous method: now there is one, need to update the missing edge
- // -> one+ previously matching method(s):
- // -> more specific then all of them: need to update the missing edge
- // -> some may have been ambiguous: now there is a replacement
- // -> some may have been called: now there is a replacement (also will be detected in the loop later)
- // -> less specific or ambiguous with any one of them: can ignore the missing edge (not missing)
- // -> some may have been ambiguous: still are
- // -> some may have been called: they may be partly replaced (will be detected in the loop later)
- // c.f. `is_replacing`, which is a similar query, but with an existing method match to compare against
- missing = 1;
- size_t j;
- for (j = 0; j < n; j++) {
- jl_method_t *m = d[j];
- if (jl_subtype(isect, m->sig) || (isect2 && jl_subtype(isect2, m->sig))) {
- // We now know that there actually was a previous
- // method for this part of the type intersection.
- if (!jl_type_morespecific(type, m->sig)) {
- missing = 0;
- break;
- }
- }
- }
- }
- if (missing) {
- jl_code_instance_t *backedge = (jl_code_instance_t*)backedges[i];
- JL_GC_PROMISE_ROOTED(backedge);
- invalidate_code_instance(backedge, max_world, 0);
- invalidated = 1;
- if (_jl_debug_method_invalidation)
- jl_array_ptr_1d_push(_jl_debug_method_invalidation, (jl_value_t*)backedgetyp);
- }
- else {
- backedges[ins++] = backedges[i - 1];
- backedges[ins++] = backedges[i - 0];
- }
- }
- if (ins == 0)
- mt->backedges = NULL;
- else
- jl_array_del_end(mt->backedges, na - ins);
- }
- if (oldvalue) {
+
oldmi = jl_alloc_vec_any(0);
char *morespec = (char*)alloca(n);
memset(morespec, morespec_unknown, n);
@@ -2466,29 +2571,37 @@ void jl_method_table_activate(jl_methtable_t *mt, jl_typemap_entry_t *newentry)
}
}
}
- if (jl_array_nrows(oldmi)) {
- // search mt->cache and leafcache and drop anything that might overlap with the new method
- // this is very cheap, so we don't mind being fairly conservative at over-approximating this
- struct invalidate_mt_env mt_cache_env;
- mt_cache_env.max_world = max_world;
- mt_cache_env.shadowed = oldmi;
- mt_cache_env.newentry = newentry;
- mt_cache_env.invalidated = 0;
-
- jl_typemap_visitor(jl_atomic_load_relaxed(&mt->cache), invalidate_mt_cache, (void*)&mt_cache_env);
- jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mt->leafcache);
- size_t i, l = leafcache->length;
- for (i = 1; i < l; i += 2) {
- jl_value_t *entry = jl_genericmemory_ptr_ref(leafcache, i);
- if (entry) {
- while (entry != jl_nothing) {
- invalidate_mt_cache((jl_typemap_entry_t*)entry, (void*)&mt_cache_env);
- entry = (jl_value_t*)jl_atomic_load_relaxed(&((jl_typemap_entry_t*)entry)->next);
- }
+ }
+
+ jl_methcache_t *mc = jl_method_table->cache;
+ JL_LOCK(&mc->writelock);
+ struct _typename_invalidate_backedge typename_env = {type, &isect, &isect2, d, n, max_world, invalidated};
+ jl_foreach_top_typename_for(_typename_invalidate_backedges, type, &typename_env);
+ invalidated |= typename_env.invalidated;
+ if (oldmi && jl_array_nrows(oldmi)) {
+ // search mc->cache and leafcache and drop anything that might overlap with the new method
+ // this is very cheap, so we don't mind being fairly conservative at over-approximating this
+ struct invalidate_mt_env mt_cache_env;
+ mt_cache_env.max_world = max_world;
+ mt_cache_env.shadowed = oldmi;
+ mt_cache_env.newentry = newentry;
+ mt_cache_env.invalidated = 0;
+
+ jl_typemap_visitor(jl_atomic_load_relaxed(&mc->cache), invalidate_mt_cache, (void*)&mt_cache_env);
+ jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mc->leafcache);
+ size_t i, l = leafcache->length;
+ for (i = 1; i < l; i += 2) {
+ jl_value_t *entry = jl_genericmemory_ptr_ref(leafcache, i);
+ if (entry) {
+ while (entry != jl_nothing) {
+ invalidate_mt_cache((jl_typemap_entry_t*)entry, (void*)&mt_cache_env);
+ entry = (jl_value_t*)jl_atomic_load_relaxed(&((jl_typemap_entry_t*)entry)->next);
}
}
}
+ invalidated |= mt_cache_env.invalidated;
}
+ JL_UNLOCK(&mc->writelock);
}
if (invalidated && _jl_debug_method_invalidation) {
jl_array_ptr_1d_push(_jl_debug_method_invalidation, (jl_value_t*)method);
@@ -2497,7 +2610,6 @@ void jl_method_table_activate(jl_methtable_t *mt, jl_typemap_entry_t *newentry)
}
jl_atomic_store_relaxed(&newentry->max_world, ~(size_t)0);
jl_atomic_store_relaxed(&method->dispatch_status, METHOD_SIG_LATEST_WHICH | (only ? METHOD_SIG_LATEST_ONLY : 0)); // TODO: this should be sequenced fully after the world counter store
- JL_UNLOCK(&mt->writelock);
JL_GC_POP();
}
@@ -2510,7 +2622,7 @@ JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method
jl_error("Method changes have been disabled via a call to disable_new_worlds.");
size_t world = jl_atomic_load_relaxed(&jl_world_counter) + 1;
jl_atomic_store_relaxed(&method->primary_world, world);
- jl_method_table_activate(mt, newentry);
+ jl_method_table_activate(newentry);
jl_atomic_store_release(&jl_world_counter, world);
JL_UNLOCK(&world_counter_lock);
JL_GC_POP();
@@ -2562,13 +2674,15 @@ static jl_tupletype_t *lookup_arg_type_tuple(jl_value_t *arg1 JL_PROPAGATES_ROOT
JL_DLLEXPORT jl_value_t *jl_method_lookup_by_tt(jl_tupletype_t *tt, size_t world, jl_value_t *_mt)
{
jl_methtable_t *mt = NULL;
- if (_mt == jl_nothing)
- mt = jl_gf_ft_mtable(jl_tparam0(tt));
+ if (_mt == jl_nothing) {
+ mt = jl_method_table;
+ }
else {
- assert(jl_isa(_mt, (jl_value_t*)jl_methtable_type));
+ assert(jl_is_mtable(_mt));
mt = (jl_methtable_t*) _mt;
}
- jl_method_instance_t* mi = jl_mt_assoc_by_type(mt, tt, world);
+ jl_methcache_t *mc = mt->cache;
+ jl_method_instance_t *mi = jl_mt_assoc_by_type(mc, tt, world);
if (!mi)
return jl_nothing;
return (jl_value_t*) mi;
@@ -2577,13 +2691,13 @@ JL_DLLEXPORT jl_value_t *jl_method_lookup_by_tt(jl_tupletype_t *tt, size_t world
JL_DLLEXPORT jl_method_instance_t *jl_method_lookup(jl_value_t **args, size_t nargs, size_t world)
{
assert(nargs > 0 && "expected caller to handle this case");
- jl_methtable_t *mt = jl_gf_mtable(args[0]);
- jl_typemap_t *cache = jl_atomic_load_relaxed(&mt->cache); // XXX: gc root for this?
- jl_typemap_entry_t *entry = jl_typemap_assoc_exact(cache, args[0], &args[1], nargs, jl_cachearg_offset(mt), world);
+ jl_methcache_t *mc = jl_method_table->cache;
+ jl_typemap_t *cache = jl_atomic_load_relaxed(&mc->cache); // XXX: gc root for this?
+ jl_typemap_entry_t *entry = jl_typemap_assoc_exact(cache, args[0], &args[1], nargs, jl_cachearg_offset(), world);
if (entry)
return entry->func.linfo;
jl_tupletype_t *tt = arg_type_tuple(args[0], &args[1], nargs);
- return jl_mt_assoc_by_type(mt, tt, world);
+ return jl_mt_assoc_by_type(mc, tt, world);
}
// return a Vector{Any} of svecs, each describing a method match:
@@ -2606,10 +2720,9 @@ JL_DLLEXPORT jl_value_t *jl_matching_methods(jl_tupletype_t *types, jl_value_t *
if (unw == (jl_value_t*)jl_emptytuple_type || jl_tparam0(unw) == jl_bottom_type)
return (jl_value_t*)jl_an_empty_vec_any;
if (mt == jl_nothing)
- mt = (jl_value_t*)jl_method_table_for(unw);
- if (mt == jl_nothing)
- mt = NULL;
- return ml_matches((jl_methtable_t*)mt, types, lim, include_ambiguous, 1, world, 1, min_valid, max_valid, ambig);
+ mt = (jl_value_t*)jl_method_table;
+ jl_methcache_t *mc = ((jl_methtable_t*)mt)->cache;
+ return ml_matches((jl_methtable_t*)mt, mc, types, lim, include_ambiguous, 1, world, 1, min_valid, max_valid, ambig);
}
JL_DLLEXPORT jl_method_instance_t *jl_get_unspecialized(jl_method_t *def JL_PROPAGATES_ROOT)
@@ -3133,14 +3246,13 @@ JL_DLLEXPORT int32_t jl_invoke_api(jl_code_instance_t *codeinst)
return -1;
}
-JL_DLLEXPORT jl_value_t *jl_normalize_to_compilable_sig(jl_methtable_t *mt, jl_tupletype_t *ti, jl_svec_t *env, jl_method_t *m,
+JL_DLLEXPORT jl_value_t *jl_normalize_to_compilable_sig(jl_tupletype_t *ti, jl_svec_t *env, jl_method_t *m,
int return_if_compileable)
{
jl_tupletype_t *tt = NULL;
jl_svec_t *newparams = NULL;
JL_GC_PUSH2(&tt, &newparams);
- jl_methtable_t *kwmt = mt == jl_kwcall_mt ? jl_kwmethod_table_for(m->sig) : mt;
- intptr_t max_varargs = get_max_varargs(m, kwmt, mt, NULL);
+ intptr_t max_varargs = get_max_varargs(m, NULL);
jl_compilation_sig(ti, env, m, max_varargs, &newparams);
int is_compileable = ((jl_datatype_t*)ti)->isdispatchtuple;
if (newparams) {
@@ -3166,10 +3278,7 @@ jl_method_instance_t *jl_normalize_to_compilable_mi(jl_method_instance_t *mi JL_
jl_method_t *def = mi->def.method;
if (!jl_is_method(def) || !jl_is_datatype(mi->specTypes))
return mi;
- jl_methtable_t *mt = jl_method_get_table(def);
- if ((jl_value_t*)mt == jl_nothing)
- return mi;
- jl_value_t *compilationsig = jl_normalize_to_compilable_sig(mt, (jl_datatype_t*)mi->specTypes, mi->sparam_vals, def, 1);
+ jl_value_t *compilationsig = jl_normalize_to_compilable_sig((jl_datatype_t*)mi->specTypes, mi->sparam_vals, def, 1);
if (compilationsig == jl_nothing || jl_egal(compilationsig, mi->specTypes))
return mi;
jl_svec_t *env = NULL;
@@ -3190,30 +3299,28 @@ JL_DLLEXPORT jl_method_instance_t *jl_method_match_to_mi(jl_method_match_t *matc
jl_tupletype_t *ti = match->spec_types;
jl_method_instance_t *mi = NULL;
if (jl_is_datatype(ti)) {
- jl_methtable_t *mt = jl_method_get_table(m);
- assert(mt != NULL);
- if ((jl_value_t*)mt != jl_nothing) {
- // get the specialization, possibly also caching it
- if (mt_cache && ((jl_datatype_t*)ti)->isdispatchtuple) {
- // Since we also use this presence in the cache
- // to trigger compilation when producing `.ji` files,
- // inject it there now if we think it will be
- // used via dispatch later (e.g. because it was hinted via a call to `precompile`)
- JL_LOCK(&mt->writelock);
- mi = cache_method(mt, &mt->cache, (jl_value_t*)mt, ti, m, world, min_valid, max_valid, env);
- JL_UNLOCK(&mt->writelock);
- }
- else {
- jl_value_t *tt = jl_normalize_to_compilable_sig(mt, ti, env, m, 1);
- if (tt != jl_nothing) {
- JL_GC_PUSH2(&tt, &env);
- if (!jl_egal(tt, (jl_value_t*)ti)) {
- jl_value_t *ti = jl_type_intersection_env((jl_value_t*)tt, (jl_value_t*)m->sig, &env);
- assert(ti != jl_bottom_type); (void)ti;
- }
- mi = jl_specializations_get_linfo(m, (jl_value_t*)tt, env);
- JL_GC_POP();
+ // get the specialization, possibly also caching it
+ if (mt_cache && ((jl_datatype_t*)ti)->isdispatchtuple) {
+ // Since we also use this presence in the cache
+ // to trigger compilation when producing `.ji` files,
+ // inject it there now if we think it will be
+ // used via dispatch later (e.g. because it was hinted via a call to `precompile`)
+ jl_methcache_t *mc = jl_method_table->cache;
+ assert(mc);
+ JL_LOCK(&mc->writelock);
+ mi = cache_method(jl_method_get_table(m), mc, &mc->cache, (jl_value_t*)mc, ti, m, world, min_valid, max_valid, env);
+ JL_UNLOCK(&mc->writelock);
+ }
+ else {
+ jl_value_t *tt = jl_normalize_to_compilable_sig(ti, env, m, 1);
+ if (tt != jl_nothing) {
+ JL_GC_PUSH2(&tt, &env);
+ if (!jl_egal(tt, (jl_value_t*)ti)) {
+ jl_value_t *ti = jl_type_intersection_env((jl_value_t*)tt, (jl_value_t*)m->sig, &env);
+ assert(ti != jl_bottom_type); (void)ti;
}
+ mi = jl_specializations_get_linfo(m, (jl_value_t*)tt, env);
+ JL_GC_POP();
}
}
}
@@ -3582,7 +3689,6 @@ STATIC_INLINE jl_method_instance_t *jl_lookup_generic_(jl_value_t *F, jl_value_t
(callsite >> 16) & (N_CALL_CACHE - 1),
(callsite >> 24 | callsite << 8) & (N_CALL_CACHE - 1)};
jl_typemap_entry_t *entry = NULL;
- jl_methtable_t *mt = NULL;
int i;
// check each cache entry to see if it matches
//#pragma unroll
@@ -3609,8 +3715,8 @@ STATIC_INLINE jl_method_instance_t *jl_lookup_generic_(jl_value_t *F, jl_value_t
if (i == 4) {
// if no method was found in the associative cache, check the full cache
JL_TIMING(METHOD_LOOKUP_FAST, METHOD_LOOKUP_FAST);
- mt = jl_gf_mtable(F);
- jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mt->leafcache);
+ jl_methcache_t *mc = jl_method_table->cache;
+ jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mc->leafcache);
entry = NULL;
int cache_entry_count = jl_atomic_load_relaxed(&((jl_datatype_t*)FT)->name->cache_entry_count);
if (leafcache != (jl_genericmemory_t*)jl_an_empty_memory_any && (cache_entry_count == 0 || cache_entry_count >= 8)) {
@@ -3620,8 +3726,8 @@ STATIC_INLINE jl_method_instance_t *jl_lookup_generic_(jl_value_t *F, jl_value_t
entry = lookup_leafcache(leafcache, (jl_value_t*)tt, world);
}
if (entry == NULL) {
- jl_typemap_t *cache = jl_atomic_load_relaxed(&mt->cache); // XXX: gc root required?
- entry = jl_typemap_assoc_exact(cache, F, args, nargs, jl_cachearg_offset(mt), world);
+ jl_typemap_t *cache = jl_atomic_load_relaxed(&mc->cache); // XXX: gc root required?
+ entry = jl_typemap_assoc_exact(cache, F, args, nargs, jl_cachearg_offset(), world);
if (entry == NULL) {
last_alloc = jl_options.malloc_log ? jl_gc_diff_total_bytes() : 0;
if (tt == NULL) {
@@ -3649,7 +3755,8 @@ STATIC_INLINE jl_method_instance_t *jl_lookup_generic_(jl_value_t *F, jl_value_t
else {
assert(tt);
// cache miss case
- mfunc = jl_mt_assoc_by_type(mt, tt, world);
+ jl_methcache_t *mc = jl_method_table->cache;
+ mfunc = jl_mt_assoc_by_type(mc, tt, world);
if (jl_options.malloc_log)
jl_gc_sync_total_bytes(last_alloc); // discard allocation count from compilation
if (mfunc == NULL) {
@@ -3690,18 +3797,15 @@ JL_DLLEXPORT jl_value_t *jl_apply_generic(jl_value_t *F, jl_value_t **args, uint
return _jl_invoke(F, args, nargs, mfunc, world);
}
-static jl_method_match_t *_gf_invoke_lookup(jl_value_t *types JL_PROPAGATES_ROOT, jl_value_t *mt, size_t world, size_t *min_valid, size_t *max_valid)
+static jl_method_match_t *_gf_invoke_lookup(jl_value_t *types JL_PROPAGATES_ROOT, jl_methtable_t *mt, size_t world, size_t *min_valid, size_t *max_valid)
{
jl_value_t *unw = jl_unwrap_unionall((jl_value_t*)types);
if (!jl_is_tuple_type(unw))
return NULL;
if (jl_tparam0(unw) == jl_bottom_type)
return NULL;
- if (mt == jl_nothing)
- mt = (jl_value_t*)jl_method_table_for(unw);
- if (mt == jl_nothing)
- mt = NULL;
- jl_value_t *matches = ml_matches((jl_methtable_t*)mt, (jl_tupletype_t*)types, 1, 0, 0, world, 1, min_valid, max_valid, NULL);
+ jl_methcache_t *mc = ((jl_methtable_t*)mt)->cache;
+ jl_value_t *matches = ml_matches((jl_methtable_t*)mt, mc, (jl_tupletype_t*)types, 1, 0, 0, world, 1, min_valid, max_valid, NULL);
if (matches == jl_nothing || jl_array_nrows(matches) != 1)
return NULL;
jl_method_match_t *matc = (jl_method_match_t*)jl_array_ptr_ref(matches, 0);
@@ -3713,7 +3817,9 @@ JL_DLLEXPORT jl_value_t *jl_gf_invoke_lookup(jl_value_t *types, jl_value_t *mt,
// Deprecated: Use jl_gf_invoke_lookup_worlds for future development
size_t min_valid = 0;
size_t max_valid = ~(size_t)0;
- jl_method_match_t *matc = _gf_invoke_lookup(types, mt, world, &min_valid, &max_valid);
+ if (mt == jl_nothing)
+ mt = (jl_value_t*)jl_method_table;
+ jl_method_match_t *matc = _gf_invoke_lookup(types, (jl_methtable_t*)mt, world, &min_valid, &max_valid);
if (matc == NULL)
return jl_nothing;
return (jl_value_t*)matc->method;
@@ -3722,7 +3828,9 @@ JL_DLLEXPORT jl_value_t *jl_gf_invoke_lookup(jl_value_t *types, jl_value_t *mt,
JL_DLLEXPORT jl_value_t *jl_gf_invoke_lookup_worlds(jl_value_t *types, jl_value_t *mt, size_t world, size_t *min_world, size_t *max_world)
{
- jl_method_match_t *matc = _gf_invoke_lookup(types, mt, world, min_world, max_world);
+ if (mt == jl_nothing)
+ mt = (jl_value_t*)jl_method_table;
+ jl_method_match_t *matc = _gf_invoke_lookup(types, (jl_methtable_t*)mt, world, min_world, max_world);
if (matc == NULL)
return jl_nothing;
return (jl_value_t*)matc;
@@ -3785,7 +3893,7 @@ jl_value_t *jl_gf_invoke_by_method(jl_method_t *method, jl_value_t *gf, jl_value
assert(sub); (void)sub;
}
- mfunc = cache_method(NULL, &method->invokes, (jl_value_t*)method, tt, method, 1, 1, ~(size_t)0, tpenv);
+ mfunc = cache_method(NULL, NULL, &method->invokes, (jl_value_t*)method, tt, method, 1, 1, ~(size_t)0, tpenv);
}
JL_UNLOCK(&method->writelock);
JL_GC_POP();
@@ -3829,8 +3937,8 @@ jl_function_t *jl_new_generic_function_with_supertype(jl_sym_t *name, jl_module_
0, 0, 0);
assert(jl_is_datatype(ftype));
JL_GC_PUSH1(&ftype);
- ftype->name->mt->name = name;
- jl_gc_wb(ftype->name->mt, name);
+ ftype->name->singletonname = name;
+ jl_gc_wb(ftype->name, name);
jl_declare_constant_val3(NULL, module, tname, (jl_value_t*)ftype, PARTITION_KIND_CONST, new_world);
jl_value_t *f = jl_new_struct(ftype);
ftype->instance = f;
@@ -3931,7 +4039,7 @@ static int ml_matches_visitor(jl_typemap_entry_t *ml, struct typemap_intersectio
static int ml_mtable_visitor(jl_methtable_t *mt, void *closure0)
{
struct typemap_intersection_env* env = (struct typemap_intersection_env*)closure0;
- return jl_typemap_intersection_visitor(jl_atomic_load_relaxed(&mt->defs), jl_cachearg_offset(mt), env);
+ return jl_typemap_intersection_visitor(jl_atomic_load_relaxed(&mt->defs), 0, env);
}
// Visit the candidate methods, starting from t[idx], to determine a possible valid sort ordering,
@@ -4214,7 +4322,7 @@ static int sort_mlmatches(jl_array_t *t, size_t idx, arraylist_t *visited, array
// fully-covers is a Bool indicating subtyping, though temporarily it may be
// tri-values, with `nothing` indicating a match that is not a subtype, but
// which is dominated by one which is (and thus should be excluded unless ambiguous)
-static jl_value_t *ml_matches(jl_methtable_t *mt,
+static jl_value_t *ml_matches(jl_methtable_t *mt, jl_methcache_t *mc,
jl_tupletype_t *type, int lim, int include_ambiguous,
int intersections, size_t world, int cache_result,
size_t *min_valid, size_t *max_valid, int *ambig)
@@ -4243,10 +4351,10 @@ static jl_value_t *ml_matches(jl_methtable_t *mt,
jl_value_t *isect2 = NULL;
JL_GC_PUSH6(&env.t, &env.matc, &env.match.env, &search.env, &env.match.ti, &isect2);
- if (mt) {
+ if (mc) {
// check the leaf cache if this type can be in there
if (((jl_datatype_t*)unw)->isdispatchtuple) {
- jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mt->leafcache);
+ jl_genericmemory_t *leafcache = jl_atomic_load_relaxed(&mc->leafcache);
jl_typemap_entry_t *entry = lookup_leafcache(leafcache, (jl_value_t*)type, world);
if (entry) {
jl_method_instance_t *mi = entry->func.linfo;
@@ -4279,7 +4387,7 @@ static jl_value_t *ml_matches(jl_methtable_t *mt,
}
// then check the full cache if it seems profitable
if (((jl_datatype_t*)unw)->isdispatchtuple) {
- jl_typemap_entry_t *entry = jl_typemap_assoc_by_type(jl_atomic_load_relaxed(&mt->cache), &search, jl_cachearg_offset(mt), /*subtype*/1);
+ jl_typemap_entry_t *entry = jl_typemap_assoc_by_type(jl_atomic_load_relaxed(&mc->cache), &search, jl_cachearg_offset(), /*subtype*/1);
if (entry && (((jl_datatype_t*)unw)->isdispatchtuple || entry->guardsigs == jl_emptysvec)) {
jl_method_instance_t *mi = entry->func.linfo;
jl_method_t *meth = mi->def.method;
@@ -4305,23 +4413,14 @@ static jl_value_t *ml_matches(jl_methtable_t *mt,
return env.t;
}
}
- if (!ml_mtable_visitor(mt, &env.match) && env.t == jl_an_empty_vec_any) {
- JL_GC_POP();
- // if we return early without returning methods, set only the min/max valid collected from matching
- *min_valid = env.match.min_valid;
- *max_valid = env.match.max_valid;
- return jl_nothing;
- }
}
- else {
- // else: scan everything
- if (!jl_foreach_reachable_mtable(ml_mtable_visitor, &env.match) && env.t == jl_an_empty_vec_any) {
- JL_GC_POP();
- // if we return early without returning methods, set only the min/max valid collected from matching
- *min_valid = env.match.min_valid;
- *max_valid = env.match.max_valid;
- return jl_nothing;
- }
+ // then scan everything
+ if (!ml_mtable_visitor(mt, &env.match) && env.t == jl_an_empty_vec_any) {
+ JL_GC_POP();
+ // if we return early without returning methods, set only the min/max valid collected from matching
+ *min_valid = env.match.min_valid;
+ *max_valid = env.match.max_valid;
+ return jl_nothing;
}
// if we return early, set only the min/max valid collected from matching
*min_valid = env.match.min_valid;
@@ -4569,14 +4668,14 @@ static jl_value_t *ml_matches(jl_methtable_t *mt,
if (env.match.min_valid < min_world)
env.match.min_valid = min_world;
}
- if (mt && cache_result && ((jl_datatype_t*)unw)->isdispatchtuple) { // cache_result parameter keeps this from being recursive
+ if (mc && cache_result && ((jl_datatype_t*)unw)->isdispatchtuple) { // cache_result parameter keeps this from being recursive
if (len == 1 && !has_ambiguity) {
env.matc = (jl_method_match_t*)jl_array_ptr_ref(env.t, 0);
jl_method_t *meth = env.matc->method;
jl_svec_t *tpenv = env.matc->sparams;
- JL_LOCK(&mt->writelock);
- cache_method(mt, &mt->cache, (jl_value_t*)mt, (jl_tupletype_t*)unw, meth, world, env.match.min_valid, env.match.max_valid, tpenv);
- JL_UNLOCK(&mt->writelock);
+ JL_LOCK(&mc->writelock);
+ cache_method(mt, mc, &mc->cache, (jl_value_t*)mc, (jl_tupletype_t*)unw, meth, world, env.match.min_valid, env.match.max_valid, tpenv);
+ JL_UNLOCK(&mc->writelock);
}
}
*min_valid = env.match.min_valid;
@@ -4654,7 +4753,7 @@ JL_DLLEXPORT void jl_extern_c(jl_value_t *name, jl_value_t *declrt, jl_tupletype
}
// save a record of this so that the alias is generated when we write an object file
- jl_method_t *meth = (jl_method_t*)jl_methtable_lookup(ft->name->mt, (jl_value_t*)sigt, jl_atomic_load_acquire(&jl_world_counter));
+ jl_method_t *meth = (jl_method_t*)jl_methtable_lookup((jl_value_t*)sigt, jl_atomic_load_acquire(&jl_world_counter));
if (!jl_is_method(meth))
jl_error("@ccallable: could not find requested method");
JL_GC_PUSH1(&meth);
diff --git a/src/interpreter.c b/src/interpreter.c
index 2f7f9f8947576..97c37cb99b9c1 100644
--- a/src/interpreter.c
+++ b/src/interpreter.c
@@ -101,9 +101,8 @@ static jl_value_t *eval_methoddef(jl_expr_t *ex, interpreter_state *s)
fname = eval_value(args[0], s);
jl_methtable_t *mt = NULL;
- if (jl_typetagis(fname, jl_methtable_type)) {
+ if (jl_is_mtable(fname))
mt = (jl_methtable_t*)fname;
- }
atypes = eval_value(args[1], s);
meth = eval_value(args[2], s);
jl_method_t *ret = jl_method_def((jl_svec_t*)atypes, mt, (jl_code_info_t*)meth, s->module);
diff --git a/src/ircode.c b/src/ircode.c
index ddd5bb29fdfac..9a94c4c62431a 100644
--- a/src/ircode.c
+++ b/src/ircode.c
@@ -1616,14 +1616,12 @@ void jl_init_serializer(void)
jl_densearray_type, jl_function_type, jl_typename_type,
jl_builtin_type, jl_task_type, jl_uniontype_type,
jl_array_any_type, jl_intrinsic_type,
- jl_methtable_type, jl_typemap_level_type,
jl_voidpointer_type, jl_newvarnode_type, jl_abstractstring_type,
jl_array_symbol_type, jl_anytuple_type, jl_tparam0(jl_anytuple_type),
jl_emptytuple_type, jl_array_uint8_type, jl_array_uint32_type, jl_code_info_type,
jl_typeofbottom_type, jl_typeofbottom_type->super,
jl_namedtuple_type, jl_array_int32_type,
jl_uint32_type, jl_uint64_type,
- jl_type_type_mt, jl_nonfunction_mt,
jl_opaque_closure_type,
jl_memory_any_type,
jl_memory_uint8_type,
diff --git a/src/jl_exported_data.inc b/src/jl_exported_data.inc
index df3b9c121837c..76e8368132424 100644
--- a/src/jl_exported_data.inc
+++ b/src/jl_exported_data.inc
@@ -65,7 +65,7 @@
XX(jl_interconditional_type) \
XX(jl_interrupt_exception) \
XX(jl_intrinsic_type) \
- XX(jl_kwcall_func) \
+ XX(jl_kwcall_type) \
XX(jl_libdl_module) \
XX(jl_libdl_dlopen_func) \
XX(jl_lineinfonode_type) \
@@ -91,13 +91,13 @@
XX(jl_method_match_type) \
XX(jl_method_type) \
XX(jl_methtable_type) \
+ XX(jl_methcache_type) \
XX(jl_missingcodeerror_type) \
XX(jl_module_type) \
XX(jl_n_threads_per_pool) \
XX(jl_namedtuple_type) \
XX(jl_namedtuple_typename) \
XX(jl_newvarnode_type) \
- XX(jl_nonfunction_mt) \
XX(jl_nothing) \
XX(jl_nothing_type) \
XX(jl_number_type) \
@@ -135,7 +135,6 @@
XX(jl_typename_type) \
XX(jl_typeofbottom_type) \
XX(jl_type_type) \
- XX(jl_type_type_mt) \
XX(jl_type_typename) \
XX(jl_uint16_type) \
XX(jl_uint32_type) \
diff --git a/src/jltypes.c b/src/jltypes.c
index a3d10a87b8091..fcb15a1a9a69d 100644
--- a/src/jltypes.c
+++ b/src/jltypes.c
@@ -2954,7 +2954,9 @@ void jl_init_types(void) JL_GC_DISABLED
XX(symbol);
jl_simplevector_type = jl_new_uninitialized_datatype();
XX(simplevector);
+ jl_methcache_type = jl_new_uninitialized_datatype();
jl_methtable_type = jl_new_uninitialized_datatype();
+ jl_method_table = jl_new_method_table(jl_symbol("GlobalMethods"), core);
jl_emptysvec = (jl_svec_t*)jl_gc_permobj(sizeof(void*), jl_simplevector_type, 0);
jl_set_typetagof(jl_emptysvec, jl_simplevector_tag, GC_OLD_MARKED);
@@ -2962,14 +2964,10 @@ void jl_init_types(void) JL_GC_DISABLED
jl_any_type = (jl_datatype_t*)jl_new_abstracttype((jl_value_t*)jl_symbol("Any"), core, NULL, jl_emptysvec);
jl_any_type->super = jl_any_type;
- jl_nonfunction_mt = jl_any_type->name->mt;
- jl_any_type->name->mt = NULL;
jl_datatype_t *type_type = jl_new_abstracttype((jl_value_t*)jl_symbol("Type"), core, jl_any_type, jl_emptysvec);
jl_type_type = (jl_unionall_t*)type_type;
jl_type_typename = type_type->name;
- jl_type_type_mt = jl_new_method_table(jl_type_typename->name, core);
- jl_type_typename->mt = jl_type_type_mt;
// initialize them. lots of cycles.
// NOTE: types are not actually mutable, but we want to ensure they are heap-allocated with stable addresses
@@ -3004,56 +3002,60 @@ void jl_init_types(void) JL_GC_DISABLED
jl_typename_type->name = jl_new_typename_in(jl_symbol("TypeName"), core, 0, 1);
jl_typename_type->name->wrapper = (jl_value_t*)jl_typename_type;
- jl_typename_type->name->mt = jl_nonfunction_mt;
jl_typename_type->super = jl_any_type;
jl_typename_type->parameters = jl_emptysvec;
- jl_typename_type->name->n_uninitialized = 17 - 2;
- jl_typename_type->name->names = jl_perm_symsvec(17, "name", "module",
+ jl_typename_type->name->n_uninitialized = 19 - 2;
+ jl_typename_type->name->names = jl_perm_symsvec(19, "name", "module", "singletonname",
"names", "atomicfields", "constfields",
"wrapper", "Typeofwrapper", "cache", "linearcache",
- "mt", "partial",
- "hash", "n_uninitialized",
+ "backedges", "partial",
+ "hash", "max_args", "n_uninitialized",
"flags", // "abstract", "mutable", "mayinlinealloc",
"cache_entry_count", "max_methods", "constprop_heuristic");
- const static uint32_t typename_constfields[1] = { 0b00011101000100111 }; // TODO: put back atomicfields and constfields in this list
- const static uint32_t typename_atomicfields[1] = { 0b00100000110000000 };
+ const static uint32_t typename_constfields[1] = { 0b0001101000001001011 }; // TODO: put back atomicfields and constfields in this list
+ const static uint32_t typename_atomicfields[1] = { 0b0010010001110000000 };
jl_typename_type->name->constfields = typename_constfields;
jl_typename_type->name->atomicfields = typename_atomicfields;
jl_precompute_memoized_dt(jl_typename_type, 1);
- jl_typename_type->types = jl_svec(17, jl_symbol_type, jl_any_type /*jl_module_type*/,
- jl_simplevector_type, jl_any_type/*jl_voidpointer_type*/, jl_any_type/*jl_voidpointer_type*/,
+ jl_typename_type->types = jl_svec(19, jl_symbol_type, jl_any_type /*jl_module_type*/, jl_symbol_type,
+ jl_simplevector_type,
+ jl_any_type/*jl_voidpointer_type*/, jl_any_type/*jl_voidpointer_type*/,
jl_type_type, jl_type_type, jl_simplevector_type, jl_simplevector_type,
- jl_methtable_type, jl_any_type,
- jl_any_type /*jl_long_type*/, jl_any_type /*jl_int32_type*/,
+ jl_methcache_type, jl_any_type,
+ jl_any_type /*jl_long_type*/,
+ jl_any_type /*jl_int32_type*/,
+ jl_any_type /*jl_int32_type*/,
jl_any_type /*jl_uint8_type*/,
jl_any_type /*jl_uint8_type*/,
jl_any_type /*jl_uint8_type*/,
jl_any_type /*jl_uint8_type*/);
+ jl_methcache_type->name = jl_new_typename_in(jl_symbol("MethodCache"), core, 0, 1);
+ jl_methcache_type->name->wrapper = (jl_value_t*)jl_methcache_type;
+ jl_methcache_type->super = jl_any_type;
+ jl_methcache_type->parameters = jl_emptysvec;
+ jl_methcache_type->name->n_uninitialized = 4 - 2;
+ jl_methcache_type->name->names = jl_perm_symsvec(4, "leafcache", "cache", "", "");
+ const static uint32_t methcache_atomicfields[1] = { 0b1111 };
+ jl_methcache_type->name->atomicfields = methcache_atomicfields;
+ jl_precompute_memoized_dt(jl_methcache_type, 1);
+ jl_methcache_type->types = jl_svec(4, jl_any_type, jl_any_type, jl_any_type/*voidpointer*/, jl_any_type/*int32*/);
+
jl_methtable_type->name = jl_new_typename_in(jl_symbol("MethodTable"), core, 0, 1);
jl_methtable_type->name->wrapper = (jl_value_t*)jl_methtable_type;
- jl_methtable_type->name->mt = jl_nonfunction_mt;
jl_methtable_type->super = jl_any_type;
jl_methtable_type->parameters = jl_emptysvec;
- jl_methtable_type->name->n_uninitialized = 11 - 6;
- jl_methtable_type->name->names = jl_perm_symsvec(11, "name", "defs",
- "leafcache", "cache", "max_args",
- "module", "backedges",
- "", "", "offs", "");
- const static uint32_t methtable_constfields[1] = { 0x00000020 }; // (1<<5);
- const static uint32_t methtable_atomicfields[1] = { 0x0000001e }; // (1<<1)|(1<<2)|(1<<3)|(1<<4);
+ jl_methtable_type->name->n_uninitialized = 0;
+ jl_methtable_type->name->names = jl_perm_symsvec(4, "defs", "cache", "name", "module");
+ const static uint32_t methtable_constfields[1] = { 0b1110 };
+ const static uint32_t methtable_atomicfields[1] = { 0b0001 };
jl_methtable_type->name->constfields = methtable_constfields;
jl_methtable_type->name->atomicfields = methtable_atomicfields;
jl_precompute_memoized_dt(jl_methtable_type, 1);
- jl_methtable_type->types = jl_svec(11, jl_symbol_type, jl_any_type, jl_any_type,
- jl_any_type, jl_any_type/*jl_long*/,
- jl_any_type/*module*/, jl_any_type/*any vector*/,
- jl_any_type/*voidpointer*/, jl_any_type/*int32*/,
- jl_any_type/*uint8*/, jl_any_type/*uint8*/);
+ jl_methtable_type->types = jl_svec(4, jl_any_type, jl_methcache_type, jl_symbol_type, jl_any_type /*jl_module_type*/);
jl_symbol_type->name = jl_new_typename_in(jl_symbol("Symbol"), core, 0, 1);
jl_symbol_type->name->wrapper = (jl_value_t*)jl_symbol_type;
- jl_symbol_type->name->mt = jl_nonfunction_mt;
jl_symbol_type->super = jl_any_type;
jl_symbol_type->parameters = jl_emptysvec;
jl_symbol_type->name->n_uninitialized = 0;
@@ -3063,7 +3065,6 @@ void jl_init_types(void) JL_GC_DISABLED
jl_simplevector_type->name = jl_new_typename_in(jl_symbol("SimpleVector"), core, 0, 1);
jl_simplevector_type->name->wrapper = (jl_value_t*)jl_simplevector_type;
- jl_simplevector_type->name->mt = jl_nonfunction_mt;
jl_simplevector_type->super = jl_any_type;
jl_simplevector_type->parameters = jl_emptysvec;
jl_simplevector_type->name->n_uninitialized = 0;
@@ -3249,8 +3250,6 @@ void jl_init_types(void) JL_GC_DISABLED
jl_function_type = jl_new_abstracttype((jl_value_t*)jl_symbol("Function"), core, jl_any_type, jl_emptysvec);
jl_builtin_type = jl_new_abstracttype((jl_value_t*)jl_symbol("Builtin"), core, jl_function_type, jl_emptysvec);
- jl_function_type->name->mt = NULL; // subtypes of Function have independent method tables
- jl_builtin_type->name->mt = NULL; // so they don't share the Any type table
jl_svec_t *tv;
@@ -3289,10 +3288,7 @@ void jl_init_types(void) JL_GC_DISABLED
jl_svec(3, jl_module_type, jl_symbol_type, jl_binding_type),
jl_emptysvec, 0, 0, 3);
- core = jl_new_module(jl_symbol("Core"), NULL);
- jl_type_typename->mt->module = core;
- jl_core_module = core;
- core = NULL; // not actually ready yet to use
+ jl_core_module = jl_new_module(jl_symbol("Core"), NULL);
tv = jl_svec1(tvar("Backend"));
jl_addrspace_typename =
@@ -3376,11 +3372,11 @@ void jl_init_types(void) JL_GC_DISABLED
jl_array_uint64_type = jl_apply_type2((jl_value_t*)jl_array_type, (jl_value_t*)jl_uint64_type, jl_box_long(1));
jl_an_empty_vec_any = (jl_value_t*)jl_alloc_vec_any(0); // used internally
jl_an_empty_memory_any = (jl_value_t*)jl_alloc_memory_any(0); // used internally
- jl_atomic_store_relaxed(&jl_nonfunction_mt->leafcache, (jl_genericmemory_t*)jl_an_empty_memory_any);
- jl_atomic_store_relaxed(&jl_type_type_mt->leafcache, (jl_genericmemory_t*)jl_an_empty_memory_any);
// finish initializing module Core
core = jl_core_module;
+ jl_method_table->module = core;
+ jl_atomic_store_relaxed(&jl_method_table->cache->leafcache, (jl_genericmemory_t*)jl_an_empty_memory_any);
jl_atomic_store_relaxed(&core->bindingkeyset, (jl_genericmemory_t*)jl_an_empty_memory_any);
// export own name, so "using Foo" makes "Foo" itself visible
jl_set_initial_const(core, core->name, (jl_value_t*)core, 1);
@@ -3718,13 +3714,6 @@ void jl_init_types(void) JL_GC_DISABLED
jl_svec(4, jl_type_type, jl_simplevector_type, jl_method_type, jl_bool_type),
jl_emptysvec, 0, 0, 4);
- // all Kinds share the Type method table (not the nonfunction one)
- jl_unionall_type->name->mt =
- jl_uniontype_type->name->mt =
- jl_datatype_type->name->mt =
- jl_typeofbottom_type->name->mt =
- jl_type_type_mt;
-
jl_intrinsic_type = jl_new_primitivetype((jl_value_t*)jl_symbol("IntrinsicFunction"), core,
jl_builtin_type, jl_emptysvec, 32);
@@ -3848,23 +3837,20 @@ void jl_init_types(void) JL_GC_DISABLED
jl_svecset(jl_datatype_type->types, 6, jl_int32_type);
jl_svecset(jl_datatype_type->types, 7, jl_uint16_type);
jl_svecset(jl_typename_type->types, 1, jl_module_type);
- jl_svecset(jl_typename_type->types, 3, jl_voidpointer_type);
jl_svecset(jl_typename_type->types, 4, jl_voidpointer_type);
- jl_svecset(jl_typename_type->types, 5, jl_type_type);
+ jl_svecset(jl_typename_type->types, 5, jl_voidpointer_type);
jl_svecset(jl_typename_type->types, 6, jl_type_type);
- jl_svecset(jl_typename_type->types, 11, jl_long_type);
- jl_svecset(jl_typename_type->types, 12, jl_int32_type);
- jl_svecset(jl_typename_type->types, 13, jl_uint8_type);
- jl_svecset(jl_typename_type->types, 14, jl_uint8_type);
+ jl_svecset(jl_typename_type->types, 7, jl_type_type);
+ jl_svecset(jl_typename_type->types, 12, jl_long_type);
+ jl_svecset(jl_typename_type->types, 13, jl_int32_type);
+ jl_svecset(jl_typename_type->types, 14, jl_int32_type);
jl_svecset(jl_typename_type->types, 15, jl_uint8_type);
jl_svecset(jl_typename_type->types, 16, jl_uint8_type);
- jl_svecset(jl_methtable_type->types, 4, jl_long_type);
- jl_svecset(jl_methtable_type->types, 5, jl_module_type);
- jl_svecset(jl_methtable_type->types, 6, jl_array_any_type);
- jl_svecset(jl_methtable_type->types, 7, jl_long_type); // voidpointer
- jl_svecset(jl_methtable_type->types, 8, jl_long_type); // uint32_t plus alignment
- jl_svecset(jl_methtable_type->types, 9, jl_uint8_type);
- jl_svecset(jl_methtable_type->types, 10, jl_uint8_type);
+ jl_svecset(jl_typename_type->types, 17, jl_uint8_type);
+ jl_svecset(jl_typename_type->types, 18, jl_uint8_type);
+ jl_svecset(jl_methcache_type->types, 2, jl_long_type); // voidpointer
+ jl_svecset(jl_methcache_type->types, 3, jl_long_type); // uint32_t plus alignment
+ jl_svecset(jl_methtable_type->types, 3, jl_module_type);
jl_svecset(jl_method_type->types, 13, jl_method_instance_type);
//jl_svecset(jl_debuginfo_type->types, 0, jl_method_instance_type); // union(jl_method_instance_type, jl_method_type, jl_symbol_type)
jl_svecset(jl_method_instance_type->types, 4, jl_code_instance_type);
@@ -3879,6 +3865,7 @@ void jl_init_types(void) JL_GC_DISABLED
jl_compute_field_offsets(jl_uniontype_type);
jl_compute_field_offsets(jl_tvar_type);
jl_compute_field_offsets(jl_methtable_type);
+ jl_compute_field_offsets(jl_methcache_type);
jl_compute_field_offsets(jl_method_instance_type);
jl_compute_field_offsets(jl_code_instance_type);
jl_compute_field_offsets(jl_unionall_type);
@@ -3966,9 +3953,9 @@ void post_boot_hooks(void)
jl_trimfailure_type = (jl_datatype_t*)core("TrimFailure");
jl_pair_type = core("Pair");
- jl_kwcall_func = core("kwcall");
- jl_kwcall_mt = ((jl_datatype_t*)jl_typeof(jl_kwcall_func))->name->mt;
- jl_atomic_store_relaxed(&jl_kwcall_mt->max_args, 0);
+ jl_value_t *kwcall_func = core("kwcall");
+ jl_kwcall_type = (jl_datatype_t*)jl_typeof(kwcall_func);
+ jl_atomic_store_relaxed(&jl_kwcall_type->name->max_args, 0);
jl_weakref_type = (jl_datatype_t*)core("WeakRef");
jl_vecelement_typename = ((jl_datatype_t*)jl_unwrap_unionall(core("VecElement")))->name;
@@ -3976,27 +3963,6 @@ void post_boot_hooks(void)
jl_abioverride_type = (jl_datatype_t*)core("ABIOverride");
jl_init_box_caches();
-
- // set module field of primitive types
- jl_svec_t *bindings = jl_atomic_load_relaxed(&jl_core_module->bindings);
- jl_value_t **table = jl_svec_data(bindings);
- for (size_t i = 0; i < jl_svec_len(bindings); i++) {
- if (table[i] != jl_nothing) {
- jl_binding_t *b = (jl_binding_t*)table[i];
- jl_value_t *v = jl_get_binding_value(b);
- if (v) {
- if (jl_is_unionall(v))
- v = jl_unwrap_unionall(v);
- if (jl_is_datatype(v)) {
- jl_datatype_t *tt = (jl_datatype_t*)v;
- tt->name->module = jl_core_module;
- if (tt->name->mt)
- tt->name->mt->module = jl_core_module;
- }
- }
- }
- }
-
export_jl_small_typeof();
}
diff --git a/src/julia.h b/src/julia.h
index c7b19fb8b4530..70eb654d4b1f3 100644
--- a/src/julia.h
+++ b/src/julia.h
@@ -197,6 +197,7 @@ typedef struct _jl_datatype_t jl_tupletype_t;
struct _jl_code_instance_t;
typedef struct _jl_method_instance_t jl_method_instance_t;
typedef struct _jl_globalref_t jl_globalref_t;
+typedef struct _jl_typemap_entry_t jl_typemap_entry_t;
// TypeMap is an implicitly defined type
@@ -509,6 +510,7 @@ typedef struct {
JL_DATA_TYPE
jl_sym_t *name;
struct _jl_module_t *module;
+ jl_sym_t *singletonname; // sometimes used for debug printing
jl_svec_t *names; // field names
const uint32_t *atomicfields; // if any fields are atomic, we record them here
const uint32_t *constfields; // if any fields are const, we record them here
@@ -518,15 +520,16 @@ typedef struct {
_Atomic(jl_value_t*) Typeofwrapper; // cache for Type{wrapper}
_Atomic(jl_svec_t*) cache; // sorted array
_Atomic(jl_svec_t*) linearcache; // unsorted array
- struct _jl_methtable_t *mt;
+ jl_array_t *backedges; // uncovered (sig => caller::CodeInstance) pairs with this type as the function
jl_array_t *partial; // incomplete instantiations of this type
intptr_t hash;
+ _Atomic(int32_t) max_args; // max # of non-vararg arguments in a signature with this type as the function
int32_t n_uninitialized;
// type properties
uint8_t abstract:1;
uint8_t mutabl:1;
uint8_t mayinlinealloc:1;
- uint8_t _reserved:5;
+ uint8_t _unused:5;
_Atomic(uint8_t) cache_entry_count; // (approximate counter of TypeMapEntry for heuristics)
uint8_t max_methods; // override for inference's max_methods setting (0 = no additional limit or relaxation)
uint8_t constprop_heustic; // override for inference's constprop heuristic
@@ -828,7 +831,7 @@ struct _jl_globalref_t {
};
// one Type-to-Value entry
-typedef struct _jl_typemap_entry_t {
+struct _jl_typemap_entry_t {
JL_DATA_TYPE
_Atomic(struct _jl_typemap_entry_t*) next; // invasive linked list
jl_tupletype_t *sig; // the type signature for this entry
@@ -845,7 +848,7 @@ typedef struct _jl_typemap_entry_t {
int8_t isleafsig; // isleaftype(sig) & !any(isType, sig) : unsorted and very fast
int8_t issimplesig; // all(isleaftype | isAny | isType | isVararg, sig) : sorted and fast
int8_t va; // isVararg(sig)
-} jl_typemap_entry_t;
+};
// one level in a TypeMap tree (each level splits on a type at a given offset)
typedef struct _jl_typemap_level_t {
@@ -865,19 +868,20 @@ typedef struct _jl_typemap_level_t {
_Atomic(jl_typemap_t*) any;
} jl_typemap_level_t;
-// contains the TypeMap for one Type
-typedef struct _jl_methtable_t {
+typedef struct _jl_methcache_t {
JL_DATA_TYPE
- jl_sym_t *name; // sometimes used for debug printing
- _Atomic(jl_typemap_t*) defs;
_Atomic(jl_genericmemory_t*) leafcache;
_Atomic(jl_typemap_t*) cache;
- _Atomic(intptr_t) max_args; // max # of non-vararg arguments in a signature
- jl_module_t *module; // sometimes used for debug printing
- jl_array_t *backedges; // (sig, caller::CodeInstance) pairs
jl_mutex_t writelock;
- uint8_t offs; // 0, or 1 to skip splitting typemap on first (function) argument
- uint8_t frozen; // whether this accepts adding new methods
+} jl_methcache_t;
+
+// contains global MethodTable
+typedef struct _jl_methtable_t {
+ JL_DATA_TYPE
+ _Atomic(jl_typemap_t*) defs;
+ jl_methcache_t *cache;
+ jl_sym_t *name; // sometimes used for debug printing
+ jl_module_t *module; // sometimes used for debug printing
} jl_methtable_t;
typedef struct {
@@ -1100,16 +1104,17 @@ extern JL_DLLIMPORT jl_datatype_t *jl_upsilonnode_type JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_datatype_t *jl_quotenode_type JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_datatype_t *jl_newvarnode_type JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_datatype_t *jl_intrinsic_type JL_GLOBALLY_ROOTED;
+extern JL_DLLIMPORT jl_datatype_t *jl_methcache_type JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_datatype_t *jl_methtable_type JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_datatype_t *jl_typemap_level_type JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_datatype_t *jl_typemap_entry_type JL_GLOBALLY_ROOTED;
+extern JL_DLLIMPORT jl_datatype_t *jl_kwcall_type JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_svec_t *jl_emptysvec JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_value_t *jl_emptytuple JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_value_t *jl_true JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_value_t *jl_false JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_value_t *jl_nothing JL_GLOBALLY_ROOTED;
-extern JL_DLLIMPORT jl_value_t *jl_kwcall_func JL_GLOBALLY_ROOTED;
extern JL_DLLIMPORT jl_value_t *jl_libdl_dlopen_func JL_GLOBALLY_ROOTED;
@@ -1452,9 +1457,7 @@ STATIC_INLINE void jl_array_uint32_set(void *a, size_t i, uint32_t x) JL_NOTSAFE
#define jl_string_data(s) ((char*)s + sizeof(void*))
#define jl_string_len(s) (*(size_t*)s)
-#define jl_gf_ft_mtable(ft) (((jl_datatype_t*)ft)->name->mt)
-#define jl_gf_mtable(f) (jl_gf_ft_mtable(jl_typeof(f)))
-#define jl_gf_name(f) (jl_gf_mtable(f)->name)
+#define jl_gf_name(f) (((jl_datatype_t*)jl_typeof(f))->name->singletonname)
// struct type info
JL_DLLEXPORT jl_svec_t *jl_compute_fieldtypes(jl_datatype_t *st JL_PROPAGATES_ROOT, void *stack, int cacheable);
@@ -1649,6 +1652,7 @@ static inline int jl_field_isconst(jl_datatype_t *st, int i) JL_NOTSAFEPOINT
#define jl_is_method(v) jl_typetagis(v,jl_method_type)
#define jl_is_module(v) jl_typetagis(v,jl_module_tag<<4)
#define jl_is_mtable(v) jl_typetagis(v,jl_methtable_type)
+#define jl_is_mcache(v) jl_typetagis(v,jl_methcache_type)
#define jl_is_task(v) jl_typetagis(v,jl_task_tag<<4)
#define jl_is_string(v) jl_typetagis(v,jl_string_tag<<4)
#define jl_is_cpointer(v) jl_is_cpointer_type(jl_typeof(v))
diff --git a/src/julia_internal.h b/src/julia_internal.h
index 133a347bf82dc..b94011e17a3e3 100644
--- a/src/julia_internal.h
+++ b/src/julia_internal.h
@@ -388,9 +388,7 @@ static inline void memassign_safe(int hasptr, char *dst, const jl_value_t *src,
#define GC_IN_IMAGE 4
// useful constants
-extern JL_DLLIMPORT jl_methtable_t *jl_type_type_mt JL_GLOBALLY_ROOTED;
-extern JL_DLLIMPORT jl_methtable_t *jl_nonfunction_mt JL_GLOBALLY_ROOTED;
-extern jl_methtable_t *jl_kwcall_mt JL_GLOBALLY_ROOTED;
+extern jl_methtable_t *jl_method_table JL_GLOBALLY_ROOTED;
extern JL_DLLEXPORT jl_method_t *jl_opaque_closure_method JL_GLOBALLY_ROOTED;
extern JL_DLLEXPORT _Atomic(size_t) jl_world_counter;
extern jl_debuginfo_t *jl_nulldebuginfo JL_GLOBALLY_ROOTED;
@@ -806,9 +804,9 @@ int jl_has_concrete_subtype(jl_value_t *typ);
jl_tupletype_t *jl_inst_arg_tuple_type(jl_value_t *arg1, jl_value_t **args, size_t nargs, int leaf);
jl_tupletype_t *jl_lookup_arg_tuple_type(jl_value_t *arg1 JL_PROPAGATES_ROOT, jl_value_t **args, size_t nargs, int leaf);
JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method, jl_tupletype_t *simpletype);
-void jl_method_table_activate(jl_methtable_t *mt, jl_typemap_entry_t *newentry);
+void jl_method_table_activate(jl_typemap_entry_t *newentry);
jl_typemap_entry_t *jl_method_table_add(jl_methtable_t *mt, jl_method_t *method, jl_tupletype_t *simpletype);
-void jl_mk_builtin_func(jl_datatype_t *dt, jl_sym_t *name, jl_fptr_args_t fptr) JL_GC_DISABLED;
+jl_method_t *jl_mk_builtin_func(jl_datatype_t *dt, jl_sym_t *name, jl_fptr_args_t fptr) JL_GC_DISABLED;
int jl_obviously_unequal(jl_value_t *a, jl_value_t *b);
int jl_has_bound_typevars(jl_value_t *v, jl_typeenv_t *env) JL_NOTSAFEPOINT;
JL_DLLEXPORT jl_array_t *jl_find_free_typevars(jl_value_t *v);
@@ -858,7 +856,7 @@ int setonce_bits(jl_datatype_t *rty, char *p, jl_value_t *owner, jl_value_t *rhs
jl_expr_t *jl_exprn(jl_sym_t *head, size_t n);
jl_function_t *jl_new_generic_function(jl_sym_t *name, jl_module_t *module, size_t new_world);
jl_function_t *jl_new_generic_function_with_supertype(jl_sym_t *name, jl_module_t *module, jl_datatype_t *st, size_t new_world);
-int jl_foreach_reachable_mtable(int (*visit)(jl_methtable_t *mt, void *env), void *env);
+int jl_foreach_reachable_mtable(int (*visit)(jl_methtable_t *mt, void *env), jl_array_t *mod_array, void *env);
int foreach_mtable_in_module(jl_module_t *m, int (*visit)(jl_methtable_t *mt, void *env), void *env);
void jl_init_main_module(void);
JL_DLLEXPORT int jl_is_submodule(jl_module_t *child, jl_module_t *parent) JL_NOTSAFEPOINT;
@@ -928,10 +926,17 @@ jl_datatype_t *jl_nth_argument_datatype(jl_value_t *argtypes JL_PROPAGATES_ROOT,
JL_DLLEXPORT jl_value_t *jl_argument_datatype(jl_value_t *argt JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT;
JL_DLLEXPORT jl_methtable_t *jl_method_table_for(
jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT;
+JL_DLLEXPORT jl_methcache_t *jl_method_cache_for(
+ jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT;
jl_methtable_t *jl_kwmethod_table_for(
jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT;
+jl_methcache_t *jl_kwmethod_cache_for(
+ jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT;
JL_DLLEXPORT jl_methtable_t *jl_method_get_table(
jl_method_t *method JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT;
+JL_DLLEXPORT jl_methcache_t *jl_method_get_cache(
+ jl_method_t *method JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT;
+void jl_foreach_top_typename_for(void (*f)(jl_typename_t*, void*), jl_value_t *argtypes JL_PROPAGATES_ROOT, void *env);
JL_DLLEXPORT int jl_pointer_egal(jl_value_t *t);
JL_DLLEXPORT jl_value_t *jl_nth_slot_type(jl_value_t *sig JL_PROPAGATES_ROOT, size_t i) JL_NOTSAFEPOINT;
@@ -1287,17 +1292,18 @@ JL_DLLEXPORT jl_method_t *jl_new_method_uninit(jl_module_t*);
jl_module_t *jl_new_module_(jl_sym_t *name, jl_module_t *parent, uint8_t default_using_core, uint8_t self_name);
jl_module_t *jl_add_standard_imports(jl_module_t *m);
JL_DLLEXPORT jl_methtable_t *jl_new_method_table(jl_sym_t *name, jl_module_t *module);
+JL_DLLEXPORT jl_methcache_t *jl_new_method_cache(void);
JL_DLLEXPORT jl_method_instance_t *jl_get_specialization1(jl_tupletype_t *types JL_PROPAGATES_ROOT, size_t world, int mt_cache);
jl_method_instance_t *jl_get_specialized(jl_method_t *m, jl_value_t *types, jl_svec_t *sp) JL_PROPAGATES_ROOT;
JL_DLLEXPORT jl_value_t *jl_rettype_inferred(jl_value_t *owner, jl_method_instance_t *li JL_PROPAGATES_ROOT, size_t min_world, size_t max_world);
JL_DLLEXPORT jl_value_t *jl_rettype_inferred_native(jl_method_instance_t *mi, size_t min_world, size_t max_world) JL_NOTSAFEPOINT;
JL_DLLEXPORT jl_code_instance_t *jl_method_compiled(jl_method_instance_t *mi JL_PROPAGATES_ROOT, size_t world) JL_NOTSAFEPOINT;
-JL_DLLEXPORT jl_value_t *jl_methtable_lookup(jl_methtable_t *mt JL_PROPAGATES_ROOT, jl_value_t *type, size_t world);
+JL_DLLEXPORT jl_value_t *jl_methtable_lookup(jl_value_t *type, size_t world) JL_GLOBALLY_ROOTED;
JL_DLLEXPORT jl_method_instance_t *jl_specializations_get_linfo(
jl_method_t *m JL_PROPAGATES_ROOT, jl_value_t *type, jl_svec_t *sparams);
jl_method_instance_t *jl_specializations_get_or_insert(jl_method_instance_t *mi_ins JL_PROPAGATES_ROOT);
JL_DLLEXPORT void jl_method_instance_add_backedge(jl_method_instance_t *callee, jl_value_t *invokesig, jl_code_instance_t *caller);
-JL_DLLEXPORT void jl_method_table_add_backedge(jl_methtable_t *mt, jl_value_t *typ, jl_code_instance_t *caller);
+JL_DLLEXPORT void jl_method_table_add_backedge(jl_value_t *typ, jl_code_instance_t *caller);
JL_DLLEXPORT void jl_mi_cache_insert(jl_method_instance_t *mi JL_ROOTING_ARGUMENT,
jl_code_instance_t *ci JL_ROOTED_ARGUMENT JL_MAYBE_UNROOTED);
JL_DLLEXPORT int jl_mi_try_insert(jl_method_instance_t *mi JL_ROOTING_ARGUMENT,
diff --git a/src/method.c b/src/method.c
index f71f09ceb83bc..77863b27e24b6 100644
--- a/src/method.c
+++ b/src/method.c
@@ -16,7 +16,6 @@
extern "C" {
#endif
-jl_methtable_t *jl_kwcall_mt;
jl_method_t *jl_opaque_closure_method;
static void check_c_types(const char *where, jl_value_t *rt, jl_value_t *at)
@@ -803,7 +802,8 @@ JL_DLLEXPORT jl_code_info_t *jl_code_for_staged(jl_method_instance_t *mi, size_t
else if (jl_is_mtable(kind)) {
assert(i < l);
ex = data[i++];
- jl_method_table_add_backedge((jl_methtable_t*)kind, ex, ci);
+ if ((jl_methtable_t*)kind == jl_method_table)
+ jl_method_table_add_backedge(ex, ci);
}
else {
assert(i < l);
@@ -1154,54 +1154,71 @@ JL_DLLEXPORT jl_value_t *jl_declare_const_gf(jl_module_t *mod, jl_sym_t *name)
return gf;
}
-static jl_methtable_t *nth_methtable(jl_value_t *a JL_PROPAGATES_ROOT, int n) JL_NOTSAFEPOINT
+static void foreach_top_nth_typename(void (*f)(jl_typename_t*, void*), jl_value_t *a JL_PROPAGATES_ROOT, int n, void *env)
{
if (jl_is_datatype(a)) {
if (n == 0) {
- jl_methtable_t *mt = ((jl_datatype_t*)a)->name->mt;
- if (mt != NULL)
- return mt;
+ jl_datatype_t *dt = ((jl_datatype_t*)a);
+ jl_typename_t *tn = NULL;
+ while (1) {
+ if (dt != jl_any_type && dt != jl_function_type)
+ tn = dt->name;
+ if (dt->super == dt)
+ break;
+ dt = dt->super;
+ }
+ if (tn)
+ f(tn, env);
}
else if (jl_is_tuple_type(a)) {
if (jl_nparams(a) >= n)
- return nth_methtable(jl_tparam(a, n - 1), 0);
+ foreach_top_nth_typename(f, jl_tparam(a, n - 1), 0, env);
}
}
else if (jl_is_typevar(a)) {
- return nth_methtable(((jl_tvar_t*)a)->ub, n);
+ foreach_top_nth_typename(f, ((jl_tvar_t*)a)->ub, n, env);
}
else if (jl_is_unionall(a)) {
- return nth_methtable(((jl_unionall_t*)a)->body, n);
+ foreach_top_nth_typename(f, ((jl_unionall_t*)a)->body, n, env);
}
else if (jl_is_uniontype(a)) {
jl_uniontype_t *u = (jl_uniontype_t*)a;
- jl_methtable_t *m1 = nth_methtable(u->a, n);
- if ((jl_value_t*)m1 != jl_nothing) {
- jl_methtable_t *m2 = nth_methtable(u->b, n);
- if (m1 == m2)
- return m1;
- }
+ foreach_top_nth_typename(f, u->a, n, env);
+ foreach_top_nth_typename(f, u->b, n, env);
}
- return (jl_methtable_t*)jl_nothing;
}
// get the MethodTable for dispatch, or `nothing` if cannot be determined
JL_DLLEXPORT jl_methtable_t *jl_method_table_for(jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT
{
- return nth_methtable(argtypes, 1);
+ return jl_method_table;
+}
+
+// get a MethodCache for dispatch
+JL_DLLEXPORT jl_methcache_t *jl_method_cache_for(jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT
+{
+ return jl_method_table->cache;
+}
+
+void jl_foreach_top_typename_for(void (*f)(jl_typename_t*, void*), jl_value_t *argtypes JL_PROPAGATES_ROOT, void *env)
+{
+ foreach_top_nth_typename(f, argtypes, 1, env);
}
-jl_methtable_t *jl_kwmethod_table_for(jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT
+jl_methcache_t *jl_kwmethod_cache_for(jl_value_t *argtypes JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT
{
- jl_methtable_t *kwmt = nth_methtable(argtypes, 3);
- if ((jl_value_t*)kwmt == jl_nothing)
- return NULL;
- return kwmt;
+ return jl_method_table->cache;
}
JL_DLLEXPORT jl_methtable_t *jl_method_get_table(jl_method_t *method JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT
{
- return method->external_mt ? (jl_methtable_t*)method->external_mt : jl_method_table_for(method->sig);
+ return method->external_mt ? (jl_methtable_t*)method->external_mt : jl_method_table;
+}
+
+// get an arbitrary MethodCache for dispatch optimizations of method
+JL_DLLEXPORT jl_methcache_t *jl_method_get_cache(jl_method_t *method JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT
+{
+ return jl_method_get_table(method)->cache;
}
JL_DLLEXPORT jl_method_t* jl_method_def(jl_svec_t *argdata,
@@ -1218,25 +1235,29 @@ JL_DLLEXPORT jl_method_t* jl_method_def(jl_svec_t *argdata,
size_t nargs = jl_svec_len(atypes);
assert(nargs > 0);
int isva = jl_is_vararg(jl_svecref(atypes, nargs - 1));
- if (!jl_is_type(jl_svecref(atypes, 0)) || (isva && nargs == 1))
+ jl_value_t *ft = jl_svecref(atypes, 0);
+ if (!jl_is_type(ft) || (isva && nargs == 1))
jl_error("function type in method definition is not a type");
jl_sym_t *name;
jl_method_t *m = NULL;
jl_value_t *argtype = NULL;
- JL_GC_PUSH3(&f, &m, &argtype);
+ JL_GC_PUSH4(&ft, &f, &m, &argtype);
size_t i, na = jl_svec_len(atypes);
argtype = jl_apply_tuple_type(atypes, 1);
if (!jl_is_datatype(argtype))
jl_error("invalid type in method definition (Union{})");
- jl_methtable_t *external_mt = mt;
if (!mt)
- mt = jl_method_table_for(argtype);
- if ((jl_value_t*)mt == jl_nothing)
- jl_error("Method dispatch is unimplemented currently for this method signature");
- if (mt->frozen)
- jl_error("cannot add methods to a builtin function");
+ mt = jl_method_table;
+ jl_methtable_t *external_mt = mt == jl_method_table ? NULL : mt;
+
+ //if (!external_mt) {
+ // jl_value_t **ttypes = { jl_builtin_type, jl_tparam0(jl_anytuple_type) };
+ // jl_value_t *invalidt = jl_apply_tuple_type_v(ttypes, 2); // Tuple{Union{Builtin,OpaqueClosure}, Vararg}
+ // if (!jl_has_empty_intersection(argtype, invalidt))
+ // jl_error("cannot add methods to a builtin function");
+ //}
assert(jl_is_linenode(functionloc));
jl_sym_t *file = (jl_sym_t*)jl_linenode_file(functionloc);
@@ -1245,21 +1266,13 @@ JL_DLLEXPORT jl_method_t* jl_method_def(jl_svec_t *argdata,
int32_t line = jl_linenode_line(functionloc);
// TODO: derive our debug name from the syntax instead of the type
- jl_methtable_t *kwmt = mt == jl_kwcall_mt ? jl_kwmethod_table_for(argtype) : mt;
// if we have a kwcall, try to derive the name from the callee argument method table
- name = (kwmt ? kwmt : mt)->name;
- if (kwmt == jl_type_type_mt || kwmt == jl_nonfunction_mt || external_mt) {
- // our value for `name` is bad, try to guess what the syntax might have had,
- // like `jl_static_show_func_sig` might have come up with
- jl_datatype_t *dt = jl_nth_argument_datatype(argtype, mt == jl_kwcall_mt ? 3 : 1);
- if (dt != NULL) {
- name = dt->name->name;
- if (jl_is_type_type((jl_value_t*)dt)) {
- dt = (jl_datatype_t*)jl_argument_datatype(jl_tparam0(dt));
- if ((jl_value_t*)dt != jl_nothing) {
- name = dt->name->name;
- }
- }
+ jl_datatype_t *dtname = (jl_datatype_t*)jl_argument_datatype(jl_kwcall_type && ft == (jl_value_t*)jl_kwcall_type && nargs >= 3 ? jl_svecref(atypes, 2) : ft);
+ name = (jl_value_t*)dtname != jl_nothing ? dtname->name->singletonname : jl_any_type->name->singletonname;
+ if (jl_is_type_type((jl_value_t*)dtname)) {
+ dtname = (jl_datatype_t*)jl_argument_datatype(jl_tparam0(dtname));
+ if ((jl_value_t*)dtname != jl_nothing) {
+ name = dtname->name->singletonname;
}
}
@@ -1320,6 +1333,9 @@ JL_DLLEXPORT jl_method_t* jl_method_def(jl_svec_t *argdata,
jl_symbol_name(file),
line);
}
+ ft = jl_rewrap_unionall(ft, argtype);
+ if (!external_mt && !jl_has_empty_intersection(ft, (jl_value_t*)jl_builtin_type)) // disallow adding methods to Any, Function, Builtin, and subtypes, or Unions of those
+ jl_error("cannot add methods to a builtin function");
m = jl_new_method_uninit(module);
m->external_mt = (jl_value_t*)external_mt;
diff --git a/src/module.c b/src/module.c
index 272748edbadb2..9c1a93d08373a 100644
--- a/src/module.c
+++ b/src/module.c
@@ -1157,14 +1157,14 @@ static void jl_binding_dep_message(jl_binding_t *b)
jl_printf(JL_STDERR, " instead.");
}
else {
- jl_methtable_t *mt = jl_gf_mtable(v);
- if (mt != NULL) {
+ jl_typename_t *tn = ((jl_datatype_t*)jl_typeof(v))->name;
+ if (tn != NULL) {
jl_printf(JL_STDERR, ", use ");
- if (mt->module != jl_core_module) {
- jl_static_show(JL_STDERR, (jl_value_t*)mt->module);
+ if (tn->module != jl_core_module) {
+ jl_static_show(JL_STDERR, (jl_value_t*)tn->module);
jl_printf(JL_STDERR, ".");
}
- jl_printf(JL_STDERR, "%s", jl_symbol_name(mt->name));
+ jl_printf(JL_STDERR, "%s", jl_symbol_name(tn->singletonname));
jl_printf(JL_STDERR, " instead.");
}
}
diff --git a/src/precompile_utils.c b/src/precompile_utils.c
index 295f91ad31e67..86bb723443925 100644
--- a/src/precompile_utils.c
+++ b/src/precompile_utils.c
@@ -159,12 +159,12 @@ static int compile_all_collect_(jl_methtable_t *mt, void *env)
return 1;
}
-static void jl_compile_all_defs(jl_array_t *mis, int all)
+static void jl_compile_all_defs(jl_array_t *mis, int all, jl_array_t *mod_array)
{
jl_array_t *allmeths = jl_alloc_vec_any(0);
JL_GC_PUSH1(&allmeths);
- jl_foreach_reachable_mtable(compile_all_collect_, allmeths);
+ jl_foreach_reachable_mtable(compile_all_collect_, mod_array, allmeths);
size_t world = jl_atomic_load_acquire(&jl_world_counter);
size_t i, l = jl_array_nrows(allmeths);
@@ -224,38 +224,53 @@ static int precompile_enq_specialization_(jl_method_instance_t *mi, void *closur
return 1;
}
-static int precompile_enq_all_specializations__(jl_typemap_entry_t *def, void *closure)
+struct precompile_enq_all_specializations_env {
+ jl_array_t *worklist;
+ jl_array_t *m;
+};
+
+static int precompile_enq_all_specializations__(jl_typemap_entry_t *def, void *env)
{
jl_method_t *m = def->func.method;
- if (m->external_mt)
- return 1;
+ assert(!m->external_mt);
+ struct precompile_enq_all_specializations_env *closure = (struct precompile_enq_all_specializations_env*)env;
+ if (closure->worklist) {
+ size_t i, l = jl_array_nrows(closure->worklist);
+ for (i = 0; i < l; i++) {
+ if (m->module == (jl_module_t*)jl_array_ptr_ref(closure->worklist, i))
+ break;
+ }
+ if (i == l)
+ return 1;
+ }
if ((m->name == jl_symbol("__init__") || m->ccallable) && jl_is_dispatch_tupletype(m->sig)) {
// ensure `__init__()` and @ccallables get strongly-hinted, specialized, and compiled
jl_method_instance_t *mi = jl_specializations_get_linfo(m, m->sig, jl_emptysvec);
- jl_array_ptr_1d_push((jl_array_t*)closure, (jl_value_t*)mi);
+ jl_array_ptr_1d_push(closure->m, (jl_value_t*)mi);
}
else {
jl_value_t *specializations = jl_atomic_load_relaxed(&def->func.method->specializations);
if (!jl_is_svec(specializations)) {
- precompile_enq_specialization_((jl_method_instance_t*)specializations, closure);
+ precompile_enq_specialization_((jl_method_instance_t*)specializations, closure->m);
}
else {
size_t i, l = jl_svec_len(specializations);
for (i = 0; i < l; i++) {
jl_value_t *mi = jl_svecref(specializations, i);
if (mi != jl_nothing)
- precompile_enq_specialization_((jl_method_instance_t*)mi, closure);
+ precompile_enq_specialization_((jl_method_instance_t*)mi, closure->m);
}
}
}
if (m->ccallable)
- jl_array_ptr_1d_push((jl_array_t*)closure, (jl_value_t*)m->ccallable);
+ jl_array_ptr_1d_push(closure->m, (jl_value_t*)m->ccallable);
return 1;
}
-static int precompile_enq_all_specializations_(jl_methtable_t *mt, void *env)
+static int precompile_enq_all_specializations_(jl_array_t *worklist, jl_array_t *env)
{
- return jl_typemap_visitor(jl_atomic_load_relaxed(&mt->defs), precompile_enq_all_specializations__, env);
+ struct precompile_enq_all_specializations_env closure = {worklist, env};
+ return jl_typemap_visitor(jl_atomic_load_relaxed(&jl_method_table->defs), precompile_enq_all_specializations__, &closure);
}
static void *jl_precompile_(jl_array_t *m, int external_linkage)
@@ -284,13 +299,13 @@ static void *jl_precompile_(jl_array_t *m, int external_linkage)
return native_code;
}
-static void *jl_precompile(int all)
+static void *jl_precompile(int all, jl_array_t *mod_array)
{
// array of MethodInstances and ccallable aliases to include in the output
jl_array_t *m = jl_alloc_vec_any(0);
JL_GC_PUSH1(&m);
- jl_compile_all_defs(m, all);
- jl_foreach_reachable_mtable(precompile_enq_all_specializations_, m);
+ jl_compile_all_defs(m, all, mod_array);
+ precompile_enq_all_specializations_(NULL, m);
void *native_code = jl_precompile_(m, 0);
JL_GC_POP();
return native_code;
@@ -311,13 +326,8 @@ static void *jl_precompile_worklist(jl_array_t *worklist, jl_array_t *extext_met
jl_array_t *m = jl_alloc_vec_any(0);
JL_GC_PUSH1(&m);
if (!suppress_precompile) {
- size_t i, n = jl_array_nrows(worklist);
- for (i = 0; i < n; i++) {
- jl_module_t *mod = (jl_module_t*)jl_array_ptr_ref(worklist, i);
- assert(jl_is_module(mod));
- foreach_mtable_in_module(mod, precompile_enq_all_specializations_, m);
- }
- n = jl_array_nrows(extext_methods);
+ precompile_enq_all_specializations_(worklist, m);
+ size_t i, n = jl_array_nrows(extext_methods);
for (i = 0; i < n; i++) {
jl_method_t *method = (jl_method_t*)jl_array_ptr_ref(extext_methods, i);
assert(jl_is_method(method));
@@ -350,21 +360,15 @@ static void *jl_precompile_worklist(jl_array_t *worklist, jl_array_t *extext_met
static int enq_ccallable_entrypoints_(jl_typemap_entry_t *def, void *closure)
{
jl_method_t *m = def->func.method;
- if (m->external_mt)
- return 1;
+ assert(!m->external_mt);
if (m->ccallable)
jl_add_entrypoint((jl_tupletype_t*)jl_svecref(m->ccallable, 1));
return 1;
}
-static int enq_ccallable_entrypoints(jl_methtable_t *mt, void *env)
-{
- return jl_typemap_visitor(jl_atomic_load_relaxed(&mt->defs), enq_ccallable_entrypoints_, env);
-}
-
JL_DLLEXPORT void jl_add_ccallable_entrypoints(void)
{
- jl_foreach_reachable_mtable(enq_ccallable_entrypoints, NULL);
+ jl_typemap_visitor(jl_atomic_load_relaxed(&jl_method_table->defs), enq_ccallable_entrypoints_, NULL);
}
static void *jl_precompile_trimmed(size_t world)
@@ -402,35 +406,39 @@ static void *jl_precompile_trimmed(size_t world)
return native_code;
}
-static void jl_rebuild_methtables(arraylist_t* MIs, htable_t* mtables)
+static void jl_rebuild_methtables(arraylist_t *MIs, htable_t *mtables) JL_GC_DISABLED
{
- size_t i;
- for (i = 0; i < MIs->len; i++) {
+ // Rebuild MethodTable to contain only those methods for which we compiled code.
+ // This can have significant soundness problems if there previously existed
+ // any ambiguous methods, but it would probably be pretty hard to do this
+ // fully correctly (with the necessary inserted guard entries).
+ htable_t ms;
+ htable_new(&ms, 0);
+ for (size_t i = 0; i < MIs->len; i++) {
jl_method_instance_t *mi = (jl_method_instance_t*)MIs->items[i];
jl_method_t *m = mi->def.method;
+ // Check if the method is already in the new table, if not then insert it there
+ void **inserted = ptrhash_bp(&ms, m);
+ if (*inserted != HT_NOTFOUND)
+ continue;
+ *inserted = (void*)m;
jl_methtable_t *old_mt = jl_method_get_table(m);
if ((jl_value_t *)old_mt == jl_nothing)
continue;
- jl_sym_t *name = old_mt->name;
if (!ptrhash_has(mtables, old_mt))
- ptrhash_put(mtables, old_mt, jl_new_method_table(name, m->module));
+ ptrhash_put(mtables, old_mt, jl_new_method_table(old_mt->name, old_mt->module));
jl_methtable_t *mt = (jl_methtable_t*)ptrhash_get(mtables, old_mt);
- size_t world = jl_atomic_load_acquire(&jl_world_counter);
- jl_value_t *lookup = jl_methtable_lookup(mt, m->sig, world);
- // Check if the method is already in the new table, if not then insert it there
- if (lookup == jl_nothing || (jl_method_t*)lookup != m) {
- //TODO: should this be a function like unsafe_insert_method?
- size_t min_world = jl_atomic_load_relaxed(&m->primary_world);
- size_t max_world = ~(size_t)0;
- assert(min_world == jl_atomic_load_relaxed(&m->primary_world));
- int dispatch_status = jl_atomic_load_relaxed(&m->dispatch_status);
- jl_atomic_store_relaxed(&m->primary_world, ~(size_t)0);
- jl_atomic_store_relaxed(&m->dispatch_status, 0);
- jl_typemap_entry_t *newentry = jl_method_table_add(mt, m, NULL);
- jl_atomic_store_relaxed(&m->primary_world, min_world);
- jl_atomic_store_relaxed(&m->dispatch_status, dispatch_status);
- jl_atomic_store_relaxed(&newentry->min_world, min_world);
- jl_atomic_store_relaxed(&newentry->max_world, max_world); // short-circuit jl_method_table_insert
- }
+ //TODO: should this be a function like unsafe_insert_method, since all that is wanted is the jl_typemap_insert on a copy of the existing entry
+ size_t min_world = jl_atomic_load_relaxed(&m->primary_world);
+ size_t max_world = ~(size_t)0;
+ int dispatch_status = jl_atomic_load_relaxed(&m->dispatch_status);
+ jl_atomic_store_relaxed(&m->primary_world, ~(size_t)0);
+ jl_atomic_store_relaxed(&m->dispatch_status, 0);
+ jl_typemap_entry_t *newentry = jl_method_table_add(mt, m, NULL);
+ jl_atomic_store_relaxed(&m->primary_world, min_world);
+ jl_atomic_store_relaxed(&m->dispatch_status, dispatch_status);
+ jl_atomic_store_relaxed(&newentry->min_world, min_world);
+ jl_atomic_store_relaxed(&newentry->max_world, max_world); // short-circuit jl_method_table_insert
}
+ htable_free(&ms);
}
diff --git a/src/rtutils.c b/src/rtutils.c
index 4baf0ee5e6e9c..2f3bd4e8a0074 100644
--- a/src/rtutils.c
+++ b/src/rtutils.c
@@ -671,7 +671,7 @@ JL_DLLEXPORT jl_value_t *jl_argument_datatype(jl_value_t *argt JL_PROPAGATES_ROO
static int is_globname_binding(jl_value_t *v, jl_datatype_t *dv) JL_NOTSAFEPOINT
{
- jl_sym_t *globname = dv->name->mt != NULL ? dv->name->mt->name : NULL;
+ jl_sym_t *globname = dv->name->singletonname;
if (globname && dv->name->module) {
jl_binding_t *b = jl_get_module_binding(dv->name->module, globname, 0);
jl_value_t *bv = jl_get_latest_binding_value_if_resolved_and_const_debug_only(b);
@@ -683,7 +683,7 @@ static int is_globname_binding(jl_value_t *v, jl_datatype_t *dv) JL_NOTSAFEPOINT
static int is_globfunction(jl_value_t *v, jl_datatype_t *dv, jl_sym_t **globname_out) JL_NOTSAFEPOINT
{
- jl_sym_t *globname = dv->name->mt != NULL ? dv->name->mt->name : NULL;
+ jl_sym_t *globname = dv->name->singletonname;
*globname_out = globname;
if (globname && !strchr(jl_symbol_name(globname), '#') && !strchr(jl_symbol_name(globname), '@')) {
return 1;
@@ -814,6 +814,9 @@ static size_t jl_static_show_x_(JL_STREAM *out, jl_value_t *v, jl_datatype_t *vt
else if (v == (jl_value_t*)jl_methtable_type) {
n += jl_printf(out, "Core.MethodTable");
}
+ else if (v == (jl_value_t*)jl_methcache_type) {
+ n += jl_printf(out, "Core.MethodCache");
+ }
else if (v == (jl_value_t*)jl_any_type) {
n += jl_printf(out, "Any");
}
@@ -997,6 +1000,9 @@ static size_t jl_static_show_x_(JL_STREAM *out, jl_value_t *v, jl_datatype_t *vt
else if (v == jl_nothing || (jl_nothing && (jl_value_t*)vt == jl_typeof(jl_nothing))) {
n += jl_printf(out, "nothing");
}
+ else if (v == (jl_value_t*)jl_method_table) {
+ n += jl_printf(out, "Core.GlobalMethods");
+ }
else if (vt == jl_string_type) {
n += jl_static_show_string(out, jl_string_data(v), jl_string_len(v), 1);
}
@@ -1426,10 +1432,8 @@ size_t jl_static_show_func_sig_(JL_STREAM *s, jl_value_t *type, jl_static_show_c
return n;
}
if ((jl_nparams(ftype) == 0 || ftype == ((jl_datatype_t*)ftype)->name->wrapper) &&
- ((jl_datatype_t*)ftype)->name->mt &&
- ((jl_datatype_t*)ftype)->name->mt != jl_type_type_mt &&
- ((jl_datatype_t*)ftype)->name->mt != jl_nonfunction_mt) {
- n += jl_static_show_symbol(s, ((jl_datatype_t*)ftype)->name->mt->name);
+ !jl_is_type_type(ftype) && !jl_is_type_type((jl_value_t*)((jl_datatype_t*)ftype)->super)) { // aka !iskind
+ n += jl_static_show_symbol(s, ((jl_datatype_t*)ftype)->name->singletonname);
}
else {
n += jl_printf(s, "(::");
diff --git a/src/staticdata.c b/src/staticdata.c
index 5465cb1da0218..eb503fe0ffa78 100644
--- a/src/staticdata.c
+++ b/src/staticdata.c
@@ -116,7 +116,7 @@ extern "C" {
// TODO: put WeakRefs on the weak_refs list during deserialization
// TODO: handle finalizers
-#define NUM_TAGS 152
+#define NUM_TAGS 151
// An array of references that need to be restored from the sysimg
// This is a manually constructed dual of the gvars array, which would be produced by codegen for Julia code, for C.
@@ -182,6 +182,7 @@ static void get_tags(jl_value_t **tags[NUM_TAGS])
INSERT_TAG(jl_array_any_type);
INSERT_TAG(jl_intrinsic_type);
INSERT_TAG(jl_methtable_type);
+ INSERT_TAG(jl_methcache_type);
INSERT_TAG(jl_typemap_level_type);
INSERT_TAG(jl_typemap_entry_type);
INSERT_TAG(jl_voidpointer_type);
@@ -229,6 +230,7 @@ static void get_tags(jl_value_t **tags[NUM_TAGS])
INSERT_TAG(jl_addrspacecore_type);
INSERT_TAG(jl_debuginfo_type);
INSERT_TAG(jl_abioverride_type);
+ INSERT_TAG(jl_kwcall_type);
// special typenames
INSERT_TAG(jl_tuple_typename);
@@ -277,12 +279,9 @@ static void get_tags(jl_value_t **tags[NUM_TAGS])
INSERT_TAG(jl_main_module);
INSERT_TAG(jl_top_module);
INSERT_TAG(jl_typeinf_func);
- INSERT_TAG(jl_type_type_mt);
- INSERT_TAG(jl_nonfunction_mt);
- INSERT_TAG(jl_kwcall_mt);
- INSERT_TAG(jl_kwcall_func);
INSERT_TAG(jl_opaque_closure_method);
INSERT_TAG(jl_nulldebuginfo);
+ INSERT_TAG(jl_method_table);
// n.b. must update NUM_TAGS when you add something here
#undef INSERT_TAG
assert(i == NUM_TAGS - 1);
@@ -785,12 +784,6 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
}
goto done_fields; // for now
}
- if (s->incremental && jl_is_mtable(v)) {
- jl_methtable_t *mt = (jl_methtable_t *)v;
- // Any back-edges will be re-validated and added by staticdata.jl, so
- // drop them from the image here
- record_field_change((jl_value_t**)&mt->backedges, NULL);
- }
if (jl_is_method_instance(v)) {
jl_method_instance_t *mi = (jl_method_instance_t*)v;
if (s->incremental) {
@@ -836,24 +829,6 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
}
}
}
- if (jl_is_mtable(v)) {
- jl_methtable_t *mt = (jl_methtable_t*)v;
- if (jl_options.trim || jl_options.strip_ir) {
- record_field_change((jl_value_t**)&mt->backedges, NULL);
- }
- else {
- // don't recurse into all backedges memory (yet)
- jl_value_t *backedges = get_replaceable_field((jl_value_t**)&mt->backedges, 1);
- if (backedges) {
- jl_queue_for_serialization_(s, (jl_value_t*)((jl_array_t*)backedges)->ref.mem, 0, 1);
- for (size_t i = 0, n = jl_array_nrows(backedges); i < n; i += 2) {
- jl_value_t *t = jl_array_ptr_ref(backedges, i);
- assert(!jl_is_code_instance(t));
- jl_queue_for_serialization(s, t);
- }
- }
- }
- }
if (jl_is_binding(v)) {
jl_binding_t *b = (jl_binding_t*)v;
if (s->incremental && needs_uniquing(v, s->query_cache)) {
@@ -892,6 +867,23 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
assert(!jl_object_in_image((jl_value_t*)tn->module));
assert(!jl_object_in_image((jl_value_t*)tn->wrapper));
}
+ // Any back-edges will be re-validated and added by staticdata.jl, so
+ // drop them from the image here
+ if (s->incremental || jl_options.trim || jl_options.strip_ir) {
+ record_field_change((jl_value_t**)&tn->backedges, NULL);
+ }
+ else {
+ // don't recurse into all backedges memory (yet)
+ jl_value_t *backedges = get_replaceable_field((jl_value_t**)&tn->backedges, 1);
+ if (backedges) {
+ jl_queue_for_serialization_(s, (jl_value_t*)((jl_array_t*)backedges)->ref.mem, 0, 1);
+ for (size_t i = 0, n = jl_array_nrows(backedges); i < n; i += 2) {
+ jl_value_t *t = jl_array_ptr_ref(backedges, i);
+ assert(!jl_is_code_instance(t));
+ jl_queue_for_serialization(s, t);
+ }
+ }
+ }
}
if (jl_is_code_instance(v)) {
jl_code_instance_t *ci = (jl_code_instance_t*)v;
@@ -1015,7 +1007,7 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
}
}
}
- else if (jl_typetagis(v, jl_module_tag << 4)) {
+ else if (jl_is_module(v)) {
jl_queue_module_for_serialization(s, (jl_module_t*)v);
}
else if (layout->nfields > 0) {
@@ -1025,15 +1017,21 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
if (jl_is_svec(jl_atomic_load_relaxed(&m->specializations)))
jl_queue_for_serialization_(s, (jl_value_t*)jl_atomic_load_relaxed(&m->specializations), 0, 1);
}
- else if (jl_typetagis(v, jl_typename_type)) {
- jl_typename_t *tn = (jl_typename_t*)v;
- if (tn->mt != NULL && !tn->mt->frozen) {
- jl_methtable_t * new_methtable = (jl_methtable_t *)ptrhash_get(&new_methtables, tn->mt);
- if (new_methtable != HT_NOTFOUND)
- record_field_change((jl_value_t **)&tn->mt, (jl_value_t*)new_methtable);
- else
- record_field_change((jl_value_t **)&tn->mt, NULL);
+ else if (jl_is_mtable(v)) {
+ jl_methtable_t *mt = (jl_methtable_t*)v;
+ jl_methtable_t *newmt = (jl_methtable_t*)ptrhash_get(&new_methtables, mt);
+ if (newmt != HT_NOTFOUND)
+ record_field_change((jl_value_t **)&mt->defs, (jl_value_t*)jl_atomic_load_relaxed(&newmt->defs));
+ else
+ record_field_change((jl_value_t **)&mt->defs, jl_nothing);
+ }
+ else if (jl_is_mcache(v)) {
+ jl_methcache_t *mc = (jl_methcache_t*)v;
+ jl_value_t *cache = jl_atomic_load_relaxed(&mc->cache);
+ if (!jl_typetagis(cache, jl_typemap_entry_type) || ((jl_typemap_entry_t*)cache)->sig != jl_tuple_type) { // aka Builtins (maybe sometimes OpaqueClosure too)
+ record_field_change((jl_value_t **)&mc->cache, jl_nothing);
}
+ record_field_change((jl_value_t **)&mc->leafcache, jl_an_empty_memory_any);
}
// TODO: prune any partitions and partition data that has been deleted in the current world
//else if (jl_is_binding(v)) {
@@ -1098,7 +1096,20 @@ static void jl_queue_for_serialization_(jl_serializer_state *s, jl_value_t *v, i
if (!jl_needs_serialization(s, v))
return;
- jl_value_t *t = jl_typeof(v);
+ jl_datatype_t *t = (jl_datatype_t*)jl_typeof(v);
+ // check early from errors, so we have a little bit of contextual state for debugging them
+ if (t == jl_task_type) {
+ jl_error("Task cannot be serialized");
+ }
+ if (s->incremental && needs_uniquing(v, s->query_cache) && t == jl_binding_type) {
+ jl_binding_t *b = (jl_binding_t*)v;
+ if (b->globalref == NULL)
+ jl_error("Binding cannot be serialized"); // no way (currently) to recover its identity
+ }
+ if (jl_is_foreign_type(t) == 1) {
+ jl_error("Cannot serialize instances of foreign datatypes");
+ }
+
// Items that require postorder traversal must visit their children prior to insertion into
// the worklist/serialization_order (and also before their first use)
if (s->incremental && !immediate) {
@@ -1506,8 +1517,6 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED
if (needs_uniquing(v, s->query_cache)) {
if (jl_is_binding(v)) {
jl_binding_t *b = (jl_binding_t*)v;
- if (b->globalref == NULL)
- jl_error("Binding cannot be serialized"); // no way (currently) to recover its identity
write_pointerfield(s, (jl_value_t*)b->globalref->mod);
write_pointerfield(s, (jl_value_t*)b->globalref->name);
continue;
@@ -1672,7 +1681,7 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED
jl_write_module(s, item, (jl_module_t*)v);
}
else if (jl_typetagis(v, jl_task_tag << 4)) {
- jl_error("Task cannot be serialized");
+ abort(); // unreachable
}
else if (jl_is_svec(v)) {
assert(f == s->s);
@@ -1688,7 +1697,7 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED
write_uint8(f, '\0'); // null-terminated strings for easier C-compatibility
}
else if (jl_is_foreign_type(t) == 1) {
- jl_error("Cannot serialize instances of foreign datatypes");
+ abort(); // unreachable
}
else if (jl_datatype_nfields(t) == 0) {
// The object has no fields, so we just snapshot its byte representation
@@ -2562,7 +2571,7 @@ static void jl_prune_mi_backedges(jl_array_t *backedges)
jl_array_del_end(backedges, n - ins);
}
-static void jl_prune_mt_backedges(jl_array_t *backedges)
+static void jl_prune_tn_backedges(jl_array_t *backedges)
{
if (backedges == NULL)
return;
@@ -2738,7 +2747,7 @@ static int strip_all_codeinfos__(jl_typemap_entry_t *def, void *_env)
JL_GC_PUSH1(&slotnames);
int tostrip = jl_array_len(slotnames);
// for keyword methods, strip only nargs to keep the keyword names at the end for reflection
- if (jl_tparam0(jl_unwrap_unionall(m->sig)) == jl_typeof(jl_kwcall_func))
+ if (jl_tparam0(jl_unwrap_unionall(m->sig)) == (jl_value_t*)jl_kwcall_type)
tostrip = m->nargs;
strip_slotnames(slotnames, tostrip);
m->slot_syms = jl_compress_argnames(slotnames);
@@ -2770,14 +2779,14 @@ static int strip_all_codeinfos__(jl_typemap_entry_t *def, void *_env)
return 1;
}
-static int strip_all_codeinfos_(jl_methtable_t *mt, void *_env)
+static int strip_all_codeinfos_mt(jl_methtable_t *mt, void *_env)
{
return jl_typemap_visitor(jl_atomic_load_relaxed(&mt->defs), strip_all_codeinfos__, NULL);
}
-static void jl_strip_all_codeinfos(void)
+static void jl_strip_all_codeinfos(jl_array_t *mod_array)
{
- jl_foreach_reachable_mtable(strip_all_codeinfos_, NULL);
+ jl_foreach_reachable_mtable(strip_all_codeinfos_mt, mod_array, NULL);
}
static int strip_module(jl_module_t *m, jl_sym_t *docmeta_sym)
@@ -2982,15 +2991,7 @@ static void jl_prepare_serialization_data(jl_array_t *mod_array, jl_array_t *new
*extext_methods = jl_alloc_vec_any(0);
internal_methods = jl_alloc_vec_any(0);
JL_GC_PUSH1(&internal_methods);
- jl_collect_methtable_from_mod(jl_type_type_mt, *extext_methods);
- jl_collect_methtable_from_mod(jl_nonfunction_mt, *extext_methods);
- size_t i, len = jl_array_len(mod_array);
- for (i = 0; i < len; i++) {
- jl_module_t *m = (jl_module_t*)jl_array_ptr_ref(mod_array, i);
- assert(jl_is_module(m));
- if (m->parent == m) // some toplevel modules (really just Base) aren't actually
- jl_collect_extext_methods_from_mod(*extext_methods, m);
- }
+ jl_collect_extext_methods(*extext_methods, mod_array);
if (edges) {
// Extract `edges` now (from info prepared by jl_collect_methcache_from_mod)
@@ -3013,7 +3014,7 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
htable_new(&bits_replace, 0);
// strip metadata and IR when requested
if (jl_options.strip_metadata || jl_options.strip_ir) {
- jl_strip_all_codeinfos();
+ jl_strip_all_codeinfos(mod_array);
jl_strip_all_docmeta(mod_array);
}
// collect needed methods and replace method tables that are in the tags array
@@ -3070,31 +3071,11 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
size_t num_mis;
jl_get_llvm_mis(native_functions, &num_mis, NULL);
arraylist_grow(&MIs, num_mis);
- jl_get_llvm_mis(native_functions, &num_mis, (jl_method_instance_t *)MIs.items);
+ jl_get_llvm_mis(native_functions, &num_mis, (jl_method_instance_t*)MIs.items);
}
}
if (jl_options.trim) {
jl_rebuild_methtables(&MIs, &new_methtables);
- jl_methtable_t *mt = (jl_methtable_t *)ptrhash_get(&new_methtables, jl_type_type_mt);
- JL_GC_PROMISE_ROOTED(mt);
- if (mt != HT_NOTFOUND)
- jl_type_type_mt = mt;
- else
- jl_type_type_mt = jl_new_method_table(jl_type_type_mt->name, jl_type_type_mt->module);
-
- mt = (jl_methtable_t *)ptrhash_get(&new_methtables, jl_kwcall_mt);
- JL_GC_PROMISE_ROOTED(mt);
- if (mt != HT_NOTFOUND)
- jl_kwcall_mt = mt;
- else
- jl_kwcall_mt = jl_new_method_table(jl_kwcall_mt->name, jl_kwcall_mt->module);
-
- mt = (jl_methtable_t *)ptrhash_get(&new_methtables, jl_nonfunction_mt);
- JL_GC_PROMISE_ROOTED(mt);
- if (mt != HT_NOTFOUND)
- jl_nonfunction_mt = mt;
- else
- jl_nonfunction_mt = jl_new_method_table(jl_nonfunction_mt->name, jl_nonfunction_mt->module);
}
nsym_tag = 0;
@@ -3259,17 +3240,14 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
jl_prune_type_cache_hash(jl_atomic_load_relaxed(&tn->cache)));
jl_gc_wb(tn, jl_atomic_load_relaxed(&tn->cache));
jl_prune_type_cache_linear(jl_atomic_load_relaxed(&tn->linearcache));
+ jl_value_t *backedges = get_replaceable_field((jl_value_t**)&tn->backedges, 1);
+ jl_prune_tn_backedges((jl_array_t*)backedges);
}
else if (jl_is_method_instance(v)) {
jl_method_instance_t *mi = (jl_method_instance_t*)v;
jl_value_t *backedges = get_replaceable_field((jl_value_t**)&mi->backedges, 1);
jl_prune_mi_backedges((jl_array_t*)backedges);
}
- else if (jl_is_mtable(v)) {
- jl_methtable_t *mt = (jl_methtable_t*)v;
- jl_value_t *backedges = get_replaceable_field((jl_value_t**)&mt->backedges, 1);
- jl_prune_mt_backedges((jl_array_t*)backedges);
- }
else if (jl_is_binding(v)) {
jl_binding_t *b = (jl_binding_t*)v;
jl_value_t *backedges = get_replaceable_field((jl_value_t**)&b->backedges, 1);
@@ -3522,7 +3500,7 @@ JL_DLLEXPORT void jl_create_system_image(void **_native_data, jl_array_t *workli
if (jl_options.trim)
*_native_data = jl_precompile_trimmed(precompilation_world);
else
- *_native_data = jl_precompile(jl_options.compile_enabled == JL_OPTIONS_COMPILE_ALL);
+ *_native_data = jl_precompile(jl_options.compile_enabled == JL_OPTIONS_COMPILE_ALL, mod_array);
}
// Make sure we don't run any Julia code concurrently after this point
diff --git a/src/staticdata_utils.c b/src/staticdata_utils.c
index 65f7dc59d9397..d699e0c262d26 100644
--- a/src/staticdata_utils.c
+++ b/src/staticdata_utils.c
@@ -332,9 +332,9 @@ static int jl_collect_methtable_from_mod(jl_methtable_t *mt, void *env)
// Collect methods of external functions defined by modules in the worklist
// "extext" = "extending external"
// Also collect relevant backedges
-static void jl_collect_extext_methods_from_mod(jl_array_t *s, jl_module_t *m)
+static void jl_collect_extext_methods(jl_array_t *s, jl_array_t *mod_array)
{
- foreach_mtable_in_module(m, jl_collect_methtable_from_mod, s);
+ jl_foreach_reachable_mtable(jl_collect_methtable_from_mod, mod_array, s);
}
static void jl_record_edges(jl_method_instance_t *caller, jl_array_t *edges)
@@ -727,9 +727,7 @@ static void jl_activate_methods(jl_array_t *external, jl_array_t *internal, size
}
for (i = 0; i < l; i++) {
jl_typemap_entry_t *entry = (jl_typemap_entry_t*)jl_array_ptr_ref(external, i);
- jl_methtable_t *mt = jl_method_get_table(entry->func.method);
- assert((jl_value_t*)mt != jl_nothing);
- jl_method_table_activate(mt, entry);
+ jl_method_table_activate(entry);
}
}
}
diff --git a/stdlib/Serialization/src/Serialization.jl b/stdlib/Serialization/src/Serialization.jl
index 3362c9439d385..9437fedf649ec 100644
--- a/stdlib/Serialization/src/Serialization.jl
+++ b/stdlib/Serialization/src/Serialization.jl
@@ -467,6 +467,20 @@ function serialize(s::AbstractSerializer, meth::Method)
nothing
end
+function serialize(s::AbstractSerializer, mt::Core.MethodTable)
+ serialize_type(s, typeof(mt))
+ serialize(s, mt.cache)
+ nothing
+end
+
+function serialize(s::AbstractSerializer, mc::Core.MethodCache)
+ serialize_type(s, typeof(mc))
+ serialize(s, mc.name)
+ serialize(s, mc.module)
+ nothing
+end
+
+
function serialize(s::AbstractSerializer, linfo::Core.MethodInstance)
serialize_cycle(s, linfo) && return
writetag(s.io, METHODINSTANCE_TAG)
@@ -536,11 +550,12 @@ function serialize_typename(s::AbstractSerializer, t::Core.TypeName)
serialize(s, t.flags & 0x2 == 0x2) # .mutable
serialize(s, Int32(length(primary.types) - t.n_uninitialized))
serialize(s, t.max_methods)
- if isdefined(t, :mt) && t.mt !== Symbol.name.mt
- serialize(s, t.mt.name)
- serialize(s, collect(Base.MethodList(t.mt)))
- serialize(s, t.mt.max_args)
- kws = collect(methods(Core.kwcall, (Any, t.wrapper, Vararg)))
+ ms = Base.matches_to_methods(Base._methods_by_ftype(Tuple{t.wrapper, Vararg}, -1, Base.get_world_counter()), t, nothing).ms
+ if t.singletonname !== t.name || !isempty(ms)
+ serialize(s, t.singletonname)
+ serialize(s, ms)
+ serialize(s, t.max_args)
+ kws = Base.matches_to_methods(Base._methods_by_ftype(Tuple{typeof(Core.kwcall), Any, t.wrapper, Vararg}, -1, Base.get_world_counter()), t, nothing).ms
if isempty(kws)
writetag(s.io, UNDEFREF_TAG)
else
@@ -555,21 +570,17 @@ end
# decide whether to send all data for a type (instead of just its name)
function should_send_whole_type(s, t::DataType)
tn = t.name
- if isdefined(tn, :mt)
- # TODO improve somehow
- # send whole type for anonymous functions in Main
- name = tn.mt.name
- mod = tn.module
- isanonfunction = mod === Main && # only Main
- t.super === Function && # only Functions
- unsafe_load(unsafe_convert(Ptr{UInt8}, tn.name)) == UInt8('#') && # hidden type
- (!isdefined(mod, name) || t != typeof(getglobal(mod, name))) # XXX: 95% accurate test for this being an inner function
- # TODO: more accurate test? (tn.name !== "#" name)
- #TODO: iskw = startswith(tn.name, "#kw#") && ???
- #TODO: iskw && return send-as-kwftype
- return mod === __deserialized_types__ || isanonfunction
- end
- return false
+ # TODO improve somehow?
+ # send whole type for anonymous functions in Main
+ name = tn.singletonname
+ mod = tn.module
+ mod === __deserialized_types__ && return true
+ isanonfunction = mod === Main && # only Main
+ t.super === Function && # only Functions
+ unsafe_load(unsafe_convert(Ptr{UInt8}, tn.name)) == UInt8('#') && # hidden type
+ (!isdefined(mod, name) || t != typeof(getglobal(mod, name))) # XXX: 95% accurate test for this being an inner function
+ # TODO: more accurate test? (tn.name !== "#" name)
+ return isanonfunction
end
function serialize_type_data(s, @nospecialize(t::DataType))
@@ -1112,8 +1123,8 @@ function deserialize(s::AbstractSerializer, ::Type{Method})
meth.recursion_relation = recursion_relation
end
if !is_for_opaque_closure
- mt = ccall(:jl_method_table_for, Any, (Any,), sig)
- if mt !== nothing && nothing === ccall(:jl_methtable_lookup, Any, (Any, Any, UInt), mt, sig, Base.get_world_counter())
+ mt = Core.GlobalMethods
+ if nothing === ccall(:jl_methtable_lookup, Any, (Any, UInt), sig, Base.get_world_counter()) # XXX: quite sketchy?
ccall(:jl_method_table_insert, Cvoid, (Any, Any, Ptr{Cvoid}), mt, meth, C_NULL)
end
end
@@ -1122,6 +1133,19 @@ function deserialize(s::AbstractSerializer, ::Type{Method})
return meth
end
+function deserialize(s::AbstractSerializer, ::Type{Core.MethodTable})
+ mc = deserialize(s)::Core.MethodCache
+ mc === Core.GlobalMethods.cache && return Core.GlobalMethods
+ return getglobal(mc.mod, mc.name)::Core.MethodTable
+end
+
+function deserialize(s::AbstractSerializer, ::Type{Core.MethodCache})
+ name = deserialize(s)::Symbol
+ mod = deserialize(s)::Module
+ f = Base.unwrap_unionall(getglobal(mod, name))
+ return (f::Core.MethodTable).cache
+end
+
function deserialize(s::AbstractSerializer, ::Type{Core.MethodInstance})
linfo = ccall(:jl_new_method_instance_uninit, Ref{Core.MethodInstance}, (Ptr{Cvoid},), C_NULL)
deserialize_cycle(s, linfo)
@@ -1471,20 +1495,10 @@ function deserialize_typename(s::AbstractSerializer, number)
if tag != UNDEFREF_TAG
mtname = handle_deserialize(s, tag)
defs = deserialize(s)
- maxa = deserialize(s)::Int
+ maxa = deserialize(s)::Union{Int,Int32}
if makenew
- mt = ccall(:jl_new_method_table, Any, (Any, Any), name, tn.module)
- if !isempty(parameters)
- mt.offs = 0
- end
- mt.name = mtname
- setfield!(mt, :max_args, maxa, :monotonic)
- ccall(:jl_set_nth_field, Cvoid, (Any, Csize_t, Any), tn, Base.fieldindex(Core.TypeName, :mt)-1, mt)
- for def in defs
- if isdefined(def, :sig)
- ccall(:jl_method_table_insert, Cvoid, (Any, Any, Ptr{Cvoid}), mt, def, C_NULL)
- end
- end
+ tn.singletonname = mtname
+ setfield!(tn, :max_args, Int32(maxa), :monotonic)
end
tag = Int32(read(s.io, UInt8)::UInt8)
if tag != UNDEFREF_TAG
@@ -1494,9 +1508,6 @@ function deserialize_typename(s::AbstractSerializer, number)
@eval Core.kwcall(kwargs::NamedTuple, f::$ty, args...) = $kws(kwargs, f, args...)
end
end
- elseif makenew
- mt = Symbol.name.mt
- ccall(:jl_set_nth_field, Cvoid, (Any, Csize_t, Any), tn, Base.fieldindex(Core.TypeName, :mt)-1, mt)
end
return tn
end
diff --git a/stdlib/Test/src/Test.jl b/stdlib/Test/src/Test.jl
index 738da88197c1c..8e15d10512029 100644
--- a/stdlib/Test/src/Test.jl
+++ b/stdlib/Test/src/Test.jl
@@ -2215,30 +2215,7 @@ function detect_ambiguities(mods::Module...;
end
end
end
- work = Base.loaded_modules_array()
- filter!(mod -> mod === parentmodule(mod), work) # some items in loaded_modules_array are not top modules (really just Base)
- while !isempty(work)
- mod = pop!(work)
- for n in names(mod, all = true)
- Base.isdeprecated(mod, n) && continue
- if !isdefined(mod, n)
- if is_in_mods(mod, recursive, mods)
- if allowed_undefineds === nothing || GlobalRef(mod, n) ∉ allowed_undefineds
- println("Skipping ", mod, '.', n) # typically stale exports
- end
- end
- continue
- end
- f = Base.unwrap_unionall(getfield(mod, n))
- if isa(f, Module) && f !== mod && parentmodule(f) === mod && nameof(f) === n
- push!(work, f)
- elseif isa(f, DataType) && isdefined(f.name, :mt) && parentmodule(f) === mod && nameof(f) === n && f.name.mt !== Symbol.name.mt && f.name.mt !== DataType.name.mt
- examine(f.name.mt)
- end
- end
- end
- examine(Symbol.name.mt)
- examine(DataType.name.mt)
+ examine(Core.GlobalMethods)
return collect(ambs)
end
@@ -2286,30 +2263,7 @@ function detect_unbound_args(mods...;
push!(ambs, m)
end
end
- work = Base.loaded_modules_array()
- filter!(mod -> mod === parentmodule(mod), work) # some items in loaded_modules_array are not top modules (really just Base)
- while !isempty(work)
- mod = pop!(work)
- for n in names(mod, all = true)
- Base.isdeprecated(mod, n) && continue
- if !isdefined(mod, n)
- if is_in_mods(mod, recursive, mods)
- if allowed_undefineds === nothing || GlobalRef(mod, n) ∉ allowed_undefineds
- println("Skipping ", mod, '.', n) # typically stale exports
- end
- end
- continue
- end
- f = Base.unwrap_unionall(getfield(mod, n))
- if isa(f, Module) && f !== mod && parentmodule(f) === mod && nameof(f) === n
- push!(work, f)
- elseif isa(f, DataType) && isdefined(f.name, :mt) && parentmodule(f) === mod && nameof(f) === n && f.name.mt !== Symbol.name.mt && f.name.mt !== DataType.name.mt
- examine(f.name.mt)
- end
- end
- end
- examine(Symbol.name.mt)
- examine(DataType.name.mt)
+ examine(Core.GlobalMethods)
return collect(ambs)
end
diff --git a/test/clangsa/MissingRoots.c b/test/clangsa/MissingRoots.c
index 0a0d5369eba44..84341f9410e1e 100644
--- a/test/clangsa/MissingRoots.c
+++ b/test/clangsa/MissingRoots.c
@@ -277,20 +277,6 @@ void nonconst_loads2()
static inline void look_at_value2(jl_value_t *v) {
look_at_value(v);
}
-void mtable(jl_value_t *f) {
- look_at_value2((jl_value_t*)jl_gf_mtable(f));
- jl_value_t *val = NULL;
- JL_GC_PUSH1(&val);
- val = (jl_value_t*)jl_gf_mtable(f);
- JL_GC_POP();
-}
-
-void mtable2(jl_value_t **v) {
- jl_value_t *val = NULL;
- JL_GC_PUSH1(&val);
- val = (jl_value_t*)jl_gf_mtable(v[2]);
- JL_GC_POP();
-}
void tparam0(jl_value_t *atype) {
look_at_value(jl_tparam0(atype));
diff --git a/test/core.jl b/test/core.jl
index c65a2a821e275..0c4133d949346 100644
--- a/test/core.jl
+++ b/test/core.jl
@@ -17,10 +17,11 @@ for (T, c) in (
(Core.CodeInstance, [:def, :owner, :rettype, :exctype, :rettype_const, :analysis_results, :time_infer_total, :time_infer_cache_saved, :time_infer_self]),
(Core.Method, [#=:name, :module, :file, :line, :primary_world, :sig, :slot_syms, :external_mt, :nargs, :called, :nospecialize, :nkw, :isva, :is_for_opaque_closure, :constprop=#]),
(Core.MethodInstance, [#=:def, :specTypes, :sparam_vals=#]),
- (Core.MethodTable, [:module]),
+ (Core.MethodTable, [:cache, :module, :name]),
+ (Core.MethodCache, []),
(Core.TypeMapEntry, [:sig, :simplesig, :guardsigs, :func, :isleafsig, :issimplesig, :va]),
(Core.TypeMapLevel, []),
- (Core.TypeName, [:name, :module, :names, :wrapper, :mt, :hash, :n_uninitialized, :flags]),
+ (Core.TypeName, [:name, :module, :names, :wrapper, :hash, :n_uninitialized, :flags]),
(DataType, [:name, :super, :parameters, :instance, :hash]),
(TypeVar, [:name, :ub, :lb]),
(Core.Memory, [:length, :ptr]),
@@ -37,10 +38,11 @@ for (T, c) in (
(Core.CodeInstance, [:next, :min_world, :max_world, :inferred, :edges, :debuginfo, :ipo_purity_bits, :invoke, :specptr, :specsigflags, :precompile, :time_compile]),
(Core.Method, [:primary_world, :dispatch_status]),
(Core.MethodInstance, [:cache, :flags]),
- (Core.MethodTable, [:defs, :leafcache, :cache, :max_args]),
+ (Core.MethodTable, [:defs]),
+ (Core.MethodCache, [:leafcache, :cache, :var""]),
(Core.TypeMapEntry, [:next, :min_world, :max_world]),
(Core.TypeMapLevel, [:arg1, :targ, :name1, :tname, :list, :any]),
- (Core.TypeName, [:cache, :linearcache, :cache_entry_count]),
+ (Core.TypeName, [:cache, :linearcache, :Typeofwrapper, :max_args, :cache_entry_count]),
(DataType, [:types, :layout]),
(Core.Memory, []),
(Core.GenericMemoryRef, []),
@@ -2647,11 +2649,14 @@ struct D14919 <: Function; end
@test B14919()() == "It's a brand new world"
@test C14919()() == D14919()() == "Boo."
-for f in (:Any, :Function, :(Core.Builtin), :(Union{Nothing, Type}), :(Union{typeof(+), Type}), :(Union{typeof(+), typeof(-)}), :(Base.Callable))
- @test_throws ErrorException("Method dispatch is unimplemented currently for this method signature") @eval (::$f)() = 1
-end
-for f in (:(Core.getfield), :((::typeof(Core.getfield))), :((::Core.IntrinsicFunction)))
- @test_throws ErrorException("cannot add methods to a builtin function") @eval $f() = 1
+let ex = ErrorException("cannot add methods to a builtin function")
+ for f in (:(Core.Any), :(Core.Function), :(Core.Builtin), :(Base.Callable), :(Union{Nothing,F} where F), :(typeof(Core.getfield)), :(Core.IntrinsicFunction))
+ @test_throws ex @eval (::$f)() = 1
+ end
+ @test_throws ex @eval (::Union{Nothing,F})() where {F<:Function} = 1
+ for f in (:(Core.getfield),)
+ @test_throws ex @eval $f() = 1
+ end
end
# issue #33370
@@ -5092,7 +5097,7 @@ function f16340(x::T) where T
return g
end
let g = f16340(1)
- @test isa(typeof(g).name.mt.defs.sig, UnionAll)
+ @test isa(only(methods(g)).sig, UnionAll)
end
# issue #16793
diff --git a/test/misc.jl b/test/misc.jl
index 64b057d1ec4fc..d72ad5b58ad12 100644
--- a/test/misc.jl
+++ b/test/misc.jl
@@ -1632,10 +1632,10 @@ end
let errs = IOBuffer()
run(`$(Base.julia_cmd()) -e '
using Test
- @test isdefined(DataType.name.mt, :backedges)
+ @test isdefined(Type.body.name, :backedges)
Base.Experimental.disable_new_worlds()
@test_throws "disable_new_worlds" @eval f() = 1
- @test !isdefined(DataType.name.mt, :backedges)
+ @test !isdefined(Type.body.name, :backedges)
@test_throws "disable_new_worlds" Base.delete_method(which(+, (Int, Int)))
@test 1+1 == 2
using Dates
diff --git a/test/precompile.jl b/test/precompile.jl
index 2ee24a71dc62e..2555086214c77 100644
--- a/test/precompile.jl
+++ b/test/precompile.jl
@@ -1958,8 +1958,12 @@ precompile_test_harness("PkgCacheInspector") do load_path
end
modules, init_order, edges, new_ext_cis, external_methods, new_method_roots, cache_sizes = sv
- m = only(external_methods).func::Method
- @test m.name == :repl_cmd && m.nargs < 2
+ for m in external_methods
+ m = m.func::Method
+ if m.name !== :f
+ @test m.name == :repl_cmd && m.nargs == 1
+ end
+ end
@test new_ext_cis === nothing || any(new_ext_cis) do ci
mi = ci.def::Core.MethodInstance
mi.specTypes == Tuple{typeof(Base.repl_cmd), Int, String}
diff --git a/test/reflection.jl b/test/reflection.jl
index 6da64ae7d7031..f7c81df32f41e 100644
--- a/test/reflection.jl
+++ b/test/reflection.jl
@@ -528,13 +528,13 @@ test_typed_ir_printing(g15714, Tuple{Vector{Float32}},
#@test used_dup_var_tested15715
@test used_unique_var_tested15714
-let li = typeof(fieldtype).name.mt.cache.func::Core.MethodInstance,
+let li = only(methods(fieldtype)).unspecialized,
lrepr = string(li),
mrepr = string(li.def),
lmime = repr("text/plain", li),
mmime = repr("text/plain", li.def)
- @test lrepr == lmime == "MethodInstance for fieldtype(...)"
+ @test lrepr == lmime == "MethodInstance for fieldtype(::Vararg{Any})"
@test mrepr == "fieldtype(...) @ Core none:0" # simple print
@test mmime == "fieldtype(...)\n @ Core none:0" # verbose print
end
diff --git a/test/show.jl b/test/show.jl
index fa5989d6cd91d..89560d0e908d1 100644
--- a/test/show.jl
+++ b/test/show.jl
@@ -858,7 +858,7 @@ struct S45879{P} end
let ms = methods(S45879)
@test ms isa Base.MethodList
@test length(ms) == 0
- @test sprint(show, Base.MethodList(Method[], typeof(S45879).name.mt)) isa String
+ @test sprint(show, Base.MethodList(Method[], typeof(S45879).name)) isa String
end
function f49475(a=12.0; b) end
@@ -1598,7 +1598,7 @@ struct f_with_params{t} <: Function end
end
let io = IOBuffer()
- show(io, MIME"text/html"(), ModFWithParams.f_with_params.body.name.mt)
+ show(io, MIME"text/html"(), methods(ModFWithParams.f_with_params{Int}()))
@test occursin("ModFWithParams.f_with_params", String(take!(io)))
end
@@ -1780,10 +1780,10 @@ end
anonfn_type_repr = "$modname.var\"$(typeof(anonfn).name.name)\""
@test repr(typeof(anonfn)) == anonfn_type_repr
@test repr(anonfn) == anonfn_type_repr * "()"
- @test repr("text/plain", anonfn) == "$(typeof(anonfn).name.mt.name) (generic function with 1 method)"
+ @test repr("text/plain", anonfn) == "$(typeof(anonfn).name.singletonname) (generic function with 1 method)"
mkclosure = x->y->x+y
clo = mkclosure(10)
- @test repr("text/plain", clo) == "$(typeof(clo).name.mt.name) (generic function with 1 method)"
+ @test repr("text/plain", clo) == "$(typeof(clo).name.singletonname) (generic function with 1 method)"
@test repr(UnionAll) == "UnionAll"
end
diff --git a/test/stacktraces.jl b/test/stacktraces.jl
index ca553c2a2e801..3df0998fe88f6 100644
--- a/test/stacktraces.jl
+++ b/test/stacktraces.jl
@@ -90,7 +90,7 @@ f(x) = (y = h(x); y)
trace = (try; f(3); catch; stacktrace(catch_backtrace()); end)[1:3]
can_inline = Bool(Base.JLOptions().can_inline)
for (frame, func, inlined) in zip(trace, [g,h,f], (can_inline, can_inline, false))
- @test frame.func === typeof(func).name.mt.name
+ @test frame.func === typeof(func).name.singletonname
# broken until #50082 can be addressed
mi = isa(frame.linfo, Core.CodeInstance) ? frame.linfo.def : frame.linfo
@test mi.def.module === which(func, (Any,)).module broken=inlined
@@ -109,10 +109,10 @@ let src = Meta.lower(Main, quote let x = 1 end end).args[1]::Core.CodeInfo
repr = string(sf)
@test repr == "Toplevel MethodInstance thunk at b:3"
end
-let li = typeof(fieldtype).name.mt.cache.func::Core.MethodInstance,
+let li = only(methods(fieldtype)).unspecialized,
sf = StackFrame(:a, :b, 3, li, false, false, 0),
repr = string(sf)
- @test repr == "fieldtype(...) at b:3"
+ @test repr == "fieldtype(::Vararg{Any}) at b:3"
end
let ctestptr = cglobal((:ctest, "libccalltest")),
diff --git a/test/syntax.jl b/test/syntax.jl
index 0c56b6b74b167..01b48918b5fde 100644
--- a/test/syntax.jl
+++ b/test/syntax.jl
@@ -596,10 +596,9 @@ let thismodule = @__MODULE__,
@test !isdefined(M16096, :foo16096)
@test !isdefined(M16096, :it)
@test typeof(local_foo16096).name.module === thismodule
- @test typeof(local_foo16096).name.mt.module === thismodule
- @test getfield(thismodule, typeof(local_foo16096).name.mt.name) === local_foo16096
+ @test getfield(thismodule, typeof(local_foo16096).name.singletonname) === local_foo16096
@test getfield(thismodule, typeof(local_foo16096).name.name) === typeof(local_foo16096)
- @test !isdefined(M16096, typeof(local_foo16096).name.mt.name)
+ @test !isdefined(M16096, typeof(local_foo16096).name.singletonname)
@test !isdefined(M16096, typeof(local_foo16096).name.name)
end
diff --git a/test/worlds.jl b/test/worlds.jl
index 542bbaa440362..4b0ef208f9c7e 100644
--- a/test/worlds.jl
+++ b/test/worlds.jl
@@ -194,31 +194,26 @@ f_gen265(x::Type{Int}) = 3
# would have capped those specializations if they were still valid
f26506(@nospecialize(x)) = 1
g26506(x) = Base.inferencebarrier(f26506)(x[1])
-z = Any["ABC"]
+z26506 = Any["ABC"]
f26506(x::Int) = 2
-g26506(z) # Places an entry for f26506(::String) in mt.name.cache
+g26506(z26506) # Places an entry for f26506(::String) in MethodTable cache
+w26506 = Base.get_world_counter()
+cache26506 = ccall(:jl_mt_find_cache_entry, Any, (Any, Any, UInt), Core.GlobalMethods.cache, Tuple{typeof(f26506),String}, w26506)::Core.TypeMapEntry
+@test cache26506.max_world === typemax(UInt)
+w26506 = Base.get_world_counter()
f26506(x::String) = 3
-let cache = typeof(f26506).name.mt.cache
- # The entry we created above should have been truncated
- @test cache.min_world == cache.max_world
-end
-c26506_1, c26506_2 = Condition(), Condition()
-# Captures the world age
-result26506 = Any[]
-t = Task(()->begin
- wait(c26506_1)
- push!(result26506, g26506(z))
- notify(c26506_2)
-end)
-yield(t)
+@test w26506+1 === Base.get_world_counter()
+# The entry we created above should have been truncated
+@test cache26506.max_world == w26506
+# Captures the world age on creation
+t26506 = @task g26506(z26506)
f26506(x::Float64) = 4
-let cache = typeof(f26506).name.mt.cache
- # The entry we created above should have been truncated
- @test cache.min_world == cache.max_world
-end
-notify(c26506_1)
-wait(c26506_2)
-@test result26506[1] == 3
+@test cache26506.max_world == w26506
+f26506(x::String) = 5
+# The entry we created above should not have been changed
+@test cache26506.max_world == w26506
+@test fetch(schedule(t26506)) === 3
+@test g26506(z26506) === 5
# issue #38435
f38435(::Int, ::Any) = 1