diff --git a/Compiler/src/typeinfer.jl b/Compiler/src/typeinfer.jl index e2821f79cca0c..49cc711dc72df 100644 --- a/Compiler/src/typeinfer.jl +++ b/Compiler/src/typeinfer.jl @@ -120,7 +120,6 @@ function finish!(interp::AbstractInterpreter, caller::InferenceState) end inferred_result = nothing uncompressed = inferred_result - relocatability = 0x1 const_flag = is_result_constabi_eligible(result) discard_src = caller.cache_mode === CACHE_MODE_NULL || const_flag if !discard_src @@ -139,22 +138,14 @@ function finish!(interp::AbstractInterpreter, caller::InferenceState) elseif ci.owner === nothing # The global cache can only handle objects that codegen understands inferred_result = nothing - else - relocatability = 0x0 - end - if isa(inferred_result, String) - t = @_gc_preserve_begin inferred_result - relocatability = unsafe_load(unsafe_convert(Ptr{UInt8}, inferred_result), Core.sizeof(inferred_result)) - @_gc_preserve_end t end end - # n.b. relocatability = !isa(inferred_result, String) || inferred_result[end] if !@isdefined di di = DebugInfo(result.linfo) end - ccall(:jl_update_codeinst, Cvoid, (Any, Any, Int32, UInt, UInt, UInt32, Any, UInt8, Any, Any), + ccall(:jl_update_codeinst, Cvoid, (Any, Any, Int32, UInt, UInt, UInt32, Any, Any, Any), ci, inferred_result, const_flag, first(result.valid_worlds), last(result.valid_worlds), encode_effects(result.ipo_effects), - result.analysis_results, relocatability, di, edges) + result.analysis_results, di, edges) engine_reject(interp, ci) if !discard_src && isdefined(interp, :codegen) && uncompressed isa CodeInfo # record that the caller could use this result to generate code when required, if desired, to avoid repeating n^2 work @@ -176,7 +167,6 @@ end function finish!(interp::AbstractInterpreter, mi::MethodInstance, ci::CodeInstance, src::CodeInfo) user_edges = src.edges edges = user_edges isa SimpleVector ? user_edges : user_edges === nothing ? Core.svec() : Core.svec(user_edges...) - relocatability = 0x1 const_flag = false di = src.debuginfo rettype = Any @@ -196,8 +186,8 @@ function finish!(interp::AbstractInterpreter, mi::MethodInstance, ci::CodeInstan end ccall(:jl_fill_codeinst, Cvoid, (Any, Any, Any, Any, Int32, UInt, UInt, UInt32, Any, Any, Any), ci, rettype, exctype, nothing, const_flags, min_world, max_world, ipo_effects, nothing, di, edges) - ccall(:jl_update_codeinst, Cvoid, (Any, Any, Int32, UInt, UInt, UInt32, Any, UInt8, Any, Any), - ci, nothing, const_flag, min_world, max_world, ipo_effects, nothing, relocatability, di, edges) + ccall(:jl_update_codeinst, Cvoid, (Any, Any, Int32, UInt, UInt, UInt32, Any, Any, Any), + ci, nothing, const_flag, min_world, max_world, ipo_effects, nothing, di, edges) code_cache(interp)[mi] = ci if isdefined(interp, :codegen) interp.codegen[ci] = src @@ -523,7 +513,6 @@ function finishinfer!(me::InferenceState, interp::AbstractInterpreter) rettype_const = nothing const_flags = 0x0 end - relocatability = 0x1 di = nothing edges = empty_edges # `edges` will be updated within `finish!` ci = result.ci @@ -827,7 +816,7 @@ function codeinst_as_edge(interp::AbstractInterpreter, sv::InferenceState, @nosp end end ci = CodeInstance(mi, cache_owner(interp), Any, Any, nothing, nothing, zero(Int32), - min_world, max_world, zero(UInt32), nothing, zero(UInt8), nothing, edges) + min_world, max_world, zero(UInt32), nothing, nothing, edges) if max_world == typemax(UInt) # if we can record all of the backedges in the global reverse-cache, # we can now widen our applicability in the global cache too diff --git a/Compiler/test/abioverride.jl b/Compiler/test/abioverride.jl index e257074852099..da9b1f92786e5 100644 --- a/Compiler/test/abioverride.jl +++ b/Compiler/test/abioverride.jl @@ -46,7 +46,7 @@ let world = Base.tls_world_age() #=owner=#SecondArgConstOverride(1), new_source.rettype, Any#=new_source.exctype is missing=#, #=inferred_const=#nothing, #=code=#nothing, #=const_flags=#Int32(0), new_source.min_world, new_source.max_world, #=new_source.ipo_purity_bits is missing=#UInt32(0), - #=analysis_results=#nothing, #=not relocatable?=#UInt8(0), new_source.debuginfo, new_source.edges) + #=analysis_results=#nothing, new_source.debuginfo, new_source.edges) # Poke the CI into the global cache # This isn't necessary, but does conveniently give it the mandatory permanent GC-root before calling `invoke` diff --git a/base/boot.jl b/base/boot.jl index 4652524530703..05788844fde66 100644 --- a/base/boot.jl +++ b/base/boot.jl @@ -561,11 +561,11 @@ function CodeInstance( mi::Union{MethodInstance, ABIOverride}, owner, @nospecialize(rettype), @nospecialize(exctype), @nospecialize(inferred_const), @nospecialize(inferred), const_flags::Int32, min_world::UInt, max_world::UInt, effects::UInt32, @nospecialize(analysis_results), - relocatability::UInt8, di::Union{DebugInfo,Nothing}, edges::SimpleVector) + di::Union{DebugInfo,Nothing}, edges::SimpleVector) return ccall(:jl_new_codeinst, Ref{CodeInstance}, - (Any, Any, Any, Any, Any, Any, Int32, UInt, UInt, UInt32, Any, UInt8, Any, Any), + (Any, Any, Any, Any, Any, Any, Int32, UInt, UInt, UInt32, Any, Any, Any), mi, owner, rettype, exctype, inferred_const, inferred, const_flags, min_world, max_world, - effects, analysis_results, relocatability, di, edges) + effects, analysis_results, di, edges) end GlobalRef(m::Module, s::Symbol) = ccall(:jl_module_globalref, Ref{GlobalRef}, (Any, Any), m, s) Module(name::Symbol=:anonymous, std_imports::Bool=true, default_names::Bool=true) = ccall(:jl_f_new_module, Ref{Module}, (Any, Bool, Bool), name, std_imports, default_names) diff --git a/src/gf.c b/src/gf.c index d5fbcfc7dcd6a..080d1ebd52ba8 100644 --- a/src/gf.c +++ b/src/gf.c @@ -323,7 +323,7 @@ jl_datatype_t *jl_mk_builtin_func(jl_datatype_t *dt, const char *name, jl_fptr_a jl_code_instance_t *codeinst = jl_new_codeinst(mi, jl_nothing, (jl_value_t*)jl_any_type, (jl_value_t*)jl_any_type, jl_nothing, jl_nothing, - 0, 1, ~(size_t)0, 0, jl_nothing, 0, NULL, NULL); + 0, 1, ~(size_t)0, 0, jl_nothing, NULL, NULL); jl_mi_cache_insert(mi, codeinst); jl_atomic_store_relaxed(&codeinst->specptr.fptr1, fptr); jl_atomic_store_relaxed(&codeinst->invoke, jl_fptr_args); @@ -564,7 +564,7 @@ JL_DLLEXPORT jl_code_instance_t *jl_get_method_inferred( } codeinst = jl_new_codeinst( mi, owner, rettype, (jl_value_t*)jl_any_type, NULL, NULL, - 0, min_world, max_world, 0, jl_nothing, 0, di, edges); + 0, min_world, max_world, 0, jl_nothing, di, edges); jl_mi_cache_insert(mi, codeinst); return codeinst; } @@ -614,7 +614,6 @@ JL_DLLEXPORT jl_code_instance_t *jl_new_codeinst( jl_value_t *inferred_const, jl_value_t *inferred, int32_t const_flags, size_t min_world, size_t max_world, uint32_t effects, jl_value_t *analysis_results, - uint8_t relocatability, jl_debuginfo_t *di, jl_svec_t *edges /*, int absolute_max*/) { assert(min_world <= max_world && "attempting to set invalid world constraints"); @@ -645,7 +644,6 @@ JL_DLLEXPORT jl_code_instance_t *jl_new_codeinst( jl_atomic_store_relaxed(&codeinst->next, NULL); jl_atomic_store_relaxed(&codeinst->ipo_purity_bits, effects); codeinst->analysis_results = analysis_results; - codeinst->relocatability = relocatability; return codeinst; } @@ -653,11 +651,10 @@ JL_DLLEXPORT void jl_update_codeinst( jl_code_instance_t *codeinst, jl_value_t *inferred, int32_t const_flags, size_t min_world, size_t max_world, uint32_t effects, jl_value_t *analysis_results, - uint8_t relocatability, jl_debuginfo_t *di, jl_svec_t *edges /* , int absolute_max*/) + jl_debuginfo_t *di, jl_svec_t *edges /* , int absolute_max*/) { assert(min_world <= max_world && "attempting to set invalid world constraints"); //assert((!jl_is_method(codeinst->def->def.value) || max_world != ~(size_t)0 || min_world <= 1 || jl_svec_len(edges) != 0) && "missing edges"); - codeinst->relocatability = relocatability; codeinst->analysis_results = analysis_results; jl_gc_wb(codeinst, analysis_results); jl_atomic_store_relaxed(&codeinst->ipo_purity_bits, effects); @@ -715,7 +712,7 @@ JL_DLLEXPORT void jl_fill_codeinst( JL_DLLEXPORT jl_code_instance_t *jl_new_codeinst_uninit(jl_method_instance_t *mi, jl_value_t *owner) { - jl_code_instance_t *codeinst = jl_new_codeinst(mi, owner, NULL, NULL, NULL, NULL, 0, 0, 0, 0, NULL, 0, NULL, NULL); + jl_code_instance_t *codeinst = jl_new_codeinst(mi, owner, NULL, NULL, NULL, NULL, 0, 0, 0, 0, NULL, NULL, NULL); jl_atomic_store_relaxed(&codeinst->min_world, 1); // make temporarily invalid before returning, so that jl_fill_codeinst is valid later return codeinst; } @@ -2908,7 +2905,7 @@ jl_code_instance_t *jl_compile_method_internal(jl_method_instance_t *mi, size_t jl_read_codeinst_invoke(unspec, &specsigflags, &invoke, &fptr, 1); jl_code_instance_t *codeinst = jl_new_codeinst(mi, jl_nothing, (jl_value_t*)jl_any_type, (jl_value_t*)jl_any_type, NULL, NULL, - 0, 1, ~(size_t)0, 0, jl_nothing, 0, NULL, NULL); + 0, 1, ~(size_t)0, 0, jl_nothing, NULL, NULL); codeinst->rettype_const = unspec->rettype_const; jl_atomic_store_relaxed(&codeinst->specptr.fptr, fptr); jl_atomic_store_relaxed(&codeinst->invoke, invoke); @@ -2929,7 +2926,7 @@ jl_code_instance_t *jl_compile_method_internal(jl_method_instance_t *mi, size_t if (!jl_code_requires_compiler(src, 0)) { jl_code_instance_t *codeinst = jl_new_codeinst(mi, jl_nothing, (jl_value_t*)jl_any_type, (jl_value_t*)jl_any_type, NULL, NULL, - 0, 1, ~(size_t)0, 0, jl_nothing, 0, NULL, NULL); + 0, 1, ~(size_t)0, 0, jl_nothing, NULL, NULL); jl_atomic_store_release(&codeinst->invoke, jl_fptr_interpret_call); jl_mi_cache_insert(mi, codeinst); record_precompile_statement(mi, 0, 0); @@ -3019,7 +3016,7 @@ jl_code_instance_t *jl_compile_method_internal(jl_method_instance_t *mi, size_t jl_read_codeinst_invoke(ucache, &specsigflags, &invoke, &fptr, 1); codeinst = jl_new_codeinst(mi, jl_nothing, (jl_value_t*)jl_any_type, (jl_value_t*)jl_any_type, NULL, NULL, - 0, 1, ~(size_t)0, 0, jl_nothing, 0, NULL, NULL); + 0, 1, ~(size_t)0, 0, jl_nothing, NULL, NULL); codeinst->rettype_const = ucache->rettype_const; // unspec is always not specsig, but might use specptr jl_atomic_store_relaxed(&codeinst->specptr.fptr, fptr); diff --git a/src/jltypes.c b/src/jltypes.c index fbdfe497ea312..b478ce7ea98fd 100644 --- a/src/jltypes.c +++ b/src/jltypes.c @@ -3638,7 +3638,7 @@ void jl_init_types(void) JL_GC_DISABLED jl_code_instance_type = jl_new_datatype(jl_symbol("CodeInstance"), core, jl_any_type, jl_emptysvec, - jl_perm_symsvec(18, + jl_perm_symsvec(17, "def", "owner", "next", @@ -3653,9 +3653,9 @@ void jl_init_types(void) JL_GC_DISABLED //"absolute_max", "ipo_purity_bits", "analysis_results", - "specsigflags", "precompile", "relocatability", + "specsigflags", "precompile", "invoke", "specptr"), // function object decls - jl_svec(18, + jl_svec(17, jl_any_type, jl_any_type, jl_any_type, @@ -3672,13 +3672,12 @@ void jl_init_types(void) JL_GC_DISABLED jl_any_type, jl_bool_type, jl_bool_type, - jl_uint8_type, jl_any_type, jl_any_type), // fptrs jl_emptysvec, 0, 1, 1); jl_svecset(jl_code_instance_type->types, 2, jl_code_instance_type); - const static uint32_t code_instance_constfields[1] = { 0b000001000011100011 }; // Set fields 1, 2, 6-8, 13 as const - const static uint32_t code_instance_atomicfields[1] = { 0b110110111100011100 }; // Set fields 3-5, 9-12, 14-15, 17-18 as atomic + const static uint32_t code_instance_constfields[1] = { 0b00001000011100011 }; // Set fields 1, 2, 6-8, 13 as const + const static uint32_t code_instance_atomicfields[1] = { 0b11110111100011100 }; // Set fields 3-5, 9-12, 14-17 as atomic // Fields 4-5 are only operated on by construction and deserialization, so are effectively const at runtime // Fields ipo_purity_bits and analysis_results are not currently threadsafe or reliable, as they get mutated after optimization, but are not declared atomic // and there is no way to tell (during inference) if their value is finalized yet (to wait for them to be narrowed if applicable) @@ -3854,8 +3853,8 @@ void jl_init_types(void) JL_GC_DISABLED jl_svecset(jl_method_type->types, 13, jl_method_instance_type); //jl_svecset(jl_debuginfo_type->types, 0, jl_method_instance_type); // union(jl_method_instance_type, jl_method_type, jl_symbol_type) jl_svecset(jl_method_instance_type->types, 4, jl_code_instance_type); + jl_svecset(jl_code_instance_type->types, 15, jl_voidpointer_type); jl_svecset(jl_code_instance_type->types, 16, jl_voidpointer_type); - jl_svecset(jl_code_instance_type->types, 17, jl_voidpointer_type); jl_svecset(jl_binding_type->types, 0, jl_globalref_type); jl_svecset(jl_binding_partition_type->types, 3, jl_binding_partition_type); diff --git a/src/julia.h b/src/julia.h index 4084b340d91e5..b5416568b7ae9 100644 --- a/src/julia.h +++ b/src/julia.h @@ -467,7 +467,6 @@ typedef struct _jl_code_instance_t { // & 0b010 == invokeptr matches specptr // & 0b100 == From image _Atomic(uint8_t) precompile; // if set, this will be added to the output system image - uint8_t relocatability; // nonzero if all roots are built into sysimg or tagged by module key _Atomic(jl_callptr_t) invoke; // jlcall entry point usually, but if this codeinst belongs to an OC Method, then this is an jl_fptr_args_t fptr1 instead, unless it is not, because it is a special token object instead union _jl_generic_specptr_t { _Atomic(void*) fptr; diff --git a/src/julia_internal.h b/src/julia_internal.h index b1c32f6e2de6e..f8dd49eab65a3 100644 --- a/src/julia_internal.h +++ b/src/julia_internal.h @@ -691,7 +691,7 @@ JL_DLLEXPORT jl_code_instance_t *jl_new_codeinst( jl_value_t *inferred_const, jl_value_t *inferred, int32_t const_flags, size_t min_world, size_t max_world, uint32_t effects, jl_value_t *analysis_results, - uint8_t relocatability, jl_debuginfo_t *di, jl_svec_t *edges /* , int absolute_max*/); + jl_debuginfo_t *di, jl_svec_t *edges /* , int absolute_max*/); JL_DLLEXPORT jl_code_instance_t *jl_get_ci_equiv(jl_code_instance_t *ci JL_PROPAGATES_ROOT, int compiled) JL_NOTSAFEPOINT; STATIC_INLINE jl_method_instance_t *jl_get_ci_mi(jl_code_instance_t *ci JL_PROPAGATES_ROOT) JL_NOTSAFEPOINT diff --git a/src/method.c b/src/method.c index 8e3bb7d0060b7..0d173d960c90f 100644 --- a/src/method.c +++ b/src/method.c @@ -1384,10 +1384,6 @@ JL_DLLEXPORT jl_method_t* jl_method_def(jl_svec_t *argdata, // at the time of writing the system image (such occur first in the list of // roots). These are the cases with `key = 0` that do not prevent // serialization. -// - CodeInstances have a `relocatability` field which when 1 indicates that -// every root is "safe," meaning it was either added at sysimg creation or is -// tagged with a non-zero `key`. Even a single unsafe root will cause this to -// have value 0. // Get the key of the current (final) block of roots static uint64_t current_root_id(jl_array_t *root_blocks) diff --git a/src/opaque_closure.c b/src/opaque_closure.c index 2d11d763be662..a10b5c617753c 100644 --- a/src/opaque_closure.c +++ b/src/opaque_closure.c @@ -169,7 +169,7 @@ JL_DLLEXPORT jl_opaque_closure_t *jl_new_opaque_closure_from_code_info(jl_tuplet if (!jl_is_svec(edges)) edges = jl_emptysvec; // OC doesn't really have edges, so just drop them for now inst = jl_new_codeinst(mi, jl_nothing, rt_ub, (jl_value_t*)jl_any_type, NULL, (jl_value_t*)ci, - 0, world, world, 0, jl_nothing, 0, ci->debuginfo, edges); + 0, world, world, 0, jl_nothing, ci->debuginfo, edges); jl_mi_cache_insert(mi, inst); } diff --git a/src/staticdata.c b/src/staticdata.c index 95d7ee94e15dc..bdeecb911e3eb 100644 --- a/src/staticdata.c +++ b/src/staticdata.c @@ -499,7 +499,6 @@ void *native_functions; // opaque jl_native_code_desc_t blob used for fetching // table of struct field addresses to rewrite during saving static htable_t field_replace; static htable_t bits_replace; -static htable_t relocatable_ext_cis; // array of definitions for the predefined function pointers // (reverse of fptr_to_id) @@ -924,11 +923,17 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_ } jl_value_t *inferred = jl_atomic_load_relaxed(&ci->inferred); if (inferred && inferred != jl_nothing) { // disregard if there is nothing here to delete (e.g. builtins, unspecialized) - if (!is_relocatable_ci(&relocatable_ext_cis, ci)) - record_field_change((jl_value_t**)&ci->inferred, jl_nothing); - else if (jl_is_method(mi->def.method) && // don't delete toplevel code - mi->def.method->source) { // don't delete code from optimized opaque closures that can't be reconstructed (and builtins) - if (jl_atomic_load_relaxed(&ci->max_world) != ~(size_t)0 || // delete all code that cannot run + jl_method_t *def = mi->def.method; + if (jl_is_method(def)) { // don't delete toplevel code + int is_relocatable = jl_is_code_info(inferred) || + (jl_is_string(inferred) && jl_string_len(inferred) > 0 && jl_string_data(inferred)[jl_string_len(inferred) - 1]); + if (!is_relocatable) { + record_field_change((jl_value_t**)&ci->inferred, jl_nothing); + } + else if (def->source == NULL) { + // don't delete code from optimized opaque closures that can't be reconstructed (and builtins) + } + else if (jl_atomic_load_relaxed(&ci->max_world) != ~(size_t)0 || // delete all code that cannot run jl_atomic_load_relaxed(&ci->invoke) == jl_fptr_const_return) { // delete all code that just returns a constant record_field_change((jl_value_t**)&ci->inferred, jl_nothing); } @@ -1801,7 +1806,6 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED jl_atomic_store_release(&newci->min_world, 1); jl_atomic_store_release(&newci->max_world, 0); } - newci->relocatability = 0; } jl_atomic_store_relaxed(&newci->invoke, NULL); jl_atomic_store_relaxed(&newci->specsigflags, 0); @@ -2865,7 +2869,7 @@ static void jl_prepare_serialization_data(jl_array_t *mod_array, jl_array_t *new // Extract `new_ext_cis` and `edges` now (from info prepared by jl_collect_methcache_from_mod) *method_roots_list = jl_alloc_vec_any(0); // Collect the new method roots for external specializations - jl_collect_new_roots(&relocatable_ext_cis, *method_roots_list, *new_ext_cis, worklist_key); + jl_collect_new_roots(*method_roots_list, *new_ext_cis, worklist_key); *edges = jl_alloc_vec_any(0); jl_collect_internal_cis(*edges, world); } @@ -3368,7 +3372,6 @@ JL_DLLEXPORT void jl_create_system_image(void **_native_data, jl_array_t *workli assert((ct->reentrant_timing & 0b1110) == 0); ct->reentrant_timing |= 0b1000; if (worklist) { - htable_new(&relocatable_ext_cis, 0); jl_prepare_serialization_data(mod_array, newly_inferred, jl_worklist_key(worklist), &extext_methods, &new_ext_cis, &method_roots_list, &edges); if (!emit_split) { @@ -3385,8 +3388,6 @@ JL_DLLEXPORT void jl_create_system_image(void **_native_data, jl_array_t *workli jl_save_system_image_to_stream(ff, mod_array, worklist, extext_methods, new_ext_cis, method_roots_list, edges); if (_native_data != NULL) native_functions = NULL; - if (worklist) - htable_free(&relocatable_ext_cis); // make sure we don't run any Julia code concurrently before this point // Re-enable running julia code for postoutput hooks, atexit, etc. jl_gc_enable_finalizers(ct, 1); diff --git a/src/staticdata_utils.c b/src/staticdata_utils.c index 5ff7a7b1b1fc0..5de85a8d6ec77 100644 --- a/src/staticdata_utils.c +++ b/src/staticdata_utils.c @@ -215,17 +215,6 @@ static int has_backedge_to_worklist(jl_method_instance_t *mi, htable_t *visited, return found; } -static int is_relocatable_ci(htable_t *relocatable_ext_cis, jl_code_instance_t *ci) -{ - if (!ci->relocatability) - return 0; - jl_method_instance_t *mi = jl_get_ci_mi(ci); - jl_method_t *m = mi->def.method; - if (!ptrhash_has(relocatable_ext_cis, ci) && jl_object_in_image((jl_value_t*)m) && (!jl_is_method(m) || jl_object_in_image((jl_value_t*)m->module))) - return 0; - return 1; -} - // Given the list of CodeInstances that were inferred during the build, select // those that are (1) external, (2) still valid, (3) are inferred to be called // from the worklist or explicitly added by a `precompile` statement, and @@ -247,8 +236,6 @@ static jl_array_t *queue_external_cis(jl_array_t *list) for (i = n0; i-- > 0; ) { jl_code_instance_t *ci = (jl_code_instance_t*)jl_array_ptr_ref(list, i); assert(jl_is_code_instance(ci)); - if (!ci->relocatability) - continue; jl_method_instance_t *mi = jl_get_ci_mi(ci); jl_method_t *m = mi->def.method; if (ci->owner == jl_nothing && jl_atomic_load_relaxed(&ci->inferred) && jl_is_method(m) && jl_object_in_image((jl_value_t*)m->module)) { @@ -275,7 +262,7 @@ static jl_array_t *queue_external_cis(jl_array_t *list) } // New roots for external methods -static void jl_collect_new_roots(htable_t *relocatable_ext_cis, jl_array_t *roots, jl_array_t *new_ext_cis, uint64_t key) +static void jl_collect_new_roots(jl_array_t *roots, jl_array_t *new_ext_cis, uint64_t key) { htable_t mset; htable_new(&mset, 0); @@ -286,7 +273,6 @@ static void jl_collect_new_roots(htable_t *relocatable_ext_cis, jl_array_t *root jl_method_t *m = jl_get_ci_mi(ci)->def.method; assert(jl_is_method(m)); ptrhash_put(&mset, (void*)m, (void*)m); - ptrhash_put(relocatable_ext_cis, (void*)ci, (void*)ci); } int nwithkey; void *const *table = mset.table; diff --git a/src/toplevel.c b/src/toplevel.c index 0f8fae8939faf..95a053f6135f6 100644 --- a/src/toplevel.c +++ b/src/toplevel.c @@ -640,7 +640,7 @@ JL_DLLEXPORT jl_code_instance_t *jl_new_codeinst_for_uninferred(jl_method_instan jl_code_instance_t *ci = jl_new_codeinst(mi, (jl_value_t*)jl_uninferred_sym, (jl_value_t*)jl_any_type, (jl_value_t*)jl_any_type, jl_nothing, (jl_value_t*)src, 0, src->min_world, src->max_world, - 0, NULL, 1, NULL, NULL); + 0, NULL, NULL, NULL); return ci; } diff --git a/test/precompile.jl b/test/precompile.jl index 2d4a4d310b4e0..3b193e1facd7c 100644 --- a/test/precompile.jl +++ b/test/precompile.jl @@ -801,7 +801,7 @@ precompile_test_harness("code caching") do dir mi = minternal.specializations::Core.MethodInstance @test mi.specTypes == Tuple{typeof(M.getelsize),Vector{Int32}} ci = mi.cache - @test ci.relocatability == 0 + @test (codeunits(ci.inferred::String)[end]) === 0x01 @test ci.inferred !== nothing # ...and that we can add "untracked" roots & non-relocatable CodeInstances to them too Base.invokelatest() do @@ -812,7 +812,7 @@ precompile_test_harness("code caching") do dir mi = mispecs[2]::Core.MethodInstance mi.specTypes == Tuple{typeof(M.getelsize),Vector{M.X2}} ci = mi.cache - @test ci.relocatability == 0 + @test (codeunits(ci.inferred::String)[end]) == 0x00 # PkgA loads PkgB, and both add roots to the same `push!` method (both before and after loading B) Cache_module2 = :Cachea1544c83560f0c99 write(joinpath(dir, "$Cache_module2.jl"), @@ -1721,8 +1721,7 @@ precompile_test_harness("issue #46296") do load_path mi = first(Base.specializations(first(methods(identity)))) ci = Core.CodeInstance(mi, nothing, Any, Any, nothing, nothing, zero(Int32), typemin(UInt), - typemax(UInt), zero(UInt32), nothing, 0x00, - Core.DebugInfo(mi), Core.svec()) + typemax(UInt), zero(UInt32), nothing, Core.DebugInfo(mi), Core.svec()) __init__() = @assert ci isa Core.CodeInstance