diff --git a/src/staticdata.c b/src/staticdata.c index ed11bcfc15171..76bb488731a92 100644 --- a/src/staticdata.c +++ b/src/staticdata.c @@ -461,6 +461,7 @@ void *native_functions; // opaque jl_native_code_desc_t blob used for fetching // table of struct field addresses to rewrite during saving static htable_t field_replace; +static htable_t relocatable_ext_cis; // array of definitions for the predefined function pointers // (reverse of fptr_to_id) @@ -693,7 +694,8 @@ static int needs_uniquing(jl_value_t *v) JL_NOTSAFEPOINT static void record_field_change(jl_value_t **addr, jl_value_t *newval) JL_NOTSAFEPOINT { - ptrhash_put(&field_replace, (void*)addr, newval); + if (*addr != newval) + ptrhash_put(&field_replace, (void*)addr, newval); } static jl_value_t *get_replaceable_field(jl_value_t **addr, int mutabl) JL_GC_DISABLED @@ -836,6 +838,8 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_ // TODO: if (ci in ci->defs->cache) record_field_change((jl_value_t**)&ci->next, NULL); } + if (jl_atomic_load_relaxed(&ci->inferred) && !is_relocatable_ci(&relocatable_ext_cis, ci)) + record_field_change((jl_value_t**)&ci->inferred, jl_nothing); } if (immediate) // must be things that can be recursively handled, and valid as type parameters @@ -1631,6 +1635,7 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED jl_atomic_store_release(&newci->min_world, 1); jl_atomic_store_release(&newci->max_world, 0); } + newci->relocatability = 0; } jl_atomic_store_relaxed(&newci->invoke, NULL); jl_atomic_store_relaxed(&newci->specsigflags, 0); @@ -2573,7 +2578,7 @@ static void jl_prepare_serialization_data(jl_array_t *mod_array, jl_array_t *new *edges = jl_alloc_vec_any(0); *method_roots_list = jl_alloc_vec_any(0); // Collect the new method roots for external specializations - jl_collect_new_roots(*method_roots_list, *new_ext_cis, worklist_key); + jl_collect_new_roots(&relocatable_ext_cis, *method_roots_list, *new_ext_cis, worklist_key); jl_collect_edges(*edges, *ext_targets, *new_ext_cis, world); } assert(edges_map == NULL); // jl_collect_edges clears this when done @@ -2974,6 +2979,7 @@ JL_DLLEXPORT void jl_create_system_image(void **_native_data, jl_array_t *workli assert((ct->reentrant_timing & 0b1110) == 0); ct->reentrant_timing |= 0b1000; if (worklist) { + htable_new(&relocatable_ext_cis, 0); jl_prepare_serialization_data(mod_array, newly_inferred, jl_worklist_key(worklist), &extext_methods, &new_ext_cis, &method_roots_list, &ext_targets, &edges); if (!emit_split) { @@ -2990,6 +2996,8 @@ JL_DLLEXPORT void jl_create_system_image(void **_native_data, jl_array_t *workli jl_save_system_image_to_stream(ff, mod_array, worklist, extext_methods, new_ext_cis, method_roots_list, ext_targets, edges); if (_native_data != NULL) native_functions = NULL; + if (worklist) + htable_free(&relocatable_ext_cis); // make sure we don't run any Julia code concurrently before this point // Re-enable running julia code for postoutput hooks, atexit, etc. jl_gc_enable_finalizers(ct, 1); diff --git a/src/staticdata_utils.c b/src/staticdata_utils.c index 05fce7cfc4630..8d3d071054083 100644 --- a/src/staticdata_utils.c +++ b/src/staticdata_utils.c @@ -209,6 +209,17 @@ static int has_backedge_to_worklist(jl_method_instance_t *mi, htable_t *visited, return found; } +static int is_relocatable_ci(htable_t *relocatable_ext_cis, jl_code_instance_t *ci) +{ + if (!ci->relocatability) + return 0; + jl_method_instance_t *mi = ci->def; + jl_method_t *m = mi->def.method; + if (!ptrhash_has(relocatable_ext_cis, ci) && jl_object_in_image((jl_value_t*)m) && (!jl_is_method(m) || jl_object_in_image((jl_value_t*)m->module))) + return 0; + return 1; +} + // Given the list of CodeInstances that were inferred during the build, select // those that are (1) external, (2) still valid, (3) are inferred to be called // from the worklist or explicitly added by a `precompile` statement, and @@ -258,7 +269,7 @@ static jl_array_t *queue_external_cis(jl_array_t *list) } // New roots for external methods -static void jl_collect_new_roots(jl_array_t *roots, jl_array_t *new_ext_cis, uint64_t key) +static void jl_collect_new_roots(htable_t *relocatable_ext_cis, jl_array_t *roots, jl_array_t *new_ext_cis, uint64_t key) { htable_t mset; htable_new(&mset, 0); @@ -269,6 +280,7 @@ static void jl_collect_new_roots(jl_array_t *roots, jl_array_t *new_ext_cis, uin jl_method_t *m = ci->def->def.method; assert(jl_is_method(m)); ptrhash_put(&mset, (void*)m, (void*)m); + ptrhash_put(relocatable_ext_cis, (void*)ci, (void*)ci); } int nwithkey; void *const *table = mset.table; diff --git a/test/precompile.jl b/test/precompile.jl index 2570a425a03b7..bc6a791627616 100644 --- a/test/precompile.jl +++ b/test/precompile.jl @@ -806,7 +806,7 @@ precompile_test_harness("code caching") do dir mi = minternal.specializations::Core.MethodInstance @test mi.specTypes == Tuple{typeof(M.getelsize),Vector{Int32}} ci = mi.cache - @test ci.relocatability == 1 + @test ci.relocatability == 0 @test ci.inferred !== nothing # ...and that we can add "untracked" roots & non-relocatable CodeInstances to them too Base.invokelatest() do