@@ -863,40 +863,60 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
863
863
}
864
864
goto done_fields ; // for now
865
865
}
866
- if (s -> incremental && jl_is_method_instance (v )) {
866
+ if (jl_is_method_instance (v )) {
867
867
jl_method_instance_t * mi = (jl_method_instance_t * )v ;
868
- jl_value_t * def = mi -> def .value ;
869
- if (needs_uniquing (v , s -> query_cache )) {
870
- // we only need 3 specific fields of this (the rest are not used)
871
- jl_queue_for_serialization (s , mi -> def .value );
872
- jl_queue_for_serialization (s , mi -> specTypes );
873
- jl_queue_for_serialization (s , (jl_value_t * )mi -> sparam_vals );
874
- goto done_fields ;
875
- }
876
- else if (jl_is_method (def ) && jl_object_in_image (def )) {
877
- // we only need 3 specific fields of this (the rest are restored afterward, if valid)
878
- // in particular, cache is repopulated by jl_mi_cache_insert for all foreign function,
879
- // so must not be present here
880
- record_field_change ((jl_value_t * * )& mi -> backedges , NULL );
881
- record_field_change ((jl_value_t * * )& mi -> cache , NULL );
868
+ if (s -> incremental ) {
869
+ jl_value_t * def = mi -> def .value ;
870
+ if (needs_uniquing (v , s -> query_cache )) {
871
+ // we only need 3 specific fields of this (the rest are not used)
872
+ jl_queue_for_serialization (s , mi -> def .value );
873
+ jl_queue_for_serialization (s , mi -> specTypes );
874
+ jl_queue_for_serialization (s , (jl_value_t * )mi -> sparam_vals );
875
+ goto done_fields ;
876
+ }
877
+ else if (jl_is_method (def ) && jl_object_in_image (def )) {
878
+ // we only need 3 specific fields of this (the rest are restored afterward, if valid)
879
+ // in particular, cache is repopulated by jl_mi_cache_insert for all foreign function,
880
+ // so must not be present here
881
+ record_field_change ((jl_value_t * * )& mi -> backedges , NULL );
882
+ record_field_change ((jl_value_t * * )& mi -> cache , NULL );
883
+ }
884
+ else {
885
+ assert (!needs_recaching (v , s -> query_cache ));
886
+ }
887
+ // n.b. opaque closures cannot be inspected and relied upon like a
888
+ // normal method since they can get improperly introduced by generated
889
+ // functions, so if they appeared at all, we will probably serialize
890
+ // them wrong and segfault. The jl_code_for_staged function should
891
+ // prevent this from happening, so we do not need to detect that user
892
+ // error now.
882
893
}
883
- else {
884
- assert (!needs_recaching (v , s -> query_cache ));
894
+ // don't recurse into all backedges memory (yet)
895
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& mi -> backedges , 1 );
896
+ if (backedges ) {
897
+ jl_queue_for_serialization_ (s , (jl_value_t * )((jl_array_t * )backedges )-> ref .mem , 0 , 1 );
898
+ size_t i = 0 , n = jl_array_nrows (backedges );
899
+ while (i < n ) {
900
+ jl_value_t * invokeTypes ;
901
+ jl_code_instance_t * caller ;
902
+ i = get_next_edge ((jl_array_t * )backedges , i , & invokeTypes , & caller );
903
+ if (invokeTypes )
904
+ jl_queue_for_serialization (s , invokeTypes );
905
+ }
885
906
}
886
- // n.b. opaque closures cannot be inspected and relied upon like a
887
- // normal method since they can get improperly introduced by generated
888
- // functions, so if they appeared at all, we will probably serialize
889
- // them wrong and segfault. The jl_code_for_staged function should
890
- // prevent this from happening, so we do not need to detect that user
891
- // error now.
892
- }
893
- if (s -> incremental && jl_is_binding (v )) {
894
- if (needs_uniquing (v , s -> query_cache )) {
895
- jl_binding_t * b = (jl_binding_t * )v ;
907
+ }
908
+ if (jl_is_binding (v )) {
909
+ jl_binding_t * b = (jl_binding_t * )v ;
910
+ if (s -> incremental && needs_uniquing (v , s -> query_cache )) {
896
911
jl_queue_for_serialization (s , b -> globalref -> mod );
897
912
jl_queue_for_serialization (s , b -> globalref -> name );
898
913
goto done_fields ;
899
914
}
915
+ // don't recurse into backedges memory (yet)
916
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& b -> backedges , 1 );
917
+ if (backedges ) {
918
+ jl_queue_for_serialization_ (s , (jl_value_t * )((jl_array_t * )backedges )-> ref .mem , 0 , 1 );
919
+ }
900
920
}
901
921
if (s -> incremental && jl_is_globalref (v )) {
902
922
jl_globalref_t * gr = (jl_globalref_t * )v ;
@@ -914,18 +934,20 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
914
934
assert (!jl_object_in_image ((jl_value_t * )tn -> wrapper ));
915
935
}
916
936
}
917
- if (s -> incremental && jl_is_code_instance (v )) {
937
+ if (jl_is_code_instance (v )) {
918
938
jl_code_instance_t * ci = (jl_code_instance_t * )v ;
919
939
jl_method_instance_t * mi = jl_get_ci_mi (ci );
920
- // make sure we don't serialize other reachable cache entries of foreign methods
921
- // Should this now be:
922
- // if (ci !in ci->defs->cache)
923
- // record_field_change((jl_value_t**)&ci->next, NULL);
924
- // Why are we checking that the method/module this originates from is in_image?
925
- // and then disconnect this CI?
926
- if (jl_object_in_image ((jl_value_t * )mi -> def .value )) {
927
- // TODO: if (ci in ci->defs->cache)
928
- record_field_change ((jl_value_t * * )& ci -> next , NULL );
940
+ if (s -> incremental ) {
941
+ // make sure we don't serialize other reachable cache entries of foreign methods
942
+ // Should this now be:
943
+ // if (ci !in ci->defs->cache)
944
+ // record_field_change((jl_value_t**)&ci->next, NULL);
945
+ // Why are we checking that the method/module this originates from is in_image?
946
+ // and then disconnect this CI?
947
+ if (jl_object_in_image ((jl_value_t * )mi -> def .value )) {
948
+ // TODO: if (ci in ci->defs->cache)
949
+ record_field_change ((jl_value_t * * )& ci -> next , NULL );
950
+ }
929
951
}
930
952
jl_value_t * inferred = jl_atomic_load_relaxed (& ci -> inferred );
931
953
if (inferred && inferred != jl_nothing ) { // disregard if there is nothing here to delete (e.g. builtins, unspecialized)
@@ -953,7 +975,7 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
953
975
if (inferred == jl_nothing ) {
954
976
record_field_change ((jl_value_t * * )& ci -> inferred , jl_nothing );
955
977
}
956
- else if (jl_is_string (inferred )) {
978
+ else if (s -> incremental && jl_is_string (inferred )) {
957
979
// New roots for external methods
958
980
if (jl_object_in_image ((jl_value_t * )def )) {
959
981
void * * pfound = ptrhash_bp (& s -> method_roots_index , def );
@@ -2572,6 +2594,35 @@ static void jl_prune_type_cache_linear(jl_svec_t *cache)
2572
2594
jl_svecset (cache , ins ++ , jl_nothing );
2573
2595
}
2574
2596
2597
+ static void jl_prune_mi_backedges (jl_array_t * backedges )
2598
+ {
2599
+ if (backedges == NULL )
2600
+ return ;
2601
+ size_t i = 0 , ins = 0 , n = jl_array_nrows (backedges );
2602
+ while (i < n ) {
2603
+ jl_value_t * invokeTypes ;
2604
+ jl_code_instance_t * caller ;
2605
+ i = get_next_edge (backedges , i , & invokeTypes , & caller );
2606
+ if (ptrhash_get (& serialization_order , caller ) != HT_NOTFOUND )
2607
+ ins = set_next_edge (backedges , ins , invokeTypes , caller );
2608
+ }
2609
+ jl_array_del_end (backedges , n - ins );
2610
+ }
2611
+
2612
+ static void jl_prune_binding_backedges (jl_array_t * backedges )
2613
+ {
2614
+ if (backedges == NULL )
2615
+ return ;
2616
+ size_t i = 0 , ins = 0 , n = jl_array_nrows (backedges );
2617
+ for (i = 0 ; i < n ; i ++ ) {
2618
+ jl_value_t * b = jl_array_ptr_ref (backedges , i );
2619
+ if (ptrhash_get (& serialization_order , b ) != HT_NOTFOUND )
2620
+ jl_array_ptr_set (backedges , ins , b );
2621
+ }
2622
+ jl_array_del_end (backedges , n - ins );
2623
+ }
2624
+
2625
+
2575
2626
uint_t bindingkey_hash (size_t idx , jl_value_t * data );
2576
2627
2577
2628
static void jl_prune_module_bindings (jl_module_t * m ) JL_GC_DISABLED
@@ -3145,12 +3196,11 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
3145
3196
jl_queue_for_serialization (& s , global_roots_keyset );
3146
3197
jl_serialize_reachable (& s );
3147
3198
}
3148
- // step 1.5: prune (garbage collect) some special weak references from
3149
- // built-in type caches too
3199
+ // step 1.5: prune (garbage collect) some special weak references known caches
3150
3200
for (i = 0 ; i < serialization_queue .len ; i ++ ) {
3151
3201
jl_value_t * v = (jl_value_t * )serialization_queue .items [i ];
3152
3202
if (jl_options .trim ) {
3153
- if (jl_is_method (v )){
3203
+ if (jl_is_method (v )) {
3154
3204
jl_method_t * m = (jl_method_t * )v ;
3155
3205
jl_value_t * specializations_ = jl_atomic_load_relaxed (& m -> specializations );
3156
3206
if (!jl_is_svec (specializations_ ))
@@ -3178,6 +3228,16 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
3178
3228
jl_gc_wb (tn , jl_atomic_load_relaxed (& tn -> cache ));
3179
3229
jl_prune_type_cache_linear (jl_atomic_load_relaxed (& tn -> linearcache ));
3180
3230
}
3231
+ else if (jl_is_method_instance (v )) {
3232
+ jl_method_instance_t * mi = (jl_method_instance_t * )v ;
3233
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& mi -> backedges , 1 );
3234
+ jl_prune_mi_backedges ((jl_array_t * )backedges );
3235
+ }
3236
+ else if (jl_is_binding (v )) {
3237
+ jl_binding_t * b = (jl_binding_t * )v ;
3238
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& b -> backedges , 1 );
3239
+ jl_prune_binding_backedges ((jl_array_t * )backedges );
3240
+ }
3181
3241
}
3182
3242
}
3183
3243
0 commit comments