@@ -863,40 +863,60 @@ static void jl_insert_into_serialization_queue(jl_serializer_state *s, jl_value_
863
863
}
864
864
goto done_fields ; // for now
865
865
}
866
- if (s -> incremental && jl_is_method_instance (v )) {
866
+ if (jl_is_method_instance (v )) {
867
867
jl_method_instance_t * mi = (jl_method_instance_t * )v ;
868
- jl_value_t * def = mi -> def .value ;
869
- if (needs_uniquing (v , s -> query_cache )) {
870
- // we only need 3 specific fields of this (the rest are not used)
871
- jl_queue_for_serialization (s , mi -> def .value );
872
- jl_queue_for_serialization (s , mi -> specTypes );
873
- jl_queue_for_serialization (s , (jl_value_t * )mi -> sparam_vals );
874
- goto done_fields ;
875
- }
876
- else if (jl_is_method (def ) && jl_object_in_image (def )) {
877
- // we only need 3 specific fields of this (the rest are restored afterward, if valid)
878
- // in particular, cache is repopulated by jl_mi_cache_insert for all foreign function,
879
- // so must not be present here
880
- record_field_change ((jl_value_t * * )& mi -> backedges , NULL );
881
- record_field_change ((jl_value_t * * )& mi -> cache , NULL );
868
+ if (s -> incremental ) {
869
+ jl_value_t * def = mi -> def .value ;
870
+ if (needs_uniquing (v , s -> query_cache )) {
871
+ // we only need 3 specific fields of this (the rest are not used)
872
+ jl_queue_for_serialization (s , mi -> def .value );
873
+ jl_queue_for_serialization (s , mi -> specTypes );
874
+ jl_queue_for_serialization (s , (jl_value_t * )mi -> sparam_vals );
875
+ goto done_fields ;
876
+ }
877
+ else if (jl_is_method (def ) && jl_object_in_image (def )) {
878
+ // we only need 3 specific fields of this (the rest are restored afterward, if valid)
879
+ // in particular, cache is repopulated by jl_mi_cache_insert for all foreign function,
880
+ // so must not be present here
881
+ record_field_change ((jl_value_t * * )& mi -> backedges , NULL );
882
+ record_field_change ((jl_value_t * * )& mi -> cache , NULL );
883
+ }
884
+ else {
885
+ assert (!needs_recaching (v , s -> query_cache ));
886
+ }
887
+ // n.b. opaque closures cannot be inspected and relied upon like a
888
+ // normal method since they can get improperly introduced by generated
889
+ // functions, so if they appeared at all, we will probably serialize
890
+ // them wrong and segfault. The jl_code_for_staged function should
891
+ // prevent this from happening, so we do not need to detect that user
892
+ // error now.
882
893
}
883
- else {
884
- assert (!needs_recaching (v , s -> query_cache ));
894
+ // don't recurse into all backedges memory (yet)
895
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& mi -> backedges , 1 );
896
+ if (backedges ) {
897
+ jl_queue_for_serialization_ (s , (jl_value_t * )((jl_array_t * )backedges )-> ref .mem , 0 , 1 );
898
+ size_t i = 0 , n = jl_array_nrows (backedges );
899
+ while (i < n ) {
900
+ jl_value_t * invokeTypes ;
901
+ jl_code_instance_t * caller ;
902
+ i = get_next_edge ((jl_array_t * )backedges , i , & invokeTypes , & caller );
903
+ if (invokeTypes )
904
+ jl_queue_for_serialization (s , invokeTypes );
905
+ }
885
906
}
886
- // n.b. opaque closures cannot be inspected and relied upon like a
887
- // normal method since they can get improperly introduced by generated
888
- // functions, so if they appeared at all, we will probably serialize
889
- // them wrong and segfault. The jl_code_for_staged function should
890
- // prevent this from happening, so we do not need to detect that user
891
- // error now.
892
- }
893
- if (s -> incremental && jl_is_binding (v )) {
894
- if (needs_uniquing (v , s -> query_cache )) {
895
- jl_binding_t * b = (jl_binding_t * )v ;
907
+ }
908
+ if (jl_is_binding (v )) {
909
+ jl_binding_t * b = (jl_binding_t * )v ;
910
+ if (s -> incremental && needs_uniquing (v , s -> query_cache )) {
896
911
jl_queue_for_serialization (s , b -> globalref -> mod );
897
912
jl_queue_for_serialization (s , b -> globalref -> name );
898
913
goto done_fields ;
899
914
}
915
+ // don't recurse into backedges memory (yet)
916
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& b -> backedges , 1 );
917
+ if (backedges ) {
918
+ jl_queue_for_serialization_ (s , (jl_value_t * )((jl_array_t * )backedges )-> ref .mem , 0 , 1 );
919
+ }
900
920
}
901
921
if (s -> incremental && jl_is_globalref (v )) {
902
922
jl_globalref_t * gr = (jl_globalref_t * )v ;
@@ -2572,6 +2592,35 @@ static void jl_prune_type_cache_linear(jl_svec_t *cache)
2572
2592
jl_svecset (cache , ins ++ , jl_nothing );
2573
2593
}
2574
2594
2595
+ static void jl_prune_mi_backedges (jl_array_t * backedges )
2596
+ {
2597
+ if (backedges == NULL )
2598
+ return ;
2599
+ size_t i = 0 , ins = 0 , n = jl_array_nrows (backedges );
2600
+ while (i < n ) {
2601
+ jl_value_t * invokeTypes ;
2602
+ jl_code_instance_t * caller ;
2603
+ i = get_next_edge (backedges , i , & invokeTypes , & caller );
2604
+ if (ptrhash_get (& serialization_order , caller ) != HT_NOTFOUND )
2605
+ ins = set_next_edge (backedges , ins , invokeTypes , caller );
2606
+ }
2607
+ jl_array_del_end (backedges , n - ins );
2608
+ }
2609
+
2610
+ static void jl_prune_binding_backedges (jl_array_t * backedges )
2611
+ {
2612
+ if (backedges == NULL )
2613
+ return ;
2614
+ size_t i = 0 , ins = 0 , n = jl_array_nrows (backedges );
2615
+ for (i = 0 ; i < n ; i ++ ) {
2616
+ jl_value_t * b = jl_array_ptr_ref (backedges , i );
2617
+ if (ptrhash_get (& serialization_order , b ) != HT_NOTFOUND )
2618
+ jl_array_ptr_set (backedges , ins , b );
2619
+ }
2620
+ jl_array_del_end (backedges , n - ins );
2621
+ }
2622
+
2623
+
2575
2624
uint_t bindingkey_hash (size_t idx , jl_value_t * data );
2576
2625
2577
2626
static void jl_prune_module_bindings (jl_module_t * m ) JL_GC_DISABLED
@@ -3145,12 +3194,11 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
3145
3194
jl_queue_for_serialization (& s , global_roots_keyset );
3146
3195
jl_serialize_reachable (& s );
3147
3196
}
3148
- // step 1.5: prune (garbage collect) some special weak references from
3149
- // built-in type caches too
3197
+ // step 1.5: prune (garbage collect) some special weak references known caches
3150
3198
for (i = 0 ; i < serialization_queue .len ; i ++ ) {
3151
3199
jl_value_t * v = (jl_value_t * )serialization_queue .items [i ];
3152
3200
if (jl_options .trim ) {
3153
- if (jl_is_method (v )){
3201
+ if (jl_is_method (v )) {
3154
3202
jl_method_t * m = (jl_method_t * )v ;
3155
3203
jl_value_t * specializations_ = jl_atomic_load_relaxed (& m -> specializations );
3156
3204
if (!jl_is_svec (specializations_ ))
@@ -3178,6 +3226,16 @@ static void jl_save_system_image_to_stream(ios_t *f, jl_array_t *mod_array,
3178
3226
jl_gc_wb (tn , jl_atomic_load_relaxed (& tn -> cache ));
3179
3227
jl_prune_type_cache_linear (jl_atomic_load_relaxed (& tn -> linearcache ));
3180
3228
}
3229
+ else if (jl_is_method_instance (v )) {
3230
+ jl_method_instance_t * mi = (jl_method_instance_t * )v ;
3231
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& mi -> backedges , 1 );
3232
+ jl_prune_mi_backedges ((jl_array_t * )backedges );
3233
+ }
3234
+ else if (jl_is_binding (v )) {
3235
+ jl_binding_t * b = (jl_binding_t * )v ;
3236
+ jl_value_t * backedges = get_replaceable_field ((jl_value_t * * )& b -> backedges , 1 );
3237
+ jl_prune_binding_backedges ((jl_array_t * )backedges );
3238
+ }
3181
3239
}
3182
3240
}
3183
3241
0 commit comments