@@ -102,6 +102,16 @@ ShenandoahGC::ShenandoahDegenPoint ShenandoahConcurrentGC::degen_point() const {
102102 return _degen_point;
103103}
104104
105+ void ShenandoahConcurrentGC::entry_concurrent_update_refs_prepare (ShenandoahHeap* const heap) {
106+ TraceCollectorStats tcs (heap->monitoring_support ()->concurrent_collection_counters ());
107+ const char * msg = conc_init_update_refs_event_message ();
108+ ShenandoahConcurrentPhase gc_phase (msg, ShenandoahPhaseTimings::conc_update_refs_prepare);
109+ EventMark em (" %s" , msg);
110+
111+ // Evacuation is complete, retire gc labs and change gc state
112+ heap->concurrent_prepare_for_update_refs ();
113+ }
114+
105115bool ShenandoahConcurrentGC::collect (GCCause::Cause cause) {
106116 ShenandoahHeap* const heap = ShenandoahHeap::heap ();
107117
@@ -192,8 +202,7 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
192202 return false ;
193203 }
194204
195- // Evacuation is complete, retire gc labs
196- heap->concurrent_prepare_for_update_refs ();
205+ entry_concurrent_update_refs_prepare (heap);
197206
198207 // Perform update-refs phase.
199208 if (ShenandoahVerify || ShenandoahPacing) {
@@ -216,24 +225,14 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
216225 // Update references freed up collection set, kick the cleanup to reclaim the space.
217226 entry_cleanup_complete ();
218227 } else {
219- // We chose not to evacuate because we found sufficient immediate garbage.
220- // However, there may still be regions to promote in place, so do that now.
221- if (has_in_place_promotions (heap)) {
222- entry_promote_in_place ();
223-
224- // If the promote-in-place operation was cancelled, we can have the degenerated
225- // cycle complete the operation. It will see that no evacuations are in progress,
226- // and that there are regions wanting promotion. The risk with not handling the
227- // cancellation would be failing to restore top for these regions and leaving
228- // them unable to serve allocations for the old generation.
229- if (check_cancellation_and_abort (ShenandoahDegenPoint::_degenerated_evac)) {
230- return false ;
231- }
228+ if (!entry_final_roots ()) {
229+ assert (_degen_point != _degenerated_unset, " Need to know where to start degenerated cycle" );
230+ return false ;
232231 }
233232
234- // At this point, the cycle is effectively complete. If the cycle has been cancelled here,
235- // the control thread will detect it on its next iteration and run a degenerated young cycle.
236- vmop_entry_final_roots ();
233+ if (VerifyAfterGC) {
234+ vmop_entry_verify_final_roots ();
235+ }
237236 _abbreviated = true ;
238237 }
239238
@@ -251,6 +250,52 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
251250 return true ;
252251}
253252
253+ bool ShenandoahConcurrentGC::complete_abbreviated_cycle () {
254+ shenandoah_assert_generational ();
255+
256+ ShenandoahGenerationalHeap* const heap = ShenandoahGenerationalHeap::heap ();
257+
258+ // We chose not to evacuate because we found sufficient immediate garbage.
259+ // However, there may still be regions to promote in place, so do that now.
260+ if (heap->old_generation ()->has_in_place_promotions ()) {
261+ entry_promote_in_place ();
262+
263+ // If the promote-in-place operation was cancelled, we can have the degenerated
264+ // cycle complete the operation. It will see that no evacuations are in progress,
265+ // and that there are regions wanting promotion. The risk with not handling the
266+ // cancellation would be failing to restore top for these regions and leaving
267+ // them unable to serve allocations for the old generation.This will leave the weak
268+ // roots flag set (the degenerated cycle will unset it).
269+ if (check_cancellation_and_abort (ShenandoahDegenPoint::_degenerated_evac)) {
270+ return false ;
271+ }
272+ }
273+
274+ // At this point, the cycle is effectively complete. If the cycle has been cancelled here,
275+ // the control thread will detect it on its next iteration and run a degenerated young cycle.
276+ if (!_generation->is_old ()) {
277+ heap->update_region_ages (_generation->complete_marking_context ());
278+ }
279+
280+ if (!heap->is_concurrent_old_mark_in_progress ()) {
281+ heap->concurrent_final_roots ();
282+ } else {
283+ // Since the cycle was shortened for having enough immediate garbage, this will be
284+ // the last phase before concurrent marking of old resumes. We must be sure
285+ // that old mark threads don't see any pointers to garbage in the SATB queues. Even
286+ // though nothing was evacuated, overwriting unreachable weak roots with null may still
287+ // put pointers to regions that become trash in the SATB queues. The following will
288+ // piggyback flushing the thread local SATB queues on the same handshake that propagates
289+ // the gc state change.
290+ ShenandoahSATBMarkQueueSet& satb_queues = ShenandoahBarrierSet::satb_mark_queue_set ();
291+ ShenandoahFlushSATBHandshakeClosure complete_thread_local_satb_buffers (satb_queues);
292+ heap->concurrent_final_roots (&complete_thread_local_satb_buffers);
293+ heap->old_generation ()->concurrent_transfer_pointers_from_satb ();
294+ }
295+ return true ;
296+ }
297+
298+
254299void ShenandoahConcurrentGC::vmop_entry_init_mark () {
255300 ShenandoahHeap* const heap = ShenandoahHeap::heap ();
256301 TraceCollectorStats tcs (heap->monitoring_support ()->stw_collection_counters ());
@@ -291,7 +336,7 @@ void ShenandoahConcurrentGC::vmop_entry_final_update_refs() {
291336 VMThread::execute (&op);
292337}
293338
294- void ShenandoahConcurrentGC::vmop_entry_final_roots () {
339+ void ShenandoahConcurrentGC::vmop_entry_verify_final_roots () {
295340 ShenandoahHeap* const heap = ShenandoahHeap::heap ();
296341 TraceCollectorStats tcs (heap->monitoring_support ()->stw_collection_counters ());
297342 ShenandoahTimingsTracker timing (ShenandoahPhaseTimings::final_roots_gross);
@@ -347,12 +392,12 @@ void ShenandoahConcurrentGC::entry_final_update_refs() {
347392 op_final_update_refs ();
348393}
349394
350- void ShenandoahConcurrentGC::entry_final_roots () {
351- const char * msg = final_roots_event_message ();
395+ void ShenandoahConcurrentGC::entry_verify_final_roots () {
396+ const char * msg = verify_final_roots_event_message ();
352397 ShenandoahPausePhase gc_phase (msg, ShenandoahPhaseTimings::final_roots);
353398 EventMark em (" %s" , msg);
354399
355- op_final_roots ();
400+ op_verify_final_roots ();
356401}
357402
358403void ShenandoahConcurrentGC::entry_reset () {
@@ -526,19 +571,12 @@ void ShenandoahConcurrentGC::entry_evacuate() {
526571 op_evacuate ();
527572}
528573
529- void ShenandoahConcurrentGC::entry_promote_in_place () {
574+ void ShenandoahConcurrentGC::entry_promote_in_place () const {
530575 shenandoah_assert_generational ();
531576
532- ShenandoahHeap* const heap = ShenandoahHeap::heap ();
533- TraceCollectorStats tcs (heap->monitoring_support ()->concurrent_collection_counters ());
534-
535- static const char * msg = " Promote in place" ;
536- ShenandoahConcurrentPhase gc_phase (msg, ShenandoahPhaseTimings::promote_in_place);
537- EventMark em (" %s" , msg);
538-
539- ShenandoahWorkerScope scope (heap->workers (),
540- ShenandoahWorkerPolicy::calc_workers_for_conc_evac (),
541- " promote in place" );
577+ ShenandoahTimingsTracker timing (ShenandoahPhaseTimings::promote_in_place);
578+ ShenandoahGCWorkerPhase worker_phase (ShenandoahPhaseTimings::promote_in_place);
579+ EventMark em (" %s" , " Promote in place" );
542580
543581 ShenandoahGenerationalHeap::heap ()->promote_regions_in_place (true );
544582}
@@ -663,6 +701,7 @@ void ShenandoahConcurrentGC::op_init_mark() {
663701 }
664702
665703 if (ShenandoahVerify) {
704+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::init_mark_verify);
666705 heap->verifier ()->verify_before_concmark ();
667706 }
668707
@@ -751,6 +790,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
751790 }
752791
753792 if (ShenandoahVerify) {
793+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::final_mark_verify);
754794 heap->verifier ()->verify_before_evacuation ();
755795 }
756796
@@ -767,6 +807,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
767807 }
768808 } else {
769809 if (ShenandoahVerify) {
810+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::final_mark_verify);
770811 if (has_in_place_promotions (heap)) {
771812 heap->verifier ()->verify_after_concmark_with_promotions ();
772813 } else {
@@ -1088,6 +1129,7 @@ void ShenandoahConcurrentGC::op_evacuate() {
10881129void ShenandoahConcurrentGC::op_init_update_refs () {
10891130 ShenandoahHeap* const heap = ShenandoahHeap::heap ();
10901131 if (ShenandoahVerify) {
1132+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::init_update_refs_verify);
10911133 heap->verifier ()->verify_before_update_refs ();
10921134 }
10931135 if (ShenandoahPacing) {
@@ -1175,6 +1217,7 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
11751217 }
11761218
11771219 if (ShenandoahVerify) {
1220+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::final_update_refs_verify);
11781221 heap->verifier ()->verify_after_update_refs ();
11791222 }
11801223
@@ -1190,33 +1233,32 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
11901233 }
11911234}
11921235
1193- void ShenandoahConcurrentGC::op_final_roots () {
1236+ bool ShenandoahConcurrentGC::entry_final_roots () {
1237+ ShenandoahHeap* const heap = ShenandoahHeap::heap ();
1238+ TraceCollectorStats tcs (heap->monitoring_support ()->concurrent_collection_counters ());
11941239
1195- ShenandoahHeap *heap = ShenandoahHeap::heap ();
1196- heap->set_concurrent_weak_root_in_progress (false );
1197- heap->set_evacuation_in_progress (false );
11981240
1199- if (heap->mode ()->is_generational ()) {
1200- // If the cycle was shortened for having enough immediate garbage, this could be
1201- // the last GC safepoint before concurrent marking of old resumes. We must be sure
1202- // that old mark threads don't see any pointers to garbage in the SATB buffers.
1203- if (heap->is_concurrent_old_mark_in_progress ()) {
1204- heap->old_generation ()->transfer_pointers_from_satb ();
1205- }
1241+ const char * msg = conc_final_roots_event_message ();
1242+ ShenandoahConcurrentPhase gc_phase (msg, ShenandoahPhaseTimings::conc_final_roots);
1243+ EventMark em (" %s" , msg);
1244+ ShenandoahWorkerScope scope (heap->workers (),
1245+ ShenandoahWorkerPolicy::calc_workers_for_conc_evac (),
1246+ msg);
12061247
1207- if (!_generation->is_old ()) {
1208- ShenandoahGenerationalHeap::heap ()->update_region_ages (_generation->complete_marking_context ());
1248+ if (!heap->mode ()->is_generational ()) {
1249+ heap->concurrent_final_roots ();
1250+ } else {
1251+ if (!complete_abbreviated_cycle ()) {
1252+ return false ;
12091253 }
12101254 }
1255+ return true ;
1256+ }
12111257
1258+ void ShenandoahConcurrentGC::op_verify_final_roots () {
12121259 if (VerifyAfterGC) {
12131260 Universe::verify ();
12141261 }
1215-
1216- {
1217- ShenandoahTimingsTracker timing (ShenandoahPhaseTimings::final_roots_propagate_gc_state);
1218- heap->propagate_gc_state_to_all_threads ();
1219- }
12201262}
12211263
12221264void ShenandoahConcurrentGC::op_cleanup_complete () {
@@ -1301,11 +1343,19 @@ const char* ShenandoahConcurrentGC::conc_reset_after_collect_event_message() con
13011343 }
13021344}
13031345
1304- const char * ShenandoahConcurrentGC::final_roots_event_message () const {
1346+ const char * ShenandoahConcurrentGC::verify_final_roots_event_message () const {
1347+ if (ShenandoahHeap::heap ()->unload_classes ()) {
1348+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Verify Final Roots" , " (unload classes)" );
1349+ } else {
1350+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Verify Final Roots" , " " );
1351+ }
1352+ }
1353+
1354+ const char * ShenandoahConcurrentGC::conc_final_roots_event_message () const {
13051355 if (ShenandoahHeap::heap ()->unload_classes ()) {
1306- SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Final Roots" , " (unload classes)" );
1356+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Final Roots" , " (unload classes)" );
13071357 } else {
1308- SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Final Roots" , " " );
1358+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Final Roots" , " " );
13091359 }
13101360}
13111361
@@ -1332,3 +1382,11 @@ const char* ShenandoahConcurrentGC::conc_cleanup_event_message() const {
13321382 SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent cleanup" , " " );
13331383 }
13341384}
1385+
1386+ const char * ShenandoahConcurrentGC::conc_init_update_refs_event_message () const {
1387+ if (ShenandoahHeap::heap ()->unload_classes ()) {
1388+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Init Update Refs" , " (unload classes)" );
1389+ } else {
1390+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Init Update Refs" , " " );
1391+ }
1392+ }
0 commit comments