@@ -238,8 +238,7 @@ void G1ParScanThreadState::do_partial_array(PartialArrayState* state, bool stole
238238}
239239
240240MAYBE_INLINE_EVACUATION
241- void G1ParScanThreadState::start_partial_objarray (G1HeapRegionAttr dest_attr,
242- oop from_obj,
241+ void G1ParScanThreadState::start_partial_objarray (oop from_obj,
243242 oop to_obj) {
244243 assert (from_obj->is_forwarded (), " precondition" );
245244 assert (from_obj->forwardee () == to_obj, " precondition" );
@@ -251,12 +250,7 @@ void G1ParScanThreadState::start_partial_objarray(G1HeapRegionAttr dest_attr,
251250 // The source array is unused when processing states.
252251 _partial_array_splitter.start (_task_queue, nullptr , to_array, array_length);
253252
254- // Skip the card enqueue iff the object (to_array) is in survivor region.
255- // However, G1HeapRegion::is_survivor() is too expensive here.
256- // Instead, we use dest_attr.is_young() because the two values are always
257- // equal: successfully allocated young regions must be survivor regions.
258- assert (dest_attr.is_young () == _g1h->heap_region_containing (to_array)->is_survivor (), " must be" );
259- G1SkipCardEnqueueSetter x (&_scanner, dest_attr.is_young ());
253+ assert (_scanner.skip_card_enqueue_set (), " must be" );
260254 // Process the initial chunk. No need to process the type in the
261255 // klass, as it will already be handled by processing the built-in
262256 // module.
@@ -422,6 +416,45 @@ void G1ParScanThreadState::update_bot_after_copying(oop obj, size_t word_sz) {
422416 region->update_bot_for_block (obj_start, obj_start + word_sz);
423417}
424418
419+ ALWAYSINLINE
420+ void G1ParScanThreadState::do_iterate_object (oop const obj,
421+ oop const old,
422+ Klass* const klass,
423+ G1HeapRegionAttr const region_attr,
424+ G1HeapRegionAttr const dest_attr,
425+ uint age) {
426+ // Most objects are not arrays, so do one array check rather than
427+ // checking for each array category for each object.
428+ if (klass->is_array_klass ()) {
429+ assert (!klass->is_stack_chunk_instance_klass (), " must be" );
430+
431+ if (klass->is_objArray_klass ()) {
432+ start_partial_objarray (old, obj);
433+ } else {
434+ // Nothing needs to be done for typeArrays. Body doesn't contain
435+ // any oops to scan, and the type in the klass will already be handled
436+ // by processing the built-in module.
437+ assert (klass->is_typeArray_klass (), " invariant" );
438+ }
439+ return ;
440+ }
441+
442+ ContinuationGCSupport::transform_stack_chunk (obj);
443+
444+ // Check for deduplicating young Strings.
445+ if (G1StringDedup::is_candidate_from_evacuation (klass,
446+ region_attr,
447+ dest_attr,
448+ age)) {
449+ // Record old; request adds a new weak reference, which reference
450+ // processing expects to refer to a from-space object.
451+ _string_dedup_requests.add (old);
452+ }
453+
454+ assert (_scanner.skip_card_enqueue_set (), " must be" );
455+ obj->oop_iterate_backwards (&_scanner, klass);
456+ }
457+
425458// Private inline function, for direct internal use and providing the
426459// implementation of the public not-inline function.
427460MAYBE_INLINE_EVACUATION
@@ -446,7 +479,7 @@ oop G1ParScanThreadState::do_copy_to_survivor_space(G1HeapRegionAttr const regio
446479
447480 // JNI only allows pinning of typeArrays, so we only need to keep those in place.
448481 if (region_attr.is_pinned () && klass->is_typeArray_klass ()) {
449- return handle_evacuation_failure_par (old, old_mark, word_sz, true /* cause_pinned */ );
482+ return handle_evacuation_failure_par (old, old_mark, klass, region_attr, word_sz, true /* cause_pinned */ );
450483 }
451484
452485 uint age = 0 ;
@@ -463,7 +496,7 @@ oop G1ParScanThreadState::do_copy_to_survivor_space(G1HeapRegionAttr const regio
463496 if (obj_ptr == nullptr ) {
464497 // This will either forward-to-self, or detect that someone else has
465498 // installed a forwarding pointer.
466- return handle_evacuation_failure_par (old, old_mark, word_sz, false /* cause_pinned */ );
499+ return handle_evacuation_failure_par (old, old_mark, klass, region_attr, word_sz, false /* cause_pinned */ );
467500 }
468501 }
469502
@@ -475,7 +508,7 @@ oop G1ParScanThreadState::do_copy_to_survivor_space(G1HeapRegionAttr const regio
475508 // Doing this after all the allocation attempts also tests the
476509 // undo_allocation() method too.
477510 undo_allocation (dest_attr, obj_ptr, word_sz, node_index);
478- return handle_evacuation_failure_par (old, old_mark, word_sz, false /* cause_pinned */ );
511+ return handle_evacuation_failure_par (old, old_mark, klass, region_attr, word_sz, false /* cause_pinned */ );
479512 }
480513
481514 // We're going to allocate linearly, so might as well prefetch ahead.
@@ -507,39 +540,16 @@ oop G1ParScanThreadState::do_copy_to_survivor_space(G1HeapRegionAttr const regio
507540 update_bot_after_copying (obj, word_sz);
508541 }
509542
510- // Most objects are not arrays, so do one array check rather than
511- // checking for each array category for each object.
512- if (klass->is_array_klass ()) {
513- if (klass->is_objArray_klass ()) {
514- start_partial_objarray (dest_attr, old, obj);
515- } else {
516- // Nothing needs to be done for typeArrays. Body doesn't contain
517- // any oops to scan, and the type in the klass will already be handled
518- // by processing the built-in module.
519- assert (klass->is_typeArray_klass (), " invariant" );
520- }
521- return obj;
522- }
523-
524- ContinuationGCSupport::transform_stack_chunk (obj);
525-
526- // Check for deduplicating young Strings.
527- if (G1StringDedup::is_candidate_from_evacuation (klass,
528- region_attr,
529- dest_attr,
530- age)) {
531- // Record old; request adds a new weak reference, which reference
532- // processing expects to refer to a from-space object.
533- _string_dedup_requests.add (old);
543+ {
544+ // Skip the card enqueue iff the object (obj) is in survivor region.
545+ // However, G1HeapRegion::is_survivor() is too expensive here.
546+ // Instead, we use dest_attr.is_young() because the two values are always
547+ // equal: successfully allocated young regions must be survivor regions.
548+ assert (dest_attr.is_young () == _g1h->heap_region_containing (obj)->is_survivor (), " must be" );
549+ G1SkipCardEnqueueSetter x (&_scanner, dest_attr.is_young ());
550+ do_iterate_object (obj, old, klass, region_attr, dest_attr, age);
534551 }
535552
536- // Skip the card enqueue iff the object (obj) is in survivor region.
537- // However, G1HeapRegion::is_survivor() is too expensive here.
538- // Instead, we use dest_attr.is_young() because the two values are always
539- // equal: successfully allocated young regions must be survivor regions.
540- assert (dest_attr.is_young () == _g1h->heap_region_containing (obj)->is_survivor (), " must be" );
541- G1SkipCardEnqueueSetter x (&_scanner, dest_attr.is_young ());
542- obj->oop_iterate_backwards (&_scanner, klass);
543553 return obj;
544554 } else {
545555 _plab_allocator->undo_allocation (dest_attr, obj_ptr, word_sz, node_index);
@@ -621,7 +631,7 @@ void G1ParScanThreadState::record_evacuation_failed_region(G1HeapRegion* r, uint
621631}
622632
623633NOINLINE
624- oop G1ParScanThreadState::handle_evacuation_failure_par (oop old, markWord m, size_t word_sz, bool cause_pinned) {
634+ oop G1ParScanThreadState::handle_evacuation_failure_par (oop old, markWord m, Klass* klass, G1HeapRegionAttr attr, size_t word_sz, bool cause_pinned) {
625635 assert (_g1h->is_in_cset (old), " Object " PTR_FORMAT " should be in the CSet" , p2i (old));
626636
627637 oop forward_ptr = old->forward_to_self_atomic (m, memory_order_relaxed);
@@ -635,16 +645,16 @@ oop G1ParScanThreadState::handle_evacuation_failure_par(oop old, markWord m, siz
635645 // evacuation failure recovery.
636646 _g1h->mark_evac_failure_object (_worker_id, old, word_sz);
637647
638- ContinuationGCSupport::transform_stack_chunk (old);
639-
640648 _evacuation_failed_info.register_copy_failure (word_sz);
641649
642- // For iterating objects that failed evacuation currently we can reuse the
643- // existing closure to scan evacuated objects; since we are iterating from a
644- // collection set region (i.e. never a Survivor region), we always need to
645- // gather cards for this case.
646- G1SkipCardEnqueueSetter x (&_scanner, false /* skip_card_enqueue */ );
647- old->oop_iterate_backwards (&_scanner);
650+ {
651+ // For iterating objects that failed evacuation currently we can reuse the
652+ // existing closure to scan evacuated objects; since we are iterating from a
653+ // collection set region (i.e. never a Survivor region), we always need to
654+ // gather cards for this case.
655+ G1SkipCardEnqueueSetter x (&_scanner, false /* skip_card_enqueue */ );
656+ do_iterate_object (old, old, klass, attr, attr, m.age ());
657+ }
648658
649659 return old;
650660 } else {
0 commit comments