Skip to content

Commit cdf7632

Browse files
author
William Kemper
committed
8351444: Shenandoah: Class Unloading may encounter recycled oops
Reviewed-by: shade, rkennke
1 parent 930455b commit cdf7632

File tree

4 files changed

+40
-24
lines changed

4 files changed

+40
-24
lines changed

src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -121,10 +121,9 @@ inline oop ShenandoahBarrierSet::load_reference_barrier(DecoratorSet decorators,
121121
return nullptr;
122122
}
123123

124-
// Prevent resurrection of unreachable objects that are visited during
125-
// concurrent class-unloading.
124+
// Allow runtime to see unreachable objects that are visited during concurrent class-unloading.
126125
if ((decorators & AS_NO_KEEPALIVE) != 0 &&
127-
_heap->is_evacuation_in_progress() &&
126+
_heap->is_concurrent_weak_root_in_progress() &&
128127
!_heap->marking_context()->is_marked(obj)) {
129128
return obj;
130129
}

src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -150,15 +150,22 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
150150
return false;
151151
}
152152

153+
assert(heap->is_concurrent_weak_root_in_progress(), "Must be doing weak roots now");
154+
153155
// Concurrent stack processing
154156
if (heap->is_evacuation_in_progress()) {
155157
entry_thread_roots();
156158
}
157159

158-
// Process weak roots that might still point to regions that would be broken by cleanup
159-
if (heap->is_concurrent_weak_root_in_progress()) {
160-
entry_weak_refs();
161-
entry_weak_roots();
160+
// Process weak roots that might still point to regions that would be broken by cleanup.
161+
// We cannot recycle regions because weak roots need to know what is marked in trashed regions.
162+
entry_weak_refs();
163+
entry_weak_roots();
164+
165+
// Perform concurrent class unloading before any regions get recycled. Class unloading may
166+
// need to inspect unmarked objects in trashed regions.
167+
if (heap->unload_classes()) {
168+
entry_class_unloading();
162169
}
163170

164171
// Final mark might have reclaimed some immediate garbage, kick cleanup to reclaim
@@ -168,12 +175,6 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
168175

169176
heap->free_set()->log_status_under_lock();
170177

171-
// Perform concurrent class unloading
172-
if (heap->unload_classes() &&
173-
heap->is_concurrent_weak_root_in_progress()) {
174-
entry_class_unloading();
175-
}
176-
177178
// Processing strong roots
178179
// This may be skipped if there is nothing to update/evacuate.
179180
// If so, strong_root_in_progress would be unset.

src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -999,7 +999,10 @@ HeapWord* ShenandoahFreeSet::try_allocate_in(ShenandoahHeapRegion* r, Shenandoah
999999
assert (has_alloc_capacity(r), "Performance: should avoid full regions on this path: %zu", r->index());
10001000
if (_heap->is_concurrent_weak_root_in_progress() && r->is_trash()) {
10011001
// We cannot use this region for allocation when weak roots are in progress because the collector may need
1002-
// to reference unmarked oops during concurrent classunloading.
1002+
// to reference unmarked oops during concurrent classunloading. The collector also needs accurate marking
1003+
// information to determine which weak handles need to be null'd out. If the region is recycled before weak
1004+
// roots processing has finished, weak root processing may fail to null out a handle into a trashed region.
1005+
// This turns the handle into a dangling pointer and will crash or corrupt the heap.
10031006
return nullptr;
10041007
}
10051008
HeapWord* result = nullptr;

src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp

Lines changed: 23 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -818,18 +818,31 @@ size_t ShenandoahHeap::initial_capacity() const {
818818
}
819819

820820
bool ShenandoahHeap::is_in(const void* p) const {
821-
if (is_in_reserved(p)) {
822-
if (is_full_gc_move_in_progress()) {
823-
// Full GC move is running, we do not have a consistent region
824-
// information yet. But we know the pointer is in heap.
825-
return true;
826-
}
827-
// Now check if we point to a live section in active region.
828-
ShenandoahHeapRegion* r = heap_region_containing(p);
829-
return (r->is_active() && p < r->top());
830-
} else {
821+
if (!is_in_reserved(p)) {
831822
return false;
832823
}
824+
825+
if (is_full_gc_move_in_progress()) {
826+
// Full GC move is running, we do not have a consistent region
827+
// information yet. But we know the pointer is in heap.
828+
return true;
829+
}
830+
831+
// Now check if we point to a live section in active region.
832+
const ShenandoahHeapRegion* r = heap_region_containing(p);
833+
if (p >= r->top()) {
834+
return false;
835+
}
836+
837+
if (r->is_active()) {
838+
return true;
839+
}
840+
841+
// The region is trash, but won't be recycled until after concurrent weak
842+
// roots. We also don't allow mutators to allocate from trash regions
843+
// during weak roots. Concurrent class unloading may access unmarked oops
844+
// in trash regions.
845+
return r->is_trash() && is_concurrent_weak_root_in_progress();
833846
}
834847

835848
void ShenandoahHeap::notify_soft_max_changed() {

0 commit comments

Comments
 (0)