Skip to content

Commit ecfba66

Browse files
8366363: MemBaseline accesses VMT without using lock
Co-authored-by: Casper Norrbin <[email protected]> Reviewed-by: azafari, cnorrbin
1 parent 680bf75 commit ecfba66

File tree

12 files changed

+185
-65
lines changed

12 files changed

+185
-65
lines changed

src/hotspot/share/nmt/memBaseline.cpp

Lines changed: 24 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,7 @@
2727
#include "memory/metaspaceUtils.hpp"
2828
#include "nmt/memBaseline.hpp"
2929
#include "nmt/memTracker.hpp"
30-
#include "runtime/javaThread.hpp"
31-
#include "runtime/safepoint.hpp"
30+
#include "nmt/regionsTree.inline.hpp"
3231

3332
/*
3433
* Sizes are sorted in descenting order for reporting
@@ -104,38 +103,6 @@ class MallocAllocationSiteWalker : public MallocSiteWalker {
104103
}
105104
};
106105

107-
// Walk all virtual memory regions for baselining
108-
class VirtualMemoryAllocationWalker : public VirtualMemoryWalker {
109-
private:
110-
typedef LinkedListImpl<ReservedMemoryRegion, AnyObj::C_HEAP, mtNMT,
111-
AllocFailStrategy::RETURN_NULL> EntryList;
112-
EntryList _virtual_memory_regions;
113-
DEBUG_ONLY(address _last_base;)
114-
public:
115-
VirtualMemoryAllocationWalker() {
116-
DEBUG_ONLY(_last_base = nullptr);
117-
}
118-
119-
bool do_allocation_site(const ReservedMemoryRegion* rgn) {
120-
assert(rgn->base() >= _last_base, "region unordered?");
121-
DEBUG_ONLY(_last_base = rgn->base());
122-
if (rgn->size() > 0) {
123-
if (_virtual_memory_regions.add(*rgn) != nullptr) {
124-
return true;
125-
} else {
126-
return false;
127-
}
128-
} else {
129-
// Ignore empty sites.
130-
return true;
131-
}
132-
}
133-
134-
LinkedList<ReservedMemoryRegion>* virtual_memory_allocations() {
135-
return &_virtual_memory_regions;
136-
}
137-
};
138-
139106
void MemBaseline::baseline_summary() {
140107
MallocMemorySummary::snapshot(&_malloc_memory_snapshot);
141108
VirtualMemorySummary::snapshot(&_virtual_memory_snapshot);
@@ -158,14 +125,15 @@ bool MemBaseline::baseline_allocation_sites() {
158125
// The malloc sites are collected in size order
159126
_malloc_sites_order = by_size;
160127

161-
// Virtual memory allocation sites
162-
VirtualMemoryAllocationWalker virtual_memory_walker;
163-
if (!VirtualMemoryTracker::Instance::walk_virtual_memory(&virtual_memory_walker)) {
164-
return false;
165-
}
128+
assert(_vma_allocations == nullptr, "must");
166129

167-
// Virtual memory allocations are collected in call stack order
168-
_virtual_memory_allocations.move(virtual_memory_walker.virtual_memory_allocations());
130+
{
131+
MemTracker::NmtVirtualMemoryLocker locker;
132+
_vma_allocations = new (mtNMT, std::nothrow) RegionsTree(*VirtualMemoryTracker::Instance::tree());
133+
if (_vma_allocations == nullptr) {
134+
return false;
135+
}
136+
}
169137

170138
if (!aggregate_virtual_memory_allocation_sites()) {
171139
return false;
@@ -202,20 +170,28 @@ int compare_allocation_site(const VirtualMemoryAllocationSite& s1,
202170
bool MemBaseline::aggregate_virtual_memory_allocation_sites() {
203171
SortedLinkedList<VirtualMemoryAllocationSite, compare_allocation_site> allocation_sites;
204172

205-
VirtualMemoryAllocationIterator itr = virtual_memory_allocations();
206-
const ReservedMemoryRegion* rgn;
207173
VirtualMemoryAllocationSite* site;
208-
while ((rgn = itr.next()) != nullptr) {
209-
VirtualMemoryAllocationSite tmp(*rgn->call_stack(), rgn->mem_tag());
174+
bool failed_oom = false;
175+
_vma_allocations->visit_reserved_regions([&](ReservedMemoryRegion& rgn) {
176+
VirtualMemoryAllocationSite tmp(*rgn.call_stack(), rgn.mem_tag());
210177
site = allocation_sites.find(tmp);
211178
if (site == nullptr) {
212179
LinkedListNode<VirtualMemoryAllocationSite>* node =
213180
allocation_sites.add(tmp);
214-
if (node == nullptr) return false;
181+
if (node == nullptr) {
182+
failed_oom = true;
183+
return false;
184+
}
215185
site = node->data();
216186
}
217-
site->reserve_memory(rgn->size());
218-
site->commit_memory(VirtualMemoryTracker::Instance::committed_size(rgn));
187+
site->reserve_memory(rgn.size());
188+
189+
site->commit_memory(_vma_allocations->committed_size(rgn));
190+
return true;
191+
});
192+
193+
if (failed_oom) {
194+
return false;
219195
}
220196

221197
_virtual_memory_sites.move(&allocation_sites);

src/hotspot/share/nmt/memBaseline.hpp

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535

3636
typedef LinkedListIterator<MallocSite> MallocSiteIterator;
3737
typedef LinkedListIterator<VirtualMemoryAllocationSite> VirtualMemorySiteIterator;
38-
typedef LinkedListIterator<ReservedMemoryRegion> VirtualMemoryAllocationIterator;
3938

4039
/*
4140
* Baseline a memory snapshot
@@ -71,7 +70,7 @@ class MemBaseline {
7170
LinkedListImpl<MallocSite> _malloc_sites;
7271

7372
// All virtual memory allocations
74-
LinkedListImpl<ReservedMemoryRegion> _virtual_memory_allocations;
73+
RegionsTree* _vma_allocations;
7574

7675
// Virtual memory allocations by allocation sites, always in by_address
7776
// order
@@ -86,6 +85,7 @@ class MemBaseline {
8685
// create a memory baseline
8786
MemBaseline():
8887
_instance_class_count(0), _array_class_count(0), _thread_count(0),
88+
_vma_allocations(nullptr),
8989
_baseline_type(Not_baselined) {
9090
}
9191

@@ -110,9 +110,9 @@ class MemBaseline {
110110

111111
// Virtual memory allocation iterator always returns in virtual memory
112112
// base address order.
113-
VirtualMemoryAllocationIterator virtual_memory_allocations() {
114-
assert(!_virtual_memory_allocations.is_empty(), "Not detail baseline");
115-
return VirtualMemoryAllocationIterator(_virtual_memory_allocations.head());
113+
RegionsTree* virtual_memory_allocations() {
114+
assert(_vma_allocations != nullptr, "Not detail baseline");
115+
return _vma_allocations;
116116
}
117117

118118
// Total reserved memory = total malloc'd memory + total reserved virtual
@@ -185,7 +185,8 @@ class MemBaseline {
185185

186186
_malloc_sites.clear();
187187
_virtual_memory_sites.clear();
188-
_virtual_memory_allocations.clear();
188+
delete _vma_allocations;
189+
_vma_allocations = nullptr;
189190
}
190191

191192
private:

src/hotspot/share/nmt/memReporter.cpp

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -394,13 +394,11 @@ int MemDetailReporter::report_virtual_memory_allocation_sites() {
394394

395395
void MemDetailReporter::report_virtual_memory_map() {
396396
// Virtual memory map always in base address order
397-
VirtualMemoryAllocationIterator itr = _baseline.virtual_memory_allocations();
398-
const ReservedMemoryRegion* rgn;
399-
400397
output()->print_cr("Virtual memory map:");
401-
while ((rgn = itr.next()) != nullptr) {
402-
report_virtual_memory_region(rgn);
403-
}
398+
_baseline.virtual_memory_allocations()->visit_reserved_regions([&](ReservedMemoryRegion& rgn) {
399+
report_virtual_memory_region(&rgn);
400+
return true;
401+
});
404402
}
405403

406404
void MemDetailReporter::report_virtual_memory_region(const ReservedMemoryRegion* reserved_rgn) {

src/hotspot/share/nmt/nmtNativeCallStackStorage.cpp

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,3 +57,21 @@ NativeCallStackStorage::NativeCallStackStorage(bool is_detailed_mode, int table_
5757
NativeCallStackStorage::~NativeCallStackStorage() {
5858
FREE_C_HEAP_ARRAY(LinkPtr, _table);
5959
}
60+
61+
NativeCallStackStorage::NativeCallStackStorage(const NativeCallStackStorage& other)
62+
: _table_size(other._table_size),
63+
_table(nullptr),
64+
_stacks(),
65+
_is_detailed_mode(other._is_detailed_mode),
66+
_fake_stack(other._fake_stack) {
67+
if (_is_detailed_mode) {
68+
_table = NEW_C_HEAP_ARRAY(TableEntryIndex, _table_size, mtNMT);
69+
for (int i = 0; i < _table_size; i++) {
70+
_table[i] = other._table[i];
71+
}
72+
}
73+
_stacks.reserve(other._stacks.length());
74+
for (int i = 0; i < other._stacks.length(); i++) {
75+
_stacks.at_grow(i) = other._stacks.at(i);
76+
}
77+
}

src/hotspot/share/nmt/nmtNativeCallStackStorage.hpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,8 @@ class NativeCallStackStorage : public CHeapObjBase {
9595
}
9696

9797
NativeCallStackStorage(bool is_detailed_mode, int table_size = default_table_size);
98-
98+
NativeCallStackStorage(const NativeCallStackStorage& other);
99+
NativeCallStackStorage& operator=(const NativeCallStackStorage& other) = delete;
99100
~NativeCallStackStorage();
100101
};
101102

src/hotspot/share/nmt/regionsTree.cpp

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222
*
2323
*/
2424
#include "nmt/regionsTree.hpp"
25+
#include "nmt/regionsTree.inline.hpp"
26+
#include "nmt/virtualMemoryTracker.hpp"
2527

2628
VMATree::SummaryDiff RegionsTree::commit_region(address addr, size_t size, const NativeCallStack& stack) {
2729
return commit_mapping((VMATree::position)addr, size, make_region_data(stack, mtNone), /*use tag inplace*/ true);
@@ -54,4 +56,13 @@ void RegionsTree::print_on(outputStream* st) {
5456
return true;
5557
});
5658
}
57-
#endif
59+
#endif
60+
61+
size_t RegionsTree::committed_size(ReservedMemoryRegion& rgn) {
62+
size_t result = 0;
63+
visit_committed_regions(rgn, [&](CommittedMemoryRegion& crgn) {
64+
result += crgn.size();
65+
return true;
66+
});
67+
return result;
68+
}

src/hotspot/share/nmt/regionsTree.hpp

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,12 @@ class RegionsTree : public VMATree {
4040
public:
4141
RegionsTree(bool with_storage) : VMATree() , _ncs_storage(with_storage), _with_storage(with_storage) { }
4242

43+
RegionsTree(const RegionsTree& other)
44+
: VMATree(other),
45+
_ncs_storage(other._ncs_storage),
46+
_with_storage(other._with_storage) {}
47+
RegionsTree& operator=(const RegionsTree& other) = delete;
48+
4349
ReservedMemoryRegion find_reserved_region(address addr);
4450

4551
SummaryDiff commit_region(address addr, size_t size, const NativeCallStack& stack);
@@ -91,6 +97,8 @@ class RegionsTree : public VMATree {
9197
NativeCallStackStorage::StackIndex si = node.out_stack_index();
9298
return _ncs_storage.get(si);
9399
}
100+
101+
size_t committed_size(ReservedMemoryRegion& rgn);
94102
};
95103

96-
#endif // NMT_REGIONSTREE_HPP
104+
#endif // NMT_REGIONSTREE_HPP

src/hotspot/share/nmt/vmatree.cpp

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -744,3 +744,10 @@ void VMATree::SummaryDiff::print_on(outputStream* out) {
744744
}
745745
}
746746
#endif
747+
748+
void VMATree::clear() {
749+
_tree.remove_all();
750+
};
751+
bool VMATree::is_empty() {
752+
return _tree.size() == 0;
753+
};

src/hotspot/share/nmt/vmatree.hpp

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
#include "nmt/nmtNativeCallStackStorage.hpp"
3131
#include "utilities/globalDefinitions.hpp"
3232
#include "utilities/ostream.hpp"
33+
#include "utilities/rbTree.hpp"
3334
#include "utilities/rbTree.inline.hpp"
3435

3536
#include <cstdint>
@@ -39,7 +40,7 @@
3940
// For example, the state may go from released memory to committed memory,
4041
// or from committed memory of a certain MemTag to committed memory of a different MemTag.
4142
// The set of points is stored in a balanced binary tree for efficient querying and updating.
42-
class VMATree {
43+
class VMATree : public CHeapObjBase {
4344
friend class NMTVMATreeTest;
4445
friend class VMTWithVMATreeTest;
4546
// A position in memory.
@@ -65,7 +66,6 @@ class VMATree {
6566
static const char* statetype_strings[static_cast<uint8_t>(StateType::st_number_of_states)];
6667

6768
public:
68-
NONCOPYABLE(VMATree);
6969

7070
static const char* statetype_to_string(StateType type) {
7171
assert(type < StateType::st_number_of_states, "must be");
@@ -226,6 +226,10 @@ class VMATree {
226226

227227
public:
228228
VMATree() : _tree() {}
229+
VMATree(const VMATree& other) : _tree() {
230+
assert(other._tree.copy_into(_tree), "VMATree dies on OOM");
231+
}
232+
VMATree& operator=(VMATree const&) = delete;
229233

230234
struct SingleDiff {
231235
using delta = int64_t;
@@ -329,5 +333,8 @@ class VMATree {
329333
_tree.visit_range_in_order(from, to, f);
330334
}
331335
VMARBTree& tree() { return _tree; }
336+
337+
void clear();
338+
bool is_empty();
332339
};
333340
#endif

src/hotspot/share/utilities/rbTree.hpp

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -428,6 +428,7 @@ class AbstractRBTree {
428428
template <typename F>
429429
void visit_in_order(F f);
430430

431+
431432
// Visit all RBNodes in ascending order whose keys are in range [from, to], calling f on each node.
432433
// If f returns `true` the iteration continues, otherwise it is stopped at the current node.
433434
template <typename F>
@@ -475,6 +476,7 @@ class RBTree : public AbstractRBTree<K, RBNode<K, V>, COMPARATOR> {
475476

476477
public:
477478
RBTree() : BaseType(), _allocator() {}
479+
NONCOPYABLE(RBTree);
478480
~RBTree() { remove_all(); }
479481
RBTree(const RBTree& other) : BaseType(), _allocator() {
480482
assert(std::is_copy_constructible<V>(), "Value type must be copy-constructible");
@@ -485,6 +487,8 @@ class RBTree : public AbstractRBTree<K, RBNode<K, V>, COMPARATOR> {
485487
}
486488
RBTree& operator=(const RBTree& other) = delete;
487489

490+
bool copy_into(RBTree& other) const;
491+
488492
typedef typename BaseType::Cursor Cursor;
489493
using BaseType::cursor;
490494
using BaseType::insert_at_cursor;

0 commit comments

Comments
 (0)