@@ -38,60 +38,59 @@ A few memory-related utilities:
38
38
Uses the caller's provided allocator, so none of these involve heap allocations
39
39
"outside of" the caller-provided allocator.
40
40
* A `str` class, inheriting from `std::string`, ensuring allocations happen via `arena`
41
- * A `fixed_str` class, not related to the arena, but instead backed by a `char[n]`
42
- array within that same struct, so it doesn't perform any [de]allocations; it only
43
- uses its own character array.
44
41
45
42
A small amount of glue / hacks are done here to allow the rest of the stacktrace-related
46
43
code to use familiar string etc. operations, while encapsulating away the details of where
47
44
memory might come from, since we need to be careful about unexpected allocations.
48
45
*/
49
46
50
- // clang-format off
51
-
52
47
struct byte_pool final {
53
48
byte* ptr_;
54
49
function<void ()> destroy_;
55
50
byte_pool* link_;
56
51
byte* end_;
57
52
58
- byte_pool (byte* __bytes,
59
- size_t __size,
60
- function<void ()> __destroy = [] {},
61
- byte_pool* __link = nullptr ) noexcept
62
- : ptr_(__bytes), destroy_(__destroy), link_(__link), end_(__bytes + __size) {}
53
+ byte_pool (
54
+ byte* __bytes, size_t __size, function<void ()> __destroy = [] {}, byte_pool* __link = nullptr ) noexcept
55
+ : ptr_(__bytes), destroy_(__destroy), link_(__link), end_(__bytes + __size) {}
63
56
64
57
byte* operator ()(size_t __sz, size_t __align) noexcept {
65
- auto __ptr = uintptr_t (ptr_); // convert curr ptr to integer, to do math
66
- auto __misalign = __ptr % __align; // if current ptr not aligned,
67
- if (__misalign) { __ptr += (__align - __misalign); } // waste a few bytes to ensure alignment
68
- auto __ret = __ptr; // we would return this aligned position
69
- __ptr += __sz; // next object will start here
70
- if (__ptr > uintptr_t (end_)) { return nullptr ; } // if this exceeds our space, then fail
71
- ptr_ = (byte*) __ptr; // otherwise update current position
72
- return (byte*) __ret; // returned aligned position as byte ptr
58
+ auto __ptr = uintptr_t (ptr_); // convert curr ptr to integer, to do math
59
+ auto __misalign = __ptr % __align; // if current ptr not aligned,
60
+ if (__misalign) {
61
+ __ptr += (__align - __misalign);
62
+ } // waste a few bytes to ensure alignment
63
+ auto __ret = __ptr; // we would return this aligned position
64
+ __ptr += __sz; // next object will start here
65
+ if (__ptr > uintptr_t (end_)) {
66
+ return nullptr ;
67
+ } // if this exceeds our space, then fail
68
+ ptr_ = (byte*)__ptr; // otherwise update current position
69
+ return (byte*)__ret; // returned aligned position as byte ptr
73
70
}
74
71
};
75
72
76
73
template <size_t _Sz>
77
74
struct stack_bytes final {
78
75
byte bytes_[_Sz];
79
76
80
- ~stack_bytes () = default ;
81
- stack_bytes () noexcept = default ;
77
+ ~stack_bytes () = default ;
78
+ stack_bytes () noexcept = default ;
82
79
stack_bytes (const stack_bytes&) = delete ;
83
- stack_bytes (stack_bytes&&) = delete ;
80
+ stack_bytes (stack_bytes&&) = delete ;
84
81
85
- byte_pool pool () { return {bytes_, _Sz, []{}, nullptr }; }
82
+ byte_pool pool () {
83
+ return {bytes_, _Sz, [] {}, nullptr };
84
+ }
86
85
};
87
86
88
87
struct arena {
89
- function<byte*(size_t )> new_bytes_; // new byte-array factory
90
- function<void (void *, size_t )> del_bytes_; // byte-array destroyer
91
- byte_pool* curr_pool_; // byte pool currently "in effect"
92
- byte_pool* next_pool_; // allocated (from curr_pool_) but not initialized
93
- size_t allocs_ {}; // number of successful allocations
94
- size_t deallocs_ {}; // incremented on each dealloc; dtor ensures these are equal!
88
+ function<byte*(size_t )> new_bytes_; // new byte-array factory
89
+ function<void (void *, size_t )> del_bytes_; // byte-array destroyer
90
+ byte_pool* curr_pool_; // byte pool currently "in effect"
91
+ byte_pool* next_pool_; // allocated (from curr_pool_) but not initialized
92
+ size_t allocs_{}; // number of successful allocations
93
+ size_t deallocs_{}; // incremented on each dealloc; dtor ensures these are equal!
95
94
96
95
// An arena is scoped to a `basic_stacktrace::current` invocation, so this is usable by only one thread.
97
96
// Additionally, it's used internally throughout many function calls, so for convenience, store it here.
@@ -108,11 +107,14 @@ struct arena {
108
107
_LIBCPP_ASSERT (active_arena_ptr_ == this , " different arena unexpectively set as the active one" );
109
108
active_arena_ptr_ = nullptr ;
110
109
_LIBCPP_ASSERT (deallocs_ == allocs_, " destructed arena still has live objects" );
111
- while (curr_pool_) { curr_pool_->destroy_ (); curr_pool_ = curr_pool_->link_ ; }
110
+ while (curr_pool_) {
111
+ curr_pool_->destroy_ ();
112
+ curr_pool_ = curr_pool_->link_ ;
113
+ }
112
114
}
113
115
114
116
arena (auto && __new_bytes, auto && __del_bytes, byte_pool& __initial_pool) noexcept
115
- : new_bytes_(__new_bytes), del_bytes_(__del_bytes), curr_pool_(&__initial_pool) {
117
+ : new_bytes_(__new_bytes), del_bytes_(__del_bytes), curr_pool_(&__initial_pool) {
116
118
prep_next_pool ();
117
119
_LIBCPP_ASSERT (!active_arena_ptr_, " already an active arena" );
118
120
active_arena_ptr_ = this ;
@@ -125,24 +127,25 @@ struct arena {
125
127
126
128
template <class _UA >
127
129
arena (byte_pool& __initial_pool, _UA const & __user_alloc)
128
- : arena(
129
- [&__user_alloc] (size_t __sz) { return as_byte_alloc (__user_alloc).allocate (__sz); },
130
- [&__user_alloc] (void * __ptr, size_t __sz) { return as_byte_alloc (__user_alloc).deallocate ((byte*)__ptr, __sz); },
131
- __initial_pool) {}
130
+ : arena([&__user_alloc](size_t __sz) { return as_byte_alloc (__user_alloc).allocate (__sz); },
131
+ [&__user_alloc](void * __ptr, size_t __sz) {
132
+ return as_byte_alloc (__user_alloc).deallocate ((byte*)__ptr, __sz);
133
+ },
134
+ __initial_pool) {}
132
135
133
136
arena (arena const &) = delete ;
134
137
arena& operator =(arena const &) = delete ;
135
138
136
139
void prep_next_pool () noexcept {
137
140
// Allocate (via current pool) a new byte_pool record, while we have enough space.
138
141
// When the current pool runs out of space, this one will be ready to use.
139
- next_pool_ = (byte_pool*) (*curr_pool_)(sizeof (byte_pool), alignof (byte_pool));
142
+ next_pool_ = (byte_pool*)(*curr_pool_)(sizeof (byte_pool), alignof (byte_pool));
140
143
_LIBCPP_ASSERT (next_pool_, " could not allocate next pool" );
141
144
}
142
145
143
146
void expand (size_t __atleast) noexcept {
144
147
constexpr static size_t __k_default_new_pool = 1 << 12 ;
145
- auto __size = max (__atleast, __k_default_new_pool);
148
+ auto __size = max (__atleast, __k_default_new_pool);
146
149
// "next_pool_" was already allocated, just need to initialize it
147
150
auto * __bytes = new_bytes_ (__size);
148
151
_LIBCPP_ASSERT (__bytes, " could not allocate more bytes for arena" );
@@ -155,12 +158,14 @@ struct arena {
155
158
156
159
std::byte* alloc (size_t __size, size_t __align) noexcept {
157
160
auto * __ret = (*curr_pool_)(__size, __align);
158
- if (__ret) [[likely]] { goto success; }
161
+ if (__ret) [[likely]] {
162
+ goto success;
163
+ }
159
164
// Need a new pool to accommodate this request + internal structs
160
165
expand (__size + __align + sizeof (byte_pool) + alignof (byte_pool)); // upper bound
161
166
__ret = (*curr_pool_)(__size, __align);
162
167
_LIBCPP_ASSERT (__ret, " arena failed to allocate" );
163
- success:
168
+ success:
164
169
++allocs_;
165
170
return __ret;
166
171
}
@@ -181,31 +186,6 @@ struct alloc {
181
186
}
182
187
};
183
188
184
- template <typename _Tp, size_t _Sz>
185
- struct fixed_buf {
186
- using value_type = _Tp;
187
- template <typename _Up> struct rebind { using other = fixed_buf<_Up, _Sz>; };
188
-
189
- _Tp __buf_[_Sz];
190
- size_t __size_;
191
- void deallocate (_Tp*, size_t ) {}
192
- _Tp* allocate (size_t ) { return __buf_; }
193
- };
194
-
195
- template <size_t _Sz>
196
- struct fixed_str : std::basic_string<char , std::char_traits<char >, fixed_buf<char , _Sz>> {
197
- using _Base _LIBCPP_NODEBUG = std::basic_string<char , std::char_traits<char >, fixed_buf<char , _Sz>>;
198
- using _Base::operator =;
199
-
200
- fixed_buf<char , _Sz> __fb_;
201
- fixed_str () : _Base(__fb_) {
202
- this ->resize (_Sz - 1 );
203
- this ->resize (0 );
204
- }
205
- fixed_str (fixed_str const & __rhs) : fixed_str() { _Base::operator =(__rhs); }
206
- fixed_str& operator =(fixed_str const & __rhs) = default ;
207
- };
208
-
209
189
struct str : std::basic_string<char , std::char_traits<char >, alloc<char >> {
210
190
using _Base _LIBCPP_NODEBUG = std::basic_string<char , std::char_traits<char >, alloc<char >>;
211
191
using _Base::basic_string;
@@ -236,7 +216,47 @@ struct str : std::basic_string<char, std::char_traits<char>, alloc<char>> {
236
216
}
237
217
};
238
218
239
- // clang-format on
219
+ /* * A string that contains its own fixed-size, fixed-location buffer. */
220
+ template <size_t _Sz>
221
+ struct fixed_str final {
222
+ size_t __size_{0 };
223
+ char __buf_[_Sz]{0 };
224
+
225
+ ~fixed_str () = default ;
226
+ fixed_str () = default ;
227
+
228
+ size_t size () const { return __size_; }
229
+ bool empty () const { return !size (); }
230
+ auto * data (this auto & __self) { return __self.__buf_ ; }
231
+ operator std::string_view () const { return {__buf_, __size_}; }
232
+
233
+ fixed_str& operator =(std::string_view __sv) {
234
+ strncpy (__buf_, __sv.data (), std::min (_Sz, __sv.size () + 1 ));
235
+ __size_ = __sv.size ();
236
+ __buf_[__size_] = 0 ;
237
+ return *this ;
238
+ }
239
+
240
+ fixed_str (auto const & __rhs) : fixed_str() { *this = __rhs; }
241
+ fixed_str& operator =(auto const & __rhs) { return (*this = std::string_view (__rhs)); }
242
+
243
+ template <size_t _S2>
244
+ requires requires { _S2 <= _Sz; }
245
+ fixed_str& operator =(fixed_str<_S2> const & __rhs) {
246
+ return (*this = std::string_view (__rhs));
247
+ }
248
+
249
+ template <size_t _S2>
250
+ requires requires { _S2 <= _Sz; }
251
+ fixed_str (fixed_str<_S2> const & __rhs) {
252
+ *this = std::string_view (__rhs);
253
+ }
254
+
255
+ fixed_str (fixed_str const & __rhs) { *this = std::string_view (__rhs); }
256
+ fixed_str& operator =(fixed_str const & __rhs) { return (*this = std::string_view (__rhs)); }
257
+
258
+ friend std::ostream& operator <<(std::ostream& __os, fixed_str const & __f) { return __os << std::string_view (__f); }
259
+ };
240
260
241
261
} // namespace __stacktrace
242
262
_LIBCPP_END_NAMESPACE_STD
0 commit comments