@@ -2,13 +2,14 @@ const std = @import("../std.zig");
2
2
const Alignment = std .mem .Alignment ;
3
3
const Allocator = std .mem .Allocator ;
4
4
5
- base : usize ,
6
- limit : usize ,
5
+ unused : usize ,
6
+ buffer_end : [ * ] u8 ,
7
7
8
8
pub fn init (buffer : []u8 ) @This () {
9
- const base : usize = @intFromPtr (buffer .ptr );
10
- const limit : usize = base + buffer .len ;
11
- return .{ .base = base , .limit = limit };
9
+ return .{
10
+ .unused = buffer .len ,
11
+ .buffer_end = buffer .ptr + buffer .len ,
12
+ };
12
13
}
13
14
14
15
pub fn allocator (self : * @This ()) Allocator {
@@ -25,12 +26,12 @@ pub fn allocator(self: *@This()) Allocator {
25
26
26
27
/// Save the current state of the allocator
27
28
pub fn savestate (self : * @This ()) usize {
28
- return self .base ;
29
+ return self .unused ;
29
30
}
30
31
31
32
/// Restore a previously saved allocator state
32
33
pub fn restore (self : * @This (), state : usize ) void {
33
- self .base = state ;
34
+ self .unused = state ;
34
35
}
35
36
36
37
pub fn alloc (
@@ -41,13 +42,17 @@ pub fn alloc(
41
42
) ? [* ]u8 {
42
43
const self : * @This () = @alignCast (@ptrCast (ctx ));
43
44
45
+ const buffer_base = self .buffer_end - self .unused ;
46
+ const align_bytes = alignment .toByteUnits ();
47
+ const ptr_adjust = std .mem .alignPointerOffset (buffer_base , align_bytes );
48
+ const align_overhead = ptr_adjust orelse return null ;
49
+
44
50
// Only allocate if we have enough space
45
- const aligned = alignment .forward (self .base );
46
- const end_addr = @addWithOverflow (aligned , length );
47
- if ((end_addr [1 ] == 1 ) | (end_addr [0 ] > self .limit )) return null ;
51
+ const allocated_length = length + align_overhead ;
52
+ if (allocated_length > self .unused ) return null ;
48
53
49
- self .base = end_addr [ 0 ] ;
50
- return @ptrFromInt ( aligned ) ;
54
+ self .unused = self . unused - allocated_length ;
55
+ return buffer_base + align_overhead ;
51
56
}
52
57
53
58
pub fn resize (
@@ -59,19 +64,17 @@ pub fn resize(
59
64
) bool {
60
65
const self : * @This () = @alignCast (@ptrCast (ctx ));
61
66
62
- const alloc_base = @intFromPtr (memory .ptr );
63
- const next_alloc = alloc_base + memory .len ;
64
-
65
67
// Prior allocations can be shrunk, but not grown
68
+ const next_alloc = memory .ptr + memory .len ;
69
+ const buffer_base = self .buffer_end - self .unused ;
66
70
const shrinking = memory .len >= new_length ;
67
- if (next_alloc != self . base ) return shrinking ;
71
+ if (next_alloc != buffer_base ) return shrinking ;
68
72
69
73
// Grow allocations only if we have enough space
70
- const end_addr = @addWithOverflow (alloc_base , new_length );
71
- const overflow = (end_addr [1 ] == 1 ) | (end_addr [0 ] > self .limit );
74
+ const overflow = new_length > self .unused + memory .len ;
72
75
if (! shrinking and overflow ) return false ;
73
76
74
- self .base = end_addr [ 0 ] ;
77
+ self .unused = ( self . unused + memory . len ) - new_length ;
75
78
return true ;
76
79
}
77
80
@@ -98,11 +101,11 @@ pub fn free(
98
101
const self : * @This () = @alignCast (@ptrCast (ctx ));
99
102
100
103
// Only free the immediate last allocation
101
- const alloc_base = @intFromPtr ( memory .ptr ) ;
102
- const next_alloc = alloc_base + memory . len ;
103
- if (next_alloc != self . base ) return ;
104
+ const next_alloc = memory .ptr + memory . len ;
105
+ const buffer_base = self . buffer_end - self . unused ;
106
+ if (next_alloc != buffer_base ) return ;
104
107
105
- self .base = self .base - memory .len ;
108
+ self .unused = self .unused + memory .len ;
106
109
}
107
110
108
111
test "BumpAllocator" {
@@ -159,6 +162,20 @@ test "avoid integer overflow for obscene allocations" {
159
162
try std .testing .expectError (error .OutOfMemory , problem );
160
163
}
161
164
165
+ test "works at comptime" {
166
+ comptime {
167
+ var buffer : [256 ]u8 = undefined ;
168
+ var bump_allocator : @This () = .init (& buffer );
169
+ const gpa = bump_allocator .allocator ();
170
+
171
+ var list : std .ArrayList (u8 ) = .empty ;
172
+ defer list .deinit (gpa );
173
+ for ("Hello, World!\n " ) | byte | {
174
+ try list .append (gpa , byte );
175
+ }
176
+ }
177
+ }
178
+
162
179
/// Deprecated; to be removed after 0.16.0 is tagged.
163
180
/// Provides a lock free thread safe `Allocator` interface to the underlying `FixedBufferAllocator`
164
181
/// Using this at the same time as the interface returned by `allocator` is not thread safe.
@@ -182,20 +199,25 @@ fn threadSafeAlloc(
182
199
_ : usize ,
183
200
) ? [* ]u8 {
184
201
const self : * @This () = @alignCast (@ptrCast (ctx ));
202
+ const align_bytes = alignment .toByteUnits ();
203
+
204
+ var old_unused = @atomicLoad (usize , & self .unused , .seq_cst );
185
205
186
- var old_base = @atomicLoad (usize , & self .base , .seq_cst );
187
206
while (true ) {
188
- // Only allocate if we have enough space
189
- const aligned = alignment .forward (old_base );
190
- const end_addr = @addWithOverflow (aligned , length );
191
- if ((end_addr [1 ] == 1 ) | (end_addr [0 ] > self .limit )) return null ;
207
+ const buffer_base = self .buffer_end - old_unused ;
208
+ const align_overhead = std .mem .alignPointerOffset (buffer_base , align_bytes ) orelse return null ;
209
+
210
+ const allocated_length = length + align_overhead ;
211
+ if (allocated_length > old_unused ) return null ;
212
+
213
+ const new_unused = old_unused - allocated_length ;
192
214
193
- if (@cmpxchgWeak (usize , & self .base , old_base , @intCast ( end_addr [ 0 ]) , .seq_cst , .seq_cst )) | prev | {
194
- old_base = prev ;
215
+ if (@cmpxchgWeak (usize , & self .unused , old_unused , new_unused , .seq_cst , .seq_cst )) | prev | {
216
+ old_unused = prev ;
195
217
continue ;
196
218
}
197
219
198
- return @ptrFromInt ( aligned ) ;
220
+ return buffer_base + align_overhead ;
199
221
}
200
222
}
201
223
0 commit comments