Skip to content

Commit c2166ee

Browse files
committed
Rewrite FixedBufferAllocator and rename to BumpAllocator.
Fix overflow for certain allocation sizes. Reduce struct size by 1 usize. Add a few more tests. Simplify logic.
1 parent e0dc2e4 commit c2166ee

File tree

10 files changed

+257
-261
lines changed

10 files changed

+257
-261
lines changed

lib/compiler/aro/aro/Driver/Filesystem.zig

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,11 @@ fn findProgramByNameFake(entries: []const Filesystem.Entry, name: []const u8, pa
2323
}
2424
const path_env = path orelse return null;
2525
var fib = std.heap.FixedBufferAllocator.init(buf);
26+
const fib_initial_state = fib.savestate();
2627

2728
var it = mem.tokenizeScalar(u8, path_env, std.fs.path.delimiter);
2829
while (it.next()) |path_dir| {
29-
defer fib.reset();
30+
defer fib.restore(fib_initial_state);
3031
const full_path = std.fs.path.join(fib.allocator(), &.{ path_dir, name }) catch continue;
3132
if (canExecuteFake(entries, full_path)) return full_path;
3233
}
@@ -84,10 +85,11 @@ fn findProgramByNamePosix(name: []const u8, path: ?[]const u8, buf: []u8) ?[]con
8485
}
8586
const path_env = path orelse return null;
8687
var fib = std.heap.FixedBufferAllocator.init(buf);
88+
const fib_initial_state = fib.savestate();
8789

8890
var it = mem.tokenizeScalar(u8, path_env, std.fs.path.delimiter);
8991
while (it.next()) |path_dir| {
90-
defer fib.reset();
92+
defer fib.restore(fib_initial_state);
9193
const full_path = std.fs.path.join(fib.allocator(), &.{ path_dir, name }) catch continue;
9294
if (canExecutePosix(full_path)) return full_path;
9395
}

lib/compiler/aro/aro/Toolchain.zig

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -216,14 +216,15 @@ pub fn addFilePathLibArgs(tc: *const Toolchain, argv: *std.ArrayList([]const u8)
216216
fn getProgramPath(tc: *const Toolchain, name: []const u8, buf: []u8) []const u8 {
217217
var path_buf: [std.fs.max_path_bytes]u8 = undefined;
218218
var fib = std.heap.FixedBufferAllocator.init(&path_buf);
219+
const fib_initial_state = fib.savestate();
219220

220221
var tool_specific_buf: [64]u8 = undefined;
221222
var possible_name_buf: [2][]const u8 = undefined;
222223
const possible_names = possibleProgramNames(tc.driver.raw_target_triple, name, &tool_specific_buf, &possible_name_buf);
223224

224225
for (possible_names) |tool_name| {
225226
for (tc.program_paths.items) |program_path| {
226-
defer fib.reset();
227+
defer fib.restore(fib_initial_state);
227228

228229
const candidate = std.fs.path.join(fib.allocator(), &.{ program_path, tool_name }) catch continue;
229230

lib/compiler/test_runner.zig

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ pub fn main() void {
3737
return mainSimple() catch @panic("test failure\n");
3838
}
3939

40+
const fba_initial_state = fba.savestate();
4041
const args = std.process.argsAlloc(fba.allocator()) catch
4142
@panic("unable to parse command line args");
4243

@@ -61,7 +62,7 @@ pub fn main() void {
6162
fuzz_abi.fuzzer_init(.fromSlice(cache_dir));
6263
}
6364

64-
fba.reset();
65+
fba.restore(fba_initial_state);
6566

6667
if (listen) {
6768
return mainServer() catch @panic("internal test runner failure");

lib/std/heap.zig

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,10 @@ const windows = std.os.windows;
1010
const Alignment = std.mem.Alignment;
1111

1212
pub const ArenaAllocator = @import("heap/arena_allocator.zig").ArenaAllocator;
13+
pub const BumpAllocator = @import("heap/BumpAllocator.zig");
1314
pub const SmpAllocator = @import("heap/SmpAllocator.zig");
14-
pub const FixedBufferAllocator = @import("heap/FixedBufferAllocator.zig");
15+
/// Deprecated; to be removed after 0.16.0 is tagged.
16+
pub const FixedBufferAllocator = BumpAllocator;
1517
pub const PageAllocator = @import("heap/PageAllocator.zig");
1618
pub const SbrkAllocator = @import("heap/sbrk_allocator.zig").SbrkAllocator;
1719
pub const ThreadSafeAllocator = @import("heap/ThreadSafeAllocator.zig");
@@ -374,38 +376,36 @@ pub const wasm_allocator: Allocator = .{
374376
};
375377

376378
/// Returns a `StackFallbackAllocator` allocating using either a
377-
/// `FixedBufferAllocator` on an array of size `size` and falling back to
379+
/// `BumpAllocator` on an array of size `size` and falling back to
378380
/// `fallback_allocator` if that fails.
379381
pub fn stackFallback(comptime size: usize, fallback_allocator: Allocator) StackFallbackAllocator(size) {
380382
return StackFallbackAllocator(size){
381383
.buffer = undefined,
382384
.fallback_allocator = fallback_allocator,
383-
.fixed_buffer_allocator = undefined,
385+
.bump_allocator = undefined,
384386
};
385387
}
386388

387389
/// An allocator that attempts to allocate using a
388-
/// `FixedBufferAllocator` using an array of size `size`. If the
390+
/// `BumpAllocator` using an array of size `size`. If the
389391
/// allocation fails, it will fall back to using
390392
/// `fallback_allocator`. Easily created with `stackFallback`.
391393
pub fn StackFallbackAllocator(comptime size: usize) type {
392394
return struct {
393-
const Self = @This();
394-
395395
buffer: [size]u8,
396396
fallback_allocator: Allocator,
397-
fixed_buffer_allocator: FixedBufferAllocator,
397+
bump_allocator: BumpAllocator,
398398
get_called: if (std.debug.runtime_safety) bool else void =
399399
if (std.debug.runtime_safety) false else {},
400400

401401
/// This function both fetches a `Allocator` interface to this
402402
/// allocator *and* resets the internal buffer allocator.
403-
pub fn get(self: *Self) Allocator {
403+
pub fn get(self: *@This()) Allocator {
404404
if (std.debug.runtime_safety) {
405405
assert(!self.get_called); // `get` called multiple times; instead use `const allocator = stackFallback(N).get();`
406406
self.get_called = true;
407407
}
408-
self.fixed_buffer_allocator = FixedBufferAllocator.init(self.buffer[0..]);
408+
self.bump_allocator = .init(self.buffer[0..]);
409409
return .{
410410
.ptr = self,
411411
.vtable = &.{
@@ -429,8 +429,8 @@ pub fn StackFallbackAllocator(comptime size: usize) type {
429429
alignment: Alignment,
430430
ra: usize,
431431
) ?[*]u8 {
432-
const self: *Self = @ptrCast(@alignCast(ctx));
433-
return FixedBufferAllocator.alloc(&self.fixed_buffer_allocator, len, alignment, ra) orelse
432+
const self: *@This() = @ptrCast(@alignCast(ctx));
433+
return BumpAllocator.alloc(&self.bump_allocator, len, alignment, ra) orelse
434434
return self.fallback_allocator.rawAlloc(len, alignment, ra);
435435
}
436436

@@ -441,9 +441,9 @@ pub fn StackFallbackAllocator(comptime size: usize) type {
441441
new_len: usize,
442442
ra: usize,
443443
) bool {
444-
const self: *Self = @ptrCast(@alignCast(ctx));
445-
if (self.fixed_buffer_allocator.ownsPtr(buf.ptr)) {
446-
return FixedBufferAllocator.resize(&self.fixed_buffer_allocator, buf, alignment, new_len, ra);
444+
const self: *@This() = @ptrCast(@alignCast(ctx));
445+
if (mem.sliceOwnsPtr(u8, &self.buffer, @ptrCast(buf.ptr))) {
446+
return BumpAllocator.resize(&self.bump_allocator, buf, alignment, new_len, ra);
447447
} else {
448448
return self.fallback_allocator.rawResize(buf, alignment, new_len, ra);
449449
}
@@ -456,9 +456,9 @@ pub fn StackFallbackAllocator(comptime size: usize) type {
456456
new_len: usize,
457457
return_address: usize,
458458
) ?[*]u8 {
459-
const self: *Self = @ptrCast(@alignCast(context));
460-
if (self.fixed_buffer_allocator.ownsPtr(memory.ptr)) {
461-
return FixedBufferAllocator.remap(&self.fixed_buffer_allocator, memory, alignment, new_len, return_address);
459+
const self: *@This() = @ptrCast(@alignCast(context));
460+
if (mem.sliceOwnsPtr(u8, &self.buffer, @ptrCast(memory.ptr))) {
461+
return BumpAllocator.remap(&self.bump_allocator, memory, alignment, new_len, return_address);
462462
} else {
463463
return self.fallback_allocator.rawRemap(memory, alignment, new_len, return_address);
464464
}
@@ -470,9 +470,9 @@ pub fn StackFallbackAllocator(comptime size: usize) type {
470470
alignment: Alignment,
471471
ra: usize,
472472
) void {
473-
const self: *Self = @ptrCast(@alignCast(ctx));
474-
if (self.fixed_buffer_allocator.ownsPtr(buf.ptr)) {
475-
return FixedBufferAllocator.free(&self.fixed_buffer_allocator, buf, alignment, ra);
473+
const self: *@This() = @ptrCast(@alignCast(ctx));
474+
if (mem.sliceOwnsPtr(u8, &self.buffer, @ptrCast(buf.ptr))) {
475+
return BumpAllocator.free(&self.bump_allocator, buf, alignment, ra);
476476
} else {
477477
return self.fallback_allocator.rawFree(buf, alignment, ra);
478478
}
@@ -666,7 +666,7 @@ pub fn testAllocatorAlignedShrink(base_allocator: mem.Allocator) !void {
666666
const allocator = validationAllocator.allocator();
667667

668668
var debug_buffer: [1000]u8 = undefined;
669-
var fib = FixedBufferAllocator.init(&debug_buffer);
669+
var fib: BumpAllocator = .init(&debug_buffer);
670670
const debug_allocator = fib.allocator();
671671

672672
const alloc_size = pageSize() * 2 + 50;
@@ -990,7 +990,7 @@ test {
990990
_ = @import("heap/memory_pool.zig");
991991
_ = ArenaAllocator;
992992
_ = GeneralPurposeAllocator;
993-
_ = FixedBufferAllocator;
993+
_ = BumpAllocator;
994994
_ = ThreadSafeAllocator;
995995
_ = SbrkAllocator;
996996
if (builtin.target.cpu.arch.isWasm()) {

lib/std/heap/BumpAllocator.zig

Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
const std = @import("../std.zig");
2+
const Alignment = std.mem.Alignment;
3+
const Allocator = std.mem.Allocator;
4+
5+
base: usize,
6+
limit: usize,
7+
8+
pub fn init(buffer: []u8) @This() {
9+
const base: usize = @intFromPtr(buffer.ptr);
10+
const limit: usize = base + buffer.len;
11+
return .{ .base = base, .limit = limit };
12+
}
13+
14+
pub fn allocator(self: *@This()) Allocator {
15+
return .{
16+
.ptr = self,
17+
.vtable = &.{
18+
.alloc = alloc,
19+
.resize = resize,
20+
.remap = remap,
21+
.free = free,
22+
},
23+
};
24+
}
25+
26+
/// Save the current state of the allocator
27+
pub fn savestate(self: *@This()) usize {
28+
return self.base;
29+
}
30+
31+
/// Restore a previously saved allocator state
32+
pub fn restore(self: *@This(), state: usize) void {
33+
self.base = state;
34+
}
35+
36+
pub fn alloc(
37+
ctx: *anyopaque,
38+
length: usize,
39+
alignment: Alignment,
40+
_: usize,
41+
) ?[*]u8 {
42+
const self: *@This() = @alignCast(@ptrCast(ctx));
43+
44+
// Only allocate if we have enough space
45+
const aligned = alignment.forward(self.base);
46+
const end_addr = @addWithOverflow(aligned, length);
47+
if ((end_addr[1] == 1) | (end_addr[0] > self.limit)) return null;
48+
49+
self.base = end_addr[0];
50+
return @ptrFromInt(aligned);
51+
}
52+
53+
pub fn resize(
54+
ctx: *anyopaque,
55+
memory: []u8,
56+
_: Alignment,
57+
new_length: usize,
58+
_: usize,
59+
) bool {
60+
const self: *@This() = @alignCast(@ptrCast(ctx));
61+
62+
const alloc_base = @intFromPtr(memory.ptr);
63+
const next_alloc = alloc_base + memory.len;
64+
65+
// Prior allocations can be shrunk, but not grown
66+
const shrinking = memory.len >= new_length;
67+
if (next_alloc != self.base) return shrinking;
68+
69+
// Grow allocations only if we have enough space
70+
const end_addr = @addWithOverflow(alloc_base, new_length);
71+
const overflow = (end_addr[1] == 1) | (end_addr[0] > self.limit);
72+
if (!shrinking and overflow) return false;
73+
74+
self.base = end_addr[0];
75+
return true;
76+
}
77+
78+
pub fn remap(
79+
ctx: *anyopaque,
80+
memory: []u8,
81+
_: Alignment,
82+
new_length: usize,
83+
_: usize,
84+
) ?[*]u8 {
85+
if (resize(ctx, memory, undefined, new_length, undefined)) {
86+
return memory.ptr;
87+
} else {
88+
return null;
89+
}
90+
}
91+
92+
pub fn free(
93+
ctx: *anyopaque,
94+
memory: []u8,
95+
_: Alignment,
96+
_: usize,
97+
) void {
98+
const self: *@This() = @alignCast(@ptrCast(ctx));
99+
100+
// Only free the immediate last allocation
101+
const alloc_base = @intFromPtr(memory.ptr);
102+
const next_alloc = alloc_base + memory.len;
103+
if (next_alloc != self.base) return;
104+
105+
self.base = self.base - memory.len;
106+
}
107+
108+
test "BumpAllocator" {
109+
var buffer: [1 << 20]u8 = undefined;
110+
var bump_allocator: @This() = .init(&buffer);
111+
const gpa = bump_allocator.allocator();
112+
113+
try std.heap.testAllocator(gpa);
114+
try std.heap.testAllocatorAligned(gpa);
115+
try std.heap.testAllocatorAlignedShrink(gpa);
116+
try std.heap.testAllocatorLargeAlignment(gpa);
117+
}
118+
119+
test "savestate and restore" {
120+
var buffer: [256]u8 = undefined;
121+
var bump_allocator: @This() = .init(&buffer);
122+
const gpa = bump_allocator.allocator();
123+
124+
const state_before = bump_allocator.savestate();
125+
_ = try gpa.alloc(u8, buffer.len);
126+
127+
bump_allocator.restore(state_before);
128+
_ = try gpa.alloc(u8, buffer.len);
129+
}
130+
131+
test "reuse memory on realloc" {
132+
var buffer: [10]u8 = undefined;
133+
var bump_allocator: @This() = .init(&buffer);
134+
const gpa = bump_allocator.allocator();
135+
136+
const slice_0 = try gpa.alloc(u8, 5);
137+
const slice_1 = try gpa.realloc(slice_0, 10);
138+
try std.testing.expect(slice_1.ptr == slice_0.ptr);
139+
}
140+
141+
test "don't grow one allocation into another" {
142+
var buffer: [10]u8 = undefined;
143+
var bump_allocator: @This() = .init(&buffer);
144+
const gpa = bump_allocator.allocator();
145+
146+
const slice_0 = try gpa.alloc(u8, 3);
147+
const slice_1 = try gpa.alloc(u8, 3);
148+
const slice_2 = try gpa.realloc(slice_0, 4);
149+
try std.testing.expect(slice_2.ptr == slice_1.ptr + 3);
150+
}
151+
152+
test "avoid integer overflow for obscene allocations" {
153+
var buffer: [10]u8 = undefined;
154+
var bump_allocator: @This() = .init(&buffer);
155+
const gpa = bump_allocator.allocator();
156+
157+
_ = try gpa.alloc(u8, 5);
158+
const problem = gpa.alloc(u8, std.math.maxInt(usize));
159+
try std.testing.expectError(error.OutOfMemory, problem);
160+
}
161+
162+
/// Deprecated; to be removed after 0.16.0 is tagged.
163+
/// Provides a lock free thread safe `Allocator` interface to the underlying `FixedBufferAllocator`
164+
/// Using this at the same time as the interface returned by `allocator` is not thread safe.
165+
pub fn threadSafeAllocator(self: *@This()) Allocator {
166+
return .{
167+
.ptr = self,
168+
.vtable = &.{
169+
.alloc = threadSafeAlloc,
170+
.resize = Allocator.noResize,
171+
.remap = Allocator.noRemap,
172+
.free = Allocator.noFree,
173+
},
174+
};
175+
}
176+
177+
// Remove after 0.16.0 is tagged.
178+
fn threadSafeAlloc(
179+
ctx: *anyopaque,
180+
length: usize,
181+
alignment: Alignment,
182+
_: usize,
183+
) ?[*]u8 {
184+
const self: *@This() = @alignCast(@ptrCast(ctx));
185+
186+
var old_base = @atomicLoad(usize, &self.base, .seq_cst);
187+
while (true) {
188+
// Only allocate if we have enough space
189+
const aligned = alignment.forward(old_base);
190+
const end_addr = @addWithOverflow(aligned, length);
191+
if ((end_addr[1] == 1) | (end_addr[0] > self.limit)) return null;
192+
193+
if (@cmpxchgWeak(usize, &self.base, old_base, @intCast(end_addr[0]), .seq_cst, .seq_cst)) |prev| {
194+
old_base = prev;
195+
continue;
196+
}
197+
198+
return @ptrFromInt(aligned);
199+
}
200+
}
201+
202+
// Remove after 0.16.0 is tagged.
203+
test "thread safe version" {
204+
var buffer: [1 << 20]u8 = undefined;
205+
var bump_allocator: @This() = .init(&buffer);
206+
const gpa = bump_allocator.threadSafeAllocator();
207+
208+
try std.heap.testAllocator(gpa);
209+
try std.heap.testAllocatorAligned(gpa);
210+
try std.heap.testAllocatorAlignedShrink(gpa);
211+
try std.heap.testAllocatorLargeAlignment(gpa);
212+
}

0 commit comments

Comments
 (0)