Skip to content

Commit 196e36b

Browse files
committed
std: remove BoundedArray
This use case is handled by ArrayListUnmanaged via the "...Bounded" method variants, and it's more optimal to share machine code, versus generating multiple versions of each function for differing array lengths.
1 parent c47ec4f commit 196e36b

File tree

12 files changed

+82
-526
lines changed

12 files changed

+82
-526
lines changed

doc/langref/test_switch_dispatch_loop.zig

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,20 +8,22 @@ const Instruction = enum {
88
};
99

1010
fn evaluate(initial_stack: []const i32, code: []const Instruction) !i32 {
11-
var stack = try std.BoundedArray(i32, 8).fromSlice(initial_stack);
11+
var buffer: [8]i32 = undefined;
12+
var stack = std.ArrayListUnmanaged(i32).initBuffer(&buffer);
13+
try stack.appendSliceBounded(initial_stack);
1214
var ip: usize = 0;
1315

1416
return vm: switch (code[ip]) {
1517
// Because all code after `continue` is unreachable, this branch does
1618
// not provide a result.
1719
.add => {
18-
try stack.append(stack.pop().? + stack.pop().?);
20+
try stack.appendBounded(stack.pop().? + stack.pop().?);
1921

2022
ip += 1;
2123
continue :vm code[ip];
2224
},
2325
.mul => {
24-
try stack.append(stack.pop().? * stack.pop().?);
26+
try stack.appendBounded(stack.pop().? * stack.pop().?);
2527

2628
ip += 1;
2729
continue :vm code[ip];

lib/docs/wasm/markdown/Parser.zig

Lines changed: 40 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,14 @@ const Node = Document.Node;
2929
const ExtraIndex = Document.ExtraIndex;
3030
const ExtraData = Document.ExtraData;
3131
const StringIndex = Document.StringIndex;
32+
const ArrayList = std.ArrayListUnmanaged;
3233

3334
nodes: Node.List = .{},
34-
extra: std.ArrayListUnmanaged(u32) = .empty,
35-
scratch_extra: std.ArrayListUnmanaged(u32) = .empty,
36-
string_bytes: std.ArrayListUnmanaged(u8) = .empty,
37-
scratch_string: std.ArrayListUnmanaged(u8) = .empty,
38-
pending_blocks: std.ArrayListUnmanaged(Block) = .empty,
35+
extra: ArrayList(u32) = .empty,
36+
scratch_extra: ArrayList(u32) = .empty,
37+
string_bytes: ArrayList(u8) = .empty,
38+
scratch_string: ArrayList(u8) = .empty,
39+
pending_blocks: ArrayList(Block) = .empty,
3940
allocator: Allocator,
4041

4142
const Parser = @This();
@@ -86,7 +87,8 @@ const Block = struct {
8687
continuation_indent: usize,
8788
},
8889
table: struct {
89-
column_alignments: std.BoundedArray(Node.TableCellAlignment, max_table_columns) = .{},
90+
column_alignments_buffer: [max_table_columns]Node.TableCellAlignment,
91+
column_alignments_len: usize,
9092
},
9193
heading: struct {
9294
/// Between 1 and 6, inclusive.
@@ -354,7 +356,8 @@ const BlockStart = struct {
354356
continuation_indent: usize,
355357
},
356358
table_row: struct {
357-
cells: std.BoundedArray([]const u8, max_table_columns),
359+
cells_buffer: [max_table_columns][]const u8,
360+
cells_len: usize,
358361
},
359362
heading: struct {
360363
/// Between 1 and 6, inclusive.
@@ -422,7 +425,8 @@ fn appendBlockStart(p: *Parser, block_start: BlockStart) !void {
422425
try p.pending_blocks.append(p.allocator, .{
423426
.tag = .table,
424427
.data = .{ .table = .{
425-
.column_alignments = .{},
428+
.column_alignments_buffer = undefined,
429+
.column_alignments_len = 0,
426430
} },
427431
.string_start = p.scratch_string.items.len,
428432
.extra_start = p.scratch_extra.items.len,
@@ -431,15 +435,19 @@ fn appendBlockStart(p: *Parser, block_start: BlockStart) !void {
431435

432436
const current_row = p.scratch_extra.items.len - p.pending_blocks.getLast().extra_start;
433437
if (current_row <= 1) {
434-
if (parseTableHeaderDelimiter(block_start.data.table_row.cells)) |alignments| {
435-
p.pending_blocks.items[p.pending_blocks.items.len - 1].data.table.column_alignments = alignments;
438+
var buffer: [max_table_columns]Node.TableCellAlignment = undefined;
439+
const table_row = &block_start.data.table_row;
440+
if (parseTableHeaderDelimiter(table_row.cells_buffer[0..table_row.cells_len], &buffer)) |alignments| {
441+
const table = &p.pending_blocks.items[p.pending_blocks.items.len - 1].data.table;
442+
@memcpy(table.column_alignments_buffer[0..alignments.len], alignments);
443+
table.column_alignments_len = alignments.len;
436444
if (current_row == 1) {
437445
// We need to go back and mark the header row and its column
438446
// alignments.
439447
const datas = p.nodes.items(.data);
440448
const header_data = datas[p.scratch_extra.getLast()];
441449
for (p.extraChildren(header_data.container.children), 0..) |header_cell, i| {
442-
const alignment = if (i < alignments.len) alignments.buffer[i] else .unset;
450+
const alignment = if (i < alignments.len) alignments[i] else .unset;
443451
const cell_data = &datas[@intFromEnum(header_cell)].table_cell;
444452
cell_data.info.alignment = alignment;
445453
cell_data.info.header = true;
@@ -480,8 +488,10 @@ fn appendBlockStart(p: *Parser, block_start: BlockStart) !void {
480488
// available in the BlockStart. We can immediately parse and append
481489
// these children now.
482490
const containing_table = p.pending_blocks.items[p.pending_blocks.items.len - 2];
483-
const column_alignments = containing_table.data.table.column_alignments.slice();
484-
for (block_start.data.table_row.cells.slice(), 0..) |cell_content, i| {
491+
const table = &containing_table.data.table;
492+
const column_alignments = table.column_alignments_buffer[0..table.column_alignments_len];
493+
const table_row = &block_start.data.table_row;
494+
for (table_row.cells_buffer[0..table_row.cells_len], 0..) |cell_content, i| {
485495
const cell_children = try p.parseInlines(cell_content);
486496
const alignment = if (i < column_alignments.len) column_alignments[i] else .unset;
487497
const cell = try p.addNode(.{
@@ -523,7 +533,8 @@ fn startBlock(p: *Parser, line: []const u8) !?BlockStart {
523533
return .{
524534
.tag = .table_row,
525535
.data = .{ .table_row = .{
526-
.cells = table_row.cells,
536+
.cells_buffer = table_row.cells_buffer,
537+
.cells_len = table_row.cells_len,
527538
} },
528539
.rest = "",
529540
};
@@ -606,7 +617,8 @@ fn startListItem(unindented_line: []const u8) ?ListItemStart {
606617
}
607618

608619
const TableRowStart = struct {
609-
cells: std.BoundedArray([]const u8, max_table_columns),
620+
cells_buffer: [max_table_columns][]const u8,
621+
cells_len: usize,
610622
};
611623

612624
fn startTableRow(unindented_line: []const u8) ?TableRowStart {
@@ -615,15 +627,16 @@ fn startTableRow(unindented_line: []const u8) ?TableRowStart {
615627
mem.endsWith(u8, unindented_line, "\\|") or
616628
!mem.endsWith(u8, unindented_line, "|")) return null;
617629

618-
var cells: std.BoundedArray([]const u8, max_table_columns) = .{};
630+
var cells_buffer: [max_table_columns][]const u8 = undefined;
631+
var cells: ArrayList([]const u8) = .initBuffer(&cells_buffer);
619632
const table_row_content = unindented_line[1 .. unindented_line.len - 1];
620633
var cell_start: usize = 0;
621634
var i: usize = 0;
622635
while (i < table_row_content.len) : (i += 1) {
623636
switch (table_row_content[i]) {
624637
'\\' => i += 1,
625638
'|' => {
626-
cells.append(table_row_content[cell_start..i]) catch return null;
639+
cells.appendBounded(table_row_content[cell_start..i]) catch return null;
627640
cell_start = i + 1;
628641
},
629642
'`' => {
@@ -641,20 +654,21 @@ fn startTableRow(unindented_line: []const u8) ?TableRowStart {
641654
else => {},
642655
}
643656
}
644-
cells.append(table_row_content[cell_start..]) catch return null;
657+
cells.appendBounded(table_row_content[cell_start..]) catch return null;
645658

646-
return .{ .cells = cells };
659+
return .{ .cells_buffer = cells_buffer, .cells_len = cells.items.len };
647660
}
648661

649662
fn parseTableHeaderDelimiter(
650-
row_cells: std.BoundedArray([]const u8, max_table_columns),
651-
) ?std.BoundedArray(Node.TableCellAlignment, max_table_columns) {
652-
var alignments: std.BoundedArray(Node.TableCellAlignment, max_table_columns) = .{};
653-
for (row_cells.slice()) |content| {
663+
row_cells: []const []const u8,
664+
buffer: []Node.TableCellAlignment,
665+
) ?[]Node.TableCellAlignment {
666+
var alignments: ArrayList(Node.TableCellAlignment) = .initBuffer(buffer);
667+
for (row_cells) |content| {
654668
const alignment = parseTableHeaderDelimiterCell(content) orelse return null;
655669
alignments.appendAssumeCapacity(alignment);
656670
}
657-
return alignments;
671+
return alignments.items;
658672
}
659673

660674
fn parseTableHeaderDelimiterCell(content: []const u8) ?Node.TableCellAlignment {
@@ -928,8 +942,8 @@ const InlineParser = struct {
928942
parent: *Parser,
929943
content: []const u8,
930944
pos: usize = 0,
931-
pending_inlines: std.ArrayListUnmanaged(PendingInline) = .empty,
932-
completed_inlines: std.ArrayListUnmanaged(CompletedInline) = .empty,
945+
pending_inlines: ArrayList(PendingInline) = .empty,
946+
completed_inlines: ArrayList(CompletedInline) = .empty,
933947

934948
const PendingInline = struct {
935949
tag: Tag,

lib/std/Io.zig

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -231,21 +231,6 @@ pub fn GenericReader(
231231
return @errorCast(self.any().readBytesNoEof(num_bytes));
232232
}
233233

234-
pub inline fn readIntoBoundedBytes(
235-
self: Self,
236-
comptime num_bytes: usize,
237-
bounded: *std.BoundedArray(u8, num_bytes),
238-
) Error!void {
239-
return @errorCast(self.any().readIntoBoundedBytes(num_bytes, bounded));
240-
}
241-
242-
pub inline fn readBoundedBytes(
243-
self: Self,
244-
comptime num_bytes: usize,
245-
) Error!std.BoundedArray(u8, num_bytes) {
246-
return @errorCast(self.any().readBoundedBytes(num_bytes));
247-
}
248-
249234
pub inline fn readInt(self: Self, comptime T: type, endian: std.builtin.Endian) NoEofError!T {
250235
return @errorCast(self.any().readInt(T, endian));
251236
}

lib/std/Io/DeprecatedReader.zig

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -249,33 +249,6 @@ pub fn readBytesNoEof(self: Self, comptime num_bytes: usize) anyerror![num_bytes
249249
return bytes;
250250
}
251251

252-
/// Reads bytes until `bounded.len` is equal to `num_bytes`,
253-
/// or the stream ends.
254-
///
255-
/// * it is assumed that `num_bytes` will not exceed `bounded.capacity()`
256-
pub fn readIntoBoundedBytes(
257-
self: Self,
258-
comptime num_bytes: usize,
259-
bounded: *std.BoundedArray(u8, num_bytes),
260-
) anyerror!void {
261-
while (bounded.len < num_bytes) {
262-
// get at most the number of bytes free in the bounded array
263-
const bytes_read = try self.read(bounded.unusedCapacitySlice());
264-
if (bytes_read == 0) return;
265-
266-
// bytes_read will never be larger than @TypeOf(bounded.len)
267-
// due to `self.read` being bounded by `bounded.unusedCapacitySlice()`
268-
bounded.len += @as(@TypeOf(bounded.len), @intCast(bytes_read));
269-
}
270-
}
271-
272-
/// Reads at most `num_bytes` and returns as a bounded array.
273-
pub fn readBoundedBytes(self: Self, comptime num_bytes: usize) anyerror!std.BoundedArray(u8, num_bytes) {
274-
var result = std.BoundedArray(u8, num_bytes){};
275-
try self.readIntoBoundedBytes(num_bytes, &result);
276-
return result;
277-
}
278-
279252
pub inline fn readInt(self: Self, comptime T: type, endian: std.builtin.Endian) anyerror!T {
280253
const bytes = try self.readBytesNoEof(@divExact(@typeInfo(T).int.bits, 8));
281254
return mem.readInt(T, &bytes, endian);

lib/std/Io/Reader/test.zig

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -349,24 +349,3 @@ test "streamUntilDelimiter writes all bytes without delimiter to the output" {
349349

350350
try std.testing.expectError(error.StreamTooLong, reader.streamUntilDelimiter(writer, '!', 5));
351351
}
352-
353-
test "readBoundedBytes correctly reads into a new bounded array" {
354-
const test_string = "abcdefg";
355-
var fis = std.io.fixedBufferStream(test_string);
356-
const reader = fis.reader();
357-
358-
var array = try reader.readBoundedBytes(10000);
359-
try testing.expectEqualStrings(array.slice(), test_string);
360-
}
361-
362-
test "readIntoBoundedBytes correctly reads into a provided bounded array" {
363-
const test_string = "abcdefg";
364-
var fis = std.io.fixedBufferStream(test_string);
365-
const reader = fis.reader();
366-
367-
var bounded_array = std.BoundedArray(u8, 10000){};
368-
369-
// compile time error if the size is not the same at the provided `bounded.capacity()`
370-
try reader.readIntoBoundedBytes(10000, &bounded_array);
371-
try testing.expectEqualStrings(bounded_array.slice(), test_string);
372-
}

lib/std/array_list.zig

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -657,6 +657,7 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?mem.Alig
657657

658658
/// Initialize with externally-managed memory. The buffer determines the
659659
/// capacity, and the length is set to zero.
660+
///
660661
/// When initialized this way, all functions that accept an Allocator
661662
/// argument cause illegal behavior.
662663
pub fn initBuffer(buffer: Slice) Self {

lib/std/base64.zig

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -517,17 +517,21 @@ fn testAllApis(codecs: Codecs, expected_decoded: []const u8, expected_encoded: [
517517
var buffer: [0x100]u8 = undefined;
518518
const encoded = codecs.Encoder.encode(&buffer, expected_decoded);
519519
try testing.expectEqualSlices(u8, expected_encoded, encoded);
520-
520+
}
521+
{
521522
// stream encode
522-
var list = try std.BoundedArray(u8, 0x100).init(0);
523-
try codecs.Encoder.encodeWriter(list.writer(), expected_decoded);
524-
try testing.expectEqualSlices(u8, expected_encoded, list.slice());
525-
523+
var buffer: [0x100]u8 = undefined;
524+
var writer: std.Io.Writer = .fixed(&buffer);
525+
try codecs.Encoder.encodeWriter(&writer, expected_decoded);
526+
try testing.expectEqualSlices(u8, expected_encoded, writer.buffered());
527+
}
528+
{
526529
// reader to writer encode
527-
var stream = std.io.fixedBufferStream(expected_decoded);
528-
list = try std.BoundedArray(u8, 0x100).init(0);
529-
try codecs.Encoder.encodeFromReaderToWriter(list.writer(), stream.reader());
530-
try testing.expectEqualSlices(u8, expected_encoded, list.slice());
530+
var stream: std.Io.Reader = .fixed(expected_decoded);
531+
var buffer: [0x100]u8 = undefined;
532+
var writer: std.Io.Writer = .fixed(&buffer);
533+
try codecs.Encoder.encodeFromReaderToWriter(&writer, &stream);
534+
try testing.expectEqualSlices(u8, expected_encoded, writer.buffered());
531535
}
532536

533537
// Base64Decoder

0 commit comments

Comments
 (0)