use ArrayList instead of slices
The API is more ergonimic.
This commit is contained in:
parent
87b4b81a48
commit
b02edc7190
|
@ -1,5 +1,7 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
const ArrayList = std.ArrayList;
|
||||||
|
|
||||||
// rounds up an int to the nearest factor of nbits.
|
// rounds up an int to the nearest factor of nbits.
|
||||||
pub fn roundUp(comptime T: type, comptime nbits: u8, n: T) T {
|
pub fn roundUp(comptime T: type, comptime nbits: u8, n: T) T {
|
||||||
|
@ -14,6 +16,12 @@ pub fn roundUpPadding(comptime T: type, comptime nbits: u8, n: T) T {
|
||||||
return roundUp(T, nbits, n) - n;
|
return roundUp(T, nbits, n) - n;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// arrayList adds padding to an ArrayList(u8) for a given number of nbits
|
||||||
|
pub fn arrayList(arr: *ArrayList(u8), comptime nbits: u8) Allocator.Error!void {
|
||||||
|
const padding = roundUpPadding(u64, nbits, arr.items.len);
|
||||||
|
try arr.*.appendNTimes(0, padding);
|
||||||
|
}
|
||||||
|
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
|
|
||||||
test "padding" {
|
test "padding" {
|
||||||
|
@ -30,3 +38,16 @@ test "padding" {
|
||||||
try testing.expectEqual(roundUpPadding(u12, 2, 4091), 1);
|
try testing.expectEqual(roundUpPadding(u12, 2, 4091), 1);
|
||||||
try testing.expectEqual(roundUpPadding(u12, 2, 4092), 0);
|
try testing.expectEqual(roundUpPadding(u12, 2, 4092), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test "arrayList" {
|
||||||
|
var buf = try ArrayList(u8).initCapacity(testing.allocator, 16);
|
||||||
|
defer buf.deinit();
|
||||||
|
|
||||||
|
buf.appendAssumeCapacity(1);
|
||||||
|
try arrayList(&buf, 3);
|
||||||
|
try testing.expectEqual(buf.items.len, 8);
|
||||||
|
|
||||||
|
buf.appendAssumeCapacity(2);
|
||||||
|
try arrayList(&buf, 10);
|
||||||
|
try testing.expectEqual(buf.items.len, 1024);
|
||||||
|
}
|
||||||
|
|
37
src/user.zig
37
src/user.zig
|
@ -141,7 +141,12 @@ pub const PackedUser = struct {
|
||||||
// packTo packs the User record and copies it to the given byte slice. The
|
// packTo packs the User record and copies it to the given byte slice. The
|
||||||
// slice must have at least maxRecordSize() bytes available.
|
// slice must have at least maxRecordSize() bytes available.
|
||||||
// The slice is passed as a pointer, so it can be mutated.
|
// The slice is passed as a pointer, so it can be mutated.
|
||||||
pub fn packTo(buf: *[]u8, user: User, idxFn: shellIndexFn) errInvalid!void {
|
const packErr = errInvalid || Allocator.Error;
|
||||||
|
pub fn packTo(
|
||||||
|
arr: *ArrayList(u8),
|
||||||
|
user: User,
|
||||||
|
idxFn: shellIndexFn,
|
||||||
|
) packErr!void {
|
||||||
// function arguments are consts. We need to mutate the underlying
|
// function arguments are consts. We need to mutate the underlying
|
||||||
// slice, so passing it via pointer instead.
|
// slice, so passing it via pointer instead.
|
||||||
const home_len = try downCast(u6, user.home.len - 1);
|
const home_len = try downCast(u6, user.home.len - 1);
|
||||||
|
@ -167,35 +172,21 @@ pub const PackedUser = struct {
|
||||||
};
|
};
|
||||||
const innerBytes = mem.asBytes(&inner);
|
const innerBytes = mem.asBytes(&inner);
|
||||||
|
|
||||||
var pos: usize = buf.*.len;
|
|
||||||
buf.*.len += InnerSize +
|
|
||||||
user.home.len +
|
|
||||||
user.gecos.len;
|
|
||||||
|
|
||||||
// innerBytes.len is longer than InnerSize. We want to copy
|
// innerBytes.len is longer than InnerSize. We want to copy
|
||||||
// only the InnerSize-number of bytes.
|
// only the InnerSize-number of bytes.
|
||||||
mem.copy(u8, buf.*[pos .. pos + InnerSize], innerBytes[0..InnerSize]);
|
try arr.*.appendSlice(innerBytes[0..InnerSize]);
|
||||||
pos += InnerSize;
|
try arr.*.appendSlice(user.home);
|
||||||
mem.copy(u8, buf.*[pos .. pos + user.home.len], user.home);
|
|
||||||
pos += user.home.len;
|
|
||||||
|
|
||||||
if (!inner.name_is_a_suffix) {
|
if (!inner.name_is_a_suffix) {
|
||||||
buf.*.len += user.name.len;
|
try arr.*.appendSlice(user.name);
|
||||||
mem.copy(u8, buf.*[pos .. pos + user.name.len], user.name);
|
|
||||||
pos += user.name.len;
|
|
||||||
}
|
}
|
||||||
mem.copy(u8, buf.*[pos .. pos + user.gecos.len], user.gecos);
|
try arr.*.appendSlice(user.gecos);
|
||||||
pos += user.gecos.len;
|
|
||||||
|
|
||||||
if (inner.shell_here) {
|
if (inner.shell_here) {
|
||||||
buf.*.len += user.shell.len;
|
try arr.*.appendSlice(user.shell);
|
||||||
mem.copy(u8, buf.*[pos .. pos + user.shell.len], user.shell);
|
|
||||||
pos += user.shell.len;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const padding = pad.roundUpPadding(u64, AlignmentBits, pos);
|
try pad.arrayList(arr, AlignmentBits);
|
||||||
buf.*.len += padding;
|
|
||||||
mem.set(u8, buf.*[pos .. pos + padding], 0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// maxSize is the maximum number of records a PackedUser can take
|
// maxSize is the maximum number of records a PackedUser can take
|
||||||
|
@ -331,7 +322,7 @@ test "construct PackedUser section" {
|
||||||
} };
|
} };
|
||||||
for (users) |user| {
|
for (users) |user| {
|
||||||
try buf.ensureUnusedCapacity(PackedUser.maxSize());
|
try buf.ensureUnusedCapacity(PackedUser.maxSize());
|
||||||
try PackedUser.packTo(&buf.items, user, testShellIndex);
|
try PackedUser.packTo(&buf, user, testShellIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
var it = userIterator(buf.items, testShell);
|
var it = userIterator(buf.items, testShell);
|
||||||
|
@ -361,6 +352,6 @@ test "PackedUser.maxSize()" {
|
||||||
.home = "Home" ** 16, // 64
|
.home = "Home" ** 16, // 64
|
||||||
.shell = "She.LllL" ** 8, // 64
|
.shell = "She.LllL" ** 8, // 64
|
||||||
};
|
};
|
||||||
try PackedUser.packTo(&buf.items, largeUser, testShellIndex);
|
try PackedUser.packTo(&buf, largeUser, testShellIndex);
|
||||||
try testing.expectEqual(PackedUser.maxSize(), buf.items.len);
|
try testing.expectEqual(PackedUser.maxSize(), buf.items.len);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue