bump to zig 0.14

This commit is contained in:
2025-04-13 22:19:42 +03:00
parent 15abfb585f
commit d22667f79a
2 changed files with 24 additions and 20 deletions

View File

@@ -139,27 +139,31 @@ pub fn zigToken(token: c_uint) Token.Tag {
// Copy-pasted from lib/std/zig/tokenizer.zig
fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void {
// Do the C thing
var ctokenizer = c.tokenizerInit(source.ptr, @intCast(source.len));
for (expected_token_tags) |expected_token_tag| {
const token = c.tokenizerNext(&ctokenizer);
try std.testing.expectEqual(expected_token_tag, zigToken(token.tag));
{
var ctokenizer = c.tokenizerInit(source.ptr, @intCast(source.len));
for (expected_token_tags) |expected_token_tag| {
const token = c.tokenizerNext(&ctokenizer);
try std.testing.expectEqual(expected_token_tag, zigToken(token.tag));
}
const last_token = c.tokenizerNext(&ctokenizer);
try std.testing.expectEqual(Token.Tag.eof, zigToken(last_token.tag));
}
const last_token = c.tokenizerNext(&ctokenizer);
try std.testing.expectEqual(Token.Tag.eof, zigToken(last_token.tag));
// uncomment when Zig source and compiler get in sync (e.g. with 0.14)
//var tokenizer = Tokenizer.init(source);
//for (expected_token_tags) |expected_token_tag| {
// const token = tokenizer.next();
// try std.testing.expectEqual(expected_token_tag, token.tag);
//}
//// Last token should always be eof, even when the last token was invalid,
//// in which case the tokenizer is in an invalid state, which can only be
//// recovered by opinionated means outside the scope of this implementation.
//const last_token = tokenizer.next();
//try std.testing.expectEqual(Token.Tag.eof, last_token.tag);
try std.testing.expectEqual(source.len, last_token.loc.start);
try std.testing.expectEqual(source.len, last_token.loc.end);
{
// uncomment when Zig source and compiler get in sync (e.g. with 0.14)
var tokenizer = Tokenizer.init(source);
for (expected_token_tags) |expected_token_tag| {
const token = tokenizer.next();
try std.testing.expectEqual(expected_token_tag, token.tag);
}
// Last token should always be eof, even when the last token was invalid,
// in which case the tokenizer is in an invalid state, which can only be
// recovered by opinionated means outside the scope of this implementation.
const last_token = tokenizer.next();
try std.testing.expectEqual(Token.Tag.eof, last_token.tag);
try std.testing.expectEqual(source.len, last_token.loc.start);
try std.testing.expectEqual(source.len, last_token.loc.end);
}
}
test "my function" {