bump to zig 0.14
This commit is contained in:
@@ -1 +1 @@
|
||||
zig0 aspires to be an interpreter of zig 0.13.0-2578-gec60156f187a C backend.
|
||||
zig0 aspires to be an interpreter of zig 0.14.0 C backend.
|
||||
|
||||
@@ -139,27 +139,31 @@ pub fn zigToken(token: c_uint) Token.Tag {
|
||||
// Copy-pasted from lib/std/zig/tokenizer.zig
|
||||
fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void {
|
||||
// Do the C thing
|
||||
var ctokenizer = c.tokenizerInit(source.ptr, @intCast(source.len));
|
||||
for (expected_token_tags) |expected_token_tag| {
|
||||
const token = c.tokenizerNext(&ctokenizer);
|
||||
try std.testing.expectEqual(expected_token_tag, zigToken(token.tag));
|
||||
{
|
||||
var ctokenizer = c.tokenizerInit(source.ptr, @intCast(source.len));
|
||||
for (expected_token_tags) |expected_token_tag| {
|
||||
const token = c.tokenizerNext(&ctokenizer);
|
||||
try std.testing.expectEqual(expected_token_tag, zigToken(token.tag));
|
||||
}
|
||||
const last_token = c.tokenizerNext(&ctokenizer);
|
||||
try std.testing.expectEqual(Token.Tag.eof, zigToken(last_token.tag));
|
||||
}
|
||||
const last_token = c.tokenizerNext(&ctokenizer);
|
||||
try std.testing.expectEqual(Token.Tag.eof, zigToken(last_token.tag));
|
||||
|
||||
// uncomment when Zig source and compiler get in sync (e.g. with 0.14)
|
||||
//var tokenizer = Tokenizer.init(source);
|
||||
//for (expected_token_tags) |expected_token_tag| {
|
||||
// const token = tokenizer.next();
|
||||
// try std.testing.expectEqual(expected_token_tag, token.tag);
|
||||
//}
|
||||
//// Last token should always be eof, even when the last token was invalid,
|
||||
//// in which case the tokenizer is in an invalid state, which can only be
|
||||
//// recovered by opinionated means outside the scope of this implementation.
|
||||
//const last_token = tokenizer.next();
|
||||
//try std.testing.expectEqual(Token.Tag.eof, last_token.tag);
|
||||
try std.testing.expectEqual(source.len, last_token.loc.start);
|
||||
try std.testing.expectEqual(source.len, last_token.loc.end);
|
||||
{
|
||||
// uncomment when Zig source and compiler get in sync (e.g. with 0.14)
|
||||
var tokenizer = Tokenizer.init(source);
|
||||
for (expected_token_tags) |expected_token_tag| {
|
||||
const token = tokenizer.next();
|
||||
try std.testing.expectEqual(expected_token_tag, token.tag);
|
||||
}
|
||||
// Last token should always be eof, even when the last token was invalid,
|
||||
// in which case the tokenizer is in an invalid state, which can only be
|
||||
// recovered by opinionated means outside the scope of this implementation.
|
||||
const last_token = tokenizer.next();
|
||||
try std.testing.expectEqual(Token.Tag.eof, last_token.tag);
|
||||
try std.testing.expectEqual(source.len, last_token.loc.start);
|
||||
try std.testing.expectEqual(source.len, last_token.loc.end);
|
||||
}
|
||||
}
|
||||
|
||||
test "my function" {
|
||||
|
||||
Reference in New Issue
Block a user