tokenizer: Fix index-out-of-bounds on string_literal_backslash right before EOF

This commit is contained in:
Ryan Liptak
2021-09-20 17:10:32 -07:00
committed by Andrew Kelley
parent 380ca26855
commit 2a728f6e5f

View File

@@ -700,7 +700,7 @@ pub const Tokenizer = struct {
},
.string_literal_backslash => switch (c) {
'\n' => {
0, '\n' => {
result.tag = .invalid;
break;
},
@@ -1919,6 +1919,10 @@ test "tokenizer - invalid builtin identifiers" {
try testTokenize("@0()", &.{ .invalid, .integer_literal, .l_paren, .r_paren });
}
test "tokenizer - backslash before eof in string literal" {
try testTokenize("\"\\", &.{.invalid});
}
fn testTokenize(source: [:0]const u8, expected_tokens: []const Token.Tag) !void {
var tokenizer = Tokenizer.init(source);
for (expected_tokens) |expected_token_id| {