Merge pull request #17156 from mlugg/destructure

compiler: implement destructuring syntax
This commit is contained in:
Andrew Kelley
2023-09-15 14:51:52 -07:00
committed by GitHub
19 changed files with 1228 additions and 253 deletions

View File

@@ -204,7 +204,9 @@ pub fn build(b: *std.Build) !void {
"Request creation of '.note.gnu.build-id' section",
);
if (!no_bin) {
if (no_bin) {
b.getInstallStep().dependOn(&exe.step);
} else {
const install_exe = b.addInstallArtifact(exe, .{
.dest_dir = if (flat) .{ .override = .prefix } else .default,
});

View File

@@ -12382,21 +12382,22 @@ ComptimeDecl <- KEYWORD_comptime Block
Decl
<- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
/ (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/ (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? GlobalVarDecl
/ KEYWORD_usingnamespace Expr SEMICOLON
FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? AddrSpace? LinkSection? CallConv? EXCLAMATIONMARK? TypeExpr
VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection? (EQUAL Expr)? SEMICOLON
VarDeclProto <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection?
GlobalVarDecl <- VarDeclProto (EQUAL Expr)? SEMICOLON
ContainerField
<- doc_comment? KEYWORD_comptime? IDENTIFIER (COLON TypeExpr)? ByteAlign? (EQUAL Expr)?
/ doc_comment? KEYWORD_comptime? (IDENTIFIER COLON)? !KEYWORD_fn TypeExpr ByteAlign? (EQUAL Expr)?
<- doc_comment? KEYWORD_comptime? IDENTIFIER (COLON TypeExpr)? ByteAlign? (EQUAL Expr)?
/ doc_comment? KEYWORD_comptime? (IDENTIFIER COLON)? !KEYWORD_fn TypeExpr ByteAlign? (EQUAL Expr)?
# *** Block Level ***
Statement
<- KEYWORD_comptime? VarDecl
/ KEYWORD_comptime BlockExprStatement
<- KEYWORD_comptime ComptimeStatement
/ KEYWORD_nosuspend BlockExprStatement
/ KEYWORD_suspend BlockExprStatement
/ KEYWORD_defer BlockExprStatement
@@ -12404,7 +12405,11 @@ Statement
/ IfStatement
/ LabeledStatement
/ SwitchExpr
/ AssignExpr SEMICOLON
/ VarDeclExprStatement
ComptimeStatement
<- BlockExpr
/ VarDeclExprStatement
IfStatement
<- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
@@ -12428,8 +12433,17 @@ BlockExprStatement
BlockExpr <- BlockLabel? Block
# An expression, assignment, or any destructure, as a statement.
VarDeclExprStatement
<- VarDeclProto (COMMA (VarDeclProto / Expr))* EQUAL Expr SEMICOLON
/ Expr (AssignOp Expr / (COMMA (VarDeclProto / Expr))+ EQUAL Expr)? SEMICOLON
# *** Expression Level ***
AssignExpr <- Expr (AssignOp Expr)?
# An assignment or a destructure whose LHS are all lvalue expressions.
AssignExpr <- Expr (AssignOp Expr / (COMMA Expr)+ EQUAL Expr)?
SingleAssignExpr <- Expr (AssignOp Expr)?
Expr <- BoolOrExpr
@@ -12570,7 +12584,7 @@ IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
ForPrefix <- KEYWORD_for LPAREN ForArgumentsList RPAREN PtrListPayload
# Payloads
Payload <- PIPE IDENTIFIER PIPE
@@ -12579,9 +12593,10 @@ PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
PtrListPayload <- PIPE ASTERISK? IDENTIFIER (COMMA ASTERISK? IDENTIFIER)* COMMA? PIPE
# Switch specific
SwitchProng <- KEYWORD_inline? SwitchCase EQUALRARROW PtrIndexPayload? AssignExpr
SwitchProng <- KEYWORD_inline? SwitchCase EQUALRARROW PtrIndexPayload? SingleAssignExpr
SwitchCase
<- SwitchItem (COMMA SwitchItem)* COMMA?
@@ -12589,6 +12604,11 @@ SwitchCase
SwitchItem <- Expr (DOT3 Expr)?
# For specific
ForArgumentsList <- ForItem (COMMA ForItem)* COMMA?
ForItem <- Expr (DOT2 Expr?)?
# Operators
AssignOp
<- ASTERISKEQUAL
@@ -12799,7 +12819,7 @@ STRINGLITERAL
/ (line_string skip)+
IDENTIFIER
<- !keyword [A-Za-z_] [A-Za-z0-9_]* skip
/ "@\"" string_char* "\"" skip
/ "@" STRINGLITERALSINGLE
BUILTINIDENTIFIER <- "@"[A-Za-z_][A-Za-z0-9_]* skip
@@ -12895,7 +12915,6 @@ KEYWORD_fn <- 'fn' end_of_word
KEYWORD_for <- 'for' end_of_word
KEYWORD_if <- 'if' end_of_word
KEYWORD_inline <- 'inline' end_of_word
KEYWORD_linksection <- 'linksection' end_of_word
KEYWORD_noalias <- 'noalias' end_of_word
KEYWORD_nosuspend <- 'nosuspend' end_of_word
KEYWORD_noinline <- 'noinline' end_of_word
@@ -12906,6 +12925,7 @@ KEYWORD_packed <- 'packed' end_of_word
KEYWORD_pub <- 'pub' end_of_word
KEYWORD_resume <- 'resume' end_of_word
KEYWORD_return <- 'return' end_of_word
KEYWORD_linksection <- 'linksection' end_of_word
KEYWORD_struct <- 'struct' end_of_word
KEYWORD_suspend <- 'suspend' end_of_word
KEYWORD_switch <- 'switch' end_of_word
@@ -12925,9 +12945,9 @@ keyword <- KEYWORD_addrspace / KEYWORD_align / KEYWORD_allowzero / KEYWORD_and
/ KEYWORD_comptime / KEYWORD_const / KEYWORD_continue / KEYWORD_defer
/ KEYWORD_else / KEYWORD_enum / KEYWORD_errdefer / KEYWORD_error / KEYWORD_export
/ KEYWORD_extern / KEYWORD_fn / KEYWORD_for / KEYWORD_if
/ KEYWORD_inline / KEYWORD_linksection / KEYWORD_noalias / KEYWORD_noinline
/ KEYWORD_nosuspend / KEYWORD_opaque / KEYWORD_or / KEYWORD_orelse
/ KEYWORD_packed / KEYWORD_pub / KEYWORD_resume / KEYWORD_return
/ KEYWORD_inline / KEYWORD_noalias / KEYWORD_nosuspend / KEYWORD_noinline
/ KEYWORD_opaque / KEYWORD_or / KEYWORD_orelse / KEYWORD_packed
/ KEYWORD_pub / KEYWORD_resume / KEYWORD_return / KEYWORD_linksection
/ KEYWORD_struct / KEYWORD_suspend / KEYWORD_switch / KEYWORD_test
/ KEYWORD_threadlocal / KEYWORD_try / KEYWORD_union / KEYWORD_unreachable
/ KEYWORD_usingnamespace / KEYWORD_var / KEYWORD_volatile / KEYWORD_while

View File

@@ -241,6 +241,11 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
});
},
.expected_expr_or_var_decl => {
return stream.print("expected expression or var decl, found '{s}'", .{
token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
});
},
.expected_fn => {
return stream.print("expected function, found '{s}'", .{
token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
@@ -584,6 +589,13 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.error_union,
=> n = datas[n].lhs,
.assign_destructure => {
const extra_idx = datas[n].lhs;
const lhs_len = tree.extra_data[extra_idx];
assert(lhs_len > 0);
n = tree.extra_data[extra_idx + 1];
},
.fn_decl,
.fn_proto_simple,
.fn_proto_multi,
@@ -816,6 +828,7 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.assign_add_sat,
.assign_sub_sat,
.assign,
.assign_destructure,
.merge_error_sets,
.mul,
.div,
@@ -2846,6 +2859,7 @@ pub const Error = struct {
expected_container_members,
expected_expr,
expected_expr_or_assignment,
expected_expr_or_var_decl,
expected_fn,
expected_inlinable,
expected_labelable,
@@ -3006,6 +3020,20 @@ pub const Node = struct {
assign_sub_sat,
/// `lhs = rhs`. main_token is op.
assign,
/// `a, b, ... = rhs`. main_token is op. lhs is index into `extra_data`
/// of an lhs elem count followed by an array of that many `Node.Index`,
/// with each node having one of the following types:
/// * `global_var_decl`
/// * `local_var_decl`
/// * `simple_var_decl`
/// * `aligned_var_decl`
/// * Any expression node
/// The first 3 types correspond to a `var` or `const` lhs node (note
/// that their `rhs` is always 0). An expression node corresponds to a
/// standard assignment LHS (which must be evaluated as an lvalue).
/// There may be a preceding `comptime` token, which does not create a
/// corresponding `comptime` node so must be manually detected.
assign_destructure,
/// `lhs || rhs`. main_token is the `||`.
merge_error_sets,
/// `lhs * rhs`. main_token is the `*`.

View File

@@ -658,9 +658,8 @@ fn expectTopLevelDecl(p: *Parse) !Node.Index {
}
const thread_local_token = p.eatToken(.keyword_threadlocal);
const var_decl = try p.parseVarDecl();
const var_decl = try p.parseGlobalVarDecl();
if (var_decl != 0) {
try p.expectSemicolon(.expected_semi_after_decl, false);
return var_decl;
}
if (thread_local_token != null) {
@@ -792,8 +791,9 @@ fn parseFnProto(p: *Parse) !Node.Index {
}
}
/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection? (EQUAL Expr)? SEMICOLON
fn parseVarDecl(p: *Parse) !Node.Index {
/// VarDeclProto <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection?
/// Returns a `*_var_decl` node with its rhs (init expression) initialized to 0.
fn parseVarDeclProto(p: *Parse) !Node.Index {
const mut_token = p.eatToken(.keyword_const) orelse
p.eatToken(.keyword_var) orelse
return null_node;
@@ -803,6 +803,65 @@ fn parseVarDecl(p: *Parse) !Node.Index {
const align_node = try p.parseByteAlign();
const addrspace_node = try p.parseAddrSpace();
const section_node = try p.parseLinkSection();
if (section_node == 0 and addrspace_node == 0) {
if (align_node == 0) {
return p.addNode(.{
.tag = .simple_var_decl,
.main_token = mut_token,
.data = .{
.lhs = type_node,
.rhs = 0,
},
});
}
if (type_node == 0) {
return p.addNode(.{
.tag = .aligned_var_decl,
.main_token = mut_token,
.data = .{
.lhs = align_node,
.rhs = 0,
},
});
}
return p.addNode(.{
.tag = .local_var_decl,
.main_token = mut_token,
.data = .{
.lhs = try p.addExtra(Node.LocalVarDecl{
.type_node = type_node,
.align_node = align_node,
}),
.rhs = 0,
},
});
} else {
return p.addNode(.{
.tag = .global_var_decl,
.main_token = mut_token,
.data = .{
.lhs = try p.addExtra(Node.GlobalVarDecl{
.type_node = type_node,
.align_node = align_node,
.addrspace_node = addrspace_node,
.section_node = section_node,
}),
.rhs = 0,
},
});
}
}
/// GlobalVarDecl <- VarDeclProto (EQUAL Expr?) SEMICOLON
fn parseGlobalVarDecl(p: *Parse) !Node.Index {
const var_decl = try p.parseVarDeclProto();
if (var_decl == 0) {
return null_node;
}
const init_node: Node.Index = switch (p.token_tags[p.tok_i]) {
.equal_equal => blk: {
try p.warn(.wrong_equal_var_decl);
@@ -815,53 +874,11 @@ fn parseVarDecl(p: *Parse) !Node.Index {
},
else => 0,
};
if (section_node == 0 and addrspace_node == 0) {
if (align_node == 0) {
return p.addNode(.{
.tag = .simple_var_decl,
.main_token = mut_token,
.data = .{
.lhs = type_node,
.rhs = init_node,
},
});
} else if (type_node == 0) {
return p.addNode(.{
.tag = .aligned_var_decl,
.main_token = mut_token,
.data = .{
.lhs = align_node,
.rhs = init_node,
},
});
} else {
return p.addNode(.{
.tag = .local_var_decl,
.main_token = mut_token,
.data = .{
.lhs = try p.addExtra(Node.LocalVarDecl{
.type_node = type_node,
.align_node = align_node,
}),
.rhs = init_node,
},
});
}
} else {
return p.addNode(.{
.tag = .global_var_decl,
.main_token = mut_token,
.data = .{
.lhs = try p.addExtra(Node.GlobalVarDecl{
.type_node = type_node,
.align_node = align_node,
.addrspace_node = addrspace_node,
.section_node = section_node,
}),
.rhs = init_node,
},
});
}
p.nodes.items(.data)[var_decl].rhs = init_node;
try p.expectSemicolon(.expected_semi_after_decl, false);
return var_decl;
}
/// ContainerField
@@ -918,8 +935,7 @@ fn expectContainerField(p: *Parse) !Node.Index {
}
/// Statement
/// <- KEYWORD_comptime? VarDecl
/// / KEYWORD_comptime BlockExprStatement
/// <- KEYWORD_comptime ComptimeStatement
/// / KEYWORD_nosuspend BlockExprStatement
/// / KEYWORD_suspend BlockExprStatement
/// / KEYWORD_defer BlockExprStatement
@@ -927,27 +943,28 @@ fn expectContainerField(p: *Parse) !Node.Index {
/// / IfStatement
/// / LabeledStatement
/// / SwitchExpr
/// / AssignExpr SEMICOLON
fn parseStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
const comptime_token = p.eatToken(.keyword_comptime);
if (allow_defer_var) {
const var_decl = try p.parseVarDecl();
if (var_decl != 0) {
try p.expectSemicolon(.expected_semi_after_decl, true);
return var_decl;
/// / VarDeclExprStatement
fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
if (p.eatToken(.keyword_comptime)) |comptime_token| {
const block_expr = try p.parseBlockExpr();
if (block_expr != 0) {
return p.addNode(.{
.tag = .@"comptime",
.main_token = comptime_token,
.data = .{
.lhs = block_expr,
.rhs = undefined,
},
});
}
}
if (comptime_token) |token| {
return p.addNode(.{
.tag = .@"comptime",
.main_token = token,
.data = .{
.lhs = try p.expectBlockExprStatement(),
.rhs = undefined,
},
});
if (allow_defer_var) {
return p.expectVarDeclExprStatement(comptime_token);
} else {
const assign = try p.expectAssignExpr();
try p.expectSemicolon(.expected_semi_after_stmt, true);
return assign;
}
}
switch (p.token_tags[p.tok_i]) {
@@ -1011,21 +1028,145 @@ fn parseStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
const labeled_statement = try p.parseLabeledStatement();
if (labeled_statement != 0) return labeled_statement;
const assign_expr = try p.parseAssignExpr();
if (assign_expr != 0) {
if (allow_defer_var) {
return p.expectVarDeclExprStatement(null);
} else {
const assign = try p.expectAssignExpr();
try p.expectSemicolon(.expected_semi_after_stmt, true);
return assign_expr;
return assign;
}
return null_node;
}
fn expectStatement(p: *Parse, allow_defer_var: bool) !Node.Index {
const statement = try p.parseStatement(allow_defer_var);
if (statement == 0) {
return p.fail(.expected_statement);
/// ComptimeStatement
/// <- BlockExpr
/// / VarDeclExprStatement
fn expectComptimeStatement(p: *Parse, comptime_token: TokenIndex) !Node.Index {
const block_expr = try p.parseBlockExpr();
if (block_expr != 0) {
return p.addNode(.{
.tag = .@"comptime",
.main_token = comptime_token,
.data = .{ .lhs = block_expr, .rhs = undefined },
});
}
return statement;
return p.expectVarDeclExprStatement(comptime_token);
}
/// VarDeclExprStatement
/// <- VarDeclProto (COMMA (VarDeclProto / Expr))* EQUAL Expr SEMICOLON
/// / Expr (AssignOp Expr / (COMMA (VarDeclProto / Expr))+ EQUAL Expr)? SEMICOLON
fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Index {
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
while (true) {
const var_decl_proto = try p.parseVarDeclProto();
if (var_decl_proto != 0) {
try p.scratch.append(p.gpa, var_decl_proto);
} else {
const expr = try p.parseExpr();
if (expr == 0) {
if (p.scratch.items.len == scratch_top) {
// We parsed nothing
return p.fail(.expected_statement);
} else {
// We've had at least one LHS, but had a bad comma
return p.fail(.expected_expr_or_var_decl);
}
}
try p.scratch.append(p.gpa, expr);
}
_ = p.eatToken(.comma) orelse break;
}
const lhs_count = p.scratch.items.len - scratch_top;
assert(lhs_count > 0);
const equal_token = p.eatToken(.equal) orelse eql: {
if (lhs_count > 1) {
// Definitely a destructure, so allow recovering from ==
if (p.eatToken(.equal_equal)) |tok| {
try p.warnMsg(.{ .tag = .wrong_equal_var_decl, .token = tok });
break :eql tok;
}
return p.failExpected(.equal);
}
const lhs = p.scratch.items[scratch_top];
switch (p.nodes.items(.tag)[lhs]) {
.global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
// Definitely a var decl, so allow recovering from ==
if (p.eatToken(.equal_equal)) |tok| {
try p.warnMsg(.{ .tag = .wrong_equal_var_decl, .token = tok });
break :eql tok;
}
return p.failExpected(.equal);
},
else => {},
}
const expr = try p.finishAssignExpr(lhs);
try p.expectSemicolon(.expected_semi_after_stmt, true);
if (comptime_token) |t| {
return p.addNode(.{
.tag = .@"comptime",
.main_token = t,
.data = .{
.lhs = expr,
.rhs = undefined,
},
});
} else {
return expr;
}
};
const rhs = try p.expectExpr();
try p.expectSemicolon(.expected_semi_after_stmt, true);
if (lhs_count == 1) {
const lhs = p.scratch.items[scratch_top];
switch (p.nodes.items(.tag)[lhs]) {
.global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
p.nodes.items(.data)[lhs].rhs = rhs;
// Don't need to wrap in comptime
return lhs;
},
else => {},
}
const expr = try p.addNode(.{
.tag = .assign,
.main_token = equal_token,
.data = .{ .lhs = lhs, .rhs = rhs },
});
if (comptime_token) |t| {
return p.addNode(.{
.tag = .@"comptime",
.main_token = t,
.data = .{
.lhs = expr,
.rhs = undefined,
},
});
} else {
return expr;
}
}
// An actual destructure! No need for any `comptime` wrapper here.
const extra_start = p.extra_data.items.len;
try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1);
p.extra_data.appendAssumeCapacity(@intCast(lhs_count));
p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]);
return p.addNode(.{
.tag = .assign_destructure,
.main_token = equal_token,
.data = .{
.lhs = @intCast(extra_start),
.rhs = rhs,
},
});
}
/// If a parse error occurs, reports an error, but then finds the next statement
@@ -1345,7 +1486,7 @@ fn parseBlockExpr(p: *Parse) Error!Node.Index {
}
}
/// AssignExpr <- Expr (AssignOp Expr)?
/// AssignExpr <- Expr (AssignOp Expr / (COMMA Expr)+ EQUAL Expr)?
///
/// AssignOp
/// <- ASTERISKEQUAL
@@ -1369,8 +1510,40 @@ fn parseBlockExpr(p: *Parse) Error!Node.Index {
fn parseAssignExpr(p: *Parse) !Node.Index {
const expr = try p.parseExpr();
if (expr == 0) return null_node;
return p.finishAssignExpr(expr);
}
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
/// SingleAssignExpr <- Expr (AssignOp Expr)?
fn parseSingleAssignExpr(p: *Parse) !Node.Index {
const lhs = try p.parseExpr();
if (lhs == 0) return null_node;
const tag = assignOpNode(p.token_tags[p.tok_i]) orelse return lhs;
return p.addNode(.{
.tag = tag,
.main_token = p.nextToken(),
.data = .{
.lhs = lhs,
.rhs = try p.expectExpr(),
},
});
}
fn finishAssignExpr(p: *Parse, lhs: Node.Index) !Node.Index {
const tok = p.token_tags[p.tok_i];
if (tok == .comma) return p.finishAssignDestructureExpr(lhs);
const tag = assignOpNode(tok) orelse return lhs;
return p.addNode(.{
.tag = tag,
.main_token = p.nextToken(),
.data = .{
.lhs = lhs,
.rhs = try p.expectExpr(),
},
});
}
fn assignOpNode(tok: Token.Tag) ?Node.Tag {
return switch (tok) {
.asterisk_equal => .assign_mul,
.slash_equal => .assign_div,
.percent_equal => .assign_mod,
@@ -1389,18 +1562,51 @@ fn parseAssignExpr(p: *Parse) !Node.Index {
.plus_pipe_equal => .assign_add_sat,
.minus_pipe_equal => .assign_sub_sat,
.equal => .assign,
else => return expr,
else => null,
};
}
fn finishAssignDestructureExpr(p: *Parse, first_lhs: Node.Index) !Node.Index {
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
try p.scratch.append(p.gpa, first_lhs);
while (p.eatToken(.comma)) |_| {
const expr = try p.expectExpr();
try p.scratch.append(p.gpa, expr);
}
const equal_token = try p.expectToken(.equal);
const rhs = try p.expectExpr();
const lhs_count = p.scratch.items.len - scratch_top;
assert(lhs_count > 1); // we already had first_lhs, and must have at least one more lvalue
const extra_start = p.extra_data.items.len;
try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1);
p.extra_data.appendAssumeCapacity(@intCast(lhs_count));
p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]);
return p.addNode(.{
.tag = tag,
.main_token = p.nextToken(),
.tag = .assign_destructure,
.main_token = equal_token,
.data = .{
.lhs = expr,
.rhs = try p.expectExpr(),
.lhs = @intCast(extra_start),
.rhs = rhs,
},
});
}
fn expectSingleAssignExpr(p: *Parse) !Node.Index {
const expr = try p.parseSingleAssignExpr();
if (expr == 0) {
return p.fail(.expected_expr_or_assignment);
}
return expr;
}
fn expectAssignExpr(p: *Parse) !Node.Index {
const expr = try p.parseAssignExpr();
if (expr == 0) {
@@ -3260,7 +3466,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index {
.main_token = arrow_token,
.data = .{
.lhs = 0,
.rhs = try p.expectAssignExpr(),
.rhs = try p.expectSingleAssignExpr(),
},
}),
1 => return p.addNode(.{
@@ -3268,7 +3474,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index {
.main_token = arrow_token,
.data = .{
.lhs = items[0],
.rhs = try p.expectAssignExpr(),
.rhs = try p.expectSingleAssignExpr(),
},
}),
else => return p.addNode(.{
@@ -3276,7 +3482,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index {
.main_token = arrow_token,
.data = .{
.lhs = try p.addExtra(try p.listToSpan(items)),
.rhs = try p.expectAssignExpr(),
.rhs = try p.expectSingleAssignExpr(),
},
}),
}

View File

@@ -4348,12 +4348,12 @@ test "zig fmt: invalid else branch statement" {
\\ for ("") |_| {} else defer {}
\\}
, &[_]Error{
.expected_statement,
.expected_statement,
.expected_statement,
.expected_statement,
.expected_statement,
.expected_statement,
.expected_expr_or_assignment,
.expected_expr_or_assignment,
.expected_expr_or_assignment,
.expected_expr_or_assignment,
.expected_expr_or_assignment,
.expected_expr_or_assignment,
});
}
@@ -6078,7 +6078,7 @@ test "recovery: missing for payload" {
try testError(
\\comptime {
\\ const a = for(a) {};
\\ const a: for(a) blk: {};
\\ const a: for(a) blk: {} = {};
\\ for(a) {}
\\}
, &[_]Error{

View File

@@ -164,7 +164,7 @@ fn renderMember(
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> return renderVarDecl(gpa, ais, tree, tree.fullVarDecl(decl).?),
=> return renderVarDecl(gpa, ais, tree, tree.fullVarDecl(decl).?, false, .semicolon),
.test_decl => {
const test_token = main_tokens[decl];
@@ -427,6 +427,42 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
return renderExpression(gpa, ais, tree, infix.rhs, space);
},
.assign_destructure => {
const lhs_count = tree.extra_data[datas[node].lhs];
assert(lhs_count > 1);
const lhs_exprs = tree.extra_data[datas[node].lhs + 1 ..][0..lhs_count];
const rhs = datas[node].rhs;
const maybe_comptime_token = tree.firstToken(node) - 1;
if (token_tags[maybe_comptime_token] == .keyword_comptime) {
try renderToken(ais, tree, maybe_comptime_token, .space);
}
for (lhs_exprs, 0..) |lhs_node, i| {
const lhs_space: Space = if (i == lhs_exprs.len - 1) .space else .comma_space;
switch (node_tags[lhs_node]) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(lhs_node).?, true, lhs_space);
},
else => try renderExpression(gpa, ais, tree, lhs_node, lhs_space),
}
}
const equal_token = main_tokens[node];
if (tree.tokensOnSameLine(equal_token, equal_token + 1)) {
try renderToken(ais, tree, equal_token, .space);
} else {
ais.pushIndent();
try renderToken(ais, tree, equal_token, .newline);
ais.popIndent();
}
ais.pushIndentOneShot();
return renderExpression(gpa, ais, tree, rhs, space);
},
.bit_not,
.bool_not,
.negation,
@@ -943,7 +979,16 @@ fn renderAsmInput(
return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen
}
fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDecl) Error!void {
fn renderVarDecl(
gpa: Allocator,
ais: *Ais,
tree: Ast,
var_decl: Ast.full.VarDecl,
/// Destructures intentionally ignore leading `comptime` tokens.
ignore_comptime_token: bool,
/// `comma_space` and `space` are used for destructure LHS decls.
space: Space,
) Error!void {
if (var_decl.visib_token) |visib_token| {
try renderToken(ais, tree, visib_token, Space.space); // pub
}
@@ -960,21 +1005,31 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
try renderToken(ais, tree, thread_local_token, Space.space); // threadlocal
}
if (var_decl.comptime_token) |comptime_token| {
try renderToken(ais, tree, comptime_token, Space.space); // comptime
if (!ignore_comptime_token) {
if (var_decl.comptime_token) |comptime_token| {
try renderToken(ais, tree, comptime_token, Space.space); // comptime
}
}
try renderToken(ais, tree, var_decl.ast.mut_token, .space); // var
const name_space = if (var_decl.ast.type_node == 0 and
(var_decl.ast.align_node != 0 or
var_decl.ast.addrspace_node != 0 or
var_decl.ast.section_node != 0 or
var_decl.ast.init_node != 0))
Space.space
else
Space.none;
try renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, name_space, .preserve_when_shadowing); // name
if (var_decl.ast.type_node != 0 or var_decl.ast.align_node != 0 or
var_decl.ast.addrspace_node != 0 or var_decl.ast.section_node != 0 or
var_decl.ast.init_node != 0)
{
const name_space = if (var_decl.ast.type_node == 0 and
(var_decl.ast.align_node != 0 or
var_decl.ast.addrspace_node != 0 or
var_decl.ast.section_node != 0 or
var_decl.ast.init_node != 0))
Space.space
else
Space.none;
try renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, name_space, .preserve_when_shadowing); // name
} else {
return renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, space, .preserve_when_shadowing); // name
}
if (var_decl.ast.type_node != 0) {
try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.space); // :
@@ -983,9 +1038,7 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
{
try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .space);
} else {
try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .none);
const semicolon = tree.lastToken(var_decl.ast.type_node) + 1;
return renderToken(ais, tree, semicolon, Space.newline); // ;
return renderExpression(gpa, ais, tree, var_decl.ast.type_node, space);
}
}
@@ -1001,8 +1054,7 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
{
try renderToken(ais, tree, rparen, .space); // )
} else {
try renderToken(ais, tree, rparen, .none); // )
return renderToken(ais, tree, rparen + 1, Space.newline); // ;
return renderToken(ais, tree, rparen, space); // )
}
}
@@ -1031,23 +1083,21 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
if (var_decl.ast.init_node != 0) {
try renderToken(ais, tree, rparen, .space); // )
} else {
try renderToken(ais, tree, rparen, .none); // )
return renderToken(ais, tree, rparen + 1, Space.newline); // ;
return renderToken(ais, tree, rparen, space); // )
}
}
if (var_decl.ast.init_node != 0) {
const eq_token = tree.firstToken(var_decl.ast.init_node) - 1;
const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline;
{
ais.pushIndent();
try renderToken(ais, tree, eq_token, eq_space); // =
ais.popIndent();
}
ais.pushIndentOneShot();
return renderExpression(gpa, ais, tree, var_decl.ast.init_node, .semicolon); // ;
assert(var_decl.ast.init_node != 0);
const eq_token = tree.firstToken(var_decl.ast.init_node) - 1;
const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline;
{
ais.pushIndent();
try renderToken(ais, tree, eq_token, eq_space); // =
ais.popIndent();
}
return renderToken(ais, tree, var_decl.ast.mut_token + 2, .newline); // ;
ais.pushIndentOneShot();
return renderExpression(gpa, ais, tree, var_decl.ast.init_node, space); // ;
}
fn renderIf(gpa: Allocator, ais: *Ais, tree: Ast, if_node: Ast.full.If, space: Space) Error!void {
@@ -1825,7 +1875,7 @@ fn renderBlock(
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(stmt).?),
=> try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(stmt).?, false, .semicolon),
else => try renderExpression(gpa, ais, tree, stmt, .semicolon),
}
}

View File

@@ -280,6 +280,20 @@ const ResultInfo = struct {
/// The result instruction from the expression must be ignored.
/// Always an instruction with tag `alloc_inferred`.
inferred_ptr: Zir.Inst.Ref,
/// The expression has a sequence of pointers to store its results into due to a destructure
/// operation. Each of these pointers may or may not have an inferred type.
destructure: struct {
/// The AST node of the destructure operation itself.
src_node: Ast.Node.Index,
/// The pointers to store results into.
components: []const DestructureComponent,
},
const DestructureComponent = union(enum) {
typed_ptr: PtrResultLoc,
inferred_ptr: Zir.Inst.Ref,
discard,
};
const PtrResultLoc = struct {
inst: Zir.Inst.Ref,
@@ -298,6 +312,12 @@ const ResultInfo = struct {
const ptr_ty = try gz.addUnNode(.typeof, ptr.inst, node);
return gz.addUnNode(.elem_type, ptr_ty, node);
},
.destructure => |destructure| {
return astgen.failNodeNotes(node, "{s} must have a known result type", .{builtin_name}, &.{
try astgen.errNoteNode(destructure.src_node, "destructure expressions do not provide a single result type", .{}),
try astgen.errNoteNode(node, "use @as to provide explicit result type", .{}),
});
},
}
return astgen.failNodeNotes(node, "{s} must have a known result type", .{builtin_name}, &.{
@@ -399,6 +419,7 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Ins
.asm_input => unreachable,
.assign,
.assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -621,6 +642,13 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return rvalue(gz, ri, .void_value, node);
},
.assign_destructure => {
// Note that this variant does not declare any new var/const: that
// variant is handled by `blockExprStmts`.
try assignDestructure(gz, scope, node);
return rvalue(gz, ri, .void_value, node);
},
.assign_shl => {
try assignShift(gz, scope, node, .shl);
return rvalue(gz, ri, .void_value, node);
@@ -1364,14 +1392,8 @@ fn arrayInitExpr(
assert(array_init.ast.elements.len != 0); // Otherwise it would be struct init.
const types: struct {
array: Zir.Inst.Ref,
elem: Zir.Inst.Ref,
} = inst: {
if (array_init.ast.type_expr == 0) break :inst .{
.array = .none,
.elem = .none,
};
const array_ty: Zir.Inst.Ref, const elem_ty: Zir.Inst.Ref = inst: {
if (array_init.ast.type_expr == 0) break :inst .{ .none, .none };
infer: {
const array_type: Ast.full.ArrayType = tree.fullArrayType(array_init.ast.type_expr) orelse break :infer;
@@ -1386,10 +1408,7 @@ fn arrayInitExpr(
.lhs = len_inst,
.rhs = elem_type,
});
break :inst .{
.array = array_type_inst,
.elem = elem_type,
};
break :inst .{ array_type_inst, elem_type };
} else {
const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = elem_type } }, array_type.ast.sentinel);
const array_type_inst = try gz.addPlNode(
@@ -1401,10 +1420,7 @@ fn arrayInitExpr(
.sentinel = sentinel,
},
);
break :inst .{
.array = array_type_inst,
.elem = elem_type,
};
break :inst .{ array_type_inst, elem_type };
}
}
}
@@ -1413,29 +1429,26 @@ fn arrayInitExpr(
.ty = array_type_inst,
.init_count = @intCast(array_init.ast.elements.len),
});
break :inst .{
.array = array_type_inst,
.elem = .none,
};
break :inst .{ array_type_inst, .none };
};
switch (ri.rl) {
.discard => {
if (types.elem != .none) {
const elem_ri: ResultInfo = .{ .rl = .{ .ty = types.elem } };
if (elem_ty != .none) {
const elem_ri: ResultInfo = .{ .rl = .{ .ty = elem_ty } };
for (array_init.ast.elements) |elem_init| {
_ = try expr(gz, scope, elem_ri, elem_init);
}
} else if (types.array != .none) {
} else if (array_ty != .none) {
for (array_init.ast.elements, 0..) |elem_init, i| {
const elem_ty = try gz.add(.{
const this_elem_ty = try gz.add(.{
.tag = .elem_type_index,
.data = .{ .bin = .{
.lhs = types.array,
.lhs = array_ty,
.rhs = @enumFromInt(i),
} },
});
_ = try expr(gz, scope, .{ .rl = .{ .ty = elem_ty } }, elem_init);
_ = try expr(gz, scope, .{ .rl = .{ .ty = this_elem_ty } }, elem_init);
}
} else {
for (array_init.ast.elements) |elem_init| {
@@ -1445,15 +1458,15 @@ fn arrayInitExpr(
return Zir.Inst.Ref.void_value;
},
.ref => {
const tag: Zir.Inst.Tag = if (types.array != .none) .array_init_ref else .array_init_anon_ref;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag);
const tag: Zir.Inst.Tag = if (array_ty != .none) .array_init_ref else .array_init_anon_ref;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, array_ty, elem_ty, tag);
},
.none => {
const tag: Zir.Inst.Tag = if (types.array != .none) .array_init else .array_init_anon;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag);
const tag: Zir.Inst.Tag = if (array_ty != .none) .array_init else .array_init_anon;
return arrayInitExprInner(gz, scope, node, array_init.ast.elements, array_ty, elem_ty, tag);
},
.ty, .coerced_ty => |ty_inst| {
const arr_ty = if (types.array != .none) types.array else blk: {
const arr_ty = if (array_ty != .none) array_ty else blk: {
const arr_ty = try gz.addUnNode(.opt_eu_base_ty, ty_inst, node);
_ = try gz.addPlNode(.validate_array_init_ty, node, Zir.Inst.ArrayInit{
.ty = arr_ty,
@@ -1461,23 +1474,50 @@ fn arrayInitExpr(
});
break :blk arr_ty;
};
const result = try arrayInitExprInner(gz, scope, node, array_init.ast.elements, arr_ty, types.elem, .array_init);
const result = try arrayInitExprInner(gz, scope, node, array_init.ast.elements, arr_ty, elem_ty, .array_init);
return rvalue(gz, ri, result, node);
},
.ptr => |ptr_res| {
return arrayInitExprRlPtr(gz, scope, node, ptr_res.inst, array_init.ast.elements, types.array);
return arrayInitExprRlPtr(gz, scope, node, ptr_res.inst, array_init.ast.elements, array_ty);
},
.inferred_ptr => |ptr_inst| {
if (types.array == .none) {
if (array_ty == .none) {
// We treat this case differently so that we don't get a crash when
// analyzing array_base_ptr against an alloc_inferred_mut.
// See corresponding logic in structInitExpr.
const result = try arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon);
return rvalue(gz, ri, result, node);
} else {
return arrayInitExprRlPtr(gz, scope, node, ptr_inst, array_init.ast.elements, types.array);
return arrayInitExprRlPtr(gz, scope, node, ptr_inst, array_init.ast.elements, array_ty);
}
},
.destructure => |destructure| {
if (array_ty != .none) {
// We have a specific type, so there may be things like default
// field values messing with us. Do this as a standard typed
// init followed by an rvalue destructure.
const result = try arrayInitExprInner(gz, scope, node, array_init.ast.elements, array_ty, elem_ty, .array_init);
return rvalue(gz, ri, result, node);
}
// Untyped init - destructure directly into result pointers
if (array_init.ast.elements.len != destructure.components.len) {
return astgen.failNodeNotes(node, "expected {} elements for destructure, found {}", .{
destructure.components.len,
array_init.ast.elements.len,
}, &.{
try astgen.errNoteNode(destructure.src_node, "result destructured here", .{}),
});
}
for (array_init.ast.elements, destructure.components) |elem_init, ds_comp| {
const elem_ri: ResultInfo = .{ .rl = switch (ds_comp) {
.typed_ptr => |ptr_rl| .{ .ptr = ptr_rl },
.inferred_ptr => |ptr_inst| .{ .inferred_ptr = ptr_inst },
.discard => .discard,
} };
_ = try expr(gz, scope, elem_ri, elem_init);
}
return .void_value;
},
}
}
@@ -1707,6 +1747,23 @@ fn structInitExpr(
return structInitExprRlPtr(gz, scope, node, struct_init, ptr_inst);
}
},
.destructure => |destructure| {
if (struct_init.ast.type_expr == 0) {
// This is an untyped init, so is an actual struct, which does
// not support destructuring.
return astgen.failNodeNotes(node, "struct value cannot be destructured", .{}, &.{
try astgen.errNoteNode(destructure.src_node, "result destructured here", .{}),
});
}
// You can init tuples using struct init syntax and numeric field
// names, but as with array inits, we could be bitten by default
// fields. Therefore, we do a normal typed init then an rvalue
// destructure.
const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr);
_ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node);
const result = try structInitExprRlTy(gz, scope, node, struct_init, ty_inst, .struct_init);
return rvalue(gz, ri, result, node);
},
}
}
@@ -1968,6 +2025,7 @@ fn restoreErrRetIndex(
// TODO: Update this to do a proper load from the rl_ptr, once Sema can support it.
break :blk .none;
},
.destructure => return, // value must be a tuple or array, so never restore/pop
else => result,
},
else => .none, // always restore/pop
@@ -2340,6 +2398,8 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
.simple_var_decl,
.aligned_var_decl, => scope = try varDecl(gz, scope, statement, block_arena_allocator, tree.fullVarDecl(statement).?),
.assign_destructure => scope = try assignDestructureMaybeDecls(gz, scope, statement, block_arena_allocator),
.@"defer" => scope = try deferStmt(gz, scope, statement, block_arena_allocator, .defer_normal),
.@"errdefer" => scope = try deferStmt(gz, scope, statement, block_arena_allocator, .defer_error),
@@ -2481,6 +2541,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.elem_ptr_node,
.elem_ptr_imm,
.elem_val_node,
.elem_val_imm,
.field_ptr,
.field_ptr_init,
.field_val,
@@ -2686,6 +2747,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.validate_array_init_ty,
.validate_struct_init_ty,
.validate_deref,
.validate_destructure,
.save_err_ret_index,
.restore_err_ret_index,
=> break :b true,
@@ -3100,10 +3162,7 @@ fn varDecl(
.keyword_var => {
const is_comptime = var_decl.comptime_token != null or gz.is_comptime;
var resolve_inferred_alloc: Zir.Inst.Ref = .none;
const var_data: struct {
result_info: ResultInfo,
alloc: Zir.Inst.Ref,
} = if (var_decl.ast.type_node != 0) a: {
const alloc: Zir.Inst.Ref, const result_info: ResultInfo = if (var_decl.ast.type_node != 0) a: {
const type_inst = try typeExpr(gz, scope, var_decl.ast.type_node);
const alloc = alloc: {
if (align_inst == .none) {
@@ -3122,7 +3181,7 @@ fn varDecl(
});
}
};
break :a .{ .alloc = alloc, .result_info = .{ .rl = .{ .ptr = .{ .inst = alloc } } } };
break :a .{ alloc, .{ .rl = .{ .ptr = .{ .inst = alloc } } } };
} else a: {
const alloc = alloc: {
if (align_inst == .none) {
@@ -3142,24 +3201,24 @@ fn varDecl(
}
};
resolve_inferred_alloc = alloc;
break :a .{ .alloc = alloc, .result_info = .{ .rl = .{ .inferred_ptr = alloc } } };
break :a .{ alloc, .{ .rl = .{ .inferred_ptr = alloc } } };
};
const prev_anon_name_strategy = gz.anon_name_strategy;
gz.anon_name_strategy = .dbg_var;
_ = try reachableExprComptime(gz, scope, var_data.result_info, var_decl.ast.init_node, node, is_comptime);
_ = try reachableExprComptime(gz, scope, result_info, var_decl.ast.init_node, node, is_comptime);
gz.anon_name_strategy = prev_anon_name_strategy;
if (resolve_inferred_alloc != .none) {
_ = try gz.addUnNode(.resolve_inferred_alloc, resolve_inferred_alloc, node);
}
try gz.addDbgVar(.dbg_var_ptr, ident_name, var_data.alloc);
try gz.addDbgVar(.dbg_var_ptr, ident_name, alloc);
const sub_scope = try block_arena.create(Scope.LocalPtr);
sub_scope.* = .{
.parent = scope,
.gen_zir = gz,
.name = ident_name,
.ptr = var_data.alloc,
.ptr = alloc,
.token_src = name_token,
.maybe_comptime = is_comptime,
.id_cat = .@"local variable",
@@ -3227,6 +3286,301 @@ fn assign(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerError!voi
} } }, rhs);
}
/// Handles destructure assignments where no LHS is a `const` or `var` decl.
fn assignDestructure(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!void {
try emitDbgNode(gz, node);
const astgen = gz.astgen;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag);
const extra_index = node_datas[node].lhs;
const lhs_count = tree.extra_data[extra_index];
const lhs_nodes: []const Ast.Node.Index = @ptrCast(tree.extra_data[extra_index + 1 ..][0..lhs_count]);
const rhs = node_datas[node].rhs;
const maybe_comptime_token = tree.firstToken(node) - 1;
const declared_comptime = token_tags[maybe_comptime_token] == .keyword_comptime;
if (declared_comptime and gz.is_comptime) {
return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
}
// If this expression is marked comptime, we must wrap the whole thing in a comptime block.
var gz_buf: GenZir = undefined;
const inner_gz = if (declared_comptime) bs: {
gz_buf = gz.makeSubBlock(scope);
gz_buf.is_comptime = true;
break :bs &gz_buf;
} else gz;
defer if (declared_comptime) inner_gz.unstack();
const rl_components = try astgen.arena.alloc(ResultInfo.Loc.DestructureComponent, lhs_nodes.len);
for (rl_components, lhs_nodes) |*lhs_rl, lhs_node| {
if (node_tags[lhs_node] == .identifier) {
// This intentionally does not support `@"_"` syntax.
const ident_name = tree.tokenSlice(main_tokens[lhs_node]);
if (mem.eql(u8, ident_name, "_")) {
lhs_rl.* = .discard;
continue;
}
}
lhs_rl.* = .{ .typed_ptr = .{
.inst = try lvalExpr(inner_gz, scope, lhs_node),
.src_node = lhs_node,
} };
}
const ri: ResultInfo = .{ .rl = .{ .destructure = .{
.src_node = node,
.components = rl_components,
} } };
_ = try expr(inner_gz, scope, ri, rhs);
if (declared_comptime) {
const comptime_block_inst = try gz.makeBlockInst(.block_comptime, node);
_ = try inner_gz.addBreak(.@"break", comptime_block_inst, .void_value);
try inner_gz.setBlockBody(comptime_block_inst);
try gz.instructions.append(gz.astgen.gpa, comptime_block_inst);
}
}
/// Handles destructure assignments where the LHS may contain `const` or `var` decls.
fn assignDestructureMaybeDecls(
gz: *GenZir,
scope: *Scope,
node: Ast.Node.Index,
block_arena: Allocator,
) InnerError!*Scope {
try emitDbgNode(gz, node);
const astgen = gz.astgen;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag);
const extra_index = node_datas[node].lhs;
const lhs_count = tree.extra_data[extra_index];
const lhs_nodes: []const Ast.Node.Index = @ptrCast(tree.extra_data[extra_index + 1 ..][0..lhs_count]);
const rhs = node_datas[node].rhs;
const maybe_comptime_token = tree.firstToken(node) - 1;
const declared_comptime = token_tags[maybe_comptime_token] == .keyword_comptime;
if (declared_comptime and gz.is_comptime) {
return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
}
const is_comptime = declared_comptime or gz.is_comptime;
const rhs_is_comptime = tree.nodes.items(.tag)[rhs] == .@"comptime";
// When declaring consts via a destructure, we always use a result pointer.
// This avoids the need to create tuple types, and is also likely easier to
// optimize, since it's a bit tricky for the optimizer to "split up" the
// value into individual pointer writes down the line.
// We know this rl information won't live past the evaluation of this
// expression, so it may as well go in the block arena.
const rl_components = try block_arena.alloc(ResultInfo.Loc.DestructureComponent, lhs_nodes.len);
var any_non_const_lhs = false;
var any_lvalue_expr = false;
for (rl_components, lhs_nodes) |*lhs_rl, lhs_node| {
switch (node_tags[lhs_node]) {
.identifier => {
// This intentionally does not support `@"_"` syntax.
const ident_name = tree.tokenSlice(main_tokens[lhs_node]);
if (mem.eql(u8, ident_name, "_")) {
any_non_const_lhs = true;
lhs_rl.* = .discard;
continue;
}
},
.global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
const full = tree.fullVarDecl(lhs_node).?;
const name_token = full.ast.mut_token + 1;
const ident_name_raw = tree.tokenSlice(name_token);
if (mem.eql(u8, ident_name_raw, "_")) {
return astgen.failTok(name_token, "'_' used as an identifier without @\"_\" syntax", .{});
}
// We detect shadowing in the second pass over these, while we're creating scopes.
if (full.ast.addrspace_node != 0) {
return astgen.failTok(main_tokens[full.ast.addrspace_node], "cannot set address space of local variable '{s}'", .{ident_name_raw});
}
if (full.ast.section_node != 0) {
return astgen.failTok(main_tokens[full.ast.section_node], "cannot set section of local variable '{s}'", .{ident_name_raw});
}
const is_const = switch (token_tags[full.ast.mut_token]) {
.keyword_var => false,
.keyword_const => true,
else => unreachable,
};
if (!is_const) any_non_const_lhs = true;
// We also mark `const`s as comptime if the RHS is definitely comptime-known.
const this_lhs_comptime = is_comptime or (is_const and rhs_is_comptime);
const align_inst: Zir.Inst.Ref = if (full.ast.align_node != 0)
try expr(gz, scope, align_ri, full.ast.align_node)
else
.none;
if (full.ast.type_node != 0) {
// Typed alloc
const type_inst = try typeExpr(gz, scope, full.ast.type_node);
const ptr = if (align_inst == .none) ptr: {
const tag: Zir.Inst.Tag = if (is_const)
.alloc
else if (this_lhs_comptime)
.alloc_comptime_mut
else
.alloc_mut;
break :ptr try gz.addUnNode(tag, type_inst, node);
} else try gz.addAllocExtended(.{
.node = node,
.type_inst = type_inst,
.align_inst = align_inst,
.is_const = is_const,
.is_comptime = this_lhs_comptime,
});
lhs_rl.* = .{ .typed_ptr = .{ .inst = ptr } };
} else {
// Inferred alloc
const ptr = if (align_inst == .none) ptr: {
const tag: Zir.Inst.Tag = if (is_const) tag: {
break :tag if (this_lhs_comptime) .alloc_inferred_comptime else .alloc_inferred;
} else tag: {
break :tag if (this_lhs_comptime) .alloc_inferred_comptime_mut else .alloc_inferred_mut;
};
break :ptr try gz.addNode(tag, node);
} else try gz.addAllocExtended(.{
.node = node,
.type_inst = .none,
.align_inst = align_inst,
.is_const = is_const,
.is_comptime = this_lhs_comptime,
});
lhs_rl.* = .{ .inferred_ptr = ptr };
}
continue;
},
else => {},
}
// This LHS is just an lvalue expression.
// We will fill in its result pointer later, inside a comptime block.
any_non_const_lhs = true;
any_lvalue_expr = true;
lhs_rl.* = .{ .typed_ptr = .{
.inst = undefined,
.src_node = lhs_node,
} };
}
if (declared_comptime and !any_non_const_lhs) {
try astgen.appendErrorTok(maybe_comptime_token, "'comptime const' is redundant; instead wrap the initialization expression with 'comptime'", .{});
}
// If this expression is marked comptime, we must wrap it in a comptime block.
var gz_buf: GenZir = undefined;
const inner_gz = if (declared_comptime) bs: {
gz_buf = gz.makeSubBlock(scope);
gz_buf.is_comptime = true;
break :bs &gz_buf;
} else gz;
defer if (declared_comptime) inner_gz.unstack();
if (any_lvalue_expr) {
// At least one LHS was an lvalue expr. Iterate again in order to
// evaluate the lvalues from within the possible block_comptime.
for (rl_components, lhs_nodes) |*lhs_rl, lhs_node| {
if (lhs_rl.* != .typed_ptr) continue;
switch (node_tags[lhs_node]) {
.global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => continue,
else => {},
}
lhs_rl.typed_ptr.inst = try lvalExpr(inner_gz, scope, lhs_node);
}
}
// We can't give a reasonable anon name strategy for destructured inits, so
// leave it at its default of `.anon`.
_ = try reachableExpr(inner_gz, scope, .{ .rl = .{ .destructure = .{
.src_node = node,
.components = rl_components,
} } }, rhs, node);
if (declared_comptime) {
// Finish the block_comptime. Inferred alloc resolution etc will occur
// in the parent block.
const comptime_block_inst = try gz.makeBlockInst(.block_comptime, node);
_ = try inner_gz.addBreak(.@"break", comptime_block_inst, .void_value);
try inner_gz.setBlockBody(comptime_block_inst);
try gz.instructions.append(gz.astgen.gpa, comptime_block_inst);
}
// Now, iterate over the LHS exprs to construct any new scopes.
// If there were any inferred allocations, resolve them.
// If there were any `const` decls, make the pointer constant.
var cur_scope = scope;
for (rl_components, lhs_nodes) |lhs_rl, lhs_node| {
switch (node_tags[lhs_node]) {
.local_var_decl, .simple_var_decl, .aligned_var_decl => {},
else => continue, // We were mutating an existing lvalue - nothing to do
}
const full = tree.fullVarDecl(lhs_node).?;
const raw_ptr = switch (lhs_rl) {
.discard => unreachable,
.typed_ptr => |typed_ptr| typed_ptr.inst,
.inferred_ptr => |ptr_inst| ptr_inst,
};
// If the alloc was inferred, resolve it.
if (full.ast.type_node == 0) {
_ = try gz.addUnNode(.resolve_inferred_alloc, raw_ptr, lhs_node);
}
const is_const = switch (token_tags[full.ast.mut_token]) {
.keyword_var => false,
.keyword_const => true,
else => unreachable,
};
// If the alloc was const, make it const.
const var_ptr = if (is_const) make_const: {
break :make_const try gz.addUnNode(.make_ptr_const, raw_ptr, node);
} else raw_ptr;
const name_token = full.ast.mut_token + 1;
const ident_name_raw = tree.tokenSlice(name_token);
const ident_name = try astgen.identAsString(name_token);
try astgen.detectLocalShadowing(
cur_scope,
ident_name,
name_token,
ident_name_raw,
if (is_const) .@"local constant" else .@"local variable",
);
try gz.addDbgVar(.dbg_var_ptr, ident_name, var_ptr);
// Finally, create the scope.
const sub_scope = try block_arena.create(Scope.LocalPtr);
sub_scope.* = .{
.parent = cur_scope,
.gen_zir = gz,
.name = ident_name,
.ptr = var_ptr,
.token_src = name_token,
.maybe_comptime = is_const or is_comptime,
.id_cat = if (is_const) .@"local constant" else .@"local variable",
};
cur_scope = &sub_scope.base;
}
return cur_scope;
}
fn assignOp(
gz: *GenZir,
scope: *Scope,
@@ -9059,6 +9413,7 @@ fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.Ev
.array_cat,
.array_mult,
.assign,
.assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -9237,6 +9592,7 @@ fn nodeImpliesMoreThanOnePossibleValue(tree: *const Ast, start_node: Ast.Node.In
.array_cat,
.array_mult,
.assign,
.assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -9483,6 +9839,7 @@ fn nodeImpliesComptimeOnly(tree: *const Ast, start_node: Ast.Node.Index) bool {
.array_cat,
.array_mult,
.assign,
.assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -9830,6 +10187,37 @@ fn rvalue(
_ = try gz.addBin(.store_to_inferred_ptr, alloc, result);
return .void_value;
},
.destructure => |destructure| {
const components = destructure.components;
_ = try gz.addPlNode(.validate_destructure, src_node, Zir.Inst.ValidateDestructure{
.operand = result,
.destructure_node = gz.nodeIndexToRelative(destructure.src_node),
.expect_len = @intCast(components.len),
});
for (components, 0..) |component, i| {
if (component == .discard) continue;
const elem_val = try gz.add(.{
.tag = .elem_val_imm,
.data = .{ .elem_val_imm = .{
.operand = result,
.idx = @intCast(i),
} },
});
switch (component) {
.typed_ptr => |ptr_res| {
_ = try gz.addPlNode(.store_node, ptr_res.src_node orelse src_node, Zir.Inst.Bin{
.lhs = ptr_res.inst,
.rhs = elem_val,
});
},
.inferred_ptr => |ptr_inst| {
_ = try gz.addBin(.store_to_inferred_ptr, ptr_inst, elem_val);
},
.discard => unreachable,
}
}
return .void_value;
},
}
}

View File

@@ -203,6 +203,16 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
else => unreachable,
}
},
.assign_destructure => {
const lhs_count = tree.extra_data[node_datas[node].lhs];
const all_lhs = tree.extra_data[node_datas[node].lhs + 1 ..][0..lhs_count];
for (all_lhs) |lhs| {
_ = try astrl.expr(lhs, block, ResultInfo.none);
}
// We don't need to gather any meaningful data here, because destructures always use RLS
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
return false;
},
.assign => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.typed_ptr);

View File

@@ -1018,6 +1018,7 @@ fn analyzeBodyInner(
.elem_ptr_imm => try sema.zirElemPtrImm(block, inst),
.elem_val => try sema.zirElemVal(block, inst),
.elem_val_node => try sema.zirElemValNode(block, inst),
.elem_val_imm => try sema.zirElemValImm(block, inst),
.elem_type_index => try sema.zirElemTypeIndex(block, inst),
.elem_type => try sema.zirElemType(block, inst),
.indexable_ptr_elem_type => try sema.zirIndexablePtrElemType(block, inst),
@@ -1379,6 +1380,11 @@ fn analyzeBodyInner(
i += 1;
continue;
},
.validate_destructure => {
try sema.zirValidateDestructure(block, inst);
i += 1;
continue;
},
.@"export" => {
try sema.zirExport(block, inst);
i += 1;
@@ -3780,6 +3786,21 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
return sema.analyzeDeclRef(try anon_decl.finish(elem_ty, store_val, ptr_info.flags.alignment));
}
// If this is already a comptime-mutable allocation, we don't want to emit an error - the stores
// were already performed at comptime! Just make the pointer constant as normal.
implicit_ct: {
const ptr_val = try sema.resolveMaybeUndefVal(alloc) orelse break :implicit_ct;
if (ptr_val.isComptimeMutablePtr(mod)) break :implicit_ct;
return sema.makePtrConst(block, alloc);
}
if (try sema.typeRequiresComptime(elem_ty)) {
// The value was initialized through RLS, so we didn't detect the runtime condition earlier.
// TODO: source location of runtime control flow
const init_src: LazySrcLoc = .{ .node_offset_bin_rhs = inst_data.src_node };
return sema.fail(block, init_src, "value with comptime-only type '{}' depends on runtime control flow", .{elem_ty.fmt(mod)});
}
return sema.makePtrConst(block, alloc);
}
@@ -5163,6 +5184,43 @@ fn zirValidateDeref(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr
}
}
fn zirValidateDestructure(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void {
const mod = sema.mod;
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const extra = sema.code.extraData(Zir.Inst.ValidateDestructure, inst_data.payload_index).data;
const src = inst_data.src();
const destructure_src = LazySrcLoc.nodeOffset(extra.destructure_node);
const operand = try sema.resolveInst(extra.operand);
const operand_ty = sema.typeOf(operand);
const can_destructure = switch (operand_ty.zigTypeTag(mod)) {
.Array => true,
.Struct => operand_ty.isTuple(mod),
else => false,
};
if (!can_destructure) {
return sema.failWithOwnedErrorMsg(block, msg: {
const msg = try sema.errMsg(block, src, "type '{}' cannot be destructured", .{operand_ty.fmt(mod)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(block, destructure_src, msg, "result destructured here", .{});
break :msg msg;
});
}
if (operand_ty.arrayLen(mod) != extra.expect_len) {
return sema.failWithOwnedErrorMsg(block, msg: {
const msg = try sema.errMsg(block, src, "expected {} elements for destructure, found {}", .{
extra.expect_len,
operand_ty.arrayLen(mod),
});
errdefer msg.destroy(sema.gpa);
try sema.errNote(block, destructure_src, msg, "result destructured here", .{});
break :msg msg;
});
}
}
fn failWithBadMemberAccess(
sema: *Sema,
block: *Block,
@@ -10289,6 +10347,17 @@ fn zirElemValNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
return sema.elemVal(block, src, array, elem_index, elem_index_src, true);
}
fn zirElemValImm(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const tracy = trace(@src());
defer tracy.end();
const mod = sema.mod;
const inst_data = sema.code.instructions.items(.data)[inst].elem_val_imm;
const array = try sema.resolveInst(inst_data.operand);
const elem_index = try mod.intRef(Type.usize, inst_data.idx);
return sema.elemVal(block, .unneeded, array, elem_index, .unneeded, false);
}
fn zirElemPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const tracy = trace(@src());
defer tracy.end();
@@ -11023,17 +11092,17 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
const special_prong_src: LazySrcLoc = .{ .node_offset_switch_special_prong = src_node_offset };
const extra = sema.code.extraData(Zir.Inst.SwitchBlock, inst_data.payload_index);
const raw_operand: struct { val: Air.Inst.Ref, ptr: Air.Inst.Ref } = blk: {
const raw_operand_val: Air.Inst.Ref, const raw_operand_ptr: Air.Inst.Ref = blk: {
const maybe_ptr = try sema.resolveInst(extra.data.operand);
if (operand_is_ref) {
const val = try sema.analyzeLoad(block, src, maybe_ptr, operand_src);
break :blk .{ .val = val, .ptr = maybe_ptr };
break :blk .{ val, maybe_ptr };
} else {
break :blk .{ .val = maybe_ptr, .ptr = undefined };
break :blk .{ maybe_ptr, undefined };
}
};
const operand = try sema.switchCond(block, operand_src, raw_operand.val);
const operand = try sema.switchCond(block, operand_src, raw_operand_val);
// AstGen guarantees that the instruction immediately preceding
// switch_block(_ref) is a dbg_stmt
@@ -11091,7 +11160,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
},
};
const maybe_union_ty = sema.typeOf(raw_operand.val);
const maybe_union_ty = sema.typeOf(raw_operand_val);
const union_originally = maybe_union_ty.zigTypeTag(mod) == .Union;
// Duplicate checking variables later also used for `inline else`.
@@ -11642,8 +11711,8 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
const spa: SwitchProngAnalysis = .{
.sema = sema,
.parent_block = block,
.operand = raw_operand.val,
.operand_ptr = raw_operand.ptr,
.operand = raw_operand_val,
.operand_ptr = raw_operand_ptr,
.cond = operand,
.else_error_ty = else_error_ty,
.switch_block_inst = inst,
@@ -15431,11 +15500,7 @@ fn analyzeArithmetic(
const maybe_lhs_val = try sema.resolveMaybeUndefValIntable(casted_lhs);
const maybe_rhs_val = try sema.resolveMaybeUndefValIntable(casted_rhs);
const rs: struct {
src: LazySrcLoc,
air_tag: Air.Inst.Tag,
air_tag_safe: Air.Inst.Tag,
} = rs: {
const runtime_src: LazySrcLoc, const air_tag: Air.Inst.Tag, const air_tag_safe: Air.Inst.Tag = rs: {
switch (zir_tag) {
.add, .add_unsafe => {
// For integers:intAddSat
@@ -15482,8 +15547,8 @@ fn analyzeArithmetic(
} else {
return Air.internedToRef((try Value.floatAdd(lhs_val, rhs_val, resolved_type, sema.arena, mod)).toIntern());
}
} else break :rs .{ .src = rhs_src, .air_tag = air_tag, .air_tag_safe = .add_safe };
} else break :rs .{ .src = lhs_src, .air_tag = air_tag, .air_tag_safe = .add_safe };
} else break :rs .{ rhs_src, air_tag, .add_safe };
} else break :rs .{ lhs_src, air_tag, .add_safe };
},
.addwrap => {
// Integers only; floats are checked above.
@@ -15503,8 +15568,8 @@ fn analyzeArithmetic(
}
if (maybe_lhs_val) |lhs_val| {
return Air.internedToRef((try sema.numberAddWrapScalar(lhs_val, rhs_val, resolved_type)).toIntern());
} else break :rs .{ .src = lhs_src, .air_tag = .add_wrap, .air_tag_safe = .add_wrap };
} else break :rs .{ .src = rhs_src, .air_tag = .add_wrap, .air_tag_safe = .add_wrap };
} else break :rs .{ lhs_src, .add_wrap, .add_wrap };
} else break :rs .{ rhs_src, .add_wrap, .add_wrap };
},
.add_sat => {
// Integers only; floats are checked above.
@@ -15530,14 +15595,14 @@ fn analyzeArithmetic(
return Air.internedToRef(val.toIntern());
} else break :rs .{
.src = lhs_src,
.air_tag = .add_sat,
.air_tag_safe = .add_sat,
lhs_src,
.add_sat,
.add_sat,
};
} else break :rs .{
.src = rhs_src,
.air_tag = .add_sat,
.air_tag_safe = .add_sat,
rhs_src,
.add_sat,
.add_sat,
};
},
.sub => {
@@ -15580,8 +15645,8 @@ fn analyzeArithmetic(
} else {
return Air.internedToRef((try Value.floatSub(lhs_val, rhs_val, resolved_type, sema.arena, mod)).toIntern());
}
} else break :rs .{ .src = rhs_src, .air_tag = air_tag, .air_tag_safe = .sub_safe };
} else break :rs .{ .src = lhs_src, .air_tag = air_tag, .air_tag_safe = .sub_safe };
} else break :rs .{ rhs_src, air_tag, .sub_safe };
} else break :rs .{ lhs_src, air_tag, .sub_safe };
},
.subwrap => {
// Integers only; floats are checked above.
@@ -15601,8 +15666,8 @@ fn analyzeArithmetic(
}
if (maybe_rhs_val) |rhs_val| {
return Air.internedToRef((try sema.numberSubWrapScalar(lhs_val, rhs_val, resolved_type)).toIntern());
} else break :rs .{ .src = rhs_src, .air_tag = .sub_wrap, .air_tag_safe = .sub_wrap };
} else break :rs .{ .src = lhs_src, .air_tag = .sub_wrap, .air_tag_safe = .sub_wrap };
} else break :rs .{ rhs_src, .sub_wrap, .sub_wrap };
} else break :rs .{ lhs_src, .sub_wrap, .sub_wrap };
},
.sub_sat => {
// Integers only; floats are checked above.
@@ -15627,8 +15692,8 @@ fn analyzeArithmetic(
try lhs_val.intSubSat(rhs_val, resolved_type, sema.arena, mod);
return Air.internedToRef(val.toIntern());
} else break :rs .{ .src = rhs_src, .air_tag = .sub_sat, .air_tag_safe = .sub_sat };
} else break :rs .{ .src = lhs_src, .air_tag = .sub_sat, .air_tag_safe = .sub_sat };
} else break :rs .{ rhs_src, .sub_sat, .sub_sat };
} else break :rs .{ lhs_src, .sub_sat, .sub_sat };
},
.mul => {
// For integers:
@@ -15720,8 +15785,8 @@ fn analyzeArithmetic(
} else {
return Air.internedToRef((try lhs_val.floatMul(rhs_val, resolved_type, sema.arena, mod)).toIntern());
}
} else break :rs .{ .src = lhs_src, .air_tag = air_tag, .air_tag_safe = .mul_safe };
} else break :rs .{ .src = rhs_src, .air_tag = air_tag, .air_tag_safe = .mul_safe };
} else break :rs .{ lhs_src, air_tag, .mul_safe };
} else break :rs .{ rhs_src, air_tag, .mul_safe };
},
.mulwrap => {
// Integers only; floats are handled above.
@@ -15765,8 +15830,8 @@ fn analyzeArithmetic(
return mod.undefRef(resolved_type);
}
return Air.internedToRef((try lhs_val.numberMulWrap(rhs_val, resolved_type, sema.arena, mod)).toIntern());
} else break :rs .{ .src = lhs_src, .air_tag = .mul_wrap, .air_tag_safe = .mul_wrap };
} else break :rs .{ .src = rhs_src, .air_tag = .mul_wrap, .air_tag_safe = .mul_wrap };
} else break :rs .{ lhs_src, .mul_wrap, .mul_wrap };
} else break :rs .{ rhs_src, .mul_wrap, .mul_wrap };
},
.mul_sat => {
// Integers only; floats are checked above.
@@ -15816,20 +15881,20 @@ fn analyzeArithmetic(
try lhs_val.intMulSat(rhs_val, resolved_type, sema.arena, mod);
return Air.internedToRef(val.toIntern());
} else break :rs .{ .src = lhs_src, .air_tag = .mul_sat, .air_tag_safe = .mul_sat };
} else break :rs .{ .src = rhs_src, .air_tag = .mul_sat, .air_tag_safe = .mul_sat };
} else break :rs .{ lhs_src, .mul_sat, .mul_sat };
} else break :rs .{ rhs_src, .mul_sat, .mul_sat };
},
else => unreachable,
}
};
try sema.requireRuntimeBlock(block, src, rs.src);
try sema.requireRuntimeBlock(block, src, runtime_src);
if (block.wantSafety() and want_safety and scalar_tag == .Int) {
if (mod.backendSupportsFeature(.safety_checked_instructions)) {
_ = try sema.preparePanicId(block, .integer_overflow);
return block.addBinOp(rs.air_tag_safe, casted_lhs, casted_rhs);
return block.addBinOp(air_tag_safe, casted_lhs, casted_rhs);
} else {
const maybe_op_ov: ?Air.Inst.Tag = switch (rs.air_tag) {
const maybe_op_ov: ?Air.Inst.Tag = switch (air_tag) {
.add => .add_with_overflow,
.sub => .sub_with_overflow,
.mul => .mul_with_overflow,
@@ -15866,7 +15931,7 @@ fn analyzeArithmetic(
}
}
}
return block.addBinOp(rs.air_tag, casted_lhs, casted_rhs);
return block.addBinOp(air_tag, casted_lhs, casted_rhs);
}
fn analyzePtrArithmetic(
@@ -32276,16 +32341,10 @@ fn compareIntsOnlyPossibleResult(
// For any other comparison, we need to know if the LHS value is
// equal to the maximum or minimum possible value of the RHS type.
const edge: struct { min: bool, max: bool } = edge: {
if (is_zero and rhs_info.signedness == .unsigned) break :edge .{
.min = true,
.max = false,
};
const is_min, const is_max = edge: {
if (is_zero and rhs_info.signedness == .unsigned) break :edge .{ true, false };
if (req_bits != rhs_info.bits) break :edge .{
.min = false,
.max = false,
};
if (req_bits != rhs_info.bits) break :edge .{ false, false };
const ty = try mod.intType(
if (is_negative) .signed else .unsigned,
@@ -32294,24 +32353,18 @@ fn compareIntsOnlyPossibleResult(
const pop_count = lhs_val.popCount(ty, mod);
if (is_negative) {
break :edge .{
.min = pop_count == 1,
.max = false,
};
break :edge .{ pop_count == 1, false };
} else {
break :edge .{
.min = false,
.max = pop_count == req_bits - sign_adj,
};
break :edge .{ false, pop_count == req_bits - sign_adj };
}
};
assert(fits);
return switch (op) {
.lt => if (edge.max) false else null,
.lte => if (edge.min) true else null,
.gt => if (edge.min) false else null,
.gte => if (edge.max) true else null,
.lt => if (is_max) false else null,
.lte => if (is_min) true else null,
.gt => if (is_min) false else null,
.gte => if (is_max) true else null,
.eq, .neq => unreachable,
};
}
@@ -32548,7 +32601,7 @@ const PeerResolveStrategy = enum {
either,
};
const res: struct { ReasonMethod, PeerResolveStrategy } = switch (s0) {
const reason_method: ReasonMethod, const strat: PeerResolveStrategy = switch (s0) {
.unknown => .{ .all_s1, s1 },
.error_set => switch (s1) {
.error_set => .{ .either, .error_set },
@@ -32616,7 +32669,7 @@ const PeerResolveStrategy = enum {
.exact => .{ .all_s0, .exact },
};
switch (res[0]) {
switch (reason_method) {
.all_s0 => {
if (!s0_is_a) {
reason_peer.* = b_peer_idx;
@@ -32633,7 +32686,7 @@ const PeerResolveStrategy = enum {
},
}
return res[1];
return strat;
}
fn select(ty: Type, mod: *Module) PeerResolveStrategy {

View File

@@ -434,6 +434,10 @@ pub const Inst = struct {
/// Payload is `Bin`.
/// No OOB safety check is emitted.
elem_val,
/// Same as `elem_val` but takes the index as an immediate value.
/// No OOB safety check is emitted. A prior instruction must validate this operation.
/// Uses the `elem_val_imm` union field.
elem_val_imm,
/// Emits a compile error if the operand is not `void`.
/// Uses the `un_node` field.
ensure_result_used,
@@ -725,6 +729,9 @@ pub const Inst = struct {
/// Check that operand type supports the dereference operand (.*).
/// Uses the `un_node` field.
validate_deref,
/// Check that the operand's type is an array or tuple with the given number of elements.
/// Uses the `pl_node` field. Payload is `ValidateDestructure`.
validate_destructure,
/// A struct literal with a specified type, with no fields.
/// Uses the `un_node` field.
struct_init_empty,
@@ -1069,6 +1076,7 @@ pub const Inst = struct {
.elem_ptr_node,
.elem_ptr_imm,
.elem_val_node,
.elem_val_imm,
.ensure_result_used,
.ensure_result_non_error,
.ensure_err_union_payload_void,
@@ -1145,6 +1153,7 @@ pub const Inst = struct {
.validate_struct_init,
.validate_array_init,
.validate_deref,
.validate_destructure,
.struct_init_empty,
.struct_init,
.struct_init_ref,
@@ -1295,6 +1304,7 @@ pub const Inst = struct {
.validate_struct_init,
.validate_array_init,
.validate_deref,
.validate_destructure,
.@"export",
.export_value,
.set_runtime_safety,
@@ -1369,6 +1379,7 @@ pub const Inst = struct {
.elem_ptr_node,
.elem_ptr_imm,
.elem_val_node,
.elem_val_imm,
.field_ptr,
.field_ptr_init,
.field_val,
@@ -1615,6 +1626,7 @@ pub const Inst = struct {
.elem_ptr_imm = .pl_node,
.elem_val = .pl_node,
.elem_val_node = .pl_node,
.elem_val_imm = .elem_val_imm,
.ensure_result_used = .un_node,
.ensure_result_non_error = .un_node,
.ensure_err_union_payload_void = .un_node,
@@ -1689,6 +1701,7 @@ pub const Inst = struct {
.validate_struct_init = .pl_node,
.validate_array_init = .pl_node,
.validate_deref = .un_node,
.validate_destructure = .pl_node,
.struct_init_empty = .un_node,
.field_type = .pl_node,
.field_type_ref = .pl_node,
@@ -2295,6 +2308,12 @@ pub const Inst = struct {
block: Ref, // If restored, the index is from this block's entrypoint
operand: Ref, // If non-error (or .none), then restore the index
},
elem_val_imm: struct {
/// The indexable value being accessed.
operand: Ref,
/// The index being accessed.
idx: u32,
},
// Make sure we don't accidentally add a field to make this union
// bigger than expected. Note that in Debug builds, Zig is allowed
@@ -2334,6 +2353,7 @@ pub const Inst = struct {
defer_err_code,
save_err_ret_index,
restore_err_ret_index,
elem_val_imm,
};
};
@@ -3233,6 +3253,15 @@ pub const Inst = struct {
index: u32,
len: u32,
};
pub const ValidateDestructure = struct {
/// The value being destructured.
operand: Ref,
/// The `destructure_assign` node.
destructure_node: i32,
/// The expected field count.
expect_len: u32,
};
};
pub const SpecialProng = enum { none, @"else", under };

View File

@@ -242,6 +242,7 @@ const Writer = struct {
.bool_br_or,
=> try self.writeBoolBr(stream, inst),
.validate_destructure => try self.writeValidateDestructure(stream, inst),
.validate_array_init_ty => try self.writeValidateArrayInitTy(stream, inst),
.array_type_sentinel => try self.writeArrayTypeSentinel(stream, inst),
.ptr_type => try self.writePtrType(stream, inst),
@@ -357,6 +358,8 @@ const Writer = struct {
.for_len => try self.writePlNodeMultiOp(stream, inst),
.elem_val_imm => try self.writeElemValImm(stream, inst),
.elem_ptr_imm => try self.writeElemPtrImm(stream, inst),
.@"export" => try self.writePlNodeExport(stream, inst),
@@ -585,6 +588,20 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
fn writeValidateDestructure(
self: *Writer,
stream: anytype,
inst: Zir.Inst.Index,
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const extra = self.code.extraData(Zir.Inst.ValidateDestructure, inst_data.payload_index).data;
try self.writeInstRef(stream, extra.operand);
try stream.print(", {d}) (destructure=", .{extra.expect_len});
try self.writeSrc(stream, LazySrcLoc.nodeOffset(extra.destructure_node));
try stream.writeAll(") ");
try self.writeSrc(stream, inst_data.src());
}
fn writeValidateArrayInitTy(
self: *Writer,
stream: anytype,
@@ -892,6 +909,12 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
fn writeElemValImm(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].elem_val_imm;
try self.writeInstRef(stream, inst_data.operand);
try stream.print(", {d})", .{inst_data.idx});
}
fn writeElemPtrImm(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const extra = self.code.extraData(Zir.Inst.ElemPtrImm, inst_data.payload_index).data;

Binary file not shown.

View File

@@ -157,6 +157,7 @@ test {
_ = @import("behavior/decltest.zig");
_ = @import("behavior/duplicated_test_names.zig");
_ = @import("behavior/defer.zig");
_ = @import("behavior/destructure.zig");
_ = @import("behavior/empty_tuple_fields.zig");
_ = @import("behavior/empty_union.zig");
_ = @import("behavior/enum.zig");

View File

@@ -0,0 +1,100 @@
const std = @import("std");
const assert = std.debug.assert;
const expect = std.testing.expect;
test "simple destructure" {
const S = struct {
fn doTheTest() !void {
var x: u32 = undefined;
x, const y, var z: u64 = .{ 1, @as(u16, 2), 3 };
comptime assert(@TypeOf(y) == u16);
try expect(x == 1);
try expect(y == 2);
try expect(z == 3);
}
};
try S.doTheTest();
try comptime S.doTheTest();
}
test "destructure with comptime syntax" {
const S = struct {
fn doTheTest() void {
comptime var x: f32 = undefined;
comptime x, const y, var z = .{ 0.5, 123, 456 }; // z is a comptime var
comptime assert(@TypeOf(y) == comptime_int);
comptime assert(@TypeOf(z) == comptime_int);
comptime assert(x == 0.5);
comptime assert(y == 123);
comptime assert(z == 456);
}
};
S.doTheTest();
comptime S.doTheTest();
}
test "destructure from labeled block" {
const S = struct {
fn doTheTest(rt_true: bool) !void {
const x: u32, const y: u8, const z: i64 = blk: {
if (rt_true) break :blk .{ 1, 2, 3 };
break :blk .{ 4, 5, 6 };
};
try expect(x == 1);
try expect(y == 2);
try expect(z == 3);
}
};
try S.doTheTest(true);
try comptime S.doTheTest(true);
}
test "destructure tuple value" {
const tup: struct { f32, u32, i64 } = .{ 10.0, 20, 30 };
const x, const y, const z = tup;
comptime assert(@TypeOf(x) == f32);
comptime assert(@TypeOf(y) == u32);
comptime assert(@TypeOf(z) == i64);
try expect(x == 10.0);
try expect(y == 20);
try expect(z == 30);
}
test "destructure array value" {
const arr: [3]u32 = .{ 10, 20, 30 };
const x, const y, const z = arr;
comptime assert(@TypeOf(x) == u32);
comptime assert(@TypeOf(y) == u32);
comptime assert(@TypeOf(z) == u32);
try expect(x == 10);
try expect(y == 20);
try expect(z == 30);
}
test "destructure from struct init with named tuple fields" {
const Tuple = struct { u8, u16, u32 };
const x, const y, const z = Tuple{
.@"0" = 100,
.@"1" = 200,
.@"2" = 300,
};
comptime assert(@TypeOf(x) == u8);
comptime assert(@TypeOf(y) == u16);
comptime assert(@TypeOf(z) == u32);
try expect(x == 100);
try expect(y == 200);
try expect(z == 300);
}

View File

@@ -13,6 +13,10 @@ export fn d() void {
var x: f32 = 0;
_ = x + @floatFromInt(123);
}
export fn e() void {
const x: u32, const y: u64 = @intCast(123);
_ = x + y;
}
// error
// backend=stage2
@@ -26,3 +30,6 @@ export fn d() void {
// :9:10: note: use @as to provide explicit result type
// :14:13: error: @floatFromInt must have a known result type
// :14:13: note: use @as to provide explicit result type
// :17:34: error: @intCast must have a known result type
// :17:32: note: destructure expressions do not provide a single result type
// :17:34: note: use @as to provide explicit result type

View File

@@ -0,0 +1,10 @@
export fn foo() void {
const x, const y, = .{ 1, 2 };
_ = .{ x, y };
}
// error
// backend=stage2
// target=native
//
// :2:23: error: expected expression or var decl, found '='

View File

@@ -0,0 +1,22 @@
export fn foo() void {
const x, const y = .{ 1, 2, 3 };
_ = .{ x, y };
}
export fn bar() void {
var x: u32 = undefined;
x, const y: u64 = blk: {
if (true) break :blk .{ 1, 2 };
break :blk .{ .x = 123, .y = 456 };
};
_ = y;
}
// error
// backend=stage2
// target=native
//
// :2:25: error: expected 2 elements for destructure, found 3
// :2:22: note: result destructured here
// :10:21: error: struct value cannot be destructured
// :8:21: note: result destructured here

View File

@@ -0,0 +1,23 @@
export fn foo() void {
const x, const y = 123;
_ = .{ x, y };
}
export fn bar() void {
var x: u32 = undefined;
x, const y: u64 = blk: {
if (false) break :blk .{ 1, 2 };
const val = .{ 3, 4, 5 };
break :blk val;
};
_ = y;
}
// error
// backend=stage2
// target=native
//
// :2:24: error: type 'comptime_int' cannot be destructured
// :2:22: note: result destructured here
// :11:20: error: expected 2 elements for destructure, found 3
// :8:21: note: result destructured here

View File

@@ -1,7 +1,10 @@
pub fn main() void {
const x = 1;
const y, var z = .{ 2, 3 };
}
// error
//
// :3:18: error: unused local variable
// :3:11: error: unused local constant
// :2:11: error: unused local constant