use * for pointer type instead of &

See #770

To help automatically translate code, see the
zig-fmt-pointer-reform-2 branch.

This will convert all & into *. Due to the syntax
ambiguity (which is why we are making this change),
even address-of & will turn into *, so you'll have
to manually fix thes instances. You will be guaranteed
to get compile errors for them - expected 'type', found 'foo'
This commit is contained in:
Andrew Kelley
2018-05-31 10:56:59 -04:00
parent 717ac85a5a
commit fcbb7426fa
150 changed files with 2160 additions and 2141 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -24,15 +24,15 @@ pub fn main() !void {
const mb_per_sec = bytes_per_sec / (1024 * 1024);
var stdout_file = try std.io.getStdOut();
const stdout = &std.io.FileOutStream.init(&stdout_file).stream;
const stdout = *std.io.FileOutStream.init(*stdout_file).stream;
try stdout.print("{.3} MB/s, {} KB used \n", mb_per_sec, memory_used / 1024);
}
fn testOnce() usize {
var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
var allocator = &fixed_buf_alloc.allocator;
var allocator = *fixed_buf_alloc.allocator;
var tokenizer = Tokenizer.init(source);
var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
var parser = Parser.init(*tokenizer, allocator, "(memory buffer)");
_ = parser.parse() catch @panic("parse failure");
return fixed_buf_alloc.end_index;
}

View File

@@ -9,7 +9,7 @@ const Error = ast.Error;
/// Result should be freed with tree.deinit() when there are
/// no more references to any of the tokens or nodes.
pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
pub fn parse(allocator: *mem.Allocator, source: []const u8) !ast.Tree {
var tree_arena = std.heap.ArenaAllocator.init(allocator);
errdefer tree_arena.deinit();
@@ -2754,16 +2754,16 @@ pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
}
const AnnotatedToken = struct {
ptr: &Token,
ptr: *Token,
index: TokenIndex,
};
const TopLevelDeclCtx = struct {
decls: &ast.Node.Root.DeclList,
decls: *ast.Node.Root.DeclList,
visib_token: ?TokenIndex,
extern_export_inline_token: ?AnnotatedToken,
lib_name: ?&ast.Node,
comments: ?&ast.Node.DocComment,
lib_name: ?*ast.Node,
comments: ?*ast.Node.DocComment,
};
const VarDeclCtx = struct {
@@ -2771,21 +2771,21 @@ const VarDeclCtx = struct {
visib_token: ?TokenIndex,
comptime_token: ?TokenIndex,
extern_export_token: ?TokenIndex,
lib_name: ?&ast.Node,
list: &ast.Node.Root.DeclList,
comments: ?&ast.Node.DocComment,
lib_name: ?*ast.Node,
list: *ast.Node.Root.DeclList,
comments: ?*ast.Node.DocComment,
};
const TopLevelExternOrFieldCtx = struct {
visib_token: TokenIndex,
container_decl: &ast.Node.ContainerDecl,
comments: ?&ast.Node.DocComment,
container_decl: *ast.Node.ContainerDecl,
comments: ?*ast.Node.DocComment,
};
const ExternTypeCtx = struct {
opt_ctx: OptionalCtx,
extern_token: TokenIndex,
comments: ?&ast.Node.DocComment,
comments: ?*ast.Node.DocComment,
};
const ContainerKindCtx = struct {
@@ -2795,24 +2795,24 @@ const ContainerKindCtx = struct {
const ExpectTokenSave = struct {
id: @TagType(Token.Id),
ptr: &TokenIndex,
ptr: *TokenIndex,
};
const OptionalTokenSave = struct {
id: @TagType(Token.Id),
ptr: &?TokenIndex,
ptr: *?TokenIndex,
};
const ExprListCtx = struct {
list: &ast.Node.SuffixOp.Op.InitList,
list: *ast.Node.SuffixOp.Op.InitList,
end: Token.Id,
ptr: &TokenIndex,
ptr: *TokenIndex,
};
fn ListSave(comptime List: type) type {
return struct {
list: &List,
ptr: &TokenIndex,
list: *List,
ptr: *TokenIndex,
};
}
@@ -2841,7 +2841,7 @@ const LoopCtx = struct {
const AsyncEndCtx = struct {
ctx: OptionalCtx,
attribute: &ast.Node.AsyncAttribute,
attribute: *ast.Node.AsyncAttribute,
};
const ErrorTypeOrSetDeclCtx = struct {
@@ -2850,21 +2850,21 @@ const ErrorTypeOrSetDeclCtx = struct {
};
const ParamDeclEndCtx = struct {
fn_proto: &ast.Node.FnProto,
param_decl: &ast.Node.ParamDecl,
fn_proto: *ast.Node.FnProto,
param_decl: *ast.Node.ParamDecl,
};
const ComptimeStatementCtx = struct {
comptime_token: TokenIndex,
block: &ast.Node.Block,
block: *ast.Node.Block,
};
const OptionalCtx = union(enum) {
Optional: &?&ast.Node,
RequiredNull: &?&ast.Node,
Required: &&ast.Node,
Optional: *?*ast.Node,
RequiredNull: *?*ast.Node,
Required: **ast.Node,
pub fn store(self: &const OptionalCtx, value: &ast.Node) void {
pub fn store(self: *const OptionalCtx, value: *ast.Node) void {
switch (self.*) {
OptionalCtx.Optional => |ptr| ptr.* = value,
OptionalCtx.RequiredNull => |ptr| ptr.* = value,
@@ -2872,7 +2872,7 @@ const OptionalCtx = union(enum) {
}
}
pub fn get(self: &const OptionalCtx) ?&ast.Node {
pub fn get(self: *const OptionalCtx) ?*ast.Node {
switch (self.*) {
OptionalCtx.Optional => |ptr| return ptr.*,
OptionalCtx.RequiredNull => |ptr| return ??ptr.*,
@@ -2880,7 +2880,7 @@ const OptionalCtx = union(enum) {
}
}
pub fn toRequired(self: &const OptionalCtx) OptionalCtx {
pub fn toRequired(self: *const OptionalCtx) OptionalCtx {
switch (self.*) {
OptionalCtx.Optional => |ptr| {
return OptionalCtx{ .RequiredNull = ptr };
@@ -2892,8 +2892,8 @@ const OptionalCtx = union(enum) {
};
const AddCommentsCtx = struct {
node_ptr: &&ast.Node,
comments: ?&ast.Node.DocComment,
node_ptr: **ast.Node,
comments: ?*ast.Node.DocComment,
};
const State = union(enum) {
@@ -2904,67 +2904,67 @@ const State = union(enum) {
TopLevelExternOrField: TopLevelExternOrFieldCtx,
ContainerKind: ContainerKindCtx,
ContainerInitArgStart: &ast.Node.ContainerDecl,
ContainerInitArg: &ast.Node.ContainerDecl,
ContainerDecl: &ast.Node.ContainerDecl,
ContainerInitArgStart: *ast.Node.ContainerDecl,
ContainerInitArg: *ast.Node.ContainerDecl,
ContainerDecl: *ast.Node.ContainerDecl,
VarDecl: VarDeclCtx,
VarDeclAlign: &ast.Node.VarDecl,
VarDeclEq: &ast.Node.VarDecl,
VarDeclSemiColon: &ast.Node.VarDecl,
VarDeclAlign: *ast.Node.VarDecl,
VarDeclEq: *ast.Node.VarDecl,
VarDeclSemiColon: *ast.Node.VarDecl,
FnDef: &ast.Node.FnProto,
FnProto: &ast.Node.FnProto,
FnProtoAlign: &ast.Node.FnProto,
FnProtoReturnType: &ast.Node.FnProto,
FnDef: *ast.Node.FnProto,
FnProto: *ast.Node.FnProto,
FnProtoAlign: *ast.Node.FnProto,
FnProtoReturnType: *ast.Node.FnProto,
ParamDecl: &ast.Node.FnProto,
ParamDeclAliasOrComptime: &ast.Node.ParamDecl,
ParamDeclName: &ast.Node.ParamDecl,
ParamDecl: *ast.Node.FnProto,
ParamDeclAliasOrComptime: *ast.Node.ParamDecl,
ParamDeclName: *ast.Node.ParamDecl,
ParamDeclEnd: ParamDeclEndCtx,
ParamDeclComma: &ast.Node.FnProto,
ParamDeclComma: *ast.Node.FnProto,
MaybeLabeledExpression: MaybeLabeledExpressionCtx,
LabeledExpression: LabelCtx,
Inline: InlineCtx,
While: LoopCtx,
WhileContinueExpr: &?&ast.Node,
WhileContinueExpr: *?*ast.Node,
For: LoopCtx,
Else: &?&ast.Node.Else,
Else: *?*ast.Node.Else,
Block: &ast.Node.Block,
Statement: &ast.Node.Block,
Block: *ast.Node.Block,
Statement: *ast.Node.Block,
ComptimeStatement: ComptimeStatementCtx,
Semicolon: &&ast.Node,
Semicolon: **ast.Node,
AsmOutputItems: &ast.Node.Asm.OutputList,
AsmOutputReturnOrType: &ast.Node.AsmOutput,
AsmInputItems: &ast.Node.Asm.InputList,
AsmClobberItems: &ast.Node.Asm.ClobberList,
AsmOutputItems: *ast.Node.Asm.OutputList,
AsmOutputReturnOrType: *ast.Node.AsmOutput,
AsmInputItems: *ast.Node.Asm.InputList,
AsmClobberItems: *ast.Node.Asm.ClobberList,
ExprListItemOrEnd: ExprListCtx,
ExprListCommaOrEnd: ExprListCtx,
FieldInitListItemOrEnd: ListSave(ast.Node.SuffixOp.Op.InitList),
FieldInitListCommaOrEnd: ListSave(ast.Node.SuffixOp.Op.InitList),
FieldListCommaOrEnd: &ast.Node.ContainerDecl,
FieldListCommaOrEnd: *ast.Node.ContainerDecl,
FieldInitValue: OptionalCtx,
ErrorTagListItemOrEnd: ListSave(ast.Node.ErrorSetDecl.DeclList),
ErrorTagListCommaOrEnd: ListSave(ast.Node.ErrorSetDecl.DeclList),
SwitchCaseOrEnd: ListSave(ast.Node.Switch.CaseList),
SwitchCaseCommaOrEnd: ListSave(ast.Node.Switch.CaseList),
SwitchCaseFirstItem: &ast.Node.SwitchCase,
SwitchCaseItemCommaOrEnd: &ast.Node.SwitchCase,
SwitchCaseItemOrEnd: &ast.Node.SwitchCase,
SwitchCaseFirstItem: *ast.Node.SwitchCase,
SwitchCaseItemCommaOrEnd: *ast.Node.SwitchCase,
SwitchCaseItemOrEnd: *ast.Node.SwitchCase,
SuspendBody: &ast.Node.Suspend,
AsyncAllocator: &ast.Node.AsyncAttribute,
SuspendBody: *ast.Node.Suspend,
AsyncAllocator: *ast.Node.AsyncAttribute,
AsyncEnd: AsyncEndCtx,
ExternType: ExternTypeCtx,
SliceOrArrayAccess: &ast.Node.SuffixOp,
SliceOrArrayType: &ast.Node.PrefixOp,
AddrOfModifiers: &ast.Node.PrefixOp.AddrOfInfo,
AlignBitRange: &ast.Node.PrefixOp.AddrOfInfo.Align,
SliceOrArrayAccess: *ast.Node.SuffixOp,
SliceOrArrayType: *ast.Node.PrefixOp,
AddrOfModifiers: *ast.Node.PrefixOp.AddrOfInfo,
AlignBitRange: *ast.Node.PrefixOp.AddrOfInfo.Align,
Payload: OptionalCtx,
PointerPayload: OptionalCtx,
@@ -3007,7 +3007,7 @@ const State = union(enum) {
ErrorTypeOrSetDecl: ErrorTypeOrSetDeclCtx,
StringLiteral: OptionalCtx,
Identifier: OptionalCtx,
ErrorTag: &&ast.Node,
ErrorTag: **ast.Node,
IfToken: @TagType(Token.Id),
IfTokenSave: ExpectTokenSave,
@@ -3016,7 +3016,7 @@ const State = union(enum) {
OptionalTokenSave: OptionalTokenSave,
};
fn pushDocComment(arena: &mem.Allocator, line_comment: TokenIndex, result: &?&ast.Node.DocComment) !void {
fn pushDocComment(arena: *mem.Allocator, line_comment: TokenIndex, result: *?*ast.Node.DocComment) !void {
const node = blk: {
if (result.*) |comment_node| {
break :blk comment_node;
@@ -3032,8 +3032,8 @@ fn pushDocComment(arena: &mem.Allocator, line_comment: TokenIndex, result: &?&as
try node.lines.push(line_comment);
}
fn eatDocComments(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) !?&ast.Node.DocComment {
var result: ?&ast.Node.DocComment = null;
fn eatDocComments(arena: *mem.Allocator, tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree) !?*ast.Node.DocComment {
var result: ?*ast.Node.DocComment = null;
while (true) {
if (eatToken(tok_it, tree, Token.Id.DocComment)) |line_comment| {
try pushDocComment(arena, line_comment, &result);
@@ -3044,7 +3044,7 @@ fn eatDocComments(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, t
return result;
}
fn parseStringLiteral(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, token_ptr: &const Token, token_index: TokenIndex, tree: &ast.Tree) !?&ast.Node {
fn parseStringLiteral(arena: *mem.Allocator, tok_it: *ast.Tree.TokenList.Iterator, token_ptr: *const Token, token_index: TokenIndex, tree: *ast.Tree) !?*ast.Node {
switch (token_ptr.id) {
Token.Id.StringLiteral => {
return &(try createLiteral(arena, ast.Node.StringLiteral, token_index)).base;
@@ -3071,11 +3071,11 @@ fn parseStringLiteral(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterato
},
// TODO: We shouldn't need a cast, but:
// zig: /home/jc/Documents/zig/src/ir.cpp:7962: TypeTableEntry* ir_resolve_peer_types(IrAnalyze*, AstNode*, IrInstruction**, size_t): Assertion `err_set_type != nullptr' failed.
else => return (?&ast.Node)(null),
else => return (?*ast.Node)(null),
}
}
fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &const OptionalCtx, token_ptr: &const Token, token_index: TokenIndex) !bool {
fn parseBlockExpr(stack: *std.ArrayList(State), arena: *mem.Allocator, ctx: *const OptionalCtx, token_ptr: *const Token, token_index: TokenIndex) !bool {
switch (token_ptr.id) {
Token.Id.Keyword_suspend => {
const node = try arena.construct(ast.Node.Suspend{
@@ -3189,7 +3189,7 @@ const ExpectCommaOrEndResult = union(enum) {
parse_error: Error,
};
fn expectCommaOrEnd(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, end: @TagType(Token.Id)) ExpectCommaOrEndResult {
fn expectCommaOrEnd(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, end: @TagType(Token.Id)) ExpectCommaOrEndResult {
const token = nextToken(tok_it, tree);
const token_index = token.index;
const token_ptr = token.ptr;
@@ -3212,7 +3212,7 @@ fn expectCommaOrEnd(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, end:
}
}
fn tokenIdToAssignment(id: &const Token.Id) ?ast.Node.InfixOp.Op {
fn tokenIdToAssignment(id: *const Token.Id) ?ast.Node.InfixOp.Op {
// TODO: We have to cast all cases because of this:
// error: expected type '?InfixOp', found '?@TagType(InfixOp)'
return switch (id.*) {
@@ -3307,21 +3307,21 @@ fn tokenIdToPrefixOp(id: @TagType(Token.Id)) ?ast.Node.PrefixOp.Op {
};
}
fn createLiteral(arena: &mem.Allocator, comptime T: type, token_index: TokenIndex) !&T {
fn createLiteral(arena: *mem.Allocator, comptime T: type, token_index: TokenIndex) !*T {
return arena.construct(T{
.base = ast.Node{ .id = ast.Node.typeToId(T) },
.token = token_index,
});
}
fn createToCtxLiteral(arena: &mem.Allocator, opt_ctx: &const OptionalCtx, comptime T: type, token_index: TokenIndex) !&T {
fn createToCtxLiteral(arena: *mem.Allocator, opt_ctx: *const OptionalCtx, comptime T: type, token_index: TokenIndex) !*T {
const node = try createLiteral(arena, T, token_index);
opt_ctx.store(&node.base);
return node;
}
fn eatToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, id: @TagType(Token.Id)) ?TokenIndex {
fn eatToken(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, id: @TagType(Token.Id)) ?TokenIndex {
const token = ??tok_it.peek();
if (token.id == id) {
@@ -3331,7 +3331,7 @@ fn eatToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, id: @TagType(
return null;
}
fn nextToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) AnnotatedToken {
fn nextToken(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree) AnnotatedToken {
const result = AnnotatedToken{
.index = tok_it.index,
.ptr = ??tok_it.next(),
@@ -3345,7 +3345,7 @@ fn nextToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) AnnotatedTok
}
}
fn prevToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) void {
fn prevToken(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree) void {
while (true) {
const prev_tok = tok_it.prev() ?? return;
if (prev_tok.id == Token.Id.LineComment) continue;

View File

@@ -1803,7 +1803,7 @@ const io = std.io;
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: []const u8, allocator: &mem.Allocator, anything_changed: &bool) ![]u8 {
fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *bool) ![]u8 {
var stderr_file = try io.getStdErr();
var stderr = &io.FileOutStream.init(&stderr_file).stream;

View File

@@ -13,7 +13,7 @@ pub const Error = error{
};
/// Returns whether anything changed
pub fn render(allocator: &mem.Allocator, stream: var, tree: &ast.Tree) (@typeOf(stream).Child.Error || Error)!bool {
pub fn render(allocator: *mem.Allocator, stream: var, tree: *ast.Tree) (@typeOf(stream).Child.Error || Error)!bool {
comptime assert(@typeId(@typeOf(stream)) == builtin.TypeId.Pointer);
var anything_changed: bool = false;
@@ -24,13 +24,13 @@ pub fn render(allocator: &mem.Allocator, stream: var, tree: &ast.Tree) (@typeOf(
const StreamError = @typeOf(stream).Child.Error;
const Stream = std.io.OutStream(StreamError);
anything_changed_ptr: &bool,
anything_changed_ptr: *bool,
child_stream: @typeOf(stream),
stream: Stream,
source_index: usize,
source: []const u8,
fn write(iface_stream: &Stream, bytes: []const u8) StreamError!void {
fn write(iface_stream: *Stream, bytes: []const u8) StreamError!void {
const self = @fieldParentPtr(MyStream, "stream", iface_stream);
if (!self.anything_changed_ptr.*) {
@@ -63,9 +63,9 @@ pub fn render(allocator: &mem.Allocator, stream: var, tree: &ast.Tree) (@typeOf(
}
fn renderRoot(
allocator: &mem.Allocator,
allocator: *mem.Allocator,
stream: var,
tree: &ast.Tree,
tree: *ast.Tree,
) (@typeOf(stream).Child.Error || Error)!void {
// render all the line comments at the beginning of the file
var tok_it = tree.tokens.iterator(0);
@@ -90,7 +90,7 @@ fn renderRoot(
}
}
fn renderExtraNewline(tree: &ast.Tree, stream: var, start_col: &usize, node: &ast.Node) !void {
fn renderExtraNewline(tree: *ast.Tree, stream: var, start_col: *usize, node: *ast.Node) !void {
const first_token = node.firstToken();
var prev_token = first_token;
while (tree.tokens.at(prev_token - 1).id == Token.Id.DocComment) {
@@ -104,7 +104,7 @@ fn renderExtraNewline(tree: &ast.Tree, stream: var, start_col: &usize, node: &as
}
}
fn renderTopLevelDecl(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, indent: usize, start_col: &usize, decl: &ast.Node) (@typeOf(stream).Child.Error || Error)!void {
fn renderTopLevelDecl(allocator: *mem.Allocator, stream: var, tree: *ast.Tree, indent: usize, start_col: *usize, decl: *ast.Node) (@typeOf(stream).Child.Error || Error)!void {
switch (decl.id) {
ast.Node.Id.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
@@ -214,12 +214,12 @@ fn renderTopLevelDecl(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, i
}
fn renderExpression(
allocator: &mem.Allocator,
allocator: *mem.Allocator,
stream: var,
tree: &ast.Tree,
tree: *ast.Tree,
indent: usize,
start_col: &usize,
base: &ast.Node,
start_col: *usize,
base: *ast.Node,
space: Space,
) (@typeOf(stream).Child.Error || Error)!void {
switch (base.id) {
@@ -1640,12 +1640,12 @@ fn renderExpression(
}
fn renderVarDecl(
allocator: &mem.Allocator,
allocator: *mem.Allocator,
stream: var,
tree: &ast.Tree,
tree: *ast.Tree,
indent: usize,
start_col: &usize,
var_decl: &ast.Node.VarDecl,
start_col: *usize,
var_decl: *ast.Node.VarDecl,
) (@typeOf(stream).Child.Error || Error)!void {
if (var_decl.visib_token) |visib_token| {
try renderToken(tree, stream, visib_token, indent, start_col, Space.Space); // pub
@@ -1696,12 +1696,12 @@ fn renderVarDecl(
}
fn renderParamDecl(
allocator: &mem.Allocator,
allocator: *mem.Allocator,
stream: var,
tree: &ast.Tree,
tree: *ast.Tree,
indent: usize,
start_col: &usize,
base: &ast.Node,
start_col: *usize,
base: *ast.Node,
space: Space,
) (@typeOf(stream).Child.Error || Error)!void {
const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", base);
@@ -1724,12 +1724,12 @@ fn renderParamDecl(
}
fn renderStatement(
allocator: &mem.Allocator,
allocator: *mem.Allocator,
stream: var,
tree: &ast.Tree,
tree: *ast.Tree,
indent: usize,
start_col: &usize,
base: &ast.Node,
start_col: *usize,
base: *ast.Node,
) (@typeOf(stream).Child.Error || Error)!void {
switch (base.id) {
ast.Node.Id.VarDecl => {
@@ -1761,7 +1761,7 @@ const Space = enum {
BlockStart,
};
fn renderToken(tree: &ast.Tree, stream: var, token_index: ast.TokenIndex, indent: usize, start_col: &usize, space: Space) (@typeOf(stream).Child.Error || Error)!void {
fn renderToken(tree: *ast.Tree, stream: var, token_index: ast.TokenIndex, indent: usize, start_col: *usize, space: Space) (@typeOf(stream).Child.Error || Error)!void {
if (space == Space.BlockStart) {
if (start_col.* < indent + indent_delta)
return renderToken(tree, stream, token_index, indent, start_col, Space.Space);
@@ -1928,11 +1928,11 @@ fn renderToken(tree: &ast.Tree, stream: var, token_index: ast.TokenIndex, indent
}
fn renderDocComments(
tree: &ast.Tree,
tree: *ast.Tree,
stream: var,
node: var,
indent: usize,
start_col: &usize,
start_col: *usize,
) (@typeOf(stream).Child.Error || Error)!void {
const comment = node.doc_comments ?? return;
var it = comment.lines.iterator(0);
@@ -1949,7 +1949,7 @@ fn renderDocComments(
}
}
fn nodeIsBlock(base: &const ast.Node) bool {
fn nodeIsBlock(base: *const ast.Node) bool {
return switch (base.id) {
ast.Node.Id.Block,
ast.Node.Id.If,
@@ -1961,7 +1961,7 @@ fn nodeIsBlock(base: &const ast.Node) bool {
};
}
fn nodeCausesSliceOpSpace(base: &ast.Node) bool {
fn nodeCausesSliceOpSpace(base: *ast.Node) bool {
const infix_op = base.cast(ast.Node.InfixOp) ?? return false;
return switch (infix_op.op) {
ast.Node.InfixOp.Op.Period => false,

View File

@@ -200,7 +200,7 @@ pub const Tokenizer = struct {
pending_invalid_token: ?Token,
/// For debugging purposes
pub fn dump(self: &Tokenizer, token: &const Token) void {
pub fn dump(self: *Tokenizer, token: *const Token) void {
std.debug.warn("{} \"{}\"\n", @tagName(token.id), self.buffer[token.start..token.end]);
}
@@ -265,7 +265,7 @@ pub const Tokenizer = struct {
SawAtSign,
};
pub fn next(self: &Tokenizer) Token {
pub fn next(self: *Tokenizer) Token {
if (self.pending_invalid_token) |token| {
self.pending_invalid_token = null;
return token;
@@ -1089,7 +1089,7 @@ pub const Tokenizer = struct {
return result;
}
fn checkLiteralCharacter(self: &Tokenizer) void {
fn checkLiteralCharacter(self: *Tokenizer) void {
if (self.pending_invalid_token != null) return;
const invalid_length = self.getInvalidCharacterLength();
if (invalid_length == 0) return;
@@ -1100,7 +1100,7 @@ pub const Tokenizer = struct {
};
}
fn getInvalidCharacterLength(self: &Tokenizer) u3 {
fn getInvalidCharacterLength(self: *Tokenizer) u3 {
const c0 = self.buffer[self.index];
if (c0 < 0x80) {
if (c0 < 0x20 or c0 == 0x7f) {