diff --git a/src/ast.zig b/src/ast.zig new file mode 100644 index 0000000..8b102a9 --- /dev/null +++ b/src/ast.zig @@ -0,0 +1,3278 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2015-2020 Zig Contributors +// This file is part of [zig](https://ziglang.org/), which is MIT licensed. +// The MIT license requires this copyright notice to be included in all copies +// and substantial portions of the software. +const std = @import("std"); +const assert = std.debug.assert; +const testing = std.testing; +const mem = std.mem; +const Token = @import("tokenizer.zig").Token; + +pub const TokenIndex = usize; +pub const NodeIndex = usize; + +pub const Tree = struct { + /// Reference to externally-owned data. + source: []const u8, + token_ids: []const Token.Id, + token_locs: []const Token.Loc, + errors: []const Error, + root_node: *Node.Root, + + arena: std.heap.ArenaAllocator.State, + gpa: *mem.Allocator, + + /// translate-c uses this to avoid having to emit correct newlines + /// TODO get rid of this hack + generated: bool = false, + + pub fn deinit(self: *Tree) void { + self.gpa.free(self.token_ids); + self.gpa.free(self.token_locs); + self.gpa.free(self.errors); + self.arena.promote(self.gpa).deinit(); + } + + pub fn renderError(self: *Tree, parse_error: *const Error, stream: anytype) !void { + return parse_error.render(self.token_ids, stream); + } + + pub fn tokenSlice(self: *Tree, token_index: TokenIndex) []const u8 { + return self.tokenSliceLoc(self.token_locs[token_index]); + } + + pub fn tokenSliceLoc(self: *Tree, token: Token.Loc) []const u8 { + return self.source[token.start..token.end]; + } + + pub fn getNodeSource(self: *const Tree, node: *const Node) []const u8 { + const first_token = self.token_locs[node.firstToken()]; + const last_token = self.token_locs[node.lastToken()]; + return self.source[first_token.start..last_token.end]; + } + + pub const Location = struct { + line: usize, + column: usize, + line_start: usize, + line_end: usize, + }; + + /// Return the Location of the token relative to the offset specified by `start_index`. + pub fn tokenLocationLoc(self: *Tree, start_index: usize, token: Token.Loc) Location { + var loc = Location{ + .line = 0, + .column = 0, + .line_start = start_index, + .line_end = self.source.len, + }; + if (self.generated) + return loc; + const token_start = token.start; + for (self.source[start_index..]) |c, i| { + if (i + start_index == token_start) { + loc.line_end = i + start_index; + while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') : (loc.line_end += 1) {} + return loc; + } + if (c == '\n') { + loc.line += 1; + loc.column = 0; + loc.line_start = i + 1; + } else { + loc.column += 1; + } + } + return loc; + } + + pub fn tokenLocation(self: *Tree, start_index: usize, token_index: TokenIndex) Location { + return self.tokenLocationLoc(start_index, self.token_locs[token_index]); + } + + pub fn tokensOnSameLine(self: *Tree, token1_index: TokenIndex, token2_index: TokenIndex) bool { + return self.tokensOnSameLineLoc(self.token_locs[token1_index], self.token_locs[token2_index]); + } + + pub fn tokensOnSameLineLoc(self: *Tree, token1: Token.Loc, token2: Token.Loc) bool { + return mem.indexOfScalar(u8, self.source[token1.end..token2.start], '\n') == null; + } + + pub fn dump(self: *Tree) void { + self.root_node.base.dump(0); + } + + /// Skips over comments + pub fn prevToken(self: *Tree, token_index: TokenIndex) TokenIndex { + var index = token_index - 1; + while (self.token_ids[index] == Token.Id.LineComment) { + index -= 1; + } + return index; + } + + /// Skips over comments + pub fn nextToken(self: *Tree, token_index: TokenIndex) TokenIndex { + var index = token_index + 1; + while (self.token_ids[index] == Token.Id.LineComment) { + index += 1; + } + return index; + } +}; + +pub const Error = union(enum) { + InvalidToken: InvalidToken, + ExpectedContainerMembers: ExpectedContainerMembers, + ExpectedStringLiteral: ExpectedStringLiteral, + ExpectedIntegerLiteral: ExpectedIntegerLiteral, + ExpectedPubItem: ExpectedPubItem, + ExpectedIdentifier: ExpectedIdentifier, + ExpectedStatement: ExpectedStatement, + ExpectedVarDeclOrFn: ExpectedVarDeclOrFn, + ExpectedVarDecl: ExpectedVarDecl, + ExpectedFn: ExpectedFn, + ExpectedReturnType: ExpectedReturnType, + UnattachedDocComment: UnattachedDocComment, + ExpectedEqOrSemi: ExpectedEqOrSemi, + ExpectedSemiOrLBrace: ExpectedSemiOrLBrace, + ExpectedSemiOrElse: ExpectedSemiOrElse, + ExpectedLabelOrLBrace: ExpectedLabelOrLBrace, + ExpectedLBrace: ExpectedLBrace, + ExpectedColonOrRParen: ExpectedColonOrRParen, + ExpectedLabelable: ExpectedLabelable, + ExpectedInlinable: ExpectedInlinable, + ExpectedAsmOutputReturnOrType: ExpectedAsmOutputReturnOrType, + ExpectedCall: ExpectedCall, + ExpectedCallOrFnProto: ExpectedCallOrFnProto, + ExpectedSliceOrRBracket: ExpectedSliceOrRBracket, + ExtraAlignQualifier: ExtraAlignQualifier, + ExtraConstQualifier: ExtraConstQualifier, + ExtraVolatileQualifier: ExtraVolatileQualifier, + ExtraAllowZeroQualifier: ExtraAllowZeroQualifier, + ExpectedTypeExpr: ExpectedTypeExpr, + ExpectedPrimaryTypeExpr: ExpectedPrimaryTypeExpr, + ExpectedParamType: ExpectedParamType, + ExpectedExpr: ExpectedExpr, + ExpectedPrimaryExpr: ExpectedPrimaryExpr, + ExpectedToken: ExpectedToken, + ExpectedCommaOrEnd: ExpectedCommaOrEnd, + ExpectedParamList: ExpectedParamList, + ExpectedPayload: ExpectedPayload, + ExpectedBlockOrAssignment: ExpectedBlockOrAssignment, + ExpectedBlockOrExpression: ExpectedBlockOrExpression, + ExpectedExprOrAssignment: ExpectedExprOrAssignment, + ExpectedPrefixExpr: ExpectedPrefixExpr, + ExpectedLoopExpr: ExpectedLoopExpr, + ExpectedDerefOrUnwrap: ExpectedDerefOrUnwrap, + ExpectedSuffixOp: ExpectedSuffixOp, + ExpectedBlockOrField: ExpectedBlockOrField, + DeclBetweenFields: DeclBetweenFields, + InvalidAnd: InvalidAnd, + AsteriskAfterPointerDereference: AsteriskAfterPointerDereference, + + pub fn render(self: *const Error, tokens: []const Token.Id, stream: anytype) !void { + switch (self.*) { + .InvalidToken => |*x| return x.render(tokens, stream), + .ExpectedContainerMembers => |*x| return x.render(tokens, stream), + .ExpectedStringLiteral => |*x| return x.render(tokens, stream), + .ExpectedIntegerLiteral => |*x| return x.render(tokens, stream), + .ExpectedPubItem => |*x| return x.render(tokens, stream), + .ExpectedIdentifier => |*x| return x.render(tokens, stream), + .ExpectedStatement => |*x| return x.render(tokens, stream), + .ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream), + .ExpectedVarDecl => |*x| return x.render(tokens, stream), + .ExpectedFn => |*x| return x.render(tokens, stream), + .ExpectedReturnType => |*x| return x.render(tokens, stream), + .UnattachedDocComment => |*x| return x.render(tokens, stream), + .ExpectedEqOrSemi => |*x| return x.render(tokens, stream), + .ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream), + .ExpectedSemiOrElse => |*x| return x.render(tokens, stream), + .ExpectedLabelOrLBrace => |*x| return x.render(tokens, stream), + .ExpectedLBrace => |*x| return x.render(tokens, stream), + .ExpectedColonOrRParen => |*x| return x.render(tokens, stream), + .ExpectedLabelable => |*x| return x.render(tokens, stream), + .ExpectedInlinable => |*x| return x.render(tokens, stream), + .ExpectedAsmOutputReturnOrType => |*x| return x.render(tokens, stream), + .ExpectedCall => |*x| return x.render(tokens, stream), + .ExpectedCallOrFnProto => |*x| return x.render(tokens, stream), + .ExpectedSliceOrRBracket => |*x| return x.render(tokens, stream), + .ExtraAlignQualifier => |*x| return x.render(tokens, stream), + .ExtraConstQualifier => |*x| return x.render(tokens, stream), + .ExtraVolatileQualifier => |*x| return x.render(tokens, stream), + .ExtraAllowZeroQualifier => |*x| return x.render(tokens, stream), + .ExpectedTypeExpr => |*x| return x.render(tokens, stream), + .ExpectedPrimaryTypeExpr => |*x| return x.render(tokens, stream), + .ExpectedParamType => |*x| return x.render(tokens, stream), + .ExpectedExpr => |*x| return x.render(tokens, stream), + .ExpectedPrimaryExpr => |*x| return x.render(tokens, stream), + .ExpectedToken => |*x| return x.render(tokens, stream), + .ExpectedCommaOrEnd => |*x| return x.render(tokens, stream), + .ExpectedParamList => |*x| return x.render(tokens, stream), + .ExpectedPayload => |*x| return x.render(tokens, stream), + .ExpectedBlockOrAssignment => |*x| return x.render(tokens, stream), + .ExpectedBlockOrExpression => |*x| return x.render(tokens, stream), + .ExpectedExprOrAssignment => |*x| return x.render(tokens, stream), + .ExpectedPrefixExpr => |*x| return x.render(tokens, stream), + .ExpectedLoopExpr => |*x| return x.render(tokens, stream), + .ExpectedDerefOrUnwrap => |*x| return x.render(tokens, stream), + .ExpectedSuffixOp => |*x| return x.render(tokens, stream), + .ExpectedBlockOrField => |*x| return x.render(tokens, stream), + .DeclBetweenFields => |*x| return x.render(tokens, stream), + .InvalidAnd => |*x| return x.render(tokens, stream), + .AsteriskAfterPointerDereference => |*x| return x.render(tokens, stream), + } + } + + pub fn loc(self: *const Error) TokenIndex { + switch (self.*) { + .InvalidToken => |x| return x.token, + .ExpectedContainerMembers => |x| return x.token, + .ExpectedStringLiteral => |x| return x.token, + .ExpectedIntegerLiteral => |x| return x.token, + .ExpectedPubItem => |x| return x.token, + .ExpectedIdentifier => |x| return x.token, + .ExpectedStatement => |x| return x.token, + .ExpectedVarDeclOrFn => |x| return x.token, + .ExpectedVarDecl => |x| return x.token, + .ExpectedFn => |x| return x.token, + .ExpectedReturnType => |x| return x.token, + .UnattachedDocComment => |x| return x.token, + .ExpectedEqOrSemi => |x| return x.token, + .ExpectedSemiOrLBrace => |x| return x.token, + .ExpectedSemiOrElse => |x| return x.token, + .ExpectedLabelOrLBrace => |x| return x.token, + .ExpectedLBrace => |x| return x.token, + .ExpectedColonOrRParen => |x| return x.token, + .ExpectedLabelable => |x| return x.token, + .ExpectedInlinable => |x| return x.token, + .ExpectedAsmOutputReturnOrType => |x| return x.token, + .ExpectedCall => |x| return x.node.firstToken(), + .ExpectedCallOrFnProto => |x| return x.node.firstToken(), + .ExpectedSliceOrRBracket => |x| return x.token, + .ExtraAlignQualifier => |x| return x.token, + .ExtraConstQualifier => |x| return x.token, + .ExtraVolatileQualifier => |x| return x.token, + .ExtraAllowZeroQualifier => |x| return x.token, + .ExpectedTypeExpr => |x| return x.token, + .ExpectedPrimaryTypeExpr => |x| return x.token, + .ExpectedParamType => |x| return x.token, + .ExpectedExpr => |x| return x.token, + .ExpectedPrimaryExpr => |x| return x.token, + .ExpectedToken => |x| return x.token, + .ExpectedCommaOrEnd => |x| return x.token, + .ExpectedParamList => |x| return x.token, + .ExpectedPayload => |x| return x.token, + .ExpectedBlockOrAssignment => |x| return x.token, + .ExpectedBlockOrExpression => |x| return x.token, + .ExpectedExprOrAssignment => |x| return x.token, + .ExpectedPrefixExpr => |x| return x.token, + .ExpectedLoopExpr => |x| return x.token, + .ExpectedDerefOrUnwrap => |x| return x.token, + .ExpectedSuffixOp => |x| return x.token, + .ExpectedBlockOrField => |x| return x.token, + .DeclBetweenFields => |x| return x.token, + .InvalidAnd => |x| return x.token, + .AsteriskAfterPointerDereference => |x| return x.token, + } + } + + pub const InvalidToken = SingleTokenError("Invalid token '{}'"); + pub const ExpectedContainerMembers = SingleTokenError("Expected test, comptime, var decl, or container field, found '{}'"); + pub const ExpectedStringLiteral = SingleTokenError("Expected string literal, found '{}'"); + pub const ExpectedIntegerLiteral = SingleTokenError("Expected integer literal, found '{}'"); + pub const ExpectedIdentifier = SingleTokenError("Expected identifier, found '{}'"); + pub const ExpectedStatement = SingleTokenError("Expected statement, found '{}'"); + pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found '{}'"); + pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{}'"); + pub const ExpectedFn = SingleTokenError("Expected function, found '{}'"); + pub const ExpectedReturnType = SingleTokenError("Expected 'var' or return type expression, found '{}'"); + pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{}'"); + pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found '{}'"); + pub const ExpectedSemiOrElse = SingleTokenError("Expected ';' or 'else', found '{}'"); + pub const ExpectedLBrace = SingleTokenError("Expected '{{', found '{}'"); + pub const ExpectedLabelOrLBrace = SingleTokenError("Expected label or '{{', found '{}'"); + pub const ExpectedColonOrRParen = SingleTokenError("Expected ':' or ')', found '{}'"); + pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found '{}'"); + pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found '{}'"); + pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Id.Identifier.symbol() ++ "', found '{}'"); + pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found '{}'"); + pub const ExpectedTypeExpr = SingleTokenError("Expected type expression, found '{}'"); + pub const ExpectedPrimaryTypeExpr = SingleTokenError("Expected primary type expression, found '{}'"); + pub const ExpectedExpr = SingleTokenError("Expected expression, found '{}'"); + pub const ExpectedPrimaryExpr = SingleTokenError("Expected primary expression, found '{}'"); + pub const ExpectedParamList = SingleTokenError("Expected parameter list, found '{}'"); + pub const ExpectedPayload = SingleTokenError("Expected loop payload, found '{}'"); + pub const ExpectedBlockOrAssignment = SingleTokenError("Expected block or assignment, found '{}'"); + pub const ExpectedBlockOrExpression = SingleTokenError("Expected block or expression, found '{}'"); + pub const ExpectedExprOrAssignment = SingleTokenError("Expected expression or assignment, found '{}'"); + pub const ExpectedPrefixExpr = SingleTokenError("Expected prefix expression, found '{}'"); + pub const ExpectedLoopExpr = SingleTokenError("Expected loop expression, found '{}'"); + pub const ExpectedDerefOrUnwrap = SingleTokenError("Expected pointer dereference or optional unwrap, found '{}'"); + pub const ExpectedSuffixOp = SingleTokenError("Expected pointer dereference, optional unwrap, or field access, found '{}'"); + pub const ExpectedBlockOrField = SingleTokenError("Expected block or field, found '{}'"); + + pub const ExpectedParamType = SimpleError("Expected parameter type"); + pub const ExpectedPubItem = SimpleError("Expected function or variable declaration after pub"); + pub const UnattachedDocComment = SimpleError("Unattached documentation comment"); + pub const ExtraAlignQualifier = SimpleError("Extra align qualifier"); + pub const ExtraConstQualifier = SimpleError("Extra const qualifier"); + pub const ExtraVolatileQualifier = SimpleError("Extra volatile qualifier"); + pub const ExtraAllowZeroQualifier = SimpleError("Extra allowzero qualifier"); + pub const DeclBetweenFields = SimpleError("Declarations are not allowed between container fields"); + pub const InvalidAnd = SimpleError("`&&` is invalid. Note that `and` is boolean AND."); + pub const AsteriskAfterPointerDereference = SimpleError("`.*` can't be followed by `*`. Are you missing a space?"); + + pub const ExpectedCall = struct { + node: *Node, + + pub fn render(self: *const ExpectedCall, tokens: []const Token.Id, stream: anytype) !void { + return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ ", found {}", .{ + @tagName(self.node.tag), + }); + } + }; + + pub const ExpectedCallOrFnProto = struct { + node: *Node, + + pub fn render(self: *const ExpectedCallOrFnProto, tokens: []const Token.Id, stream: anytype) !void { + return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ " or " ++ + @tagName(Node.Tag.FnProto) ++ ", found {}", .{@tagName(self.node.tag)}); + } + }; + + pub const ExpectedToken = struct { + token: TokenIndex, + expected_id: Token.Id, + + pub fn render(self: *const ExpectedToken, tokens: []const Token.Id, stream: anytype) !void { + const found_token = tokens[self.token]; + switch (found_token) { + .Invalid => { + return stream.print("expected '{}', found invalid bytes", .{self.expected_id.symbol()}); + }, + else => { + const token_name = found_token.symbol(); + return stream.print("expected '{}', found '{}'", .{ self.expected_id.symbol(), token_name }); + }, + } + } + }; + + pub const ExpectedCommaOrEnd = struct { + token: TokenIndex, + end_id: Token.Id, + + pub fn render(self: *const ExpectedCommaOrEnd, tokens: []const Token.Id, stream: anytype) !void { + const actual_token = tokens[self.token]; + return stream.print("expected ',' or '{}', found '{}'", .{ + self.end_id.symbol(), + actual_token.symbol(), + }); + } + }; + + fn SingleTokenError(comptime msg: []const u8) type { + return struct { + const ThisError = @This(); + + token: TokenIndex, + + pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: anytype) !void { + const actual_token = tokens[self.token]; + return stream.print(msg, .{actual_token.symbol()}); + } + }; + } + + fn SimpleError(comptime msg: []const u8) type { + return struct { + const ThisError = @This(); + + token: TokenIndex, + + pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: anytype) !void { + return stream.writeAll(msg); + } + }; + } +}; + +pub const Node = struct { + tag: Tag, + + pub const Tag = enum { + // Top level + Root, + Use, + TestDecl, + + // Statements + VarDecl, + Defer, + + // Infix operators + Catch, + + // SimpleInfixOp + Add, + AddWrap, + ArrayCat, + ArrayMult, + Assign, + AssignBitAnd, + AssignBitOr, + AssignBitShiftLeft, + AssignBitShiftRight, + AssignBitXor, + AssignDiv, + AssignSub, + AssignSubWrap, + AssignMod, + AssignAdd, + AssignAddWrap, + AssignMul, + AssignMulWrap, + BangEqual, + BitAnd, + BitOr, + BitShiftLeft, + BitShiftRight, + BitXor, + BoolAnd, + BoolOr, + Div, + EqualEqual, + ErrorUnion, + GreaterOrEqual, + GreaterThan, + LessOrEqual, + LessThan, + MergeErrorSets, + Mod, + Mul, + MulWrap, + Period, + Range, + Sub, + SubWrap, + OrElse, + + // SimplePrefixOp + AddressOf, + Await, + BitNot, + BoolNot, + OptionalType, + Negation, + NegationWrap, + Resume, + Try, + + ArrayType, + /// ArrayType but has a sentinel node. + ArrayTypeSentinel, + PtrType, + SliceType, + /// `a[b..c]` + Slice, + /// `a.*` + Deref, + /// `a.?` + UnwrapOptional, + /// `a[b]` + ArrayAccess, + /// `T{a, b}` + ArrayInitializer, + /// ArrayInitializer but with `.` instead of a left-hand-side operand. + ArrayInitializerDot, + /// `T{.a = b}` + StructInitializer, + /// StructInitializer but with `.` instead of a left-hand-side operand. + StructInitializerDot, + /// `foo()` + Call, + + // Control flow + Switch, + While, + For, + If, + Suspend, + Continue, + Break, + Return, + + // Type expressions + AnyType, + ErrorType, + FnProto, + AnyFrameType, + + // Primary expressions + IntegerLiteral, + FloatLiteral, + EnumLiteral, + StringLiteral, + MultilineStringLiteral, + CharLiteral, + BoolLiteral, + NullLiteral, + UndefinedLiteral, + Unreachable, + Identifier, + GroupedExpression, + BuiltinCall, + ErrorSetDecl, + ContainerDecl, + Asm, + Comptime, + Nosuspend, + Block, + LabeledBlock, + + // Misc + DocComment, + SwitchCase, // TODO make this not a child of AST Node + SwitchElse, // TODO make this not a child of AST Node + Else, // TODO make this not a child of AST Node + Payload, // TODO make this not a child of AST Node + PointerPayload, // TODO make this not a child of AST Node + PointerIndexPayload, // TODO make this not a child of AST Node + ContainerField, + ErrorTag, // TODO make this not a child of AST Node + FieldInitializer, // TODO make this not a child of AST Node + + pub fn Type(tag: Tag) type { + return switch (tag) { + .Root => Root, + .Use => Use, + .TestDecl => TestDecl, + .VarDecl => VarDecl, + .Defer => Defer, + .Catch => Catch, + + .Add, + .AddWrap, + .ArrayCat, + .ArrayMult, + .Assign, + .AssignBitAnd, + .AssignBitOr, + .AssignBitShiftLeft, + .AssignBitShiftRight, + .AssignBitXor, + .AssignDiv, + .AssignSub, + .AssignSubWrap, + .AssignMod, + .AssignAdd, + .AssignAddWrap, + .AssignMul, + .AssignMulWrap, + .BangEqual, + .BitAnd, + .BitOr, + .BitShiftLeft, + .BitShiftRight, + .BitXor, + .BoolAnd, + .BoolOr, + .Div, + .EqualEqual, + .ErrorUnion, + .GreaterOrEqual, + .GreaterThan, + .LessOrEqual, + .LessThan, + .MergeErrorSets, + .Mod, + .Mul, + .MulWrap, + .Period, + .Range, + .Sub, + .SubWrap, + .OrElse, + => SimpleInfixOp, + + .AddressOf, + .Await, + .BitNot, + .BoolNot, + .OptionalType, + .Negation, + .NegationWrap, + .Resume, + .Try, + => SimplePrefixOp, + + .Identifier, + .BoolLiteral, + .NullLiteral, + .UndefinedLiteral, + .Unreachable, + .AnyType, + .ErrorType, + .IntegerLiteral, + .FloatLiteral, + .StringLiteral, + .CharLiteral, + => OneToken, + + .Continue, + .Break, + .Return, + => ControlFlowExpression, + + .ArrayType => ArrayType, + .ArrayTypeSentinel => ArrayTypeSentinel, + + .PtrType => PtrType, + .SliceType => SliceType, + .Slice => Slice, + .Deref, .UnwrapOptional => SimpleSuffixOp, + .ArrayAccess => ArrayAccess, + + .ArrayInitializer => ArrayInitializer, + .ArrayInitializerDot => ArrayInitializerDot, + + .StructInitializer => StructInitializer, + .StructInitializerDot => StructInitializerDot, + + .Call => Call, + .Switch => Switch, + .While => While, + .For => For, + .If => If, + .Suspend => Suspend, + .FnProto => FnProto, + .AnyFrameType => AnyFrameType, + .EnumLiteral => EnumLiteral, + .MultilineStringLiteral => MultilineStringLiteral, + .GroupedExpression => GroupedExpression, + .BuiltinCall => BuiltinCall, + .ErrorSetDecl => ErrorSetDecl, + .ContainerDecl => ContainerDecl, + .Asm => Asm, + .Comptime => Comptime, + .Nosuspend => Nosuspend, + .Block => Block, + .LabeledBlock => LabeledBlock, + .DocComment => DocComment, + .SwitchCase => SwitchCase, + .SwitchElse => SwitchElse, + .Else => Else, + .Payload => Payload, + .PointerPayload => PointerPayload, + .PointerIndexPayload => PointerIndexPayload, + .ContainerField => ContainerField, + .ErrorTag => ErrorTag, + .FieldInitializer => FieldInitializer, + }; + } + + pub fn isBlock(tag: Tag) bool { + return switch (tag) { + .Block, .LabeledBlock => true, + else => false, + }; + } + }; + + /// Prefer `castTag` to this. + pub fn cast(base: *Node, comptime T: type) ?*T { + if (std.meta.fieldInfo(T, "base").default_value) |default_base| { + return base.castTag(default_base.tag); + } + inline for (@typeInfo(Tag).Enum.fields) |field| { + const tag = @intToEnum(Tag, field.value); + if (base.tag == tag) { + if (T == tag.Type()) { + return @fieldParentPtr(T, "base", base); + } + return null; + } + } + unreachable; + } + + pub fn castTag(base: *Node, comptime tag: Tag) ?*tag.Type() { + if (base.tag == tag) { + return @fieldParentPtr(tag.Type(), "base", base); + } + return null; + } + + pub fn iterate(base: *Node, index: usize) ?*Node { + inline for (@typeInfo(Tag).Enum.fields) |field| { + const tag = @intToEnum(Tag, field.value); + if (base.tag == tag) { + return @fieldParentPtr(tag.Type(), "base", base).iterate(index); + } + } + unreachable; + } + + pub fn firstToken(base: *const Node) TokenIndex { + inline for (@typeInfo(Tag).Enum.fields) |field| { + const tag = @intToEnum(Tag, field.value); + if (base.tag == tag) { + return @fieldParentPtr(tag.Type(), "base", base).firstToken(); + } + } + unreachable; + } + + pub fn lastToken(base: *const Node) TokenIndex { + inline for (@typeInfo(Tag).Enum.fields) |field| { + const tag = @intToEnum(Tag, field.value); + if (base.tag == tag) { + return @fieldParentPtr(tag.Type(), "base", base).lastToken(); + } + } + unreachable; + } + + pub fn requireSemiColon(base: *const Node) bool { + var n = base; + while (true) { + switch (n.tag) { + .Root, + .ContainerField, + .Block, + .LabeledBlock, + .Payload, + .PointerPayload, + .PointerIndexPayload, + .Switch, + .SwitchCase, + .SwitchElse, + .FieldInitializer, + .DocComment, + .TestDecl, + => return false, + + .While => { + const while_node = @fieldParentPtr(While, "base", n); + if (while_node.@"else") |@"else"| { + n = &@"else".base; + continue; + } + + return !while_node.body.tag.isBlock(); + }, + .For => { + const for_node = @fieldParentPtr(For, "base", n); + if (for_node.@"else") |@"else"| { + n = &@"else".base; + continue; + } + + return !for_node.body.tag.isBlock(); + }, + .If => { + const if_node = @fieldParentPtr(If, "base", n); + if (if_node.@"else") |@"else"| { + n = &@"else".base; + continue; + } + + return !if_node.body.tag.isBlock(); + }, + .Else => { + const else_node = @fieldParentPtr(Else, "base", n); + n = else_node.body; + continue; + }, + .Defer => { + const defer_node = @fieldParentPtr(Defer, "base", n); + return !defer_node.expr.tag.isBlock(); + }, + .Comptime => { + const comptime_node = @fieldParentPtr(Comptime, "base", n); + return !comptime_node.expr.tag.isBlock(); + }, + .Suspend => { + const suspend_node = @fieldParentPtr(Suspend, "base", n); + if (suspend_node.body) |body| { + return !body.tag.isBlock(); + } + + return true; + }, + .Nosuspend => { + const nosuspend_node = @fieldParentPtr(Nosuspend, "base", n); + return !nosuspend_node.expr.tag.isBlock(); + }, + else => return true, + } + } + } + + /// Asserts the node is a Block or LabeledBlock and returns the statements slice. + pub fn blockStatements(base: *Node) []*Node { + if (base.castTag(.Block)) |block| { + return block.statements(); + } else if (base.castTag(.LabeledBlock)) |labeled_block| { + return labeled_block.statements(); + } else { + unreachable; + } + } + + pub fn findFirstWithId(self: *Node, id: Id) ?*Node { + if (self.id == id) return self; + var child_i: usize = 0; + while (self.iterate(child_i)) |child| : (child_i += 1) { + if (child.findFirstWithId(id)) |result| return result; + } + return null; + } + + pub fn dump(self: *Node, indent: usize) void { + { + var i: usize = 0; + while (i < indent) : (i += 1) { + std.debug.warn(" ", .{}); + } + } + std.debug.warn("{}\n", .{@tagName(self.tag)}); + + var child_i: usize = 0; + while (self.iterate(child_i)) |child| : (child_i += 1) { + child.dump(indent + 2); + } + } + + /// The decls data follows this struct in memory as an array of Node pointers. + pub const Root = struct { + base: Node = Node{ .tag = .Root }, + eof_token: TokenIndex, + decls_len: NodeIndex, + + /// After this the caller must initialize the decls list. + pub fn create(allocator: *mem.Allocator, decls_len: NodeIndex, eof_token: TokenIndex) !*Root { + const bytes = try allocator.alignedAlloc(u8, @alignOf(Root), sizeInBytes(decls_len)); + const self = @ptrCast(*Root, bytes.ptr); + self.* = .{ + .eof_token = eof_token, + .decls_len = decls_len, + }; + return self; + } + + pub fn destroy(self: *Decl, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const Root, index: usize) ?*Node { + var i = index; + + if (i < self.decls_len) return self.declsConst()[i]; + return null; + } + + pub fn decls(self: *Root) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(Root); + return @ptrCast([*]*Node, decls_start)[0..self.decls_len]; + } + + pub fn declsConst(self: *const Root) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Root); + return @ptrCast([*]const *Node, decls_start)[0..self.decls_len]; + } + + pub fn firstToken(self: *const Root) TokenIndex { + if (self.decls_len == 0) return self.eof_token; + return self.declsConst()[0].firstToken(); + } + + pub fn lastToken(self: *const Root) TokenIndex { + if (self.decls_len == 0) return self.eof_token; + return self.declsConst()[self.decls_len - 1].lastToken(); + } + + fn sizeInBytes(decls_len: NodeIndex) usize { + return @sizeOf(Root) + @sizeOf(*Node) * @as(usize, decls_len); + } + }; + + /// Trailed in memory by possibly many things, with each optional thing + /// determined by a bit in `trailer_flags`. + pub const VarDecl = struct { + base: Node = Node{ .tag = .VarDecl }, + trailer_flags: TrailerFlags, + mut_token: TokenIndex, + name_token: TokenIndex, + semicolon_token: TokenIndex, + + pub const TrailerFlags = std.meta.TrailerFlags(struct { + doc_comments: *DocComment, + visib_token: TokenIndex, + thread_local_token: TokenIndex, + eq_token: TokenIndex, + comptime_token: TokenIndex, + extern_export_token: TokenIndex, + lib_name: *Node, + type_node: *Node, + align_node: *Node, + section_node: *Node, + init_node: *Node, + }); + + pub fn getDocComments(self: *const VarDecl) ?*DocComment { + return self.getTrailer(.doc_comments); + } + + pub fn setDocComments(self: *VarDecl, value: *DocComment) void { + self.setTrailer(.doc_comments, value); + } + + pub fn getVisibToken(self: *const VarDecl) ?TokenIndex { + return self.getTrailer(.visib_token); + } + + pub fn setVisibToken(self: *VarDecl, value: TokenIndex) void { + self.setTrailer(.visib_token, value); + } + + pub fn getThreadLocalToken(self: *const VarDecl) ?TokenIndex { + return self.getTrailer(.thread_local_token); + } + + pub fn setThreadLocalToken(self: *VarDecl, value: TokenIndex) void { + self.setTrailer(.thread_local_token, value); + } + + pub fn getEqToken(self: *const VarDecl) ?TokenIndex { + return self.getTrailer(.eq_token); + } + + pub fn setEqToken(self: *VarDecl, value: TokenIndex) void { + self.setTrailer(.eq_token, value); + } + + pub fn getComptimeToken(self: *const VarDecl) ?TokenIndex { + return self.getTrailer(.comptime_token); + } + + pub fn setComptimeToken(self: *VarDecl, value: TokenIndex) void { + self.setTrailer(.comptime_token, value); + } + + pub fn getExternExportToken(self: *const VarDecl) ?TokenIndex { + return self.getTrailer(.extern_export_token); + } + + pub fn setExternExportToken(self: *VarDecl, value: TokenIndex) void { + self.setTrailer(.extern_export_token, value); + } + + pub fn getLibName(self: *const VarDecl) ?*Node { + return self.getTrailer(.lib_name); + } + + pub fn setLibName(self: *VarDecl, value: *Node) void { + self.setTrailer(.lib_name, value); + } + + pub fn getTypeNode(self: *const VarDecl) ?*Node { + return self.getTrailer(.type_node); + } + + pub fn setTypeNode(self: *VarDecl, value: *Node) void { + self.setTrailer(.type_node, value); + } + + pub fn getAlignNode(self: *const VarDecl) ?*Node { + return self.getTrailer(.align_node); + } + + pub fn setAlignNode(self: *VarDecl, value: *Node) void { + self.setTrailer(.align_node, value); + } + + pub fn getSectionNode(self: *const VarDecl) ?*Node { + return self.getTrailer(.section_node); + } + + pub fn setSectionNode(self: *VarDecl, value: *Node) void { + self.setTrailer(.section_node, value); + } + + pub fn getInitNode(self: *const VarDecl) ?*Node { + return self.getTrailer(.init_node); + } + + pub fn setInitNode(self: *VarDecl, value: *Node) void { + self.setTrailer(.init_node, value); + } + + pub const RequiredFields = struct { + mut_token: TokenIndex, + name_token: TokenIndex, + semicolon_token: TokenIndex, + }; + + fn getTrailer(self: *const VarDecl, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) { + const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(VarDecl); + return self.trailer_flags.get(trailers_start, field); + } + + fn setTrailer(self: *VarDecl, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void { + const trailers_start = @ptrCast([*]u8, self) + @sizeOf(VarDecl); + self.trailer_flags.set(trailers_start, field, value); + } + + pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*VarDecl { + const trailer_flags = TrailerFlags.init(trailers); + const bytes = try allocator.alignedAlloc(u8, @alignOf(VarDecl), sizeInBytes(trailer_flags)); + const var_decl = @ptrCast(*VarDecl, bytes.ptr); + var_decl.* = .{ + .trailer_flags = trailer_flags, + .mut_token = required.mut_token, + .name_token = required.name_token, + .semicolon_token = required.semicolon_token, + }; + const trailers_start = bytes.ptr + @sizeOf(VarDecl); + trailer_flags.setMany(trailers_start, trailers); + return var_decl; + } + + pub fn destroy(self: *VarDecl, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const VarDecl, index: usize) ?*Node { + var i = index; + + if (self.getTypeNode()) |type_node| { + if (i < 1) return type_node; + i -= 1; + } + + if (self.getAlignNode()) |align_node| { + if (i < 1) return align_node; + i -= 1; + } + + if (self.getSectionNode()) |section_node| { + if (i < 1) return section_node; + i -= 1; + } + + if (self.getInitNode()) |init_node| { + if (i < 1) return init_node; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const VarDecl) TokenIndex { + if (self.getVisibToken()) |visib_token| return visib_token; + if (self.getThreadLocalToken()) |thread_local_token| return thread_local_token; + if (self.getComptimeToken()) |comptime_token| return comptime_token; + if (self.getExternExportToken()) |extern_export_token| return extern_export_token; + assert(self.getLibName() == null); + return self.mut_token; + } + + pub fn lastToken(self: *const VarDecl) TokenIndex { + return self.semicolon_token; + } + + fn sizeInBytes(trailer_flags: TrailerFlags) usize { + return @sizeOf(VarDecl) + trailer_flags.sizeInBytes(); + } + }; + + pub const Use = struct { + base: Node = Node{ .tag = .Use }, + doc_comments: ?*DocComment, + visib_token: ?TokenIndex, + use_token: TokenIndex, + expr: *Node, + semicolon_token: TokenIndex, + + pub fn iterate(self: *const Use, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Use) TokenIndex { + if (self.visib_token) |visib_token| return visib_token; + return self.use_token; + } + + pub fn lastToken(self: *const Use) TokenIndex { + return self.semicolon_token; + } + }; + + pub const ErrorSetDecl = struct { + base: Node = Node{ .tag = .ErrorSetDecl }, + error_token: TokenIndex, + rbrace_token: TokenIndex, + decls_len: NodeIndex, + + /// After this the caller must initialize the decls list. + pub fn alloc(allocator: *mem.Allocator, decls_len: NodeIndex) !*ErrorSetDecl { + const bytes = try allocator.alignedAlloc(u8, @alignOf(ErrorSetDecl), sizeInBytes(decls_len)); + return @ptrCast(*ErrorSetDecl, bytes.ptr); + } + + pub fn free(self: *ErrorSetDecl, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const ErrorSetDecl, index: usize) ?*Node { + var i = index; + + if (i < self.decls_len) return self.declsConst()[i]; + i -= self.decls_len; + + return null; + } + + pub fn firstToken(self: *const ErrorSetDecl) TokenIndex { + return self.error_token; + } + + pub fn lastToken(self: *const ErrorSetDecl) TokenIndex { + return self.rbrace_token; + } + + pub fn decls(self: *ErrorSetDecl) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(ErrorSetDecl); + return @ptrCast([*]*Node, decls_start)[0..self.decls_len]; + } + + pub fn declsConst(self: *const ErrorSetDecl) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ErrorSetDecl); + return @ptrCast([*]const *Node, decls_start)[0..self.decls_len]; + } + + fn sizeInBytes(decls_len: NodeIndex) usize { + return @sizeOf(ErrorSetDecl) + @sizeOf(*Node) * @as(usize, decls_len); + } + }; + + /// The fields and decls Node pointers directly follow this struct in memory. + pub const ContainerDecl = struct { + base: Node = Node{ .tag = .ContainerDecl }, + kind_token: TokenIndex, + layout_token: ?TokenIndex, + lbrace_token: TokenIndex, + rbrace_token: TokenIndex, + fields_and_decls_len: NodeIndex, + init_arg_expr: InitArg, + + pub const InitArg = union(enum) { + None, + Enum: ?*Node, + Type: *Node, + }; + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, fields_and_decls_len: NodeIndex) !*ContainerDecl { + const bytes = try allocator.alignedAlloc(u8, @alignOf(ContainerDecl), sizeInBytes(fields_and_decls_len)); + return @ptrCast(*ContainerDecl, bytes.ptr); + } + + pub fn free(self: *ContainerDecl, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.fields_and_decls_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const ContainerDecl, index: usize) ?*Node { + var i = index; + + switch (self.init_arg_expr) { + .Type => |t| { + if (i < 1) return t; + i -= 1; + }, + .None, .Enum => {}, + } + + if (i < self.fields_and_decls_len) return self.fieldsAndDeclsConst()[i]; + i -= self.fields_and_decls_len; + + return null; + } + + pub fn firstToken(self: *const ContainerDecl) TokenIndex { + if (self.layout_token) |layout_token| { + return layout_token; + } + return self.kind_token; + } + + pub fn lastToken(self: *const ContainerDecl) TokenIndex { + return self.rbrace_token; + } + + pub fn fieldsAndDecls(self: *ContainerDecl) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(ContainerDecl); + return @ptrCast([*]*Node, decls_start)[0..self.fields_and_decls_len]; + } + + pub fn fieldsAndDeclsConst(self: *const ContainerDecl) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ContainerDecl); + return @ptrCast([*]const *Node, decls_start)[0..self.fields_and_decls_len]; + } + + fn sizeInBytes(fields_and_decls_len: NodeIndex) usize { + return @sizeOf(ContainerDecl) + @sizeOf(*Node) * @as(usize, fields_and_decls_len); + } + }; + + pub const ContainerField = struct { + base: Node = Node{ .tag = .ContainerField }, + doc_comments: ?*DocComment, + comptime_token: ?TokenIndex, + name_token: TokenIndex, + type_expr: ?*Node, + value_expr: ?*Node, + align_expr: ?*Node, + + pub fn iterate(self: *const ContainerField, index: usize) ?*Node { + var i = index; + + if (self.type_expr) |type_expr| { + if (i < 1) return type_expr; + i -= 1; + } + + if (self.align_expr) |align_expr| { + if (i < 1) return align_expr; + i -= 1; + } + + if (self.value_expr) |value_expr| { + if (i < 1) return value_expr; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const ContainerField) TokenIndex { + return self.comptime_token orelse self.name_token; + } + + pub fn lastToken(self: *const ContainerField) TokenIndex { + if (self.value_expr) |value_expr| { + return value_expr.lastToken(); + } + if (self.align_expr) |align_expr| { + // The expression refers to what's inside the parenthesis, the + // last token is the closing one + return align_expr.lastToken() + 1; + } + if (self.type_expr) |type_expr| { + return type_expr.lastToken(); + } + + return self.name_token; + } + }; + + pub const ErrorTag = struct { + base: Node = Node{ .tag = .ErrorTag }, + doc_comments: ?*DocComment, + name_token: TokenIndex, + + pub fn iterate(self: *const ErrorTag, index: usize) ?*Node { + var i = index; + + if (self.doc_comments) |comments| { + if (i < 1) return &comments.base; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const ErrorTag) TokenIndex { + return self.name_token; + } + + pub fn lastToken(self: *const ErrorTag) TokenIndex { + return self.name_token; + } + }; + + pub const OneToken = struct { + base: Node, + token: TokenIndex, + + pub fn iterate(self: *const OneToken, index: usize) ?*Node { + return null; + } + + pub fn firstToken(self: *const OneToken) TokenIndex { + return self.token; + } + + pub fn lastToken(self: *const OneToken) TokenIndex { + return self.token; + } + }; + + /// The params are directly after the FnProto in memory. + /// Next, each optional thing determined by a bit in `trailer_flags`. + pub const FnProto = struct { + base: Node = Node{ .tag = .FnProto }, + trailer_flags: TrailerFlags, + fn_token: TokenIndex, + params_len: NodeIndex, + return_type: ReturnType, + + pub const TrailerFlags = std.meta.TrailerFlags(struct { + doc_comments: *DocComment, + body_node: *Node, + lib_name: *Node, // populated if this is an extern declaration + align_expr: *Node, // populated if align(A) is present + section_expr: *Node, // populated if linksection(A) is present + callconv_expr: *Node, // populated if callconv(A) is present + visib_token: TokenIndex, + name_token: TokenIndex, + var_args_token: TokenIndex, + extern_export_inline_token: TokenIndex, + is_extern_prototype: void, // TODO: Remove once extern fn rewriting is + is_async: void, // TODO: remove once async fn rewriting is + }); + + pub const RequiredFields = struct { + fn_token: TokenIndex, + params_len: NodeIndex, + return_type: ReturnType, + }; + + pub const ReturnType = union(enum) { + Explicit: *Node, + InferErrorSet: *Node, + Invalid: TokenIndex, + }; + + pub const ParamDecl = struct { + doc_comments: ?*DocComment, + comptime_token: ?TokenIndex, + noalias_token: ?TokenIndex, + name_token: ?TokenIndex, + param_type: ParamType, + + pub const ParamType = union(enum) { + any_type: *Node, + type_expr: *Node, + }; + + pub fn iterate(self: *const ParamDecl, index: usize) ?*Node { + var i = index; + + if (i < 1) { + switch (self.param_type) { + .any_type, .type_expr => |node| return node, + } + } + i -= 1; + + return null; + } + + pub fn firstToken(self: *const ParamDecl) TokenIndex { + if (self.comptime_token) |comptime_token| return comptime_token; + if (self.noalias_token) |noalias_token| return noalias_token; + if (self.name_token) |name_token| return name_token; + switch (self.param_type) { + .any_type, .type_expr => |node| return node.firstToken(), + } + } + + pub fn lastToken(self: *const ParamDecl) TokenIndex { + switch (self.param_type) { + .any_type, .type_expr => |node| return node.lastToken(), + } + } + }; + + /// For debugging purposes. + pub fn dump(self: *const FnProto) void { + const trailers_start = @alignCast( + @alignOf(ParamDecl), + @ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len, + ); + std.debug.print("{*} flags: {b} name_token: {} {*} params_len: {}\n", .{ + self, + self.trailer_flags.bits, + self.getNameToken(), + self.trailer_flags.ptrConst(trailers_start, .name_token), + self.params_len, + }); + } + + pub fn getDocComments(self: *const FnProto) ?*DocComment { + return self.getTrailer(.doc_comments); + } + + pub fn setDocComments(self: *FnProto, value: *DocComment) void { + self.setTrailer(.doc_comments, value); + } + + pub fn getBodyNode(self: *const FnProto) ?*Node { + return self.getTrailer(.body_node); + } + + pub fn setBodyNode(self: *FnProto, value: *Node) void { + self.setTrailer(.body_node, value); + } + + pub fn getLibName(self: *const FnProto) ?*Node { + return self.getTrailer(.lib_name); + } + + pub fn setLibName(self: *FnProto, value: *Node) void { + self.setTrailer(.lib_name, value); + } + + pub fn getAlignExpr(self: *const FnProto) ?*Node { + return self.getTrailer(.align_expr); + } + + pub fn setAlignExpr(self: *FnProto, value: *Node) void { + self.setTrailer(.align_expr, value); + } + + pub fn getSectionExpr(self: *const FnProto) ?*Node { + return self.getTrailer(.section_expr); + } + + pub fn setSectionExpr(self: *FnProto, value: *Node) void { + self.setTrailer(.section_expr, value); + } + + pub fn getCallconvExpr(self: *const FnProto) ?*Node { + return self.getTrailer(.callconv_expr); + } + + pub fn setCallconvExpr(self: *FnProto, value: *Node) void { + self.setTrailer(.callconv_expr, value); + } + + pub fn getVisibToken(self: *const FnProto) ?TokenIndex { + return self.getTrailer(.visib_token); + } + + pub fn setVisibToken(self: *FnProto, value: TokenIndex) void { + self.setTrailer(.visib_token, value); + } + + pub fn getNameToken(self: *const FnProto) ?TokenIndex { + return self.getTrailer(.name_token); + } + + pub fn setNameToken(self: *FnProto, value: TokenIndex) void { + self.setTrailer(.name_token, value); + } + + pub fn getVarArgsToken(self: *const FnProto) ?TokenIndex { + return self.getTrailer(.var_args_token); + } + + pub fn setVarArgsToken(self: *FnProto, value: TokenIndex) void { + self.setTrailer(.var_args_token, value); + } + + pub fn getExternExportInlineToken(self: *const FnProto) ?TokenIndex { + return self.getTrailer(.extern_export_inline_token); + } + + pub fn setExternExportInlineToken(self: *FnProto, value: TokenIndex) void { + self.setTrailer(.extern_export_inline_token, value); + } + + pub fn getIsExternPrototype(self: *const FnProto) ?void { + return self.getTrailer(.is_extern_prototype); + } + + pub fn setIsExternPrototype(self: *FnProto, value: void) void { + self.setTrailer(.is_extern_prototype, value); + } + + pub fn getIsAsync(self: *const FnProto) ?void { + return self.getTrailer(.is_async); + } + + pub fn setIsAsync(self: *FnProto, value: void) void { + self.setTrailer(.is_async, value); + } + + fn getTrailer(self: *const FnProto, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) { + const trailers_start = @alignCast( + @alignOf(ParamDecl), + @ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len, + ); + return self.trailer_flags.get(trailers_start, field); + } + + fn setTrailer(self: *FnProto, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void { + const trailers_start = @alignCast( + @alignOf(ParamDecl), + @ptrCast([*]u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len, + ); + self.trailer_flags.set(trailers_start, field, value); + } + + /// After this the caller must initialize the params list. + pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*FnProto { + const trailer_flags = TrailerFlags.init(trailers); + const bytes = try allocator.alignedAlloc(u8, @alignOf(FnProto), sizeInBytes( + required.params_len, + trailer_flags, + )); + const fn_proto = @ptrCast(*FnProto, bytes.ptr); + fn_proto.* = .{ + .trailer_flags = trailer_flags, + .fn_token = required.fn_token, + .params_len = required.params_len, + .return_type = required.return_type, + }; + const trailers_start = @alignCast( + @alignOf(ParamDecl), + bytes.ptr + @sizeOf(FnProto) + @sizeOf(ParamDecl) * required.params_len, + ); + trailer_flags.setMany(trailers_start, trailers); + return fn_proto; + } + + pub fn destroy(self: *FnProto, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len, self.trailer_flags)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const FnProto, index: usize) ?*Node { + var i = index; + + if (self.getLibName()) |lib_name| { + if (i < 1) return lib_name; + i -= 1; + } + + const params_len: usize = if (self.params_len == 0) + 0 + else switch (self.paramsConst()[self.params_len - 1].param_type) { + .any_type, .type_expr => self.params_len, + }; + if (i < params_len) { + switch (self.paramsConst()[i].param_type) { + .any_type => |n| return n, + .type_expr => |n| return n, + } + } + i -= params_len; + + if (self.getAlignExpr()) |align_expr| { + if (i < 1) return align_expr; + i -= 1; + } + + if (self.getSectionExpr()) |section_expr| { + if (i < 1) return section_expr; + i -= 1; + } + + switch (self.return_type) { + .Explicit, .InferErrorSet => |node| { + if (i < 1) return node; + i -= 1; + }, + .Invalid => {}, + } + + if (self.getBodyNode()) |body_node| { + if (i < 1) return body_node; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const FnProto) TokenIndex { + if (self.getVisibToken()) |visib_token| return visib_token; + if (self.getExternExportInlineToken()) |extern_export_inline_token| return extern_export_inline_token; + assert(self.getLibName() == null); + return self.fn_token; + } + + pub fn lastToken(self: *const FnProto) TokenIndex { + if (self.getBodyNode()) |body_node| return body_node.lastToken(); + switch (self.return_type) { + .Explicit, .InferErrorSet => |node| return node.lastToken(), + .Invalid => |tok| return tok, + } + } + + pub fn params(self: *FnProto) []ParamDecl { + const params_start = @ptrCast([*]u8, self) + @sizeOf(FnProto); + return @ptrCast([*]ParamDecl, params_start)[0..self.params_len]; + } + + pub fn paramsConst(self: *const FnProto) []const ParamDecl { + const params_start = @ptrCast([*]const u8, self) + @sizeOf(FnProto); + return @ptrCast([*]const ParamDecl, params_start)[0..self.params_len]; + } + + fn sizeInBytes(params_len: NodeIndex, trailer_flags: TrailerFlags) usize { + return @sizeOf(FnProto) + @sizeOf(ParamDecl) * @as(usize, params_len) + trailer_flags.sizeInBytes(); + } + }; + + pub const AnyFrameType = struct { + base: Node = Node{ .tag = .AnyFrameType }, + anyframe_token: TokenIndex, + result: ?Result, + + pub const Result = struct { + arrow_token: TokenIndex, + return_type: *Node, + }; + + pub fn iterate(self: *const AnyFrameType, index: usize) ?*Node { + var i = index; + + if (self.result) |result| { + if (i < 1) return result.return_type; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const AnyFrameType) TokenIndex { + return self.anyframe_token; + } + + pub fn lastToken(self: *const AnyFrameType) TokenIndex { + if (self.result) |result| return result.return_type.lastToken(); + return self.anyframe_token; + } + }; + + /// The statements of the block follow Block directly in memory. + pub const Block = struct { + base: Node = Node{ .tag = .Block }, + statements_len: NodeIndex, + lbrace: TokenIndex, + rbrace: TokenIndex, + + /// After this the caller must initialize the statements list. + pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*Block { + const bytes = try allocator.alignedAlloc(u8, @alignOf(Block), sizeInBytes(statements_len)); + return @ptrCast(*Block, bytes.ptr); + } + + pub fn free(self: *Block, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const Block, index: usize) ?*Node { + var i = index; + + if (i < self.statements_len) return self.statementsConst()[i]; + i -= self.statements_len; + + return null; + } + + pub fn firstToken(self: *const Block) TokenIndex { + return self.lbrace; + } + + pub fn lastToken(self: *const Block) TokenIndex { + return self.rbrace; + } + + pub fn statements(self: *Block) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(Block); + return @ptrCast([*]*Node, decls_start)[0..self.statements_len]; + } + + pub fn statementsConst(self: *const Block) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Block); + return @ptrCast([*]const *Node, decls_start)[0..self.statements_len]; + } + + fn sizeInBytes(statements_len: NodeIndex) usize { + return @sizeOf(Block) + @sizeOf(*Node) * @as(usize, statements_len); + } + }; + + /// The statements of the block follow LabeledBlock directly in memory. + pub const LabeledBlock = struct { + base: Node = Node{ .tag = .LabeledBlock }, + statements_len: NodeIndex, + lbrace: TokenIndex, + rbrace: TokenIndex, + label: TokenIndex, + + /// After this the caller must initialize the statements list. + pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*LabeledBlock { + const bytes = try allocator.alignedAlloc(u8, @alignOf(LabeledBlock), sizeInBytes(statements_len)); + return @ptrCast(*LabeledBlock, bytes.ptr); + } + + pub fn free(self: *LabeledBlock, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const LabeledBlock, index: usize) ?*Node { + var i = index; + + if (i < self.statements_len) return self.statementsConst()[i]; + i -= self.statements_len; + + return null; + } + + pub fn firstToken(self: *const LabeledBlock) TokenIndex { + return self.label; + } + + pub fn lastToken(self: *const LabeledBlock) TokenIndex { + return self.rbrace; + } + + pub fn statements(self: *LabeledBlock) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(LabeledBlock); + return @ptrCast([*]*Node, decls_start)[0..self.statements_len]; + } + + pub fn statementsConst(self: *const LabeledBlock) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(LabeledBlock); + return @ptrCast([*]const *Node, decls_start)[0..self.statements_len]; + } + + fn sizeInBytes(statements_len: NodeIndex) usize { + return @sizeOf(LabeledBlock) + @sizeOf(*Node) * @as(usize, statements_len); + } + }; + + pub const Defer = struct { + base: Node = Node{ .tag = .Defer }, + defer_token: TokenIndex, + payload: ?*Node, + expr: *Node, + + pub fn iterate(self: *const Defer, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Defer) TokenIndex { + return self.defer_token; + } + + pub fn lastToken(self: *const Defer) TokenIndex { + return self.expr.lastToken(); + } + }; + + pub const Comptime = struct { + base: Node = Node{ .tag = .Comptime }, + doc_comments: ?*DocComment, + comptime_token: TokenIndex, + expr: *Node, + + pub fn iterate(self: *const Comptime, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Comptime) TokenIndex { + return self.comptime_token; + } + + pub fn lastToken(self: *const Comptime) TokenIndex { + return self.expr.lastToken(); + } + }; + + pub const Nosuspend = struct { + base: Node = Node{ .tag = .Nosuspend }, + nosuspend_token: TokenIndex, + expr: *Node, + + pub fn iterate(self: *const Nosuspend, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Nosuspend) TokenIndex { + return self.nosuspend_token; + } + + pub fn lastToken(self: *const Nosuspend) TokenIndex { + return self.expr.lastToken(); + } + }; + + pub const Payload = struct { + base: Node = Node{ .tag = .Payload }, + lpipe: TokenIndex, + error_symbol: *Node, + rpipe: TokenIndex, + + pub fn iterate(self: *const Payload, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.error_symbol; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Payload) TokenIndex { + return self.lpipe; + } + + pub fn lastToken(self: *const Payload) TokenIndex { + return self.rpipe; + } + }; + + pub const PointerPayload = struct { + base: Node = Node{ .tag = .PointerPayload }, + lpipe: TokenIndex, + ptr_token: ?TokenIndex, + value_symbol: *Node, + rpipe: TokenIndex, + + pub fn iterate(self: *const PointerPayload, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.value_symbol; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const PointerPayload) TokenIndex { + return self.lpipe; + } + + pub fn lastToken(self: *const PointerPayload) TokenIndex { + return self.rpipe; + } + }; + + pub const PointerIndexPayload = struct { + base: Node = Node{ .tag = .PointerIndexPayload }, + lpipe: TokenIndex, + ptr_token: ?TokenIndex, + value_symbol: *Node, + index_symbol: ?*Node, + rpipe: TokenIndex, + + pub fn iterate(self: *const PointerIndexPayload, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.value_symbol; + i -= 1; + + if (self.index_symbol) |index_symbol| { + if (i < 1) return index_symbol; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const PointerIndexPayload) TokenIndex { + return self.lpipe; + } + + pub fn lastToken(self: *const PointerIndexPayload) TokenIndex { + return self.rpipe; + } + }; + + pub const Else = struct { + base: Node = Node{ .tag = .Else }, + else_token: TokenIndex, + payload: ?*Node, + body: *Node, + + pub fn iterate(self: *const Else, index: usize) ?*Node { + var i = index; + + if (self.payload) |payload| { + if (i < 1) return payload; + i -= 1; + } + + if (i < 1) return self.body; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Else) TokenIndex { + return self.else_token; + } + + pub fn lastToken(self: *const Else) TokenIndex { + return self.body.lastToken(); + } + }; + + /// The cases node pointers are found in memory after Switch. + /// They must be SwitchCase or SwitchElse nodes. + pub const Switch = struct { + base: Node = Node{ .tag = .Switch }, + switch_token: TokenIndex, + rbrace: TokenIndex, + cases_len: NodeIndex, + expr: *Node, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, cases_len: NodeIndex) !*Switch { + const bytes = try allocator.alignedAlloc(u8, @alignOf(Switch), sizeInBytes(cases_len)); + return @ptrCast(*Switch, bytes.ptr); + } + + pub fn free(self: *Switch, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.cases_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const Switch, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + if (i < self.cases_len) return self.casesConst()[i]; + i -= self.cases_len; + + return null; + } + + pub fn firstToken(self: *const Switch) TokenIndex { + return self.switch_token; + } + + pub fn lastToken(self: *const Switch) TokenIndex { + return self.rbrace; + } + + pub fn cases(self: *Switch) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(Switch); + return @ptrCast([*]*Node, decls_start)[0..self.cases_len]; + } + + pub fn casesConst(self: *const Switch) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Switch); + return @ptrCast([*]const *Node, decls_start)[0..self.cases_len]; + } + + fn sizeInBytes(cases_len: NodeIndex) usize { + return @sizeOf(Switch) + @sizeOf(*Node) * @as(usize, cases_len); + } + }; + + /// Items sub-nodes appear in memory directly following SwitchCase. + pub const SwitchCase = struct { + base: Node = Node{ .tag = .SwitchCase }, + arrow_token: TokenIndex, + payload: ?*Node, + expr: *Node, + items_len: NodeIndex, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, items_len: NodeIndex) !*SwitchCase { + const bytes = try allocator.alignedAlloc(u8, @alignOf(SwitchCase), sizeInBytes(items_len)); + return @ptrCast(*SwitchCase, bytes.ptr); + } + + pub fn free(self: *SwitchCase, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.items_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const SwitchCase, index: usize) ?*Node { + var i = index; + + if (i < self.items_len) return self.itemsConst()[i]; + i -= self.items_len; + + if (self.payload) |payload| { + if (i < 1) return payload; + i -= 1; + } + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const SwitchCase) TokenIndex { + return self.itemsConst()[0].firstToken(); + } + + pub fn lastToken(self: *const SwitchCase) TokenIndex { + return self.expr.lastToken(); + } + + pub fn items(self: *SwitchCase) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(SwitchCase); + return @ptrCast([*]*Node, decls_start)[0..self.items_len]; + } + + pub fn itemsConst(self: *const SwitchCase) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(SwitchCase); + return @ptrCast([*]const *Node, decls_start)[0..self.items_len]; + } + + fn sizeInBytes(items_len: NodeIndex) usize { + return @sizeOf(SwitchCase) + @sizeOf(*Node) * @as(usize, items_len); + } + }; + + pub const SwitchElse = struct { + base: Node = Node{ .tag = .SwitchElse }, + token: TokenIndex, + + pub fn iterate(self: *const SwitchElse, index: usize) ?*Node { + return null; + } + + pub fn firstToken(self: *const SwitchElse) TokenIndex { + return self.token; + } + + pub fn lastToken(self: *const SwitchElse) TokenIndex { + return self.token; + } + }; + + pub const While = struct { + base: Node = Node{ .tag = .While }, + label: ?TokenIndex, + inline_token: ?TokenIndex, + while_token: TokenIndex, + condition: *Node, + payload: ?*Node, + continue_expr: ?*Node, + body: *Node, + @"else": ?*Else, + + pub fn iterate(self: *const While, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.condition; + i -= 1; + + if (self.payload) |payload| { + if (i < 1) return payload; + i -= 1; + } + + if (self.continue_expr) |continue_expr| { + if (i < 1) return continue_expr; + i -= 1; + } + + if (i < 1) return self.body; + i -= 1; + + if (self.@"else") |@"else"| { + if (i < 1) return &@"else".base; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const While) TokenIndex { + if (self.label) |label| { + return label; + } + + if (self.inline_token) |inline_token| { + return inline_token; + } + + return self.while_token; + } + + pub fn lastToken(self: *const While) TokenIndex { + if (self.@"else") |@"else"| { + return @"else".body.lastToken(); + } + + return self.body.lastToken(); + } + }; + + pub const For = struct { + base: Node = Node{ .tag = .For }, + label: ?TokenIndex, + inline_token: ?TokenIndex, + for_token: TokenIndex, + array_expr: *Node, + payload: *Node, + body: *Node, + @"else": ?*Else, + + pub fn iterate(self: *const For, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.array_expr; + i -= 1; + + if (i < 1) return self.payload; + i -= 1; + + if (i < 1) return self.body; + i -= 1; + + if (self.@"else") |@"else"| { + if (i < 1) return &@"else".base; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const For) TokenIndex { + if (self.label) |label| { + return label; + } + + if (self.inline_token) |inline_token| { + return inline_token; + } + + return self.for_token; + } + + pub fn lastToken(self: *const For) TokenIndex { + if (self.@"else") |@"else"| { + return @"else".body.lastToken(); + } + + return self.body.lastToken(); + } + }; + + pub const If = struct { + base: Node = Node{ .tag = .If }, + if_token: TokenIndex, + condition: *Node, + payload: ?*Node, + body: *Node, + @"else": ?*Else, + + pub fn iterate(self: *const If, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.condition; + i -= 1; + + if (self.payload) |payload| { + if (i < 1) return payload; + i -= 1; + } + + if (i < 1) return self.body; + i -= 1; + + if (self.@"else") |@"else"| { + if (i < 1) return &@"else".base; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const If) TokenIndex { + return self.if_token; + } + + pub fn lastToken(self: *const If) TokenIndex { + if (self.@"else") |@"else"| { + return @"else".body.lastToken(); + } + + return self.body.lastToken(); + } + }; + + pub const Catch = struct { + base: Node = Node{ .tag = .Catch }, + op_token: TokenIndex, + lhs: *Node, + rhs: *Node, + payload: ?*Node, + + pub fn iterate(self: *const Catch, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + if (self.payload) |payload| { + if (i < 1) return payload; + i -= 1; + } + + if (i < 1) return self.rhs; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Catch) TokenIndex { + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const Catch) TokenIndex { + return self.rhs.lastToken(); + } + }; + + pub const SimpleInfixOp = struct { + base: Node, + op_token: TokenIndex, + lhs: *Node, + rhs: *Node, + + pub fn iterate(self: *const SimpleInfixOp, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + if (i < 1) return self.rhs; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const SimpleInfixOp) TokenIndex { + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const SimpleInfixOp) TokenIndex { + return self.rhs.lastToken(); + } + }; + + pub const SimplePrefixOp = struct { + base: Node, + op_token: TokenIndex, + rhs: *Node, + + const Self = @This(); + + pub fn iterate(self: *const Self, index: usize) ?*Node { + if (index == 0) return self.rhs; + return null; + } + + pub fn firstToken(self: *const Self) TokenIndex { + return self.op_token; + } + + pub fn lastToken(self: *const Self) TokenIndex { + return self.rhs.lastToken(); + } + }; + + pub const ArrayType = struct { + base: Node = Node{ .tag = .ArrayType }, + op_token: TokenIndex, + rhs: *Node, + len_expr: *Node, + + pub fn iterate(self: *const ArrayType, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.len_expr; + i -= 1; + + if (i < 1) return self.rhs; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const ArrayType) TokenIndex { + return self.op_token; + } + + pub fn lastToken(self: *const ArrayType) TokenIndex { + return self.rhs.lastToken(); + } + }; + + pub const ArrayTypeSentinel = struct { + base: Node = Node{ .tag = .ArrayTypeSentinel }, + op_token: TokenIndex, + rhs: *Node, + len_expr: *Node, + sentinel: *Node, + + pub fn iterate(self: *const ArrayTypeSentinel, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.len_expr; + i -= 1; + + if (i < 1) return self.sentinel; + i -= 1; + + if (i < 1) return self.rhs; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const ArrayTypeSentinel) TokenIndex { + return self.op_token; + } + + pub fn lastToken(self: *const ArrayTypeSentinel) TokenIndex { + return self.rhs.lastToken(); + } + }; + + pub const PtrType = struct { + base: Node = Node{ .tag = .PtrType }, + op_token: TokenIndex, + rhs: *Node, + /// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents + /// one of these possibly-null things. Then we have them directly follow the PtrType in memory. + ptr_info: PtrInfo = .{}, + + pub fn iterate(self: *const PtrType, index: usize) ?*Node { + var i = index; + + if (self.ptr_info.sentinel) |sentinel| { + if (i < 1) return sentinel; + i -= 1; + } + + if (self.ptr_info.align_info) |align_info| { + if (i < 1) return align_info.node; + i -= 1; + } + + if (i < 1) return self.rhs; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const PtrType) TokenIndex { + return self.op_token; + } + + pub fn lastToken(self: *const PtrType) TokenIndex { + return self.rhs.lastToken(); + } + }; + + pub const SliceType = struct { + base: Node = Node{ .tag = .SliceType }, + op_token: TokenIndex, + rhs: *Node, + /// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents + /// one of these possibly-null things. Then we have them directly follow the SliceType in memory. + ptr_info: PtrInfo = .{}, + + pub fn iterate(self: *const SliceType, index: usize) ?*Node { + var i = index; + + if (self.ptr_info.sentinel) |sentinel| { + if (i < 1) return sentinel; + i -= 1; + } + + if (self.ptr_info.align_info) |align_info| { + if (i < 1) return align_info.node; + i -= 1; + } + + if (i < 1) return self.rhs; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const SliceType) TokenIndex { + return self.op_token; + } + + pub fn lastToken(self: *const SliceType) TokenIndex { + return self.rhs.lastToken(); + } + }; + + pub const FieldInitializer = struct { + base: Node = Node{ .tag = .FieldInitializer }, + period_token: TokenIndex, + name_token: TokenIndex, + expr: *Node, + + pub fn iterate(self: *const FieldInitializer, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const FieldInitializer) TokenIndex { + return self.period_token; + } + + pub fn lastToken(self: *const FieldInitializer) TokenIndex { + return self.expr.lastToken(); + } + }; + + /// Elements occur directly in memory after ArrayInitializer. + pub const ArrayInitializer = struct { + base: Node = Node{ .tag = .ArrayInitializer }, + rtoken: TokenIndex, + list_len: NodeIndex, + lhs: *Node, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializer { + const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializer), sizeInBytes(list_len)); + return @ptrCast(*ArrayInitializer, bytes.ptr); + } + + pub fn free(self: *ArrayInitializer, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const ArrayInitializer, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + if (i < self.list_len) return self.listConst()[i]; + i -= self.list_len; + + return null; + } + + pub fn firstToken(self: *const ArrayInitializer) TokenIndex { + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const ArrayInitializer) TokenIndex { + return self.rtoken; + } + + pub fn list(self: *ArrayInitializer) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializer); + return @ptrCast([*]*Node, decls_start)[0..self.list_len]; + } + + pub fn listConst(self: *const ArrayInitializer) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializer); + return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; + } + + fn sizeInBytes(list_len: NodeIndex) usize { + return @sizeOf(ArrayInitializer) + @sizeOf(*Node) * @as(usize, list_len); + } + }; + + /// Elements occur directly in memory after ArrayInitializerDot. + pub const ArrayInitializerDot = struct { + base: Node = Node{ .tag = .ArrayInitializerDot }, + dot: TokenIndex, + rtoken: TokenIndex, + list_len: NodeIndex, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializerDot { + const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializerDot), sizeInBytes(list_len)); + return @ptrCast(*ArrayInitializerDot, bytes.ptr); + } + + pub fn free(self: *ArrayInitializerDot, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const ArrayInitializerDot, index: usize) ?*Node { + var i = index; + + if (i < self.list_len) return self.listConst()[i]; + i -= self.list_len; + + return null; + } + + pub fn firstToken(self: *const ArrayInitializerDot) TokenIndex { + return self.dot; + } + + pub fn lastToken(self: *const ArrayInitializerDot) TokenIndex { + return self.rtoken; + } + + pub fn list(self: *ArrayInitializerDot) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializerDot); + return @ptrCast([*]*Node, decls_start)[0..self.list_len]; + } + + pub fn listConst(self: *const ArrayInitializerDot) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializerDot); + return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; + } + + fn sizeInBytes(list_len: NodeIndex) usize { + return @sizeOf(ArrayInitializerDot) + @sizeOf(*Node) * @as(usize, list_len); + } + }; + + /// Elements occur directly in memory after StructInitializer. + pub const StructInitializer = struct { + base: Node = Node{ .tag = .StructInitializer }, + rtoken: TokenIndex, + list_len: NodeIndex, + lhs: *Node, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializer { + const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializer), sizeInBytes(list_len)); + return @ptrCast(*StructInitializer, bytes.ptr); + } + + pub fn free(self: *StructInitializer, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const StructInitializer, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + if (i < self.list_len) return self.listConst()[i]; + i -= self.list_len; + + return null; + } + + pub fn firstToken(self: *const StructInitializer) TokenIndex { + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const StructInitializer) TokenIndex { + return self.rtoken; + } + + pub fn list(self: *StructInitializer) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializer); + return @ptrCast([*]*Node, decls_start)[0..self.list_len]; + } + + pub fn listConst(self: *const StructInitializer) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializer); + return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; + } + + fn sizeInBytes(list_len: NodeIndex) usize { + return @sizeOf(StructInitializer) + @sizeOf(*Node) * @as(usize, list_len); + } + }; + + /// Elements occur directly in memory after StructInitializerDot. + pub const StructInitializerDot = struct { + base: Node = Node{ .tag = .StructInitializerDot }, + dot: TokenIndex, + rtoken: TokenIndex, + list_len: NodeIndex, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializerDot { + const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializerDot), sizeInBytes(list_len)); + return @ptrCast(*StructInitializerDot, bytes.ptr); + } + + pub fn free(self: *StructInitializerDot, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const StructInitializerDot, index: usize) ?*Node { + var i = index; + + if (i < self.list_len) return self.listConst()[i]; + i -= self.list_len; + + return null; + } + + pub fn firstToken(self: *const StructInitializerDot) TokenIndex { + return self.dot; + } + + pub fn lastToken(self: *const StructInitializerDot) TokenIndex { + return self.rtoken; + } + + pub fn list(self: *StructInitializerDot) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializerDot); + return @ptrCast([*]*Node, decls_start)[0..self.list_len]; + } + + pub fn listConst(self: *const StructInitializerDot) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializerDot); + return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; + } + + fn sizeInBytes(list_len: NodeIndex) usize { + return @sizeOf(StructInitializerDot) + @sizeOf(*Node) * @as(usize, list_len); + } + }; + + /// Parameter nodes directly follow Call in memory. + pub const Call = struct { + base: Node = Node{ .tag = .Call }, + rtoken: TokenIndex, + lhs: *Node, + params_len: NodeIndex, + async_token: ?TokenIndex, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*Call { + const bytes = try allocator.alignedAlloc(u8, @alignOf(Call), sizeInBytes(params_len)); + return @ptrCast(*Call, bytes.ptr); + } + + pub fn free(self: *Call, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const Call, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + if (i < self.params_len) return self.paramsConst()[i]; + i -= self.params_len; + + return null; + } + + pub fn firstToken(self: *const Call) TokenIndex { + if (self.async_token) |async_token| return async_token; + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const Call) TokenIndex { + return self.rtoken; + } + + pub fn params(self: *Call) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(Call); + return @ptrCast([*]*Node, decls_start)[0..self.params_len]; + } + + pub fn paramsConst(self: *const Call) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Call); + return @ptrCast([*]const *Node, decls_start)[0..self.params_len]; + } + + fn sizeInBytes(params_len: NodeIndex) usize { + return @sizeOf(Call) + @sizeOf(*Node) * @as(usize, params_len); + } + }; + + pub const ArrayAccess = struct { + base: Node = Node{ .tag = .ArrayAccess }, + rtoken: TokenIndex, + lhs: *Node, + index_expr: *Node, + + pub fn iterate(self: *const ArrayAccess, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + if (i < 1) return self.index_expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const ArrayAccess) TokenIndex { + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const ArrayAccess) TokenIndex { + return self.rtoken; + } + }; + + pub const SimpleSuffixOp = struct { + base: Node, + rtoken: TokenIndex, + lhs: *Node, + + pub fn iterate(self: *const SimpleSuffixOp, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const SimpleSuffixOp) TokenIndex { + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const SimpleSuffixOp) TokenIndex { + return self.rtoken; + } + }; + + pub const Slice = struct { + base: Node = Node{ .tag = .Slice }, + rtoken: TokenIndex, + lhs: *Node, + start: *Node, + end: ?*Node, + sentinel: ?*Node, + + pub fn iterate(self: *const Slice, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.lhs; + i -= 1; + + if (i < 1) return self.start; + i -= 1; + + if (self.end) |end| { + if (i < 1) return end; + i -= 1; + } + if (self.sentinel) |sentinel| { + if (i < 1) return sentinel; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const Slice) TokenIndex { + return self.lhs.firstToken(); + } + + pub fn lastToken(self: *const Slice) TokenIndex { + return self.rtoken; + } + }; + + pub const GroupedExpression = struct { + base: Node = Node{ .tag = .GroupedExpression }, + lparen: TokenIndex, + expr: *Node, + rparen: TokenIndex, + + pub fn iterate(self: *const GroupedExpression, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const GroupedExpression) TokenIndex { + return self.lparen; + } + + pub fn lastToken(self: *const GroupedExpression) TokenIndex { + return self.rparen; + } + }; + + /// Trailed in memory by possibly many things, with each optional thing + /// determined by a bit in `trailer_flags`. + /// Can be: return, break, continue + pub const ControlFlowExpression = struct { + base: Node, + trailer_flags: TrailerFlags, + ltoken: TokenIndex, + + pub const TrailerFlags = std.meta.TrailerFlags(struct { + rhs: *Node, + label: TokenIndex, + }); + + pub const RequiredFields = struct { + tag: Tag, + ltoken: TokenIndex, + }; + + pub fn getRHS(self: *const ControlFlowExpression) ?*Node { + return self.getTrailer(.rhs); + } + + pub fn setRHS(self: *ControlFlowExpression, value: *Node) void { + self.setTrailer(.rhs, value); + } + + pub fn getLabel(self: *const ControlFlowExpression) ?TokenIndex { + return self.getTrailer(.label); + } + + pub fn setLabel(self: *ControlFlowExpression, value: TokenIndex) void { + self.setTrailer(.label, value); + } + + fn getTrailer(self: *const ControlFlowExpression, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) { + const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(ControlFlowExpression); + return self.trailer_flags.get(trailers_start, field); + } + + fn setTrailer(self: *ControlFlowExpression, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void { + const trailers_start = @ptrCast([*]u8, self) + @sizeOf(ControlFlowExpression); + self.trailer_flags.set(trailers_start, field, value); + } + + pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*ControlFlowExpression { + const trailer_flags = TrailerFlags.init(trailers); + const bytes = try allocator.alignedAlloc(u8, @alignOf(ControlFlowExpression), sizeInBytes(trailer_flags)); + const ctrl_flow_expr = @ptrCast(*ControlFlowExpression, bytes.ptr); + ctrl_flow_expr.* = .{ + .base = .{ .tag = required.tag }, + .trailer_flags = trailer_flags, + .ltoken = required.ltoken, + }; + const trailers_start = bytes.ptr + @sizeOf(ControlFlowExpression); + trailer_flags.setMany(trailers_start, trailers); + return ctrl_flow_expr; + } + + pub fn destroy(self: *ControlFlowExpression, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const ControlFlowExpression, index: usize) ?*Node { + var i = index; + + if (self.getRHS()) |rhs| { + if (i < 1) return rhs; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const ControlFlowExpression) TokenIndex { + return self.ltoken; + } + + pub fn lastToken(self: *const ControlFlowExpression) TokenIndex { + if (self.getRHS()) |rhs| { + return rhs.lastToken(); + } + + if (self.getLabel()) |label| { + return label; + } + + return self.ltoken; + } + + fn sizeInBytes(trailer_flags: TrailerFlags) usize { + return @sizeOf(ControlFlowExpression) + trailer_flags.sizeInBytes(); + } + }; + + pub const Suspend = struct { + base: Node = Node{ .tag = .Suspend }, + suspend_token: TokenIndex, + body: ?*Node, + + pub fn iterate(self: *const Suspend, index: usize) ?*Node { + var i = index; + + if (self.body) |body| { + if (i < 1) return body; + i -= 1; + } + + return null; + } + + pub fn firstToken(self: *const Suspend) TokenIndex { + return self.suspend_token; + } + + pub fn lastToken(self: *const Suspend) TokenIndex { + if (self.body) |body| { + return body.lastToken(); + } + + return self.suspend_token; + } + }; + + pub const EnumLiteral = struct { + base: Node = Node{ .tag = .EnumLiteral }, + dot: TokenIndex, + name: TokenIndex, + + pub fn iterate(self: *const EnumLiteral, index: usize) ?*Node { + return null; + } + + pub fn firstToken(self: *const EnumLiteral) TokenIndex { + return self.dot; + } + + pub fn lastToken(self: *const EnumLiteral) TokenIndex { + return self.name; + } + }; + + /// Parameters are in memory following BuiltinCall. + pub const BuiltinCall = struct { + base: Node = Node{ .tag = .BuiltinCall }, + params_len: NodeIndex, + builtin_token: TokenIndex, + rparen_token: TokenIndex, + + /// After this the caller must initialize the fields_and_decls list. + pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*BuiltinCall { + const bytes = try allocator.alignedAlloc(u8, @alignOf(BuiltinCall), sizeInBytes(params_len)); + return @ptrCast(*BuiltinCall, bytes.ptr); + } + + pub fn free(self: *BuiltinCall, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const BuiltinCall, index: usize) ?*Node { + var i = index; + + if (i < self.params_len) return self.paramsConst()[i]; + i -= self.params_len; + + return null; + } + + pub fn firstToken(self: *const BuiltinCall) TokenIndex { + return self.builtin_token; + } + + pub fn lastToken(self: *const BuiltinCall) TokenIndex { + return self.rparen_token; + } + + pub fn params(self: *BuiltinCall) []*Node { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(BuiltinCall); + return @ptrCast([*]*Node, decls_start)[0..self.params_len]; + } + + pub fn paramsConst(self: *const BuiltinCall) []const *Node { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(BuiltinCall); + return @ptrCast([*]const *Node, decls_start)[0..self.params_len]; + } + + fn sizeInBytes(params_len: NodeIndex) usize { + return @sizeOf(BuiltinCall) + @sizeOf(*Node) * @as(usize, params_len); + } + }; + + /// The string literal tokens appear directly in memory after MultilineStringLiteral. + pub const MultilineStringLiteral = struct { + base: Node = Node{ .tag = .MultilineStringLiteral }, + lines_len: TokenIndex, + + /// After this the caller must initialize the lines list. + pub fn alloc(allocator: *mem.Allocator, lines_len: NodeIndex) !*MultilineStringLiteral { + const bytes = try allocator.alignedAlloc(u8, @alignOf(MultilineStringLiteral), sizeInBytes(lines_len)); + return @ptrCast(*MultilineStringLiteral, bytes.ptr); + } + + pub fn free(self: *MultilineStringLiteral, allocator: *mem.Allocator) void { + const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.lines_len)]; + allocator.free(bytes); + } + + pub fn iterate(self: *const MultilineStringLiteral, index: usize) ?*Node { + return null; + } + + pub fn firstToken(self: *const MultilineStringLiteral) TokenIndex { + return self.linesConst()[0]; + } + + pub fn lastToken(self: *const MultilineStringLiteral) TokenIndex { + return self.linesConst()[self.lines_len - 1]; + } + + pub fn lines(self: *MultilineStringLiteral) []TokenIndex { + const decls_start = @ptrCast([*]u8, self) + @sizeOf(MultilineStringLiteral); + return @ptrCast([*]TokenIndex, decls_start)[0..self.lines_len]; + } + + pub fn linesConst(self: *const MultilineStringLiteral) []const TokenIndex { + const decls_start = @ptrCast([*]const u8, self) + @sizeOf(MultilineStringLiteral); + return @ptrCast([*]const TokenIndex, decls_start)[0..self.lines_len]; + } + + fn sizeInBytes(lines_len: NodeIndex) usize { + return @sizeOf(MultilineStringLiteral) + @sizeOf(TokenIndex) * @as(usize, lines_len); + } + }; + + pub const Asm = struct { + base: Node = Node{ .tag = .Asm }, + asm_token: TokenIndex, + rparen: TokenIndex, + volatile_token: ?TokenIndex, + template: *Node, + outputs: []Output, + inputs: []Input, + /// A clobber node must be a StringLiteral or MultilineStringLiteral. + clobbers: []*Node, + + pub const Output = struct { + lbracket: TokenIndex, + symbolic_name: *Node, + constraint: *Node, + kind: Kind, + rparen: TokenIndex, + + pub const Kind = union(enum) { + Variable: *OneToken, + Return: *Node, + }; + + pub fn iterate(self: *const Output, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.symbolic_name; + i -= 1; + + if (i < 1) return self.constraint; + i -= 1; + + switch (self.kind) { + .Variable => |variable_name| { + if (i < 1) return &variable_name.base; + i -= 1; + }, + .Return => |return_type| { + if (i < 1) return return_type; + i -= 1; + }, + } + + return null; + } + + pub fn firstToken(self: *const Output) TokenIndex { + return self.lbracket; + } + + pub fn lastToken(self: *const Output) TokenIndex { + return self.rparen; + } + }; + + pub const Input = struct { + lbracket: TokenIndex, + symbolic_name: *Node, + constraint: *Node, + expr: *Node, + rparen: TokenIndex, + + pub fn iterate(self: *const Input, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.symbolic_name; + i -= 1; + + if (i < 1) return self.constraint; + i -= 1; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Input) TokenIndex { + return self.lbracket; + } + + pub fn lastToken(self: *const Input) TokenIndex { + return self.rparen; + } + }; + + pub fn iterate(self: *const Asm, index: usize) ?*Node { + var i = index; + + if (i < self.outputs.len * 3) switch (i % 3) { + 0 => return self.outputs[i / 3].symbolic_name, + 1 => return self.outputs[i / 3].constraint, + 2 => switch (self.outputs[i / 3].kind) { + .Variable => |variable_name| return &variable_name.base, + .Return => |return_type| return return_type, + }, + else => unreachable, + }; + i -= self.outputs.len * 3; + + if (i < self.inputs.len * 3) switch (i % 3) { + 0 => return self.inputs[i / 3].symbolic_name, + 1 => return self.inputs[i / 3].constraint, + 2 => return self.inputs[i / 3].expr, + else => unreachable, + }; + i -= self.inputs.len * 3; + + return null; + } + + pub fn firstToken(self: *const Asm) TokenIndex { + return self.asm_token; + } + + pub fn lastToken(self: *const Asm) TokenIndex { + return self.rparen; + } + }; + + /// TODO remove from the Node base struct + /// TODO actually maybe remove entirely in favor of iterating backward from Node.firstToken() + /// and forwards to find same-line doc comments. + pub const DocComment = struct { + base: Node = Node{ .tag = .DocComment }, + /// Points to the first doc comment token. API users are expected to iterate over the + /// tokens array, looking for more doc comments, ignoring line comments, and stopping + /// at the first other token. + first_line: TokenIndex, + + pub fn iterate(self: *const DocComment, index: usize) ?*Node { + return null; + } + + pub fn firstToken(self: *const DocComment) TokenIndex { + return self.first_line; + } + + /// Returns the first doc comment line. Be careful, this may not be the desired behavior, + /// which would require the tokens array. + pub fn lastToken(self: *const DocComment) TokenIndex { + return self.first_line; + } + }; + + pub const TestDecl = struct { + base: Node = Node{ .tag = .TestDecl }, + doc_comments: ?*DocComment, + test_token: TokenIndex, + name: *Node, + body_node: *Node, + + pub fn iterate(self: *const TestDecl, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.body_node; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const TestDecl) TokenIndex { + return self.test_token; + } + + pub fn lastToken(self: *const TestDecl) TokenIndex { + return self.body_node.lastToken(); + } + }; +}; + +pub const PtrInfo = struct { + allowzero_token: ?TokenIndex = null, + align_info: ?Align = null, + const_token: ?TokenIndex = null, + volatile_token: ?TokenIndex = null, + sentinel: ?*Node = null, + + pub const Align = struct { + node: *Node, + bit_range: ?BitRange = null, + + pub const BitRange = struct { + start: *Node, + end: *Node, + }; + }; +}; + +test "iterate" { + var root = Node.Root{ + .base = Node{ .tag = Node.Tag.Root }, + .decls_len = 0, + .eof_token = 0, + }; + var base = &root.base; + testing.expect(base.iterate(0) == null); +} diff --git a/src/parse.zig b/src/parse.zig new file mode 100644 index 0000000..1cfa4a4 --- /dev/null +++ b/src/parse.zig @@ -0,0 +1,3378 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2015-2020 Zig Contributors +// This file is part of [zig](https://ziglang.org/), which is MIT licensed. +// The MIT license requires this copyright notice to be included in all copies +// and substantial portions of the software. +const std = @import("std"); +const assert = std.debug.assert; +const Allocator = std.mem.Allocator; +const ast = @import("ast.zig"); +const Node = ast.Node; +const Tree = ast.Tree; +const AstError = ast.Error; +const TokenIndex = ast.TokenIndex; +const NodeIndex = ast.NodeIndex; + +const lexer = @import("tokenizer.zig"); +const Token = lexer.Token; +const Tokenizer = lexer.Tokenizer; + +pub const Error = error{ParseError} || Allocator.Error; + +/// Result should be freed with tree.deinit() when there are +/// no more references to any of the tokens or nodes. +pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!*Tree { + + var token_ids = std.ArrayList(Token.Id).init(gpa); + defer token_ids.deinit(); + var token_locs = std.ArrayList(Token.Loc).init(gpa); + defer token_locs.deinit(); + + // Before starting, just check that we have about enough memory. + const estimated_token_count = source.len / 8; + try token_ids.ensureCapacity(estimated_token_count); + try token_locs.ensureCapacity(estimated_token_count); + + // Use the lexer to get all the tokens from the source code. + var tokenizer = Tokenizer.init(source); + while (true) { + const token = tokenizer.next(); + try token_ids.append(token.id); + try token_locs.append(token.loc); + if (token.id == .Eof) break; + } + + // Create a Parser structure. + var parser: Parser = .{ + .source = source, // Source code. + .arena = std.heap.ArenaAllocator.init(gpa), // Arena allocator. + .gpa = gpa, // General Purpose Allocator. + .token_ids = token_ids.items, // IDs of the tokens. + .token_locs = token_locs.items, // Location of the tokens. + .errors = .{}, // List of errors in our parsing. + .tok_i = 0, // Index of current token being analyzed. + }; + defer parser.errors.deinit(gpa); + errdefer parser.arena.deinit(); + + // Ignore the first line comments from our code. + while (token_ids.items[parser.tok_i] == .LineComment) parser.tok_i += 1; + + // Perform parsing, called once. + const root_node = try parser.parseRoot(); + + // Create a parsing Tree, with the nodes parsed early on. + // toOwnedSlice: free the memory and return the list. Arrays are empty, + // allocator can be free, arrays are owned by a different allocator. + const tree = try parser.arena.allocator.create(Tree); + tree.* = .{ + .gpa = gpa, + .source = source, + .token_ids = token_ids.toOwnedSlice(), + .token_locs = token_locs.toOwnedSlice(), + .errors = parser.errors.toOwnedSlice(gpa), + .root_node = root_node, + .arena = parser.arena.state, + }; + return tree; +} + +/// Represents in-progress parsing, will be converted to an ast.Tree after completion. +const Parser = struct { + arena: std.heap.ArenaAllocator, + gpa: *Allocator, + source: []const u8, + token_ids: []const Token.Id, + token_locs: []const Token.Loc, + tok_i: TokenIndex, + errors: std.ArrayListUnmanaged(AstError), + + /// Root <- skip ContainerMembers eof + fn parseRoot(p: *Parser) Allocator.Error!*Node.Root { + // Parse declarations. + const decls = try parseContainerMembers(p, true); + defer p.gpa.free(decls); + + // parseContainerMembers will try to skip as much + // invalid tokens as it can so this can only be the EOF + // eatToken returns next token or null (if current token id isn't parameter). + // If current token is .Eof, next token is actually the first. + const eof_token = p.eatToken(.Eof).?; + + // Nb of declarations becomes an ast.NodeIndex integer variable (usize). + const decls_len = @intCast(NodeIndex, decls.len); + const node = try Node.Root.create(&p.arena.allocator, decls_len, eof_token); + // std.mem.copy: T, dest, src + std.mem.copy(*Node, node.decls(), decls); + + return node; // Root node. + } + + /// ContainerMembers + /// <- TestDecl ContainerMembers + /// / TopLevelComptime ContainerMembers + /// / KEYWORD_pub? TopLevelDecl ContainerMembers + /// / ContainerField COMMA ContainerMembers + /// / ContainerField + /// / + // parseContainerMembers: actual parsing code starts here. + fn parseContainerMembers(p: *Parser, top_level: bool) ![]*Node { + std.debug.print("parseContainerMembers: is top? {}\n", .{top_level}); + // list: all nodes in the ast. + var list = std.ArrayList(*Node).init(p.gpa); + defer list.deinit(); + + // field_state: union of enum. + // Tagged union: eligible to use in switch expressions and coerce their value. + // Example: switch (some_tagged_union) { SomeType => |value| print("{}\n", value); } + // If a '*' is placed before the variable name, it's a pointer to the value inside + // the tagged union. + // Example: switch (some_tagged_union) { SomeType => |*value| value.* += 1; } + // @TagType can be used to get the right enum type. + var field_state: union(enum) { + /// no fields have been seen + none, + /// currently parsing fields + seen, + /// saw fields and then a declaration after them. + /// payload is first token of previous declaration. + end: TokenIndex, // TokenIndex is defined as usize in std.zig.ast. + /// there was a declaration between fields, don't report more errors + err, + } = .none; + + // True start of parsing. + while (true) { + const token = p.nextToken(); + switch (p.token_ids[token]) { + .Eof => { + p.putBackToken(token); + break; + }, + else => { + std.debug.print("token: {}\n", .{p.token_ids[token]}); + continue; + }, + } + } + +// // Documentation comments. Ignored. +// if (try p.parseContainerDocComments()) |node| { +// std.debug.print("found: Doc Comments: {}\n", .{node}); +// try list.append(node); +// continue; +// } +// +// const doc_comments = try p.parseDocComment(); +// +// if (p.parseContainerField() catch |err| switch (err) { +// error.OutOfMemory => return error.OutOfMemory, +// error.ParseError => { +// // attempt to recover +// p.findNextContainerMember(); +// continue; +// }, +// }) |node| { +// std.debug.print("found: ContainerField: {}\n", .{node}); +// switch (field_state) { +// .none => field_state = .seen, +// .err, .seen => {}, +// .end => |tok| { +// try p.errors.append(p.gpa, .{ +// .DeclBetweenFields = .{ .token = tok }, +// }); +// // continue parsing, error will be reported later +// field_state = .err; +// }, +// } +// +// const field = node.cast(Node.ContainerField).?; +// field.doc_comments = doc_comments; +// try list.append(node); +// const comma = p.eatToken(.Comma) orelse { +// // try to continue parsing +// const index = p.tok_i; +// p.findNextContainerMember(); +// const next = p.token_ids[p.tok_i]; +// switch (next) { +// .Eof => { +// // no invalid tokens were found +// if (index == p.tok_i) break; +// +// // Invalid tokens, add error and exit +// try p.errors.append(p.gpa, .{ +// .ExpectedToken = .{ .token = index, .expected_id = .Comma }, +// }); +// break; +// }, +// else => { +// if (next == .RBrace) { +// if (!top_level) break; +// _ = p.nextToken(); +// } +// +// // add error and continue +// try p.errors.append(p.gpa, .{ +// .ExpectedToken = .{ .token = index, .expected_id = .Comma }, +// }); +// continue; +// }, +// } +// }; +// if (try p.parseAppendedDocComment(comma)) |appended_comment| +// field.doc_comments = appended_comment; +// continue; +// } +// +// // Dangling doc comment +// if (doc_comments != null) { +// try p.errors.append(p.gpa, .{ +// .UnattachedDocComment = .{ .token = doc_comments.?.firstToken() }, +// }); +// } +// +// const next = p.token_ids[p.tok_i]; +// switch (next) { +// .Eof => break, +// .Keyword_comptime => { +// _ = p.nextToken(); +// try p.errors.append(p.gpa, .{ +// .ExpectedBlockOrField = .{ .token = p.tok_i }, +// }); +// }, +// else => { +// const index = p.tok_i; +// if (next == .RBrace) { +// if (!top_level) break; +// _ = p.nextToken(); +// } +// +// // this was likely not supposed to end yet, +// // try to find the next declaration +// p.findNextContainerMember(); +// try p.errors.append(p.gpa, .{ +// .ExpectedContainerMembers = .{ .token = index }, +// }); +// }, +// } +// } + + return list.toOwnedSlice(); + } + +// /// Attempts to find next container member by searching for certain tokens +// fn findNextContainerMember(p: *Parser) void { +// var level: u32 = 0; +// while (true) { +// const tok = p.nextToken(); +// switch (p.token_ids[tok]) { +// // any of these can start a new top level declaration +// .Keyword_test, +// .Keyword_comptime, +// .Keyword_pub, +// .Keyword_export, +// .Keyword_extern, +// .Keyword_inline, +// .Keyword_noinline, +// .Keyword_usingnamespace, +// .Keyword_threadlocal, +// .Keyword_const, +// .Keyword_var, +// .Keyword_fn, +// .Identifier, +// => { +// if (level == 0) { +// p.putBackToken(tok); +// return; +// } +// }, +// .Comma, .Semicolon => { +// // this decl was likely meant to end here +// if (level == 0) { +// return; +// } +// }, +// .LParen, .LBracket, .LBrace => level += 1, +// .RParen, .RBracket => { +// if (level != 0) level -= 1; +// }, +// .RBrace => { +// if (level == 0) { +// // end of container, exit +// p.putBackToken(tok); +// return; +// } +// level -= 1; +// }, +// .Eof => { +// p.putBackToken(tok); +// return; +// }, +// else => {}, +// } +// } +// } +// +// /// Attempts to find the next statement by searching for a semicolon +// fn findNextStmt(p: *Parser) void { +// var level: u32 = 0; +// while (true) { +// const tok = p.nextToken(); +// switch (p.token_ids[tok]) { +// .LBrace => level += 1, +// .RBrace => { +// if (level == 0) { +// p.putBackToken(tok); +// return; +// } +// level -= 1; +// }, +// .Semicolon => { +// if (level == 0) { +// return; +// } +// }, +// .Eof => { +// p.putBackToken(tok); +// return; +// }, +// else => {}, +// } +// } +// } +// +// /// Eat a multiline container doc comment +// fn parseContainerDocComments(p: *Parser) !?*Node { +// if (p.eatToken(.ContainerDocComment)) |first_line| { +// while (p.eatToken(.ContainerDocComment)) |_| {} +// const node = try p.arena.allocator.create(Node.DocComment); +// node.* = .{ .first_line = first_line }; +// return &node.base; +// } +// return null; +// } +// +// /// TopLevelComptime <- KEYWORD_comptime BlockExpr +// fn parseTopLevelComptime(p: *Parser) !?*Node { +// const tok = p.eatToken(.Keyword_comptime) orelse return null; +// const lbrace = p.eatToken(.LBrace) orelse { +// p.putBackToken(tok); +// return null; +// }; +// p.putBackToken(lbrace); +// const block_node = try p.expectNode(parseBlockExpr, .{ +// .ExpectedLabelOrLBrace = .{ .token = p.tok_i }, +// }); +// +// const comptime_node = try p.arena.allocator.create(Node.Comptime); +// comptime_node.* = .{ +// .doc_comments = null, +// .comptime_token = tok, +// .expr = block_node, +// }; +// return &comptime_node.base; +// } +// +// /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr) +// fn parseFnProto(p: *Parser, level: enum { top_level, as_type }, fields: struct { +// doc_comments: ?*Node.DocComment = null, +// visib_token: ?TokenIndex = null, +// extern_export_inline_token: ?TokenIndex = null, +// lib_name: ?*Node = null, +// }) !?*Node { +// // TODO: Remove once extern/async fn rewriting is +// var is_async: ?void = null; +// var is_extern_prototype: ?void = null; +// const cc_token: ?TokenIndex = blk: { +// if (p.eatToken(.Keyword_extern)) |token| { +// is_extern_prototype = {}; +// break :blk token; +// } +// if (p.eatToken(.Keyword_async)) |token| { +// is_async = {}; +// break :blk token; +// } +// break :blk null; +// }; +// const fn_token = p.eatToken(.Keyword_fn) orelse { +// if (cc_token) |token| +// p.putBackToken(token); +// return null; +// }; +// const name_token = p.eatToken(.Identifier); +// const lparen = try p.expectToken(.LParen); +// const params = try p.parseParamDeclList(); +// defer p.gpa.free(params); +// const var_args_token = p.eatToken(.Ellipsis3); +// const rparen = try p.expectToken(.RParen); +// const align_expr = try p.parseByteAlign(); +// const section_expr = try p.parseLinkSection(); +// const callconv_expr = try p.parseCallconv(); +// const exclamation_token = p.eatToken(.Bang); +// +// const return_type_expr = (try p.parseAnyType()) orelse +// try p.expectNodeRecoverable(parseTypeExpr, .{ +// // most likely the user forgot to specify the return type. +// // Mark return type as invalid and try to continue. +// .ExpectedReturnType = .{ .token = p.tok_i }, +// }); +// +// // TODO https://github.com/ziglang/zig/issues/3750 +// const R = Node.FnProto.ReturnType; +// const return_type = if (return_type_expr == null) +// R{ .Invalid = rparen } +// else if (exclamation_token != null) +// R{ .InferErrorSet = return_type_expr.? } +// else +// R{ .Explicit = return_type_expr.? }; +// +// const body_node: ?*Node = switch (level) { +// .top_level => blk: { +// if (p.eatToken(.Semicolon)) |_| { +// break :blk null; +// } +// const body_block = (try p.parseBlock(null)) orelse { +// // Since parseBlock only return error.ParseError on +// // a missing '}' we can assume this function was +// // supposed to end here. +// try p.errors.append(p.gpa, .{ .ExpectedSemiOrLBrace = .{ .token = p.tok_i } }); +// break :blk null; +// }; +// break :blk body_block; +// }, +// .as_type => null, +// }; +// +// const fn_proto_node = try Node.FnProto.create(&p.arena.allocator, .{ +// .params_len = params.len, +// .fn_token = fn_token, +// .return_type = return_type, +// }, .{ +// .doc_comments = fields.doc_comments, +// .visib_token = fields.visib_token, +// .name_token = name_token, +// .var_args_token = var_args_token, +// .extern_export_inline_token = fields.extern_export_inline_token, +// .body_node = body_node, +// .lib_name = fields.lib_name, +// .align_expr = align_expr, +// .section_expr = section_expr, +// .callconv_expr = callconv_expr, +// .is_extern_prototype = is_extern_prototype, +// .is_async = is_async, +// }); +// std.mem.copy(Node.FnProto.ParamDecl, fn_proto_node.params(), params); +// +// return &fn_proto_node.base; +// } +// +// /// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON +// fn parseVarDecl(p: *Parser, fields: struct { +// doc_comments: ?*Node.DocComment = null, +// visib_token: ?TokenIndex = null, +// thread_local_token: ?TokenIndex = null, +// extern_export_token: ?TokenIndex = null, +// lib_name: ?*Node = null, +// comptime_token: ?TokenIndex = null, +// }) !?*Node { +// const mut_token = p.eatToken(.Keyword_const) orelse +// p.eatToken(.Keyword_var) orelse +// return null; +// +// const name_token = try p.expectToken(.Identifier); +// const type_node = if (p.eatToken(.Colon) != null) +// try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }) +// else +// null; +// const align_node = try p.parseByteAlign(); +// const section_node = try p.parseLinkSection(); +// const eq_token = p.eatToken(.Equal); +// const init_node = if (eq_token != null) blk: { +// break :blk try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// } else null; +// const semicolon_token = try p.expectToken(.Semicolon); +// +// const doc_comments = fields.doc_comments orelse try p.parseAppendedDocComment(semicolon_token); +// +// const node = try Node.VarDecl.create(&p.arena.allocator, .{ +// .mut_token = mut_token, +// .name_token = name_token, +// .semicolon_token = semicolon_token, +// }, .{ +// .doc_comments = doc_comments, +// .visib_token = fields.visib_token, +// .thread_local_token = fields.thread_local_token, +// .eq_token = eq_token, +// .comptime_token = fields.comptime_token, +// .extern_export_token = fields.extern_export_token, +// .lib_name = fields.lib_name, +// .type_node = type_node, +// .align_node = align_node, +// .section_node = section_node, +// .init_node = init_node, +// }); +// return &node.base; +// } +// +// /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)? +// fn parseContainerField(p: *Parser) !?*Node { +// const comptime_token = p.eatToken(.Keyword_comptime); +// const name_token = p.eatToken(.Identifier) orelse { +// if (comptime_token) |t| p.putBackToken(t); +// return null; +// }; +// +// var align_expr: ?*Node = null; +// var type_expr: ?*Node = null; +// if (p.eatToken(.Colon)) |_| { +// if (p.eatToken(.Keyword_anytype) orelse p.eatToken(.Keyword_var)) |anytype_tok| { +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .AnyType }, +// .token = anytype_tok, +// }; +// type_expr = &node.base; +// } else { +// type_expr = try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }); +// align_expr = try p.parseByteAlign(); +// } +// } +// +// const value_expr = if (p.eatToken(.Equal)) |_| +// try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }) +// else +// null; +// +// const node = try p.arena.allocator.create(Node.ContainerField); +// node.* = .{ +// .doc_comments = null, +// .comptime_token = comptime_token, +// .name_token = name_token, +// .type_expr = type_expr, +// .value_expr = value_expr, +// .align_expr = align_expr, +// }; +// return &node.base; +// } +// +// /// Statement +// /// <- KEYWORD_comptime? VarDecl +// /// / KEYWORD_comptime BlockExprStatement +// /// / KEYWORD_nosuspend BlockExprStatement +// /// / KEYWORD_suspend (SEMICOLON / BlockExprStatement) +// /// / KEYWORD_defer BlockExprStatement +// /// / KEYWORD_errdefer Payload? BlockExprStatement +// /// / IfStatement +// /// / LabeledStatement +// /// / SwitchExpr +// /// / AssignExpr SEMICOLON +// fn parseStatement(p: *Parser) Error!?*Node { +// const comptime_token = p.eatToken(.Keyword_comptime); +// +// if (try p.parseVarDecl(.{ +// .comptime_token = comptime_token, +// })) |node| { +// return node; +// } +// +// if (comptime_token) |token| { +// const block_expr = try p.expectNode(parseBlockExprStatement, .{ +// .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, +// }); +// +// const node = try p.arena.allocator.create(Node.Comptime); +// node.* = .{ +// .doc_comments = null, +// .comptime_token = token, +// .expr = block_expr, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_nosuspend)) |nosuspend_token| { +// const block_expr = try p.expectNode(parseBlockExprStatement, .{ +// .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, +// }); +// +// const node = try p.arena.allocator.create(Node.Nosuspend); +// node.* = .{ +// .nosuspend_token = nosuspend_token, +// .expr = block_expr, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_suspend)) |suspend_token| { +// const semicolon = p.eatToken(.Semicolon); +// +// const body_node = if (semicolon == null) blk: { +// break :blk try p.expectNode(parseBlockExprStatement, .{ +// .ExpectedBlockOrExpression = .{ .token = p.tok_i }, +// }); +// } else null; +// +// const node = try p.arena.allocator.create(Node.Suspend); +// node.* = .{ +// .suspend_token = suspend_token, +// .body = body_node, +// }; +// return &node.base; +// } +// +// const defer_token = p.eatToken(.Keyword_defer) orelse p.eatToken(.Keyword_errdefer); +// if (defer_token) |token| { +// const payload = if (p.token_ids[token] == .Keyword_errdefer) +// try p.parsePayload() +// else +// null; +// const expr_node = try p.expectNode(parseBlockExprStatement, .{ +// .ExpectedBlockOrExpression = .{ .token = p.tok_i }, +// }); +// const node = try p.arena.allocator.create(Node.Defer); +// node.* = .{ +// .defer_token = token, +// .expr = expr_node, +// .payload = payload, +// }; +// return &node.base; +// } +// +// if (try p.parseIfStatement()) |node| return node; +// if (try p.parseLabeledStatement()) |node| return node; +// if (try p.parseSwitchExpr()) |node| return node; +// if (try p.parseAssignExpr()) |node| { +// _ = try p.expectTokenRecoverable(.Semicolon); +// return node; +// } +// +// return null; +// } +// +// /// IfStatement +// /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )? +// /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) +// fn parseIfStatement(p: *Parser) !?*Node { +// const if_node = (try p.parseIfPrefix()) orelse return null; +// const if_prefix = if_node.cast(Node.If).?; +// +// const block_expr = (try p.parseBlockExpr()); +// const assign_expr = if (block_expr == null) +// try p.expectNode(parseAssignExpr, .{ +// .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, +// }) +// else +// null; +// +// const semicolon = if (assign_expr != null) p.eatToken(.Semicolon) else null; +// +// const else_node = if (semicolon == null) blk: { +// const else_token = p.eatToken(.Keyword_else) orelse break :blk null; +// const payload = try p.parsePayload(); +// const else_body = try p.expectNode(parseStatement, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// +// const node = try p.arena.allocator.create(Node.Else); +// node.* = .{ +// .else_token = else_token, +// .payload = payload, +// .body = else_body, +// }; +// +// break :blk node; +// } else null; +// +// if (block_expr) |body| { +// if_prefix.body = body; +// if_prefix.@"else" = else_node; +// return if_node; +// } +// +// if (assign_expr) |body| { +// if_prefix.body = body; +// if (semicolon != null) return if_node; +// if (else_node != null) { +// if_prefix.@"else" = else_node; +// return if_node; +// } +// try p.errors.append(p.gpa, .{ +// .ExpectedSemiOrElse = .{ .token = p.tok_i }, +// }); +// } +// +// return if_node; +// } +// +// /// LabeledStatement <- BlockLabel? (Block / LoopStatement) +// fn parseLabeledStatement(p: *Parser) !?*Node { +// var colon: TokenIndex = undefined; +// const label_token = p.parseBlockLabel(&colon); +// +// if (try p.parseBlock(label_token)) |node| return node; +// +// if (try p.parseLoopStatement()) |node| { +// if (node.cast(Node.For)) |for_node| { +// for_node.label = label_token; +// } else if (node.cast(Node.While)) |while_node| { +// while_node.label = label_token; +// } else unreachable; +// return node; +// } +// +// if (label_token != null) { +// try p.errors.append(p.gpa, .{ +// .ExpectedLabelable = .{ .token = p.tok_i }, +// }); +// return error.ParseError; +// } +// +// return null; +// } +// +// /// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement) +// fn parseLoopStatement(p: *Parser) !?*Node { +// const inline_token = p.eatToken(.Keyword_inline); +// +// if (try p.parseForStatement()) |node| { +// node.cast(Node.For).?.inline_token = inline_token; +// return node; +// } +// +// if (try p.parseWhileStatement()) |node| { +// node.cast(Node.While).?.inline_token = inline_token; +// return node; +// } +// if (inline_token == null) return null; +// +// // If we've seen "inline", there should have been a "for" or "while" +// try p.errors.append(p.gpa, .{ +// .ExpectedInlinable = .{ .token = p.tok_i }, +// }); +// return error.ParseError; +// } +// +// /// ForStatement +// /// <- ForPrefix BlockExpr ( KEYWORD_else Statement )? +// /// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement ) +// fn parseForStatement(p: *Parser) !?*Node { +// const node = (try p.parseForPrefix()) orelse return null; +// const for_prefix = node.cast(Node.For).?; +// +// if (try p.parseBlockExpr()) |block_expr_node| { +// for_prefix.body = block_expr_node; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const statement_node = try p.expectNode(parseStatement, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = null, +// .body = statement_node, +// }; +// for_prefix.@"else" = else_node; +// +// return node; +// } +// +// return node; +// } +// +// for_prefix.body = try p.expectNode(parseAssignExpr, .{ +// .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, +// }); +// +// if (p.eatToken(.Semicolon) != null) return node; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const statement_node = try p.expectNode(parseStatement, .{ +// .ExpectedStatement = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = null, +// .body = statement_node, +// }; +// for_prefix.@"else" = else_node; +// return node; +// } +// +// try p.errors.append(p.gpa, .{ +// .ExpectedSemiOrElse = .{ .token = p.tok_i }, +// }); +// +// return node; +// } +// +// /// WhileStatement +// /// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )? +// /// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) +// fn parseWhileStatement(p: *Parser) !?*Node { +// const node = (try p.parseWhilePrefix()) orelse return null; +// const while_prefix = node.cast(Node.While).?; +// +// if (try p.parseBlockExpr()) |block_expr_node| { +// while_prefix.body = block_expr_node; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const payload = try p.parsePayload(); +// +// const statement_node = try p.expectNode(parseStatement, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = payload, +// .body = statement_node, +// }; +// while_prefix.@"else" = else_node; +// +// return node; +// } +// +// return node; +// } +// +// while_prefix.body = try p.expectNode(parseAssignExpr, .{ +// .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, +// }); +// +// if (p.eatToken(.Semicolon) != null) return node; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const payload = try p.parsePayload(); +// +// const statement_node = try p.expectNode(parseStatement, .{ +// .ExpectedStatement = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = payload, +// .body = statement_node, +// }; +// while_prefix.@"else" = else_node; +// return node; +// } +// +// try p.errors.append(p.gpa, .{ +// .ExpectedSemiOrElse = .{ .token = p.tok_i }, +// }); +// +// return node; +// } +// +// /// BlockExprStatement +// /// <- BlockExpr +// /// / AssignExpr SEMICOLON +// fn parseBlockExprStatement(p: *Parser) !?*Node { +// if (try p.parseBlockExpr()) |node| return node; +// if (try p.parseAssignExpr()) |node| { +// _ = try p.expectTokenRecoverable(.Semicolon); +// return node; +// } +// return null; +// } +// +// /// BlockExpr <- BlockLabel? Block +// fn parseBlockExpr(p: *Parser) Error!?*Node { +// var colon: TokenIndex = undefined; +// const label_token = p.parseBlockLabel(&colon); +// const block_node = (try p.parseBlock(label_token)) orelse { +// if (label_token) |label| { +// p.putBackToken(label + 1); // ":" +// p.putBackToken(label); // IDENTIFIER +// } +// return null; +// }; +// return block_node; +// } +// +// /// AssignExpr <- Expr (AssignOp Expr)? +// fn parseAssignExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr(parseAssignOp, parseExpr, .Once); +// } +// +// /// Expr <- KEYWORD_try* BoolOrExpr +// fn parseExpr(p: *Parser) Error!?*Node { +// return p.parsePrefixOpExpr(parseTry, parseBoolOrExpr); +// } +// +// /// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)* +// fn parseBoolOrExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr( +// SimpleBinOpParseFn(.Keyword_or, .BoolOr), +// parseBoolAndExpr, +// .Infinitely, +// ); +// } +// +// /// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)* +// fn parseBoolAndExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr( +// SimpleBinOpParseFn(.Keyword_and, .BoolAnd), +// parseCompareExpr, +// .Infinitely, +// ); +// } +// +// /// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)? +// fn parseCompareExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr(parseCompareOp, parseBitwiseExpr, .Once); +// } +// +// /// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)* +// fn parseBitwiseExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr(parseBitwiseOp, parseBitShiftExpr, .Infinitely); +// } +// +// /// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)* +// fn parseBitShiftExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr(parseBitShiftOp, parseAdditionExpr, .Infinitely); +// } +// +// /// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)* +// fn parseAdditionExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr(parseAdditionOp, parseMultiplyExpr, .Infinitely); +// } +// +// /// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)* +// fn parseMultiplyExpr(p: *Parser) !?*Node { +// return p.parseBinOpExpr(parseMultiplyOp, parsePrefixExpr, .Infinitely); +// } +// +// /// PrefixExpr <- PrefixOp* PrimaryExpr +// fn parsePrefixExpr(p: *Parser) !?*Node { +// return p.parsePrefixOpExpr(parsePrefixOp, parsePrimaryExpr); +// } +// +// /// PrimaryExpr +// /// <- AsmExpr +// /// / IfExpr +// /// / KEYWORD_break BreakLabel? Expr? +// /// / KEYWORD_comptime Expr +// /// / KEYWORD_nosuspend Expr +// /// / KEYWORD_continue BreakLabel? +// /// / KEYWORD_resume Expr +// /// / KEYWORD_return Expr? +// /// / BlockLabel? LoopExpr +// /// / Block +// /// / CurlySuffixExpr +// fn parsePrimaryExpr(p: *Parser) !?*Node { +// if (try p.parseAsmExpr()) |node| return node; +// if (try p.parseIfExpr()) |node| return node; +// +// if (p.eatToken(.Keyword_break)) |token| { +// const label = try p.parseBreakLabel(); +// const expr_node = try p.parseExpr(); +// const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{ +// .tag = .Break, +// .ltoken = token, +// }, .{ +// .label = label, +// .rhs = expr_node, +// }); +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_comptime)) |token| { +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// const node = try p.arena.allocator.create(Node.Comptime); +// node.* = .{ +// .doc_comments = null, +// .comptime_token = token, +// .expr = expr_node, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_nosuspend)) |token| { +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// const node = try p.arena.allocator.create(Node.Nosuspend); +// node.* = .{ +// .nosuspend_token = token, +// .expr = expr_node, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_continue)) |token| { +// const label = try p.parseBreakLabel(); +// const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{ +// .tag = .Continue, +// .ltoken = token, +// }, .{ +// .label = label, +// .rhs = null, +// }); +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_resume)) |token| { +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// const node = try p.arena.allocator.create(Node.SimplePrefixOp); +// node.* = .{ +// .base = .{ .tag = .Resume }, +// .op_token = token, +// .rhs = expr_node, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_return)) |token| { +// const expr_node = try p.parseExpr(); +// const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{ +// .tag = .Return, +// .ltoken = token, +// }, .{ +// .rhs = expr_node, +// }); +// return &node.base; +// } +// +// var colon: TokenIndex = undefined; +// const label = p.parseBlockLabel(&colon); +// if (try p.parseLoopExpr()) |node| { +// if (node.cast(Node.For)) |for_node| { +// for_node.label = label; +// } else if (node.cast(Node.While)) |while_node| { +// while_node.label = label; +// } else unreachable; +// return node; +// } +// if (label) |token| { +// p.putBackToken(token + 1); // ":" +// p.putBackToken(token); // IDENTIFIER +// } +// +// if (try p.parseBlock(null)) |node| return node; +// if (try p.parseCurlySuffixExpr()) |node| return node; +// +// return null; +// } +// +// /// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)? +// fn parseIfExpr(p: *Parser) !?*Node { +// return p.parseIf(parseExpr); +// } +// +// /// Block <- LBRACE Statement* RBRACE +// fn parseBlock(p: *Parser, label_token: ?TokenIndex) !?*Node { +// const lbrace = p.eatToken(.LBrace) orelse return null; +// +// var statements = std.ArrayList(*Node).init(p.gpa); +// defer statements.deinit(); +// +// while (true) { +// const statement = (p.parseStatement() catch |err| switch (err) { +// error.OutOfMemory => return error.OutOfMemory, +// error.ParseError => { +// // try to skip to the next statement +// p.findNextStmt(); +// continue; +// }, +// }) orelse break; +// try statements.append(statement); +// } +// +// const rbrace = try p.expectToken(.RBrace); +// +// const statements_len = @intCast(NodeIndex, statements.items.len); +// +// if (label_token) |label| { +// const block_node = try Node.LabeledBlock.alloc(&p.arena.allocator, statements_len); +// block_node.* = .{ +// .label = label, +// .lbrace = lbrace, +// .statements_len = statements_len, +// .rbrace = rbrace, +// }; +// std.mem.copy(*Node, block_node.statements(), statements.items); +// return &block_node.base; +// } else { +// const block_node = try Node.Block.alloc(&p.arena.allocator, statements_len); +// block_node.* = .{ +// .lbrace = lbrace, +// .statements_len = statements_len, +// .rbrace = rbrace, +// }; +// std.mem.copy(*Node, block_node.statements(), statements.items); +// return &block_node.base; +// } +// } +// +// /// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr) +// fn parseLoopExpr(p: *Parser) !?*Node { +// const inline_token = p.eatToken(.Keyword_inline); +// +// if (try p.parseForExpr()) |node| { +// node.cast(Node.For).?.inline_token = inline_token; +// return node; +// } +// +// if (try p.parseWhileExpr()) |node| { +// node.cast(Node.While).?.inline_token = inline_token; +// return node; +// } +// +// if (inline_token == null) return null; +// +// // If we've seen "inline", there should have been a "for" or "while" +// try p.errors.append(p.gpa, .{ +// .ExpectedInlinable = .{ .token = p.tok_i }, +// }); +// return error.ParseError; +// } +// +// /// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)? +// fn parseForExpr(p: *Parser) !?*Node { +// const node = (try p.parseForPrefix()) orelse return null; +// const for_prefix = node.cast(Node.For).?; +// +// const body_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// for_prefix.body = body_node; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const body = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = null, +// .body = body, +// }; +// +// for_prefix.@"else" = else_node; +// } +// +// return node; +// } +// +// /// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)? +// fn parseWhileExpr(p: *Parser) !?*Node { +// const node = (try p.parseWhilePrefix()) orelse return null; +// const while_prefix = node.cast(Node.While).?; +// +// const body_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// while_prefix.body = body_node; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const payload = try p.parsePayload(); +// const body = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = payload, +// .body = body, +// }; +// +// while_prefix.@"else" = else_node; +// } +// +// return node; +// } +// +// /// CurlySuffixExpr <- TypeExpr InitList? +// fn parseCurlySuffixExpr(p: *Parser) !?*Node { +// const lhs = (try p.parseTypeExpr()) orelse return null; +// const suffix_op = (try p.parseInitList(lhs)) orelse return lhs; +// return suffix_op; +// } +// +// /// InitList +// /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE +// /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE +// /// / LBRACE RBRACE +// fn parseInitList(p: *Parser, lhs: *Node) !?*Node { +// const lbrace = p.eatToken(.LBrace) orelse return null; +// var init_list = std.ArrayList(*Node).init(p.gpa); +// defer init_list.deinit(); +// +// if (try p.parseFieldInit()) |field_init| { +// try init_list.append(field_init); +// while (p.eatToken(.Comma)) |_| { +// const next = (try p.parseFieldInit()) orelse break; +// try init_list.append(next); +// } +// const node = try Node.StructInitializer.alloc(&p.arena.allocator, init_list.items.len); +// node.* = .{ +// .lhs = lhs, +// .rtoken = try p.expectToken(.RBrace), +// .list_len = init_list.items.len, +// }; +// std.mem.copy(*Node, node.list(), init_list.items); +// return &node.base; +// } +// +// if (try p.parseExpr()) |expr| { +// try init_list.append(expr); +// while (p.eatToken(.Comma)) |_| { +// const next = (try p.parseExpr()) orelse break; +// try init_list.append(next); +// } +// const node = try Node.ArrayInitializer.alloc(&p.arena.allocator, init_list.items.len); +// node.* = .{ +// .lhs = lhs, +// .rtoken = try p.expectToken(.RBrace), +// .list_len = init_list.items.len, +// }; +// std.mem.copy(*Node, node.list(), init_list.items); +// return &node.base; +// } +// +// const node = try p.arena.allocator.create(Node.StructInitializer); +// node.* = .{ +// .lhs = lhs, +// .rtoken = try p.expectToken(.RBrace), +// .list_len = 0, +// }; +// return &node.base; +// } +// +// /// InitList +// /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE +// /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE +// /// / LBRACE RBRACE +// fn parseAnonInitList(p: *Parser, dot: TokenIndex) !?*Node { +// const lbrace = p.eatToken(.LBrace) orelse return null; +// var init_list = std.ArrayList(*Node).init(p.gpa); +// defer init_list.deinit(); +// +// if (try p.parseFieldInit()) |field_init| { +// try init_list.append(field_init); +// while (p.eatToken(.Comma)) |_| { +// const next = (try p.parseFieldInit()) orelse break; +// try init_list.append(next); +// } +// const node = try Node.StructInitializerDot.alloc(&p.arena.allocator, init_list.items.len); +// node.* = .{ +// .dot = dot, +// .rtoken = try p.expectToken(.RBrace), +// .list_len = init_list.items.len, +// }; +// std.mem.copy(*Node, node.list(), init_list.items); +// return &node.base; +// } +// +// if (try p.parseExpr()) |expr| { +// try init_list.append(expr); +// while (p.eatToken(.Comma)) |_| { +// const next = (try p.parseExpr()) orelse break; +// try init_list.append(next); +// } +// const node = try Node.ArrayInitializerDot.alloc(&p.arena.allocator, init_list.items.len); +// node.* = .{ +// .dot = dot, +// .rtoken = try p.expectToken(.RBrace), +// .list_len = init_list.items.len, +// }; +// std.mem.copy(*Node, node.list(), init_list.items); +// return &node.base; +// } +// +// const node = try p.arena.allocator.create(Node.StructInitializerDot); +// node.* = .{ +// .dot = dot, +// .rtoken = try p.expectToken(.RBrace), +// .list_len = 0, +// }; +// return &node.base; +// } +// +// /// TypeExpr <- PrefixTypeOp* ErrorUnionExpr +// fn parseTypeExpr(p: *Parser) Error!?*Node { +// return p.parsePrefixOpExpr(parsePrefixTypeOp, parseErrorUnionExpr); +// } +// +// /// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)? +// fn parseErrorUnionExpr(p: *Parser) !?*Node { +// const suffix_expr = (try p.parseSuffixExpr()) orelse return null; +// +// if (try SimpleBinOpParseFn(.Bang, .ErrorUnion)(p)) |node| { +// const error_union = node.castTag(.ErrorUnion).?; +// const type_expr = try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }); +// error_union.lhs = suffix_expr; +// error_union.rhs = type_expr; +// return node; +// } +// +// return suffix_expr; +// } +// +// /// SuffixExpr +// /// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments +// /// / PrimaryTypeExpr (SuffixOp / FnCallArguments)* +// fn parseSuffixExpr(p: *Parser) !?*Node { +// const maybe_async = p.eatToken(.Keyword_async); +// if (maybe_async) |async_token| { +// const token_fn = p.eatToken(.Keyword_fn); +// if (token_fn != null) { +// // TODO: remove this hack when async fn rewriting is +// // HACK: If we see the keyword `fn`, then we assume that +// // we are parsing an async fn proto, and not a call. +// // We therefore put back all tokens consumed by the async +// // prefix... +// p.putBackToken(token_fn.?); +// p.putBackToken(async_token); +// return p.parsePrimaryTypeExpr(); +// } +// var res = try p.expectNode(parsePrimaryTypeExpr, .{ +// .ExpectedPrimaryTypeExpr = .{ .token = p.tok_i }, +// }); +// +// while (try p.parseSuffixOp(res)) |node| { +// res = node; +// } +// +// const params = (try p.parseFnCallArguments()) orelse { +// try p.errors.append(p.gpa, .{ +// .ExpectedParamList = .{ .token = p.tok_i }, +// }); +// // ignore this, continue parsing +// return res; +// }; +// defer p.gpa.free(params.list); +// const node = try Node.Call.alloc(&p.arena.allocator, params.list.len); +// node.* = .{ +// .lhs = res, +// .params_len = params.list.len, +// .async_token = async_token, +// .rtoken = params.rparen, +// }; +// std.mem.copy(*Node, node.params(), params.list); +// return &node.base; +// } +// if (try p.parsePrimaryTypeExpr()) |expr| { +// var res = expr; +// +// while (true) { +// if (try p.parseSuffixOp(res)) |node| { +// res = node; +// continue; +// } +// if (try p.parseFnCallArguments()) |params| { +// defer p.gpa.free(params.list); +// const call = try Node.Call.alloc(&p.arena.allocator, params.list.len); +// call.* = .{ +// .lhs = res, +// .params_len = params.list.len, +// .async_token = null, +// .rtoken = params.rparen, +// }; +// std.mem.copy(*Node, call.params(), params.list); +// res = &call.base; +// continue; +// } +// break; +// } +// return res; +// } +// +// return null; +// } +// +// /// PrimaryTypeExpr +// /// <- BUILTINIDENTIFIER FnCallArguments +// /// / CHAR_LITERAL +// /// / ContainerDecl +// /// / DOT IDENTIFIER +// /// / ErrorSetDecl +// /// / FLOAT +// /// / FnProto +// /// / GroupedExpr +// /// / LabeledTypeExpr +// /// / IDENTIFIER +// /// / IfTypeExpr +// /// / INTEGER +// /// / KEYWORD_comptime TypeExpr +// /// / KEYWORD_error DOT IDENTIFIER +// /// / KEYWORD_false +// /// / KEYWORD_null +// /// / KEYWORD_anyframe +// /// / KEYWORD_true +// /// / KEYWORD_undefined +// /// / KEYWORD_unreachable +// /// / STRINGLITERAL +// /// / SwitchExpr +// fn parsePrimaryTypeExpr(p: *Parser) !?*Node { +// if (try p.parseBuiltinCall()) |node| return node; +// if (p.eatToken(.CharLiteral)) |token| { +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .CharLiteral }, +// .token = token, +// }; +// return &node.base; +// } +// if (try p.parseContainerDecl()) |node| return node; +// if (try p.parseAnonLiteral()) |node| return node; +// if (try p.parseErrorSetDecl()) |node| return node; +// if (try p.parseFloatLiteral()) |node| return node; +// if (try p.parseFnProto(.as_type, .{})) |node| return node; +// if (try p.parseGroupedExpr()) |node| return node; +// if (try p.parseLabeledTypeExpr()) |node| return node; +// if (try p.parseIdentifier()) |node| return node; +// if (try p.parseIfTypeExpr()) |node| return node; +// if (try p.parseIntegerLiteral()) |node| return node; +// if (p.eatToken(.Keyword_comptime)) |token| { +// const expr = (try p.parseTypeExpr()) orelse return null; +// const node = try p.arena.allocator.create(Node.Comptime); +// node.* = .{ +// .doc_comments = null, +// .comptime_token = token, +// .expr = expr, +// }; +// return &node.base; +// } +// if (p.eatToken(.Keyword_error)) |token| { +// const period = try p.expectTokenRecoverable(.Period); +// const identifier = try p.expectNodeRecoverable(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// const global_error_set = try p.createLiteral(.ErrorType, token); +// if (period == null or identifier == null) return global_error_set; +// +// const node = try p.arena.allocator.create(Node.SimpleInfixOp); +// node.* = .{ +// .base = Node{ .tag = .Period }, +// .op_token = period.?, +// .lhs = global_error_set, +// .rhs = identifier.?, +// }; +// return &node.base; +// } +// if (p.eatToken(.Keyword_false)) |token| return p.createLiteral(.BoolLiteral, token); +// if (p.eatToken(.Keyword_null)) |token| return p.createLiteral(.NullLiteral, token); +// if (p.eatToken(.Keyword_anyframe)) |token| { +// const node = try p.arena.allocator.create(Node.AnyFrameType); +// node.* = .{ +// .anyframe_token = token, +// .result = null, +// }; +// return &node.base; +// } +// if (p.eatToken(.Keyword_true)) |token| return p.createLiteral(.BoolLiteral, token); +// if (p.eatToken(.Keyword_undefined)) |token| return p.createLiteral(.UndefinedLiteral, token); +// if (p.eatToken(.Keyword_unreachable)) |token| return p.createLiteral(.Unreachable, token); +// if (try p.parseStringLiteral()) |node| return node; +// if (try p.parseSwitchExpr()) |node| return node; +// +// return null; +// } +// +// /// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto +// fn parseContainerDecl(p: *Parser) !?*Node { +// const layout_token = p.eatToken(.Keyword_extern) orelse +// p.eatToken(.Keyword_packed); +// +// const node = (try p.parseContainerDeclAuto()) orelse { +// if (layout_token) |token| +// p.putBackToken(token); +// return null; +// }; +// node.cast(Node.ContainerDecl).?.*.layout_token = layout_token; +// return node; +// } +// +// /// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE +// fn parseErrorSetDecl(p: *Parser) !?*Node { +// const error_token = p.eatToken(.Keyword_error) orelse return null; +// if (p.eatToken(.LBrace) == null) { +// // Might parse as `KEYWORD_error DOT IDENTIFIER` later in PrimaryTypeExpr, so don't error +// p.putBackToken(error_token); +// return null; +// } +// const decls = try p.parseErrorTagList(); +// defer p.gpa.free(decls); +// const rbrace = try p.expectToken(.RBrace); +// +// const node = try Node.ErrorSetDecl.alloc(&p.arena.allocator, decls.len); +// node.* = .{ +// .error_token = error_token, +// .decls_len = decls.len, +// .rbrace_token = rbrace, +// }; +// std.mem.copy(*Node, node.decls(), decls); +// return &node.base; +// } +// +// /// GroupedExpr <- LPAREN Expr RPAREN +// fn parseGroupedExpr(p: *Parser) !?*Node { +// const lparen = p.eatToken(.LParen) orelse return null; +// const expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// const rparen = try p.expectToken(.RParen); +// +// const node = try p.arena.allocator.create(Node.GroupedExpression); +// node.* = .{ +// .lparen = lparen, +// .expr = expr, +// .rparen = rparen, +// }; +// return &node.base; +// } +// +// /// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)? +// fn parseIfTypeExpr(p: *Parser) !?*Node { +// return p.parseIf(parseTypeExpr); +// } +// +// /// LabeledTypeExpr +// /// <- BlockLabel Block +// /// / BlockLabel? LoopTypeExpr +// fn parseLabeledTypeExpr(p: *Parser) !?*Node { +// var colon: TokenIndex = undefined; +// const label = p.parseBlockLabel(&colon); +// +// if (label) |label_token| { +// if (try p.parseBlock(label_token)) |node| return node; +// } +// +// if (try p.parseLoopTypeExpr()) |node| { +// switch (node.tag) { +// .For => node.cast(Node.For).?.label = label, +// .While => node.cast(Node.While).?.label = label, +// else => unreachable, +// } +// return node; +// } +// +// if (label) |token| { +// p.putBackToken(colon); +// p.putBackToken(token); +// } +// return null; +// } +// +// /// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr) +// fn parseLoopTypeExpr(p: *Parser) !?*Node { +// const inline_token = p.eatToken(.Keyword_inline); +// +// if (try p.parseForTypeExpr()) |node| { +// node.cast(Node.For).?.inline_token = inline_token; +// return node; +// } +// +// if (try p.parseWhileTypeExpr()) |node| { +// node.cast(Node.While).?.inline_token = inline_token; +// return node; +// } +// +// if (inline_token == null) return null; +// +// // If we've seen "inline", there should have been a "for" or "while" +// try p.errors.append(p.gpa, .{ +// .ExpectedInlinable = .{ .token = p.tok_i }, +// }); +// return error.ParseError; +// } +// +// /// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)? +// fn parseForTypeExpr(p: *Parser) !?*Node { +// const node = (try p.parseForPrefix()) orelse return null; +// const for_prefix = node.cast(Node.For).?; +// +// const type_expr = try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }); +// for_prefix.body = type_expr; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const else_expr = try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = null, +// .body = else_expr, +// }; +// +// for_prefix.@"else" = else_node; +// } +// +// return node; +// } +// +// /// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)? +// fn parseWhileTypeExpr(p: *Parser) !?*Node { +// const node = (try p.parseWhilePrefix()) orelse return null; +// const while_prefix = node.cast(Node.While).?; +// +// const type_expr = try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }); +// while_prefix.body = type_expr; +// +// if (p.eatToken(.Keyword_else)) |else_token| { +// const payload = try p.parsePayload(); +// +// const else_expr = try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }); +// +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = null, +// .body = else_expr, +// }; +// +// while_prefix.@"else" = else_node; +// } +// +// return node; +// } +// +// /// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE +// fn parseSwitchExpr(p: *Parser) !?*Node { +// const switch_token = p.eatToken(.Keyword_switch) orelse return null; +// _ = try p.expectToken(.LParen); +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// _ = try p.expectToken(.LBrace); +// const cases = try p.parseSwitchProngList(); +// defer p.gpa.free(cases); +// const rbrace = try p.expectToken(.RBrace); +// +// const node = try Node.Switch.alloc(&p.arena.allocator, cases.len); +// node.* = .{ +// .switch_token = switch_token, +// .expr = expr_node, +// .cases_len = cases.len, +// .rbrace = rbrace, +// }; +// std.mem.copy(*Node, node.cases(), cases); +// return &node.base; +// } +// +// /// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN +// /// AsmOutput <- COLON AsmOutputList AsmInput? +// /// AsmInput <- COLON AsmInputList AsmClobbers? +// /// AsmClobbers <- COLON StringList +// /// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL? +// fn parseAsmExpr(p: *Parser) !?*Node { +// const asm_token = p.eatToken(.Keyword_asm) orelse return null; +// const volatile_token = p.eatToken(.Keyword_volatile); +// _ = try p.expectToken(.LParen); +// const template = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// +// var arena_outputs: []Node.Asm.Output = &[0]Node.Asm.Output{}; +// var arena_inputs: []Node.Asm.Input = &[0]Node.Asm.Input{}; +// var arena_clobbers: []*Node = &[0]*Node{}; +// +// if (p.eatToken(.Colon) != null) { +// const outputs = try p.parseAsmOutputList(); +// defer p.gpa.free(outputs); +// arena_outputs = try p.arena.allocator.dupe(Node.Asm.Output, outputs); +// +// if (p.eatToken(.Colon) != null) { +// const inputs = try p.parseAsmInputList(); +// defer p.gpa.free(inputs); +// arena_inputs = try p.arena.allocator.dupe(Node.Asm.Input, inputs); +// +// if (p.eatToken(.Colon) != null) { +// const clobbers = try ListParseFn(*Node, parseStringLiteral)(p); +// defer p.gpa.free(clobbers); +// arena_clobbers = try p.arena.allocator.dupe(*Node, clobbers); +// } +// } +// } +// +// const node = try p.arena.allocator.create(Node.Asm); +// node.* = .{ +// .asm_token = asm_token, +// .volatile_token = volatile_token, +// .template = template, +// .outputs = arena_outputs, +// .inputs = arena_inputs, +// .clobbers = arena_clobbers, +// .rparen = try p.expectToken(.RParen), +// }; +// +// return &node.base; +// } +// +// /// DOT IDENTIFIER +// fn parseAnonLiteral(p: *Parser) !?*Node { +// const dot = p.eatToken(.Period) orelse return null; +// +// // anon enum literal +// if (p.eatToken(.Identifier)) |name| { +// const node = try p.arena.allocator.create(Node.EnumLiteral); +// node.* = .{ +// .dot = dot, +// .name = name, +// }; +// return &node.base; +// } +// +// if (try p.parseAnonInitList(dot)) |node| { +// return node; +// } +// +// p.putBackToken(dot); +// return null; +// } +// +// /// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN +// fn parseAsmOutputItem(p: *Parser) !?Node.Asm.Output { +// const lbracket = p.eatToken(.LBracket) orelse return null; +// const name = try p.expectNode(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RBracket); +// +// const constraint = try p.expectNode(parseStringLiteral, .{ +// .ExpectedStringLiteral = .{ .token = p.tok_i }, +// }); +// +// _ = try p.expectToken(.LParen); +// const kind: Node.Asm.Output.Kind = blk: { +// if (p.eatToken(.Arrow) != null) { +// const return_ident = try p.expectNode(parseTypeExpr, .{ +// .ExpectedTypeExpr = .{ .token = p.tok_i }, +// }); +// break :blk .{ .Return = return_ident }; +// } +// const variable = try p.expectNode(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// break :blk .{ .Variable = variable.castTag(.Identifier).? }; +// }; +// const rparen = try p.expectToken(.RParen); +// +// return Node.Asm.Output{ +// .lbracket = lbracket, +// .symbolic_name = name, +// .constraint = constraint, +// .kind = kind, +// .rparen = rparen, +// }; +// } +// +// /// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN +// fn parseAsmInputItem(p: *Parser) !?Node.Asm.Input { +// const lbracket = p.eatToken(.LBracket) orelse return null; +// const name = try p.expectNode(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RBracket); +// +// const constraint = try p.expectNode(parseStringLiteral, .{ +// .ExpectedStringLiteral = .{ .token = p.tok_i }, +// }); +// +// _ = try p.expectToken(.LParen); +// const expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// const rparen = try p.expectToken(.RParen); +// +// return Node.Asm.Input{ +// .lbracket = lbracket, +// .symbolic_name = name, +// .constraint = constraint, +// .expr = expr, +// .rparen = rparen, +// }; +// } +// +// /// BreakLabel <- COLON IDENTIFIER +// fn parseBreakLabel(p: *Parser) !?TokenIndex { +// _ = p.eatToken(.Colon) orelse return null; +// const ident = try p.expectToken(.Identifier); +// return ident; +// } +// +// /// BlockLabel <- IDENTIFIER COLON +// fn parseBlockLabel(p: *Parser, colon_token: *TokenIndex) ?TokenIndex { +// const identifier = p.eatToken(.Identifier) orelse return null; +// if (p.eatToken(.Colon)) |colon| { +// colon_token.* = colon; +// return identifier; +// } +// p.putBackToken(identifier); +// return null; +// } +// +// /// FieldInit <- DOT IDENTIFIER EQUAL Expr +// fn parseFieldInit(p: *Parser) !?*Node { +// const period_token = p.eatToken(.Period) orelse return null; +// const name_token = p.eatToken(.Identifier) orelse { +// // Because of anon literals `.{` is also valid. +// p.putBackToken(period_token); +// return null; +// }; +// const eq_token = p.eatToken(.Equal) orelse { +// // `.Name` may also be an enum literal, which is a later rule. +// p.putBackToken(name_token); +// p.putBackToken(period_token); +// return null; +// }; +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// +// const node = try p.arena.allocator.create(Node.FieldInitializer); +// node.* = .{ +// .period_token = period_token, +// .name_token = name_token, +// .expr = expr_node, +// }; +// return &node.base; +// } +// +// /// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN +// fn parseWhileContinueExpr(p: *Parser) !?*Node { +// _ = p.eatToken(.Colon) orelse return null; +// _ = try p.expectToken(.LParen); +// const node = try p.expectNode(parseAssignExpr, .{ +// .ExpectedExprOrAssignment = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// return node; +// } +// +// /// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN +// fn parseLinkSection(p: *Parser) !?*Node { +// _ = p.eatToken(.Keyword_linksection) orelse return null; +// _ = try p.expectToken(.LParen); +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// return expr_node; +// } +// +// /// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN +// fn parseCallconv(p: *Parser) !?*Node { +// _ = p.eatToken(.Keyword_callconv) orelse return null; +// _ = try p.expectToken(.LParen); +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// return expr_node; +// } +// +// /// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType +// fn parseParamDecl(p: *Parser) !?Node.FnProto.ParamDecl { +// const doc_comments = try p.parseDocComment(); +// const noalias_token = p.eatToken(.Keyword_noalias); +// const comptime_token = if (noalias_token == null) p.eatToken(.Keyword_comptime) else null; +// const name_token = blk: { +// const identifier = p.eatToken(.Identifier) orelse break :blk null; +// if (p.eatToken(.Colon) != null) break :blk identifier; +// p.putBackToken(identifier); // ParamType may also be an identifier +// break :blk null; +// }; +// const param_type = (try p.parseParamType()) orelse { +// // Only return cleanly if no keyword, identifier, or doc comment was found +// if (noalias_token == null and +// comptime_token == null and +// name_token == null and +// doc_comments == null) +// { +// return null; +// } +// try p.errors.append(p.gpa, .{ +// .ExpectedParamType = .{ .token = p.tok_i }, +// }); +// return error.ParseError; +// }; +// +// return Node.FnProto.ParamDecl{ +// .doc_comments = doc_comments, +// .comptime_token = comptime_token, +// .noalias_token = noalias_token, +// .name_token = name_token, +// .param_type = param_type, +// }; +// } +// +// /// ParamType +// /// <- Keyword_anytype +// /// / DOT3 +// /// / TypeExpr +// fn parseParamType(p: *Parser) !?Node.FnProto.ParamDecl.ParamType { +// // TODO cast from tuple to error union is broken +// const P = Node.FnProto.ParamDecl.ParamType; +// if (try p.parseAnyType()) |node| return P{ .any_type = node }; +// if (try p.parseTypeExpr()) |node| return P{ .type_expr = node }; +// return null; +// } +// +// /// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload? +// fn parseIfPrefix(p: *Parser) !?*Node { +// const if_token = p.eatToken(.Keyword_if) orelse return null; +// _ = try p.expectToken(.LParen); +// const condition = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// const payload = try p.parsePtrPayload(); +// +// const node = try p.arena.allocator.create(Node.If); +// node.* = .{ +// .if_token = if_token, +// .condition = condition, +// .payload = payload, +// .body = undefined, // set by caller +// .@"else" = null, +// }; +// return &node.base; +// } +// +// /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr? +// fn parseWhilePrefix(p: *Parser) !?*Node { +// const while_token = p.eatToken(.Keyword_while) orelse return null; +// +// _ = try p.expectToken(.LParen); +// const condition = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// +// const payload = try p.parsePtrPayload(); +// const continue_expr = try p.parseWhileContinueExpr(); +// +// const node = try p.arena.allocator.create(Node.While); +// node.* = .{ +// .label = null, +// .inline_token = null, +// .while_token = while_token, +// .condition = condition, +// .payload = payload, +// .continue_expr = continue_expr, +// .body = undefined, // set by caller +// .@"else" = null, +// }; +// return &node.base; +// } +// +// /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload +// fn parseForPrefix(p: *Parser) !?*Node { +// const for_token = p.eatToken(.Keyword_for) orelse return null; +// +// _ = try p.expectToken(.LParen); +// const array_expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// +// const payload = try p.expectNode(parsePtrIndexPayload, .{ +// .ExpectedPayload = .{ .token = p.tok_i }, +// }); +// +// const node = try p.arena.allocator.create(Node.For); +// node.* = .{ +// .label = null, +// .inline_token = null, +// .for_token = for_token, +// .array_expr = array_expr, +// .payload = payload, +// .body = undefined, // set by caller +// .@"else" = null, +// }; +// return &node.base; +// } +// +// /// Payload <- PIPE IDENTIFIER PIPE +// fn parsePayload(p: *Parser) !?*Node { +// const lpipe = p.eatToken(.Pipe) orelse return null; +// const identifier = try p.expectNode(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// const rpipe = try p.expectToken(.Pipe); +// +// const node = try p.arena.allocator.create(Node.Payload); +// node.* = .{ +// .lpipe = lpipe, +// .error_symbol = identifier, +// .rpipe = rpipe, +// }; +// return &node.base; +// } +// +// /// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE +// fn parsePtrPayload(p: *Parser) !?*Node { +// const lpipe = p.eatToken(.Pipe) orelse return null; +// const asterisk = p.eatToken(.Asterisk); +// const identifier = try p.expectNode(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// const rpipe = try p.expectToken(.Pipe); +// +// const node = try p.arena.allocator.create(Node.PointerPayload); +// node.* = .{ +// .lpipe = lpipe, +// .ptr_token = asterisk, +// .value_symbol = identifier, +// .rpipe = rpipe, +// }; +// return &node.base; +// } +// +// /// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE +// fn parsePtrIndexPayload(p: *Parser) !?*Node { +// const lpipe = p.eatToken(.Pipe) orelse return null; +// const asterisk = p.eatToken(.Asterisk); +// const identifier = try p.expectNode(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// +// const index = if (p.eatToken(.Comma) == null) +// null +// else +// try p.expectNode(parseIdentifier, .{ +// .ExpectedIdentifier = .{ .token = p.tok_i }, +// }); +// +// const rpipe = try p.expectToken(.Pipe); +// +// const node = try p.arena.allocator.create(Node.PointerIndexPayload); +// node.* = .{ +// .lpipe = lpipe, +// .ptr_token = asterisk, +// .value_symbol = identifier, +// .index_symbol = index, +// .rpipe = rpipe, +// }; +// return &node.base; +// } +// +// /// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr +// fn parseSwitchProng(p: *Parser) !?*Node { +// const node = (try p.parseSwitchCase()) orelse return null; +// const arrow = try p.expectToken(.EqualAngleBracketRight); +// const payload = try p.parsePtrPayload(); +// const expr = try p.expectNode(parseAssignExpr, .{ +// .ExpectedExprOrAssignment = .{ .token = p.tok_i }, +// }); +// +// const switch_case = node.cast(Node.SwitchCase).?; +// switch_case.arrow_token = arrow; +// switch_case.payload = payload; +// switch_case.expr = expr; +// +// return node; +// } +// +// /// SwitchCase +// /// <- SwitchItem (COMMA SwitchItem)* COMMA? +// /// / KEYWORD_else +// fn parseSwitchCase(p: *Parser) !?*Node { +// var list = std.ArrayList(*Node).init(p.gpa); +// defer list.deinit(); +// +// if (try p.parseSwitchItem()) |first_item| { +// try list.append(first_item); +// while (p.eatToken(.Comma) != null) { +// const next_item = (try p.parseSwitchItem()) orelse break; +// try list.append(next_item); +// } +// } else if (p.eatToken(.Keyword_else)) |else_token| { +// const else_node = try p.arena.allocator.create(Node.SwitchElse); +// else_node.* = .{ +// .token = else_token, +// }; +// try list.append(&else_node.base); +// } else return null; +// +// const node = try Node.SwitchCase.alloc(&p.arena.allocator, list.items.len); +// node.* = .{ +// .items_len = list.items.len, +// .arrow_token = undefined, // set by caller +// .payload = null, +// .expr = undefined, // set by caller +// }; +// std.mem.copy(*Node, node.items(), list.items); +// return &node.base; +// } +// +// /// SwitchItem <- Expr (DOT3 Expr)? +// fn parseSwitchItem(p: *Parser) !?*Node { +// const expr = (try p.parseExpr()) orelse return null; +// if (p.eatToken(.Ellipsis3)) |token| { +// const range_end = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// +// const node = try p.arena.allocator.create(Node.SimpleInfixOp); +// node.* = .{ +// .base = Node{ .tag = .Range }, +// .op_token = token, +// .lhs = expr, +// .rhs = range_end, +// }; +// return &node.base; +// } +// return expr; +// } +// +// /// AssignOp +// /// <- ASTERISKEQUAL +// /// / SLASHEQUAL +// /// / PERCENTEQUAL +// /// / PLUSEQUAL +// /// / MINUSEQUAL +// /// / LARROW2EQUAL +// /// / RARROW2EQUAL +// /// / AMPERSANDEQUAL +// /// / CARETEQUAL +// /// / PIPEEQUAL +// /// / ASTERISKPERCENTEQUAL +// /// / PLUSPERCENTEQUAL +// /// / MINUSPERCENTEQUAL +// /// / EQUAL +// fn parseAssignOp(p: *Parser) !?*Node { +// const token = p.nextToken(); +// const op: Node.Tag = switch (p.token_ids[token]) { +// .AsteriskEqual => .AssignMul, +// .SlashEqual => .AssignDiv, +// .PercentEqual => .AssignMod, +// .PlusEqual => .AssignAdd, +// .MinusEqual => .AssignSub, +// .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft, +// .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight, +// .AmpersandEqual => .AssignBitAnd, +// .CaretEqual => .AssignBitXor, +// .PipeEqual => .AssignBitOr, +// .AsteriskPercentEqual => .AssignMulWrap, +// .PlusPercentEqual => .AssignAddWrap, +// .MinusPercentEqual => .AssignSubWrap, +// .Equal => .Assign, +// else => { +// p.putBackToken(token); +// return null; +// }, +// }; +// +// const node = try p.arena.allocator.create(Node.SimpleInfixOp); +// node.* = .{ +// .base = .{ .tag = op }, +// .op_token = token, +// .lhs = undefined, // set by caller +// .rhs = undefined, // set by caller +// }; +// return &node.base; +// } +// +// /// CompareOp +// /// <- EQUALEQUAL +// /// / EXCLAMATIONMARKEQUAL +// /// / LARROW +// /// / RARROW +// /// / LARROWEQUAL +// /// / RARROWEQUAL +// fn parseCompareOp(p: *Parser) !?*Node { +// const token = p.nextToken(); +// const op: Node.Tag = switch (p.token_ids[token]) { +// .EqualEqual => .EqualEqual, +// .BangEqual => .BangEqual, +// .AngleBracketLeft => .LessThan, +// .AngleBracketRight => .GreaterThan, +// .AngleBracketLeftEqual => .LessOrEqual, +// .AngleBracketRightEqual => .GreaterOrEqual, +// else => { +// p.putBackToken(token); +// return null; +// }, +// }; +// +// return p.createInfixOp(token, op); +// } +// +// /// BitwiseOp +// /// <- AMPERSAND +// /// / CARET +// /// / PIPE +// /// / KEYWORD_orelse +// /// / KEYWORD_catch Payload? +// fn parseBitwiseOp(p: *Parser) !?*Node { +// const token = p.nextToken(); +// const op: Node.Tag = switch (p.token_ids[token]) { +// .Ampersand => .BitAnd, +// .Caret => .BitXor, +// .Pipe => .BitOr, +// .Keyword_orelse => .OrElse, +// .Keyword_catch => { +// const payload = try p.parsePayload(); +// const node = try p.arena.allocator.create(Node.Catch); +// node.* = .{ +// .op_token = token, +// .lhs = undefined, // set by caller +// .rhs = undefined, // set by caller +// .payload = payload, +// }; +// return &node.base; +// }, +// else => { +// p.putBackToken(token); +// return null; +// }, +// }; +// +// return p.createInfixOp(token, op); +// } +// +// /// BitShiftOp +// /// <- LARROW2 +// /// / RARROW2 +// fn parseBitShiftOp(p: *Parser) !?*Node { +// const token = p.nextToken(); +// const op: Node.Tag = switch (p.token_ids[token]) { +// .AngleBracketAngleBracketLeft => .BitShiftLeft, +// .AngleBracketAngleBracketRight => .BitShiftRight, +// else => { +// p.putBackToken(token); +// return null; +// }, +// }; +// +// return p.createInfixOp(token, op); +// } +// +// /// AdditionOp +// /// <- PLUS +// /// / MINUS +// /// / PLUS2 +// /// / PLUSPERCENT +// /// / MINUSPERCENT +// fn parseAdditionOp(p: *Parser) !?*Node { +// const token = p.nextToken(); +// const op: Node.Tag = switch (p.token_ids[token]) { +// .Plus => .Add, +// .Minus => .Sub, +// .PlusPlus => .ArrayCat, +// .PlusPercent => .AddWrap, +// .MinusPercent => .SubWrap, +// else => { +// p.putBackToken(token); +// return null; +// }, +// }; +// +// return p.createInfixOp(token, op); +// } +// +// /// MultiplyOp +// /// <- PIPE2 +// /// / ASTERISK +// /// / SLASH +// /// / PERCENT +// /// / ASTERISK2 +// /// / ASTERISKPERCENT +// fn parseMultiplyOp(p: *Parser) !?*Node { +// const token = p.nextToken(); +// const op: Node.Tag = switch (p.token_ids[token]) { +// .PipePipe => .MergeErrorSets, +// .Asterisk => .Mul, +// .Slash => .Div, +// .Percent => .Mod, +// .AsteriskAsterisk => .ArrayMult, +// .AsteriskPercent => .MulWrap, +// else => { +// p.putBackToken(token); +// return null; +// }, +// }; +// +// return p.createInfixOp(token, op); +// } +// +// /// PrefixOp +// /// <- EXCLAMATIONMARK +// /// / MINUS +// /// / TILDE +// /// / MINUSPERCENT +// /// / AMPERSAND +// /// / KEYWORD_try +// /// / KEYWORD_await +// fn parsePrefixOp(p: *Parser) !?*Node { +// const token = p.nextToken(); +// switch (p.token_ids[token]) { +// .Bang => return p.allocSimplePrefixOp(.BoolNot, token), +// .Minus => return p.allocSimplePrefixOp(.Negation, token), +// .Tilde => return p.allocSimplePrefixOp(.BitNot, token), +// .MinusPercent => return p.allocSimplePrefixOp(.NegationWrap, token), +// .Ampersand => return p.allocSimplePrefixOp(.AddressOf, token), +// .Keyword_try => return p.allocSimplePrefixOp(.Try, token), +// .Keyword_await => return p.allocSimplePrefixOp(.Await, token), +// else => { +// p.putBackToken(token); +// return null; +// }, +// } +// } +// +// fn allocSimplePrefixOp(p: *Parser, comptime tag: Node.Tag, token: TokenIndex) !?*Node { +// const node = try p.arena.allocator.create(Node.SimplePrefixOp); +// node.* = .{ +// .base = .{ .tag = tag }, +// .op_token = token, +// .rhs = undefined, // set by caller +// }; +// return &node.base; +// } +// +// // TODO: ArrayTypeStart is either an array or a slice, but const/allowzero only work on +// // pointers. Consider updating this rule: +// // ... +// // / ArrayTypeStart +// // / SliceTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* +// // / PtrTypeStart ... +// +// /// PrefixTypeOp +// /// <- QUESTIONMARK +// /// / KEYWORD_anyframe MINUSRARROW +// /// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* +// /// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* +// fn parsePrefixTypeOp(p: *Parser) !?*Node { +// if (p.eatToken(.QuestionMark)) |token| { +// const node = try p.arena.allocator.create(Node.SimplePrefixOp); +// node.* = .{ +// .base = .{ .tag = .OptionalType }, +// .op_token = token, +// .rhs = undefined, // set by caller +// }; +// return &node.base; +// } +// +// if (p.eatToken(.Keyword_anyframe)) |token| { +// const arrow = p.eatToken(.Arrow) orelse { +// p.putBackToken(token); +// return null; +// }; +// const node = try p.arena.allocator.create(Node.AnyFrameType); +// node.* = .{ +// .anyframe_token = token, +// .result = .{ +// .arrow_token = arrow, +// .return_type = undefined, // set by caller +// }, +// }; +// return &node.base; +// } +// +// if (try p.parsePtrTypeStart()) |node| { +// // If the token encountered was **, there will be two nodes instead of one. +// // The attributes should be applied to the rightmost operator. +// var ptr_info = if (node.cast(Node.PtrType)) |ptr_type| +// if (p.token_ids[ptr_type.op_token] == .AsteriskAsterisk) +// &ptr_type.rhs.cast(Node.PtrType).?.ptr_info +// else +// &ptr_type.ptr_info +// else if (node.cast(Node.SliceType)) |slice_type| +// &slice_type.ptr_info +// else +// unreachable; +// +// while (true) { +// if (p.eatToken(.Keyword_align)) |align_token| { +// const lparen = try p.expectToken(.LParen); +// const expr_node = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// +// // Optional bit range +// const bit_range = if (p.eatToken(.Colon)) |_| bit_range_value: { +// const range_start = try p.expectNode(parseIntegerLiteral, .{ +// .ExpectedIntegerLiteral = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.Colon); +// const range_end = try p.expectNode(parseIntegerLiteral, .{ +// .ExpectedIntegerLiteral = .{ .token = p.tok_i }, +// }); +// +// break :bit_range_value ast.PtrInfo.Align.BitRange{ +// .start = range_start, +// .end = range_end, +// }; +// } else null; +// _ = try p.expectToken(.RParen); +// +// if (ptr_info.align_info != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraAlignQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// +// ptr_info.align_info = ast.PtrInfo.Align{ +// .node = expr_node, +// .bit_range = bit_range, +// }; +// +// continue; +// } +// if (p.eatToken(.Keyword_const)) |const_token| { +// if (ptr_info.const_token != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraConstQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// ptr_info.const_token = const_token; +// continue; +// } +// if (p.eatToken(.Keyword_volatile)) |volatile_token| { +// if (ptr_info.volatile_token != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// ptr_info.volatile_token = volatile_token; +// continue; +// } +// if (p.eatToken(.Keyword_allowzero)) |allowzero_token| { +// if (ptr_info.allowzero_token != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// ptr_info.allowzero_token = allowzero_token; +// continue; +// } +// break; +// } +// +// return node; +// } +// +// if (try p.parseArrayTypeStart()) |node| { +// if (node.cast(Node.SliceType)) |slice_type| { +// // Collect pointer qualifiers in any order, but disallow duplicates +// while (true) { +// if (try p.parseByteAlign()) |align_expr| { +// if (slice_type.ptr_info.align_info != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraAlignQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// slice_type.ptr_info.align_info = ast.PtrInfo.Align{ +// .node = align_expr, +// .bit_range = null, +// }; +// continue; +// } +// if (p.eatToken(.Keyword_const)) |const_token| { +// if (slice_type.ptr_info.const_token != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraConstQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// slice_type.ptr_info.const_token = const_token; +// continue; +// } +// if (p.eatToken(.Keyword_volatile)) |volatile_token| { +// if (slice_type.ptr_info.volatile_token != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// slice_type.ptr_info.volatile_token = volatile_token; +// continue; +// } +// if (p.eatToken(.Keyword_allowzero)) |allowzero_token| { +// if (slice_type.ptr_info.allowzero_token != null) { +// try p.errors.append(p.gpa, .{ +// .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 }, +// }); +// continue; +// } +// slice_type.ptr_info.allowzero_token = allowzero_token; +// continue; +// } +// break; +// } +// } +// return node; +// } +// +// return null; +// } +// +// /// SuffixOp +// /// <- LBRACKET Expr (DOT2 (Expr (COLON Expr)?)?)? RBRACKET +// /// / DOT IDENTIFIER +// /// / DOTASTERISK +// /// / DOTQUESTIONMARK +// fn parseSuffixOp(p: *Parser, lhs: *Node) !?*Node { +// if (p.eatToken(.LBracket)) |_| { +// const index_expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// +// if (p.eatToken(.Ellipsis2) != null) { +// const end_expr = try p.parseExpr(); +// const sentinel: ?*Node = if (p.eatToken(.Colon) != null) +// try p.parseExpr() +// else +// null; +// const rtoken = try p.expectToken(.RBracket); +// const node = try p.arena.allocator.create(Node.Slice); +// node.* = .{ +// .lhs = lhs, +// .rtoken = rtoken, +// .start = index_expr, +// .end = end_expr, +// .sentinel = sentinel, +// }; +// return &node.base; +// } +// +// const rtoken = try p.expectToken(.RBracket); +// const node = try p.arena.allocator.create(Node.ArrayAccess); +// node.* = .{ +// .lhs = lhs, +// .rtoken = rtoken, +// .index_expr = index_expr, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.PeriodAsterisk)) |period_asterisk| { +// const node = try p.arena.allocator.create(Node.SimpleSuffixOp); +// node.* = .{ +// .base = .{ .tag = .Deref }, +// .lhs = lhs, +// .rtoken = period_asterisk, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.Period)) |period| { +// if (try p.parseIdentifier()) |identifier| { +// const node = try p.arena.allocator.create(Node.SimpleInfixOp); +// node.* = .{ +// .base = Node{ .tag = .Period }, +// .op_token = period, +// .lhs = lhs, +// .rhs = identifier, +// }; +// return &node.base; +// } +// if (p.eatToken(.QuestionMark)) |question_mark| { +// const node = try p.arena.allocator.create(Node.SimpleSuffixOp); +// node.* = .{ +// .base = .{ .tag = .UnwrapOptional }, +// .lhs = lhs, +// .rtoken = question_mark, +// }; +// return &node.base; +// } +// try p.errors.append(p.gpa, .{ +// .ExpectedSuffixOp = .{ .token = p.tok_i }, +// }); +// return null; +// } +// +// return null; +// } +// +// /// FnCallArguments <- LPAREN ExprList RPAREN +// /// ExprList <- (Expr COMMA)* Expr? +// fn parseFnCallArguments(p: *Parser) !?AnnotatedParamList { +// if (p.eatToken(.LParen) == null) return null; +// const list = try ListParseFn(*Node, parseExpr)(p); +// errdefer p.gpa.free(list); +// const rparen = try p.expectToken(.RParen); +// return AnnotatedParamList{ .list = list, .rparen = rparen }; +// } +// +// const AnnotatedParamList = struct { +// list: []*Node, +// rparen: TokenIndex, +// }; +// +// /// ArrayTypeStart <- LBRACKET Expr? RBRACKET +// fn parseArrayTypeStart(p: *Parser) !?*Node { +// const lbracket = p.eatToken(.LBracket) orelse return null; +// const expr = try p.parseExpr(); +// const sentinel = if (p.eatToken(.Colon)) |_| +// try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }) +// else +// null; +// const rbracket = try p.expectToken(.RBracket); +// +// if (expr) |len_expr| { +// if (sentinel) |s| { +// const node = try p.arena.allocator.create(Node.ArrayTypeSentinel); +// node.* = .{ +// .op_token = lbracket, +// .rhs = undefined, // set by caller +// .len_expr = len_expr, +// .sentinel = s, +// }; +// return &node.base; +// } else { +// const node = try p.arena.allocator.create(Node.ArrayType); +// node.* = .{ +// .op_token = lbracket, +// .rhs = undefined, // set by caller +// .len_expr = len_expr, +// }; +// return &node.base; +// } +// } +// +// const node = try p.arena.allocator.create(Node.SliceType); +// node.* = .{ +// .op_token = lbracket, +// .rhs = undefined, // set by caller +// .ptr_info = .{ .sentinel = sentinel }, +// }; +// return &node.base; +// } +// +// /// PtrTypeStart +// /// <- ASTERISK +// /// / ASTERISK2 +// /// / PTRUNKNOWN +// /// / PTRC +// fn parsePtrTypeStart(p: *Parser) !?*Node { +// if (p.eatToken(.Asterisk)) |asterisk| { +// const sentinel = if (p.eatToken(.Colon)) |_| +// try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }) +// else +// null; +// const node = try p.arena.allocator.create(Node.PtrType); +// node.* = .{ +// .op_token = asterisk, +// .rhs = undefined, // set by caller +// .ptr_info = .{ .sentinel = sentinel }, +// }; +// return &node.base; +// } +// +// if (p.eatToken(.AsteriskAsterisk)) |double_asterisk| { +// const node = try p.arena.allocator.create(Node.PtrType); +// node.* = .{ +// .op_token = double_asterisk, +// .rhs = undefined, // set by caller +// }; +// +// // Special case for **, which is its own token +// const child = try p.arena.allocator.create(Node.PtrType); +// child.* = .{ +// .op_token = double_asterisk, +// .rhs = undefined, // set by caller +// }; +// node.rhs = &child.base; +// +// return &node.base; +// } +// if (p.eatToken(.LBracket)) |lbracket| { +// const asterisk = p.eatToken(.Asterisk) orelse { +// p.putBackToken(lbracket); +// return null; +// }; +// if (p.eatToken(.Identifier)) |ident| { +// const token_loc = p.token_locs[ident]; +// const token_slice = p.source[token_loc.start..token_loc.end]; +// if (!std.mem.eql(u8, token_slice, "c")) { +// p.putBackToken(ident); +// } else { +// _ = try p.expectToken(.RBracket); +// const node = try p.arena.allocator.create(Node.PtrType); +// node.* = .{ +// .op_token = lbracket, +// .rhs = undefined, // set by caller +// }; +// return &node.base; +// } +// } +// const sentinel = if (p.eatToken(.Colon)) |_| +// try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }) +// else +// null; +// _ = try p.expectToken(.RBracket); +// const node = try p.arena.allocator.create(Node.PtrType); +// node.* = .{ +// .op_token = lbracket, +// .rhs = undefined, // set by caller +// .ptr_info = .{ .sentinel = sentinel }, +// }; +// return &node.base; +// } +// return null; +// } +// +// /// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE +// fn parseContainerDeclAuto(p: *Parser) !?*Node { +// const container_decl_type = (try p.parseContainerDeclType()) orelse return null; +// const lbrace = try p.expectToken(.LBrace); +// const members = try p.parseContainerMembers(false); +// defer p.gpa.free(members); +// const rbrace = try p.expectToken(.RBrace); +// +// const members_len = @intCast(NodeIndex, members.len); +// const node = try Node.ContainerDecl.alloc(&p.arena.allocator, members_len); +// node.* = .{ +// .layout_token = null, +// .kind_token = container_decl_type.kind_token, +// .init_arg_expr = container_decl_type.init_arg_expr, +// .fields_and_decls_len = members_len, +// .lbrace_token = lbrace, +// .rbrace_token = rbrace, +// }; +// std.mem.copy(*Node, node.fieldsAndDecls(), members); +// return &node.base; +// } +// +// /// Holds temporary data until we are ready to construct the full ContainerDecl AST node. +// const ContainerDeclType = struct { +// kind_token: TokenIndex, +// init_arg_expr: Node.ContainerDecl.InitArg, +// }; +// +// /// ContainerDeclType +// /// <- KEYWORD_struct +// /// / KEYWORD_enum (LPAREN Expr RPAREN)? +// /// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)? +// fn parseContainerDeclType(p: *Parser) !?ContainerDeclType { +// const kind_token = p.nextToken(); +// +// const init_arg_expr = switch (p.token_ids[kind_token]) { +// .Keyword_struct => Node.ContainerDecl.InitArg{ .None = {} }, +// .Keyword_enum => blk: { +// if (p.eatToken(.LParen) != null) { +// const expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// break :blk Node.ContainerDecl.InitArg{ .Type = expr }; +// } +// break :blk Node.ContainerDecl.InitArg{ .None = {} }; +// }, +// .Keyword_union => blk: { +// if (p.eatToken(.LParen) != null) { +// if (p.eatToken(.Keyword_enum) != null) { +// if (p.eatToken(.LParen) != null) { +// const expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// _ = try p.expectToken(.RParen); +// break :blk Node.ContainerDecl.InitArg{ .Enum = expr }; +// } +// _ = try p.expectToken(.RParen); +// break :blk Node.ContainerDecl.InitArg{ .Enum = null }; +// } +// const expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// break :blk Node.ContainerDecl.InitArg{ .Type = expr }; +// } +// break :blk Node.ContainerDecl.InitArg{ .None = {} }; +// }, +// else => { +// p.putBackToken(kind_token); +// return null; +// }, +// }; +// +// return ContainerDeclType{ +// .kind_token = kind_token, +// .init_arg_expr = init_arg_expr, +// }; +// } +// +// /// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN +// fn parseByteAlign(p: *Parser) !?*Node { +// _ = p.eatToken(.Keyword_align) orelse return null; +// _ = try p.expectToken(.LParen); +// const expr = try p.expectNode(parseExpr, .{ +// .ExpectedExpr = .{ .token = p.tok_i }, +// }); +// _ = try p.expectToken(.RParen); +// return expr; +// } +// +// /// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER? +// /// Only ErrorSetDecl parses an IdentifierList +// fn parseErrorTagList(p: *Parser) ![]*Node { +// return ListParseFn(*Node, parseErrorTag)(p); +// } +// +// /// SwitchProngList <- (SwitchProng COMMA)* SwitchProng? +// fn parseSwitchProngList(p: *Parser) ![]*Node { +// return ListParseFn(*Node, parseSwitchProng)(p); +// } +// +// /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem? +// fn parseAsmOutputList(p: *Parser) Error![]Node.Asm.Output { +// return ListParseFn(Node.Asm.Output, parseAsmOutputItem)(p); +// } +// +// /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem? +// fn parseAsmInputList(p: *Parser) Error![]Node.Asm.Input { +// return ListParseFn(Node.Asm.Input, parseAsmInputItem)(p); +// } +// +// /// ParamDeclList <- (ParamDecl COMMA)* ParamDecl? +// fn parseParamDeclList(p: *Parser) ![]Node.FnProto.ParamDecl { +// return ListParseFn(Node.FnProto.ParamDecl, parseParamDecl)(p); +// } +// +// const NodeParseFn = fn (p: *Parser) Error!?*Node; +// +// fn ListParseFn(comptime E: type, comptime nodeParseFn: anytype) ParseFn([]E) { +// return struct { +// pub fn parse(p: *Parser) ![]E { +// var list = std.ArrayList(E).init(p.gpa); +// defer list.deinit(); +// +// while (try nodeParseFn(p)) |item| { +// try list.append(item); +// +// switch (p.token_ids[p.tok_i]) { +// .Comma => _ = p.nextToken(), +// // all possible delimiters +// .Colon, .RParen, .RBrace, .RBracket => break, +// else => { +// // this is likely just a missing comma, +// // continue parsing this list and give an error +// try p.errors.append(p.gpa, .{ +// .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, +// }); +// }, +// } +// } +// return list.toOwnedSlice(); +// } +// }.parse; +// } +// +// fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.Tag) NodeParseFn { +// return struct { +// pub fn parse(p: *Parser) Error!?*Node { +// const op_token = if (token == .Keyword_and) switch (p.token_ids[p.tok_i]) { +// .Keyword_and => p.nextToken(), +// .Invalid_ampersands => blk: { +// try p.errors.append(p.gpa, .{ +// .InvalidAnd = .{ .token = p.tok_i }, +// }); +// break :blk p.nextToken(); +// }, +// else => return null, +// } else p.eatToken(token) orelse return null; +// +// const node = try p.arena.allocator.create(Node.SimpleInfixOp); +// node.* = .{ +// .base = .{ .tag = op }, +// .op_token = op_token, +// .lhs = undefined, // set by caller +// .rhs = undefined, // set by caller +// }; +// return &node.base; +// } +// }.parse; +// } +// +// // Helper parsers not included in the grammar +// +// fn parseBuiltinCall(p: *Parser) !?*Node { +// const token = p.eatToken(.Builtin) orelse return null; +// const params = (try p.parseFnCallArguments()) orelse { +// try p.errors.append(p.gpa, .{ +// .ExpectedParamList = .{ .token = p.tok_i }, +// }); +// +// // lets pretend this was an identifier so we can continue parsing +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .Identifier }, +// .token = token, +// }; +// return &node.base; +// }; +// defer p.gpa.free(params.list); +// +// const node = try Node.BuiltinCall.alloc(&p.arena.allocator, params.list.len); +// node.* = .{ +// .builtin_token = token, +// .params_len = params.list.len, +// .rparen_token = params.rparen, +// }; +// std.mem.copy(*Node, node.params(), params.list); +// return &node.base; +// } +// +// fn parseErrorTag(p: *Parser) !?*Node { +// const doc_comments = try p.parseDocComment(); // no need to rewind on failure +// const token = p.eatToken(.Identifier) orelse return null; +// +// const node = try p.arena.allocator.create(Node.ErrorTag); +// node.* = .{ +// .doc_comments = doc_comments, +// .name_token = token, +// }; +// return &node.base; +// } +// +// fn parseIdentifier(p: *Parser) !?*Node { +// const token = p.eatToken(.Identifier) orelse return null; +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .Identifier }, +// .token = token, +// }; +// return &node.base; +// } +// +// fn parseAnyType(p: *Parser) !?*Node { +// const token = p.eatToken(.Keyword_anytype) orelse +// p.eatToken(.Keyword_var) orelse return null; // TODO remove in next release cycle +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .AnyType }, +// .token = token, +// }; +// return &node.base; +// } +// +// fn createLiteral(p: *Parser, tag: ast.Node.Tag, token: TokenIndex) !*Node { +// const result = try p.arena.allocator.create(Node.OneToken); +// result.* = .{ +// .base = .{ .tag = tag }, +// .token = token, +// }; +// return &result.base; +// } +// +// fn parseStringLiteralSingle(p: *Parser) !?*Node { +// if (p.eatToken(.StringLiteral)) |token| { +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .StringLiteral }, +// .token = token, +// }; +// return &node.base; +// } +// return null; +// } +// +// // string literal or multiline string literal +// fn parseStringLiteral(p: *Parser) !?*Node { +// if (try p.parseStringLiteralSingle()) |node| return node; +// +// if (p.eatToken(.MultilineStringLiteralLine)) |first_line| { +// const start_tok_i = p.tok_i; +// var tok_i = start_tok_i; +// var count: usize = 1; // including first_line +// while (true) : (tok_i += 1) { +// switch (p.token_ids[tok_i]) { +// .LineComment => continue, +// .MultilineStringLiteralLine => count += 1, +// else => break, +// } +// } +// +// const node = try Node.MultilineStringLiteral.alloc(&p.arena.allocator, count); +// node.* = .{ .lines_len = count }; +// const lines = node.lines(); +// tok_i = start_tok_i; +// lines[0] = first_line; +// count = 1; +// while (true) : (tok_i += 1) { +// switch (p.token_ids[tok_i]) { +// .LineComment => continue, +// .MultilineStringLiteralLine => { +// lines[count] = tok_i; +// count += 1; +// }, +// else => break, +// } +// } +// p.tok_i = tok_i; +// return &node.base; +// } +// +// return null; +// } +// +// fn parseIntegerLiteral(p: *Parser) !?*Node { +// const token = p.eatToken(.IntegerLiteral) orelse return null; +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .IntegerLiteral }, +// .token = token, +// }; +// return &node.base; +// } +// +// fn parseFloatLiteral(p: *Parser) !?*Node { +// const token = p.eatToken(.FloatLiteral) orelse return null; +// const node = try p.arena.allocator.create(Node.OneToken); +// node.* = .{ +// .base = .{ .tag = .FloatLiteral }, +// .token = token, +// }; +// return &node.base; +// } +// +// fn parseTry(p: *Parser) !?*Node { +// const token = p.eatToken(.Keyword_try) orelse return null; +// const node = try p.arena.allocator.create(Node.SimplePrefixOp); +// node.* = .{ +// .base = .{ .tag = .Try }, +// .op_token = token, +// .rhs = undefined, // set by caller +// }; +// return &node.base; +// } +// +// /// IfPrefix Body (KEYWORD_else Payload? Body)? +// fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !?*Node { +// const node = (try p.parseIfPrefix()) orelse return null; +// const if_prefix = node.cast(Node.If).?; +// +// if_prefix.body = try p.expectNode(bodyParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// +// const else_token = p.eatToken(.Keyword_else) orelse return node; +// const payload = try p.parsePayload(); +// const else_expr = try p.expectNode(bodyParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// const else_node = try p.arena.allocator.create(Node.Else); +// else_node.* = .{ +// .else_token = else_token, +// .payload = payload, +// .body = else_expr, +// }; +// if_prefix.@"else" = else_node; +// +// return node; +// } +// +// /// Eat a multiline doc comment +// fn parseDocComment(p: *Parser) !?*Node.DocComment { +// if (p.eatToken(.DocComment)) |first_line| { +// while (p.eatToken(.DocComment)) |_| {} +// const node = try p.arena.allocator.create(Node.DocComment); +// node.* = .{ .first_line = first_line }; +// return node; +// } +// return null; +// } +// +// fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool { +// return std.mem.indexOfScalar(u8, p.source[p.token_locs[token1].end..p.token_locs[token2].start], '\n') == null; +// } +// +// /// Eat a single-line doc comment on the same line as another node +// fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment { +// const comment_token = p.eatToken(.DocComment) orelse return null; +// if (p.tokensOnSameLine(after_token, comment_token)) { +// const node = try p.arena.allocator.create(Node.DocComment); +// node.* = .{ .first_line = comment_token }; +// return node; +// } +// p.putBackToken(comment_token); +// return null; +// } +// +// /// Op* Child +// fn parsePrefixOpExpr(p: *Parser, comptime opParseFn: NodeParseFn, comptime childParseFn: NodeParseFn) Error!?*Node { +// if (try opParseFn(p)) |first_op| { +// var rightmost_op = first_op; +// while (true) { +// switch (rightmost_op.tag) { +// .AddressOf, +// .Await, +// .BitNot, +// .BoolNot, +// .OptionalType, +// .Negation, +// .NegationWrap, +// .Resume, +// .Try, +// => { +// if (try opParseFn(p)) |rhs| { +// rightmost_op.cast(Node.SimplePrefixOp).?.rhs = rhs; +// rightmost_op = rhs; +// } else break; +// }, +// .ArrayType => { +// if (try opParseFn(p)) |rhs| { +// rightmost_op.cast(Node.ArrayType).?.rhs = rhs; +// rightmost_op = rhs; +// } else break; +// }, +// .ArrayTypeSentinel => { +// if (try opParseFn(p)) |rhs| { +// rightmost_op.cast(Node.ArrayTypeSentinel).?.rhs = rhs; +// rightmost_op = rhs; +// } else break; +// }, +// .SliceType => { +// if (try opParseFn(p)) |rhs| { +// rightmost_op.cast(Node.SliceType).?.rhs = rhs; +// rightmost_op = rhs; +// } else break; +// }, +// .PtrType => { +// var ptr_type = rightmost_op.cast(Node.PtrType).?; +// // If the token encountered was **, there will be two nodes +// if (p.token_ids[ptr_type.op_token] == .AsteriskAsterisk) { +// rightmost_op = ptr_type.rhs; +// ptr_type = rightmost_op.cast(Node.PtrType).?; +// } +// if (try opParseFn(p)) |rhs| { +// ptr_type.rhs = rhs; +// rightmost_op = rhs; +// } else break; +// }, +// .AnyFrameType => { +// const prom = rightmost_op.cast(Node.AnyFrameType).?; +// if (try opParseFn(p)) |rhs| { +// prom.result.?.return_type = rhs; +// rightmost_op = rhs; +// } else break; +// }, +// else => unreachable, +// } +// } +// +// // If any prefix op existed, a child node on the RHS is required +// switch (rightmost_op.tag) { +// .AddressOf, +// .Await, +// .BitNot, +// .BoolNot, +// .OptionalType, +// .Negation, +// .NegationWrap, +// .Resume, +// .Try, +// => { +// const prefix_op = rightmost_op.cast(Node.SimplePrefixOp).?; +// prefix_op.rhs = try p.expectNode(childParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// }, +// .ArrayType => { +// const prefix_op = rightmost_op.cast(Node.ArrayType).?; +// prefix_op.rhs = try p.expectNode(childParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// }, +// .ArrayTypeSentinel => { +// const prefix_op = rightmost_op.cast(Node.ArrayTypeSentinel).?; +// prefix_op.rhs = try p.expectNode(childParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// }, +// .PtrType => { +// const prefix_op = rightmost_op.cast(Node.PtrType).?; +// prefix_op.rhs = try p.expectNode(childParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// }, +// .SliceType => { +// const prefix_op = rightmost_op.cast(Node.SliceType).?; +// prefix_op.rhs = try p.expectNode(childParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// }, +// .AnyFrameType => { +// const prom = rightmost_op.cast(Node.AnyFrameType).?; +// prom.result.?.return_type = try p.expectNode(childParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// }, +// else => unreachable, +// } +// +// return first_op; +// } +// +// // Otherwise, the child node is optional +// return childParseFn(p); +// } +// +// /// Child (Op Child)* +// /// Child (Op Child)? +// fn parseBinOpExpr( +// p: *Parser, +// opParseFn: NodeParseFn, +// childParseFn: NodeParseFn, +// chain: enum { +// Once, +// Infinitely, +// }, +// ) Error!?*Node { +// var res = (try childParseFn(p)) orelse return null; +// +// while (try opParseFn(p)) |node| { +// const right = try p.expectNode(childParseFn, .{ +// .InvalidToken = .{ .token = p.tok_i }, +// }); +// const left = res; +// res = node; +// +// if (node.castTag(.Catch)) |op| { +// op.lhs = left; +// op.rhs = right; +// } else if (node.cast(Node.SimpleInfixOp)) |op| { +// op.lhs = left; +// op.rhs = right; +// } +// +// switch (chain) { +// .Once => break, +// .Infinitely => continue, +// } +// } +// +// return res; +// } +// +// fn createInfixOp(p: *Parser, op_token: TokenIndex, tag: Node.Tag) !*Node { +// const node = try p.arena.allocator.create(Node.SimpleInfixOp); +// node.* = .{ +// .base = Node{ .tag = tag }, +// .op_token = op_token, +// .lhs = undefined, // set by caller +// .rhs = undefined, // set by caller +// }; +// return &node.base; +// } +// + fn eatToken(p: *Parser, id: Token.Id) ?TokenIndex { + return if (p.token_ids[p.tok_i] == id) p.nextToken() else null; + } + + // expectToken: either returns the token or an error. + fn expectToken(p: *Parser, id: Token.Id) Error!TokenIndex { + return (try p.expectTokenRecoverable(id)) orelse error.ParseError; + } + + // expectTokenRecoverable: either returns the token or null if not the one expected. + // Also, appends the error inside p.errors. + fn expectTokenRecoverable(p: *Parser, id: Token.Id) !?TokenIndex { + const token = p.nextToken(); + if (p.token_ids[token] != id) { + try p.errors.append(p.gpa, .{ + .ExpectedToken = .{ .token = token, .expected_id = id }, + }); + // go back so that we can recover properly + p.putBackToken(token); + return null; + } + return token; + } + + // nextToken: provide the TokenIndex of the current token, but increases the tok_i + // inside the Parser structure. + fn nextToken(p: *Parser) TokenIndex { + const result = p.tok_i; + p.tok_i += 1; + assert(p.token_ids[result] != .LineComment); + if (p.tok_i >= p.token_ids.len) return result; + + while (true) { + if (p.token_ids[p.tok_i] != .LineComment) return result; + p.tok_i += 1; + } + } + + // putBackToken: come back one token (except for comment lines which are ignored). + // Example: we have a function searching for a declaration, + // the function read a token "my-variable" so it returns after putting back the token. + // Caller now have the start of a declaration in its parsing structure. + fn putBackToken(p: *Parser, putting_back: TokenIndex) void { + while (p.tok_i > 0) { + p.tok_i -= 1; + if (p.token_ids[p.tok_i] == .LineComment) continue; + assert(putting_back == p.tok_i); + return; + } + } + + /// TODO Delete this function. I don't like the inversion of control. + fn expectNode( + p: *Parser, + parseFn: NodeParseFn, + /// if parsing fails + err: AstError, + ) Error!*Node { + return (try p.expectNodeRecoverable(parseFn, err)) orelse return error.ParseError; + } + + /// TODO Delete this function. I don't like the inversion of control. + fn expectNodeRecoverable( + p: *Parser, + parseFn: NodeParseFn, + /// if parsing fails + err: AstError, + ) !?*Node { + return (try parseFn(p)) orelse { + try p.errors.append(p.gpa, err); + return null; + }; + } +}; + +fn ParseFn(comptime T: type) type { + return fn (p: *Parser) Error!T; +} + +test "std.zig.parser" { + _ = @import("parser_test.zig"); +} diff --git a/src/parser_test.zig b/src/parser_test.zig new file mode 100644 index 0000000..135fd44 --- /dev/null +++ b/src/parser_test.zig @@ -0,0 +1,127 @@ +// test "recovery: invalid parameter" { +// try testError( +// \\fn main() void { +// \\ a(comptime T: type) +// \\} +// , &[_]Error{ +// .ExpectedToken, +// }); +// } + +const std = @import("std"); +const mem = std.mem; +const warn = std.debug.warn; +const io = std.io; +const maxInt = std.math.maxInt; +const process = std.process; +const fs = std.fs; + +const ast = @import("ast.zig"); + +const own_parser = @import("./parse.zig"); + +fn testParse(source: []const u8, allocator: *mem.Allocator) !void { + const stderr = io.getStdErr().outStream(); + + const tree = try own_parser.parse(allocator, source); + defer tree.deinit(); + + for (tree.errors) |*parse_error| { + const token = tree.token_locs[parse_error.loc()]; + const loc = tree.tokenLocation(0, parse_error.loc()); + try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 }); + try tree.renderError(parse_error, stderr); + try stderr.print("\n{}\n", .{source[loc.line_start..loc.line_end]}); + { + var i: usize = 0; + while (i < loc.column) : (i += 1) { + try stderr.writeAll(" "); + } + } + { + const caret_count = token.end - token.start; + var i: usize = 0; + while (i < caret_count) : (i += 1) { + try stderr.writeAll("~"); + } + } + try stderr.writeAll("\n"); + } + if (tree.errors.len != 0) { + return error.ParseError; + } +} + +const Error = @TagType(ast.Error); + +fn testError(source: []const u8, expected_errors: []const Error) !void { + const tree = try own_parser.parse(std.testing.allocator, source); + defer tree.deinit(); + + std.testing.expect(tree.errors.len == expected_errors.len); + for (expected_errors) |expected, i| { + std.testing.expect(expected == tree.errors[i]); + } +} + + +fn nextArg(args: [][]const u8, idx: *usize) ?[]const u8 { + if (idx.* >= args.len) return null; + defer idx.* += 1; + return args[idx.*]; +} + +pub fn get_file_size(path: []const u8) !u64 { + var file = try fs.cwd().openFile(path, .{}); + defer file.close(); + + // Find the size of the file and create a buffer with this size. + var file_stat = try file.stat(); + return file_stat.size; +} + +pub fn parser_analyze() !void { + // 1. get an allocator. + // 2. get the file path. + // 3. get the file size, and allocate file_size+1 bytes. + // 4. get the content of the file. + // 5. perform the analyze, and print each element. + + // Create an allocator, for the arguments and the file. + var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); + defer arena.deinit(); + + const allocator = &arena.allocator; + var args = try process.argsAlloc(allocator); + defer process.argsFree(allocator, args); + + // Get the file path. + var arg_idx: usize = 1; // Skipping the executable binary name. + const gui_file_path = nextArg(args, &arg_idx) orelse { + warn("Expected first argument to be path to gui file\n", .{}); + return error.InvalidArgs; + }; + + // Get the file size. + const file_size = try get_file_size(gui_file_path); + + // Get the file size and allocate memory. + const buffer = try allocator.alloc(u8, file_size + 1); // Last value will be a null-byte. + buffer[file_size] = 0; + const content = try fs.cwd().readFile(gui_file_path, buffer); + // print("file content is: {}", .{content}); // Working. + + // Get the file size and allocate memory. + // const tokens = try getAllTokens(allocator, content); + // for(tokens.items) |token| { + // print("{s:20} => {}\n", .{@tagName(token.id), buffer[token.loc.start..token.loc.end]}); + // } + + try testParse(content, allocator); +} + + +pub fn main() !void { + try parser_analyze(); +} + diff --git a/src/tokenizer.zig b/src/tokenizer.zig index 7b3c536..b160ce9 100644 --- a/src/tokenizer.zig +++ b/src/tokenizer.zig @@ -182,7 +182,6 @@ pub const Token = struct { .Tilde => "~", .Keyword_property => "property", - .Keyword_and => "and", .Keyword_false => "false", .Keyword_null => "null", .Keyword_true => "true", @@ -1629,12 +1628,12 @@ test "tokenizer - comments with literal tab" { }); } -//test "tokenizer - pipe and then invalid" { -// testTokenize("||=", &[_]Token.Id{ -// .PipePipe, -// .Equal, -// }); -//} +test "tokenizer - pipe and then invalid" { + testTokenize("||=", &[_]Token.Id{ + .PipePipe, + .Equal, + }); +} //test "tokenizer - line comment and doc comment" { // testTokenize("//", &[_]Token.Id{.LineComment});