diff options
Diffstat (limited to 'lib/std')
| -rw-r--r-- | lib/std/zig/ast.zig | 1152 | ||||
| -rw-r--r-- | lib/std/zig/parse.zig | 398 | ||||
| -rw-r--r-- | lib/std/zig/render.zig | 456 |
3 files changed, 1003 insertions, 1003 deletions
diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 6a56c0e242..0b6133e789 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -199,148 +199,148 @@ pub const Tree = struct { var end_offset: TokenIndex = 0; var n = node; while (true) switch (tags[n]) { - .Root => return 0, - - .UsingNamespace, - .TestDecl, - .ErrDefer, - .Defer, - .BoolNot, - .Negation, - .BitNot, - .NegationWrap, - .AddressOf, - .Try, - .Await, - .OptionalType, - .Switch, - .SwitchComma, - .IfSimple, - .If, - .Suspend, - .Resume, - .Continue, - .Break, - .Return, - .AnyFrameType, - .Identifier, - .AnyFrameLiteral, - .CharLiteral, - .IntegerLiteral, - .FloatLiteral, - .FalseLiteral, - .TrueLiteral, - .NullLiteral, - .UndefinedLiteral, - .UnreachableLiteral, - .StringLiteral, - .GroupedExpression, - .BuiltinCallTwo, - .BuiltinCallTwoComma, - .BuiltinCall, - .BuiltinCallComma, - .ErrorSetDecl, - .AnyType, - .Comptime, - .Nosuspend, - .AsmSimple, - .Asm, - .FnProtoSimple, - .FnProtoMulti, - .FnProtoOne, - .FnProto, - .ArrayType, - .ArrayTypeSentinel, - .ErrorValue, + .root => return 0, + + .@"usingnamespace", + .test_decl, + .@"errdefer", + .@"defer", + .bool_not, + .negation, + .bit_not, + .negation_wrap, + .address_of, + .@"try", + .@"await", + .optional_type, + .@"switch", + .switch_comma, + .if_simple, + .@"if", + .@"suspend", + .@"resume", + .@"continue", + .@"break", + .@"return", + .anyframe_type, + .identifier, + .anyframe_literal, + .char_literal, + .integer_literal, + .float_literal, + .false_literal, + .true_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + .string_literal, + .grouped_expression, + .builtin_call_two, + .builtin_call_two_comma, + .builtin_call, + .builtin_call_comma, + .error_set_decl, + .@"anytype", + .@"comptime", + .@"nosuspend", + .asm_simple, + .@"asm", + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, + .array_type, + .array_type_sentinel, + .error_value, => return main_tokens[n] - end_offset, - .ArrayInitDot, - .ArrayInitDotComma, - .ArrayInitDotTwo, - .ArrayInitDotTwoComma, - .StructInitDot, - .StructInitDotComma, - .StructInitDotTwo, - .StructInitDotTwoComma, - .EnumLiteral, + .array_init_dot, + .array_init_dot_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .enum_literal, => return main_tokens[n] - 1 - end_offset, - .Catch, - .FieldAccess, - .UnwrapOptional, - .EqualEqual, - .BangEqual, - .LessThan, - .GreaterThan, - .LessOrEqual, - .GreaterOrEqual, - .AssignMul, - .AssignDiv, - .AssignMod, - .AssignAdd, - .AssignSub, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitAnd, - .AssignBitXor, - .AssignBitOr, - .AssignMulWrap, - .AssignAddWrap, - .AssignSubWrap, - .Assign, - .MergeErrorSets, - .Mul, - .Div, - .Mod, - .ArrayMult, - .MulWrap, - .Add, - .Sub, - .ArrayCat, - .AddWrap, - .SubWrap, - .BitShiftLeft, - .BitShiftRight, - .BitAnd, - .BitXor, - .BitOr, - .OrElse, - .BoolAnd, - .BoolOr, - .SliceOpen, - .Slice, - .SliceSentinel, - .Deref, - .ArrayAccess, - .ArrayInitOne, - .ArrayInitOneComma, - .ArrayInit, - .ArrayInitComma, - .StructInitOne, - .StructInitOneComma, - .StructInit, - .StructInitComma, - .CallOne, - .CallOneComma, - .Call, - .CallComma, - .SwitchRange, - .FnDecl, - .ErrorUnion, + .@"catch", + .field_access, + .unwrap_optional, + .equal_equal, + .bang_equal, + .less_than, + .greater_than, + .less_or_equal, + .greater_or_equal, + .assign_mul, + .assign_div, + .assign_mod, + .assign_add, + .assign_sub, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_and, + .assign_bit_xor, + .assign_bit_or, + .assign_mul_wrap, + .assign_add_wrap, + .assign_sub_wrap, + .assign, + .merge_error_sets, + .mul, + .div, + .mod, + .array_mult, + .mul_wrap, + .add, + .sub, + .array_cat, + .add_wrap, + .sub_wrap, + .bit_shift_left, + .bit_shift_right, + .bit_and, + .bit_xor, + .bit_or, + .@"orelse", + .bool_and, + .bool_or, + .slice_open, + .slice, + .slice_sentinel, + .deref, + .array_access, + .array_init_one, + .array_init_one_comma, + .array_init, + .array_init_comma, + .struct_init_one, + .struct_init_one_comma, + .struct_init, + .struct_init_comma, + .call_one, + .call_one_comma, + .call, + .call_comma, + .switch_range, + .fn_decl, + .error_union, => n = datas[n].lhs, - .AsyncCallOne, - .AsyncCallOneComma, - .AsyncCall, - .AsyncCallComma, + .async_call_one, + .async_call_one_comma, + .async_call, + .async_call_comma, => { end_offset += 1; // async token n = datas[n].lhs; }, - .ContainerFieldInit, - .ContainerFieldAlign, - .ContainerField, + .container_field_init, + .container_field_align, + .container_field, => { const name_token = main_tokens[n]; if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) { @@ -349,10 +349,10 @@ pub const Tree = struct { return name_token - end_offset; }, - .GlobalVarDecl, - .LocalVarDecl, - .SimpleVarDecl, - .AlignedVarDecl, + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, => { var i = main_tokens[n]; // mut token while (i > 0) { @@ -372,10 +372,10 @@ pub const Tree = struct { return i - end_offset; }, - .Block, - .BlockSemicolon, - .BlockTwo, - .BlockTwoSemicolon, + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, => { // Look for a label. const lbrace = main_tokens[n]; @@ -385,18 +385,18 @@ pub const Tree = struct { return lbrace - end_offset; }, - .ContainerDecl, - .ContainerDeclComma, - .ContainerDeclTwo, - .ContainerDeclTwoComma, - .ContainerDeclArg, - .ContainerDeclArgComma, - .TaggedUnion, - .TaggedUnionComma, - .TaggedUnionTwo, - .TaggedUnionTwoComma, - .TaggedUnionEnumTag, - .TaggedUnionEnumTagComma, + .container_decl, + .container_decl_comma, + .container_decl_two, + .container_decl_two_comma, + .container_decl_arg, + .container_decl_arg_comma, + .tagged_union, + .tagged_union_comma, + .tagged_union_two, + .tagged_union_two_comma, + .tagged_union_enum_tag, + .tagged_union_enum_tag_comma, => { const main_token = main_tokens[n]; switch (token_tags[main_token - 1]) { @@ -406,10 +406,10 @@ pub const Tree = struct { return main_token - end_offset; }, - .PtrTypeAligned, - .PtrTypeSentinel, - .PtrType, - .PtrTypeBitRange, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, => { const main_token = main_tokens[n]; return switch (token_tags[main_token]) { @@ -424,29 +424,29 @@ pub const Tree = struct { } - end_offset; }, - .SwitchCaseOne => { + .switch_case_one => { if (datas[n].lhs == 0) { return main_tokens[n] - 1 - end_offset; // else token } else { n = datas[n].lhs; } }, - .SwitchCase => { + .switch_case => { const extra = tree.extraData(datas[n].lhs, Node.SubRange); assert(extra.end - extra.start > 0); n = extra.start; }, - .AsmOutput, .AsmInput => { + .asm_output, .asm_input => { assert(token_tags[main_tokens[n] - 1] == .l_bracket); return main_tokens[n] - 1 - end_offset; }, - .WhileSimple, - .WhileCont, - .While, - .ForSimple, - .For, + .while_simple, + .while_cont, + .@"while", + .for_simple, + .@"for", => { const main_token = main_tokens[n]; return switch (token_tags[main_token - 1]) { @@ -465,115 +465,115 @@ pub const Tree = struct { var n = node; var end_offset: TokenIndex = 0; while (true) switch (tags[n]) { - .Root => return @intCast(TokenIndex, tree.tokens.len - 1), - - .UsingNamespace, - .BoolNot, - .Negation, - .BitNot, - .NegationWrap, - .AddressOf, - .Try, - .Await, - .OptionalType, - .Resume, - .Nosuspend, - .Comptime, + .root => return @intCast(TokenIndex, tree.tokens.len - 1), + + .@"usingnamespace", + .bool_not, + .negation, + .bit_not, + .negation_wrap, + .address_of, + .@"try", + .@"await", + .optional_type, + .@"resume", + .@"nosuspend", + .@"comptime", => n = datas[n].lhs, - .TestDecl, - .ErrDefer, - .Defer, - .Catch, - .EqualEqual, - .BangEqual, - .LessThan, - .GreaterThan, - .LessOrEqual, - .GreaterOrEqual, - .AssignMul, - .AssignDiv, - .AssignMod, - .AssignAdd, - .AssignSub, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitAnd, - .AssignBitXor, - .AssignBitOr, - .AssignMulWrap, - .AssignAddWrap, - .AssignSubWrap, - .Assign, - .MergeErrorSets, - .Mul, - .Div, - .Mod, - .ArrayMult, - .MulWrap, - .Add, - .Sub, - .ArrayCat, - .AddWrap, - .SubWrap, - .BitShiftLeft, - .BitShiftRight, - .BitAnd, - .BitXor, - .BitOr, - .OrElse, - .BoolAnd, - .BoolOr, - .AnyFrameType, - .ErrorUnion, - .IfSimple, - .WhileSimple, - .ForSimple, - .FnProtoSimple, - .FnProtoMulti, - .PtrTypeAligned, - .PtrTypeSentinel, - .PtrType, - .PtrTypeBitRange, - .ArrayType, - .SwitchCaseOne, - .SwitchCase, - .SwitchRange, + .test_decl, + .@"errdefer", + .@"defer", + .@"catch", + .equal_equal, + .bang_equal, + .less_than, + .greater_than, + .less_or_equal, + .greater_or_equal, + .assign_mul, + .assign_div, + .assign_mod, + .assign_add, + .assign_sub, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_and, + .assign_bit_xor, + .assign_bit_or, + .assign_mul_wrap, + .assign_add_wrap, + .assign_sub_wrap, + .assign, + .merge_error_sets, + .mul, + .div, + .mod, + .array_mult, + .mul_wrap, + .add, + .sub, + .array_cat, + .add_wrap, + .sub_wrap, + .bit_shift_left, + .bit_shift_right, + .bit_and, + .bit_xor, + .bit_or, + .@"orelse", + .bool_and, + .bool_or, + .anyframe_type, + .error_union, + .if_simple, + .while_simple, + .for_simple, + .fn_proto_simple, + .fn_proto_multi, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, + .array_type, + .switch_case_one, + .switch_case, + .switch_range, => n = datas[n].rhs, - .FieldAccess, - .UnwrapOptional, - .GroupedExpression, - .StringLiteral, - .ErrorSetDecl, - .AsmSimple, - .AsmOutput, - .AsmInput, - .ErrorValue, + .field_access, + .unwrap_optional, + .grouped_expression, + .string_literal, + .error_set_decl, + .asm_simple, + .asm_output, + .asm_input, + .error_value, => return datas[n].rhs + end_offset, - .AnyType, - .AnyFrameLiteral, - .CharLiteral, - .IntegerLiteral, - .FloatLiteral, - .FalseLiteral, - .TrueLiteral, - .NullLiteral, - .UndefinedLiteral, - .UnreachableLiteral, - .Identifier, - .Deref, - .EnumLiteral, + .@"anytype", + .anyframe_literal, + .char_literal, + .integer_literal, + .float_literal, + .false_literal, + .true_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + .identifier, + .deref, + .enum_literal, => return main_tokens[n] + end_offset, - .Return => if (datas[n].lhs != 0) { + .@"return" => if (datas[n].lhs != 0) { n = datas[n].lhs; } else { return main_tokens[n] + end_offset; }, - .Call, .AsyncCall => { + .call, .async_call => { end_offset += 1; // for the rparen const params = tree.extraData(datas[n].rhs, Node.SubRange); if (params.end - params.start == 0) { @@ -581,7 +581,7 @@ pub const Tree = struct { } n = tree.extra_data[params.end - 1]; // last parameter }, - .TaggedUnionEnumTag => { + .tagged_union_enum_tag => { const members = tree.extraData(datas[n].rhs, Node.SubRange); if (members.end - members.start == 0) { end_offset += 4; // for the rparen + rparen + lbrace + rbrace @@ -591,16 +591,16 @@ pub const Tree = struct { n = tree.extra_data[members.end - 1]; // last parameter } }, - .CallComma, - .AsyncCallComma, - .TaggedUnionEnumTagComma, + .call_comma, + .async_call_comma, + .tagged_union_enum_tag_comma, => { end_offset += 2; // for the comma + rparen/rbrace const params = tree.extraData(datas[n].rhs, Node.SubRange); assert(params.end > params.start); n = tree.extra_data[params.end - 1]; // last parameter }, - .Switch => { + .@"switch" => { const cases = tree.extraData(datas[n].rhs, Node.SubRange); if (cases.end - cases.start == 0) { end_offset += 3; // rparen, lbrace, rbrace @@ -610,7 +610,7 @@ pub const Tree = struct { n = tree.extra_data[cases.end - 1]; // last case } }, - .ContainerDeclArg => { + .container_decl_arg => { const members = tree.extraData(datas[n].rhs, Node.SubRange); if (members.end - members.start == 0) { end_offset += 1; // for the rparen @@ -620,53 +620,53 @@ pub const Tree = struct { n = tree.extra_data[members.end - 1]; // last parameter } }, - .Asm => { + .@"asm" => { const extra = tree.extraData(datas[n].rhs, Node.Asm); return extra.rparen + end_offset; }, - .ArrayInit, - .StructInit, + .array_init, + .struct_init, => { const elements = tree.extraData(datas[n].rhs, Node.SubRange); assert(elements.end - elements.start > 0); end_offset += 1; // for the rbrace n = tree.extra_data[elements.end - 1]; // last element }, - .ArrayInitComma, - .StructInitComma, - .ContainerDeclArgComma, - .SwitchComma, + .array_init_comma, + .struct_init_comma, + .container_decl_arg_comma, + .switch_comma, => { const members = tree.extraData(datas[n].rhs, Node.SubRange); assert(members.end - members.start > 0); end_offset += 2; // for the comma + rbrace n = tree.extra_data[members.end - 1]; // last parameter }, - .ArrayInitDot, - .StructInitDot, - .Block, - .ContainerDecl, - .TaggedUnion, - .BuiltinCall, + .array_init_dot, + .struct_init_dot, + .block, + .container_decl, + .tagged_union, + .builtin_call, => { assert(datas[n].rhs - datas[n].lhs > 0); end_offset += 1; // for the rbrace n = tree.extra_data[datas[n].rhs - 1]; // last statement }, - .ArrayInitDotComma, - .StructInitDotComma, - .BlockSemicolon, - .ContainerDeclComma, - .TaggedUnionComma, - .BuiltinCallComma, + .array_init_dot_comma, + .struct_init_dot_comma, + .block_semicolon, + .container_decl_comma, + .tagged_union_comma, + .builtin_call_comma, => { assert(datas[n].rhs - datas[n].lhs > 0); end_offset += 2; // for the comma/semicolon + rbrace/rparen n = tree.extra_data[datas[n].rhs - 1]; // last member }, - .CallOne, - .AsyncCallOne, - .ArrayAccess, + .call_one, + .async_call_one, + .array_access, => { end_offset += 1; // for the rparen/rbracket if (datas[n].rhs == 0) { @@ -674,12 +674,12 @@ pub const Tree = struct { } n = datas[n].rhs; }, - .ArrayInitDotTwo, - .BlockTwo, - .BuiltinCallTwo, - .StructInitDotTwo, - .ContainerDeclTwo, - .TaggedUnionTwo, + .array_init_dot_two, + .block_two, + .builtin_call_two, + .struct_init_dot_two, + .container_decl_two, + .tagged_union_two, => { if (datas[n].rhs != 0) { end_offset += 1; // for the rparen/rbrace @@ -689,25 +689,25 @@ pub const Tree = struct { n = datas[n].lhs; } else { switch (tags[n]) { - .ArrayInitDotTwo, - .BlockTwo, - .StructInitDotTwo, + .array_init_dot_two, + .block_two, + .struct_init_dot_two, => end_offset += 1, // rbrace - .BuiltinCallTwo, - .ContainerDeclTwo, + .builtin_call_two, + .container_decl_two, => end_offset += 2, // lparen/lbrace + rparen/rbrace - .TaggedUnionTwo => end_offset += 5, // (enum) {} + .tagged_union_two => end_offset += 5, // (enum) {} else => unreachable, } return main_tokens[n] + end_offset; } }, - .ArrayInitDotTwoComma, - .BuiltinCallTwoComma, - .BlockTwoSemicolon, - .StructInitDotTwoComma, - .ContainerDeclTwoComma, - .TaggedUnionTwoComma, + .array_init_dot_two_comma, + .builtin_call_two_comma, + .block_two_semicolon, + .struct_init_dot_two_comma, + .container_decl_two_comma, + .tagged_union_two_comma, => { end_offset += 2; // for the comma/semicolon + rbrace/rparen if (datas[n].rhs != 0) { @@ -718,7 +718,7 @@ pub const Tree = struct { unreachable; } }, - .SimpleVarDecl => { + .simple_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -728,7 +728,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .AlignedVarDecl => { + .aligned_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -739,7 +739,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .GlobalVarDecl => { + .global_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else { @@ -758,7 +758,7 @@ pub const Tree = struct { } } }, - .LocalVarDecl => { + .local_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else { @@ -774,7 +774,7 @@ pub const Tree = struct { } } }, - .ContainerFieldInit => { + .container_field_init => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -783,7 +783,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .ContainerFieldAlign => { + .container_field_align => { if (datas[n].rhs != 0) { end_offset += 1; // for the rparen n = datas[n].rhs; @@ -793,7 +793,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .ContainerField => { + .container_field => { const extra = tree.extraData(datas[n].rhs, Node.ContainerField); if (extra.value_expr != 0) { n = extra.value_expr; @@ -807,8 +807,8 @@ pub const Tree = struct { } }, - .ArrayInitOne, - .StructInitOne, + .array_init_one, + .struct_init_one, => { end_offset += 1; // rbrace if (datas[n].rhs == 0) { @@ -817,37 +817,37 @@ pub const Tree = struct { n = datas[n].rhs; } }, - .SliceOpen, - .CallOneComma, - .AsyncCallOneComma, - .ArrayInitOneComma, - .StructInitOneComma, + .slice_open, + .call_one_comma, + .async_call_one_comma, + .array_init_one_comma, + .struct_init_one_comma, => { end_offset += 2; // ellipsis2 + rbracket, or comma + rparen n = datas[n].rhs; assert(n != 0); }, - .Slice => { + .slice => { const extra = tree.extraData(datas[n].rhs, Node.Slice); assert(extra.end != 0); // should have used SliceOpen end_offset += 1; // rbracket n = extra.end; }, - .SliceSentinel => { + .slice_sentinel => { const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel); assert(extra.sentinel != 0); // should have used Slice end_offset += 1; // rbracket n = extra.sentinel; }, - .Continue => { + .@"continue" => { if (datas[n].lhs != 0) { return datas[n].lhs + end_offset; } else { return main_tokens[n] + end_offset; } }, - .Break => { + .@"break" => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -856,14 +856,14 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .FnDecl => { + .fn_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else { n = datas[n].lhs; } }, - .FnProtoOne => { + .fn_proto_one => { const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); // linksection, callconv, align can appear in any order, so we // find the last one here. @@ -897,7 +897,7 @@ pub const Tree = struct { n = max_node; end_offset += max_offset; }, - .FnProto => { + .fn_proto => { const extra = tree.extraData(datas[n].lhs, Node.FnProto); // linksection, callconv, align can appear in any order, so we // find the last one here. @@ -931,29 +931,29 @@ pub const Tree = struct { n = max_node; end_offset += max_offset; }, - .WhileCont => { + .while_cont => { const extra = tree.extraData(datas[n].rhs, Node.WhileCont); assert(extra.then_expr != 0); n = extra.then_expr; }, - .While => { + .@"while" => { const extra = tree.extraData(datas[n].rhs, Node.While); assert(extra.else_expr != 0); n = extra.else_expr; }, - .If, .For => { + .@"if", .@"for" => { const extra = tree.extraData(datas[n].rhs, Node.If); assert(extra.else_expr != 0); n = extra.else_expr; }, - .Suspend => { + .@"suspend" => { if (datas[n].lhs != 0) { n = datas[n].lhs; } else { return main_tokens[n] + end_offset; } }, - .ArrayTypeSentinel => { + .array_type_sentinel => { const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); n = extra.elem_type; }, @@ -967,7 +967,7 @@ pub const Tree = struct { } pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .GlobalVarDecl); + assert(tree.nodes.items(.tag)[node] == .global_var_decl); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.GlobalVarDecl); return tree.fullVarDecl(.{ @@ -980,7 +980,7 @@ pub const Tree = struct { } pub fn localVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .LocalVarDecl); + assert(tree.nodes.items(.tag)[node] == .local_var_decl); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.LocalVarDecl); return tree.fullVarDecl(.{ @@ -993,7 +993,7 @@ pub const Tree = struct { } pub fn simpleVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .SimpleVarDecl); + assert(tree.nodes.items(.tag)[node] == .simple_var_decl); const data = tree.nodes.items(.data)[node]; return tree.fullVarDecl(.{ .type_node = data.lhs, @@ -1005,7 +1005,7 @@ pub const Tree = struct { } pub fn alignedVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .AlignedVarDecl); + assert(tree.nodes.items(.tag)[node] == .aligned_var_decl); const data = tree.nodes.items(.data)[node]; return tree.fullVarDecl(.{ .type_node = 0, @@ -1017,7 +1017,7 @@ pub const Tree = struct { } pub fn ifSimple(tree: Tree, node: Node.Index) full.If { - assert(tree.nodes.items(.tag)[node] == .IfSimple); + assert(tree.nodes.items(.tag)[node] == .if_simple); const data = tree.nodes.items(.data)[node]; return tree.fullIf(.{ .cond_expr = data.lhs, @@ -1028,7 +1028,7 @@ pub const Tree = struct { } pub fn ifFull(tree: Tree, node: Node.Index) full.If { - assert(tree.nodes.items(.tag)[node] == .If); + assert(tree.nodes.items(.tag)[node] == .@"if"); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.If); return tree.fullIf(.{ @@ -1040,7 +1040,7 @@ pub const Tree = struct { } pub fn containerField(tree: Tree, node: Node.Index) full.ContainerField { - assert(tree.nodes.items(.tag)[node] == .ContainerField); + assert(tree.nodes.items(.tag)[node] == .container_field); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.ContainerField); return tree.fullContainerField(.{ @@ -1052,7 +1052,7 @@ pub const Tree = struct { } pub fn containerFieldInit(tree: Tree, node: Node.Index) full.ContainerField { - assert(tree.nodes.items(.tag)[node] == .ContainerFieldInit); + assert(tree.nodes.items(.tag)[node] == .container_field_init); const data = tree.nodes.items(.data)[node]; return tree.fullContainerField(.{ .name_token = tree.nodes.items(.main_token)[node], @@ -1063,7 +1063,7 @@ pub const Tree = struct { } pub fn containerFieldAlign(tree: Tree, node: Node.Index) full.ContainerField { - assert(tree.nodes.items(.tag)[node] == .ContainerFieldAlign); + assert(tree.nodes.items(.tag)[node] == .container_field_align); const data = tree.nodes.items(.data)[node]; return tree.fullContainerField(.{ .name_token = tree.nodes.items(.main_token)[node], @@ -1074,7 +1074,7 @@ pub const Tree = struct { } pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProtoSimple); + assert(tree.nodes.items(.tag)[node] == .fn_proto_simple); const data = tree.nodes.items(.data)[node]; buffer[0] = data.lhs; const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1]; @@ -1089,7 +1089,7 @@ pub const Tree = struct { } pub fn fnProtoMulti(tree: Tree, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProtoMulti); + assert(tree.nodes.items(.tag)[node] == .fn_proto_multi); const data = tree.nodes.items(.data)[node]; const params_range = tree.extraData(data.lhs, Node.SubRange); const params = tree.extra_data[params_range.start..params_range.end]; @@ -1104,7 +1104,7 @@ pub const Tree = struct { } pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProtoOne); + assert(tree.nodes.items(.tag)[node] == .fn_proto_one); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.FnProtoOne); buffer[0] = extra.param; @@ -1120,7 +1120,7 @@ pub const Tree = struct { } pub fn fnProto(tree: Tree, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProto); + assert(tree.nodes.items(.tag)[node] == .fn_proto); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.FnProto); const params = tree.extra_data[extra.params_start..extra.params_end]; @@ -1135,8 +1135,8 @@ pub const Tree = struct { } pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitOne or - tree.nodes.items(.tag)[node] == .StructInitOneComma); + assert(tree.nodes.items(.tag)[node] == .struct_init_one or + tree.nodes.items(.tag)[node] == .struct_init_one_comma); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; @@ -1148,8 +1148,8 @@ pub const Tree = struct { } pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitDotTwo or - tree.nodes.items(.tag)[node] == .StructInitDotTwoComma); + assert(tree.nodes.items(.tag)[node] == .struct_init_dot_two or + tree.nodes.items(.tag)[node] == .struct_init_dot_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const fields = if (data.rhs != 0) @@ -1166,8 +1166,8 @@ pub const Tree = struct { } pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitDot or - tree.nodes.items(.tag)[node] == .StructInitDotComma); + assert(tree.nodes.items(.tag)[node] == .struct_init_dot or + tree.nodes.items(.tag)[node] == .struct_init_dot_comma); const data = tree.nodes.items(.data)[node]; return tree.fullStructInit(.{ .lbrace = tree.nodes.items(.main_token)[node], @@ -1177,8 +1177,8 @@ pub const Tree = struct { } pub fn structInit(tree: Tree, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInit or - tree.nodes.items(.tag)[node] == .StructInitComma); + assert(tree.nodes.items(.tag)[node] == .struct_init or + tree.nodes.items(.tag)[node] == .struct_init_comma); const data = tree.nodes.items(.data)[node]; const fields_range = tree.extraData(data.rhs, Node.SubRange); return tree.fullStructInit(.{ @@ -1189,8 +1189,8 @@ pub const Tree = struct { } pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitOne or - tree.nodes.items(.tag)[node] == .ArrayInitOneComma); + assert(tree.nodes.items(.tag)[node] == .array_init_one or + tree.nodes.items(.tag)[node] == .array_init_one_comma); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; @@ -1204,8 +1204,8 @@ pub const Tree = struct { } pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitDotTwo or - tree.nodes.items(.tag)[node] == .ArrayInitDotTwoComma); + assert(tree.nodes.items(.tag)[node] == .array_init_dot_two or + tree.nodes.items(.tag)[node] == .array_init_dot_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const elements = if (data.rhs != 0) @@ -1224,8 +1224,8 @@ pub const Tree = struct { } pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitDot or - tree.nodes.items(.tag)[node] == .ArrayInitDotComma); + assert(tree.nodes.items(.tag)[node] == .array_init_dot or + tree.nodes.items(.tag)[node] == .array_init_dot_comma); const data = tree.nodes.items(.data)[node]; return .{ .ast = .{ @@ -1237,8 +1237,8 @@ pub const Tree = struct { } pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInit or - tree.nodes.items(.tag)[node] == .ArrayInitComma); + assert(tree.nodes.items(.tag)[node] == .array_init or + tree.nodes.items(.tag)[node] == .array_init_comma); const data = tree.nodes.items(.data)[node]; const elem_range = tree.extraData(data.rhs, Node.SubRange); return .{ @@ -1251,7 +1251,7 @@ pub const Tree = struct { } pub fn arrayType(tree: Tree, node: Node.Index) full.ArrayType { - assert(tree.nodes.items(.tag)[node] == .ArrayType); + assert(tree.nodes.items(.tag)[node] == .array_type); const data = tree.nodes.items(.data)[node]; return .{ .ast = .{ @@ -1264,7 +1264,7 @@ pub const Tree = struct { } pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) full.ArrayType { - assert(tree.nodes.items(.tag)[node] == .ArrayTypeSentinel); + assert(tree.nodes.items(.tag)[node] == .array_type_sentinel); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel); return .{ @@ -1278,7 +1278,7 @@ pub const Tree = struct { } pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrTypeAligned); + assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned); const data = tree.nodes.items(.data)[node]; return tree.fullPtrType(.{ .main_token = tree.nodes.items(.main_token)[node], @@ -1291,7 +1291,7 @@ pub const Tree = struct { } pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrTypeSentinel); + assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel); const data = tree.nodes.items(.data)[node]; return tree.fullPtrType(.{ .main_token = tree.nodes.items(.main_token)[node], @@ -1304,7 +1304,7 @@ pub const Tree = struct { } pub fn ptrType(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrType); + assert(tree.nodes.items(.tag)[node] == .ptr_type); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrType); return tree.fullPtrType(.{ @@ -1318,7 +1318,7 @@ pub const Tree = struct { } pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrTypeBitRange); + assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange); return tree.fullPtrType(.{ @@ -1332,7 +1332,7 @@ pub const Tree = struct { } pub fn sliceOpen(tree: Tree, node: Node.Index) full.Slice { - assert(tree.nodes.items(.tag)[node] == .SliceOpen); + assert(tree.nodes.items(.tag)[node] == .slice_open); const data = tree.nodes.items(.data)[node]; return .{ .ast = .{ @@ -1346,7 +1346,7 @@ pub const Tree = struct { } pub fn slice(tree: Tree, node: Node.Index) full.Slice { - assert(tree.nodes.items(.tag)[node] == .Slice); + assert(tree.nodes.items(.tag)[node] == .slice); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.Slice); return .{ @@ -1361,7 +1361,7 @@ pub const Tree = struct { } pub fn sliceSentinel(tree: Tree, node: Node.Index) full.Slice { - assert(tree.nodes.items(.tag)[node] == .SliceSentinel); + assert(tree.nodes.items(.tag)[node] == .slice_sentinel); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.SliceSentinel); return .{ @@ -1376,8 +1376,8 @@ pub const Tree = struct { } pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or - tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma); + assert(tree.nodes.items(.tag)[node] == .container_decl_two or + tree.nodes.items(.tag)[node] == .container_decl_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const members = if (data.rhs != 0) @@ -1395,8 +1395,8 @@ pub const Tree = struct { } pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .ContainerDecl or - tree.nodes.items(.tag)[node] == .ContainerDeclComma); + assert(tree.nodes.items(.tag)[node] == .container_decl or + tree.nodes.items(.tag)[node] == .container_decl_comma); const data = tree.nodes.items(.data)[node]; return tree.fullContainerDecl(.{ .main_token = tree.nodes.items(.main_token)[node], @@ -1407,8 +1407,8 @@ pub const Tree = struct { } pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg or - tree.nodes.items(.tag)[node] == .ContainerDeclArgComma); + assert(tree.nodes.items(.tag)[node] == .container_decl_arg or + tree.nodes.items(.tag)[node] == .container_decl_arg_comma); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); return tree.fullContainerDecl(.{ @@ -1420,8 +1420,8 @@ pub const Tree = struct { } pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo or - tree.nodes.items(.tag)[node] == .TaggedUnionTwoComma); + assert(tree.nodes.items(.tag)[node] == .tagged_union_two or + tree.nodes.items(.tag)[node] == .tagged_union_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const members = if (data.rhs != 0) @@ -1440,8 +1440,8 @@ pub const Tree = struct { } pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnion or - tree.nodes.items(.tag)[node] == .TaggedUnionComma); + assert(tree.nodes.items(.tag)[node] == .tagged_union or + tree.nodes.items(.tag)[node] == .tagged_union_comma); const data = tree.nodes.items(.data)[node]; const main_token = tree.nodes.items(.main_token)[node]; return tree.fullContainerDecl(.{ @@ -1453,8 +1453,8 @@ pub const Tree = struct { } pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag or - tree.nodes.items(.tag)[node] == .TaggedUnionEnumTagComma); + assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or + tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_comma); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); const main_token = tree.nodes.items(.main_token)[node]; @@ -1765,7 +1765,7 @@ pub const Tree = struct { } const outputs_end: usize = for (info.items) |item, i| { switch (node_tags[item]) { - .AsmOutput => continue, + .asm_output => continue, else => break i, } } else info.items.len; @@ -2162,7 +2162,7 @@ pub const Error = union(enum) { pub fn render(self: ExpectedCall, tree: Tree, stream: anytype) !void { const node_tag = tree.nodes.items(.tag)[self.node]; - return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ ", found {s}", .{ + return stream.print("expected " ++ @tagName(Node.Tag.call) ++ ", found {s}", .{ @tagName(node_tag), }); } @@ -2173,8 +2173,8 @@ pub const Error = union(enum) { pub fn render(self: ExpectedCallOrFnProto, tree: Tree, stream: anytype) !void { const node_tag = tree.nodes.items(.tag)[self.node]; - return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ " or " ++ - @tagName(Node.Tag.FnProto) ++ ", found {s}", .{@tagName(node_tag)}); + return stream.print("expected " ++ @tagName(Node.Tag.call) ++ " or " ++ + @tagName(Node.Tag.fn_proto) ++ ", found {s}", .{@tagName(node_tag)}); } }; @@ -2305,433 +2305,433 @@ pub const Node = struct { /// Tree.lastToken() pub const Tag = enum { /// sub_list[lhs...rhs] - Root, + root, /// `usingnamespace lhs;`. rhs unused. main_token is `usingnamespace`. - UsingNamespace, + @"usingnamespace", /// lhs is test name token (must be string literal), if any. /// rhs is the body node. - TestDecl, + test_decl, /// lhs is the index into extra_data. /// rhs is the initialization expression, if any. /// main_token is `var` or `const`. - GlobalVarDecl, + global_var_decl, /// `var a: x align(y) = rhs` /// lhs is the index into extra_data. /// main_token is `var` or `const`. - LocalVarDecl, + local_var_decl, /// `var a: lhs = rhs`. lhs and rhs may be unused. /// Can be local or global. /// main_token is `var` or `const`. - SimpleVarDecl, + simple_var_decl, /// `var a align(lhs) = rhs`. lhs and rhs may be unused. /// Can be local or global. /// main_token is `var` or `const`. - AlignedVarDecl, + aligned_var_decl, /// lhs is the identifier token payload if any, /// rhs is the deferred expression. - ErrDefer, + @"errdefer", /// lhs is unused. /// rhs is the deferred expression. - Defer, + @"defer", /// lhs catch rhs /// lhs catch |err| rhs /// main_token is the catch /// payload is determined by looking at the prev tokens before rhs. - Catch, + @"catch", /// `lhs.a`. main_token is the dot. rhs is the identifier token index. - FieldAccess, + field_access, /// `lhs.?`. main_token is the dot. rhs is the `?` token index. - UnwrapOptional, + unwrap_optional, /// `lhs == rhs`. main_token is op. - EqualEqual, + equal_equal, /// `lhs != rhs`. main_token is op. - BangEqual, + bang_equal, /// `lhs < rhs`. main_token is op. - LessThan, + less_than, /// `lhs > rhs`. main_token is op. - GreaterThan, + greater_than, /// `lhs <= rhs`. main_token is op. - LessOrEqual, + less_or_equal, /// `lhs >= rhs`. main_token is op. - GreaterOrEqual, + greater_or_equal, /// `lhs *= rhs`. main_token is op. - AssignMul, + assign_mul, /// `lhs /= rhs`. main_token is op. - AssignDiv, + assign_div, /// `lhs *= rhs`. main_token is op. - AssignMod, + assign_mod, /// `lhs += rhs`. main_token is op. - AssignAdd, + assign_add, /// `lhs -= rhs`. main_token is op. - AssignSub, + assign_sub, /// `lhs <<= rhs`. main_token is op. - AssignBitShiftLeft, + assign_bit_shift_left, /// `lhs >>= rhs`. main_token is op. - AssignBitShiftRight, + assign_bit_shift_right, /// `lhs &= rhs`. main_token is op. - AssignBitAnd, + assign_bit_and, /// `lhs ^= rhs`. main_token is op. - AssignBitXor, + assign_bit_xor, /// `lhs |= rhs`. main_token is op. - AssignBitOr, + assign_bit_or, /// `lhs *%= rhs`. main_token is op. - AssignMulWrap, + assign_mul_wrap, /// `lhs +%= rhs`. main_token is op. - AssignAddWrap, + assign_add_wrap, /// `lhs -%= rhs`. main_token is op. - AssignSubWrap, + assign_sub_wrap, /// `lhs = rhs`. main_token is op. - Assign, + assign, /// `lhs || rhs`. main_token is the `||`. - MergeErrorSets, + merge_error_sets, /// `lhs * rhs`. main_token is the `*`. - Mul, + mul, /// `lhs / rhs`. main_token is the `/`. - Div, + div, /// `lhs % rhs`. main_token is the `%`. - Mod, + mod, /// `lhs ** rhs`. main_token is the `**`. - ArrayMult, + array_mult, /// `lhs *% rhs`. main_token is the `*%`. - MulWrap, + mul_wrap, /// `lhs + rhs`. main_token is the `+`. - Add, + add, /// `lhs - rhs`. main_token is the `-`. - Sub, + sub, /// `lhs ++ rhs`. main_token is the `++`. - ArrayCat, + array_cat, /// `lhs +% rhs`. main_token is the `+%`. - AddWrap, + add_wrap, /// `lhs -% rhs`. main_token is the `-%`. - SubWrap, + sub_wrap, /// `lhs << rhs`. main_token is the `<<`. - BitShiftLeft, + bit_shift_left, /// `lhs >> rhs`. main_token is the `>>`. - BitShiftRight, + bit_shift_right, /// `lhs & rhs`. main_token is the `&`. - BitAnd, + bit_and, /// `lhs ^ rhs`. main_token is the `^`. - BitXor, + bit_xor, /// `lhs | rhs`. main_token is the `|`. - BitOr, + bit_or, /// `lhs orelse rhs`. main_token is the `orelse`. - OrElse, + @"orelse", /// `lhs and rhs`. main_token is the `and`. - BoolAnd, + bool_and, /// `lhs or rhs`. main_token is the `or`. - BoolOr, + bool_or, /// `op lhs`. rhs unused. main_token is op. - BoolNot, + bool_not, /// `op lhs`. rhs unused. main_token is op. - Negation, + negation, /// `op lhs`. rhs unused. main_token is op. - BitNot, + bit_not, /// `op lhs`. rhs unused. main_token is op. - NegationWrap, + negation_wrap, /// `op lhs`. rhs unused. main_token is op. - AddressOf, + address_of, /// `op lhs`. rhs unused. main_token is op. - Try, + @"try", /// `op lhs`. rhs unused. main_token is op. - Await, + @"await", /// `?lhs`. rhs unused. main_token is the `?`. - OptionalType, + optional_type, /// `[lhs]rhs`. lhs can be omitted to make it a slice. - ArrayType, - /// `[lhs:a]b`. `ArrayTypeSentinel[rhs]`. - ArrayTypeSentinel, + array_type, + /// `[lhs:a]b`. `array_type_sentinel[rhs]`. + array_type_sentinel, /// `[*]align(lhs) rhs`. lhs can be omitted. /// `*align(lhs) rhs`. lhs can be omitted. /// `[]rhs`. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrTypeAligned, + ptr_type_aligned, /// `[*:lhs]rhs`. lhs can be omitted. /// `*rhs`. /// `[:lhs]rhs`. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrTypeSentinel, - /// lhs is index into PtrType. rhs is the element type expression. + ptr_type_sentinel, + /// lhs is index into ptr_type. rhs is the element type expression. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrType, - /// lhs is index into PtrTypeBitRange. rhs is the element type expression. + ptr_type, + /// lhs is index into ptr_type_bit_range. rhs is the element type expression. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrTypeBitRange, + ptr_type_bit_range, /// `lhs[rhs..]` /// main_token is the lbracket. - SliceOpen, + slice_open, /// `lhs[b..c]`. rhs is index into Slice /// main_token is the lbracket. - Slice, + slice, /// `lhs[b..c :d]`. rhs is index into SliceSentinel /// main_token is the lbracket. - SliceSentinel, + slice_sentinel, /// `lhs.*`. rhs is unused. - Deref, + deref, /// `lhs[rhs]`. - ArrayAccess, + array_access, /// `lhs{rhs}`. rhs can be omitted. - ArrayInitOne, + array_init_one, /// `lhs{rhs,}`. rhs can *not* be omitted - ArrayInitOneComma, + array_init_one_comma, /// `.{lhs, rhs}`. lhs and rhs can be omitted. - ArrayInitDotTwo, - /// Same as `ArrayInitDotTwo` except there is known to be a trailing comma + array_init_dot_two, + /// Same as `array_init_dot_two` except there is known to be a trailing comma /// before the final rbrace. - ArrayInitDotTwoComma, + array_init_dot_two_comma, /// `.{a, b}`. `sub_list[lhs..rhs]`. - ArrayInitDot, - /// Same as `ArrayInitDot` except there is known to be a trailing comma + array_init_dot, + /// Same as `array_init_dot` except there is known to be a trailing comma /// before the final rbrace. - ArrayInitDotComma, + array_init_dot_comma, /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`. - ArrayInit, - /// Same as `ArrayInit` except there is known to be a trailing comma + array_init, + /// Same as `array_init` except there is known to be a trailing comma /// before the final rbrace. - ArrayInitComma, + array_init_comma, /// `lhs{.a = rhs}`. rhs can be omitted making it empty. /// main_token is the lbrace. - StructInitOne, + struct_init_one, /// `lhs{.a = rhs,}`. rhs can *not* be omitted. /// main_token is the lbrace. - StructInitOneComma, + struct_init_one_comma, /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. /// main_token is the lbrace. /// No trailing comma before the rbrace. - StructInitDotTwo, - /// Same as `StructInitDotTwo` except there is known to be a trailing comma + struct_init_dot_two, + /// Same as `struct_init_dot_two` except there is known to be a trailing comma /// before the final rbrace. - StructInitDotTwoComma, + struct_init_dot_two_comma, /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. /// main_token is the lbrace. - StructInitDot, - /// Same as `StructInitDot` except there is known to be a trailing comma + struct_init_dot, + /// Same as `struct_init_dot` except there is known to be a trailing comma /// before the final rbrace. - StructInitDotComma, + struct_init_dot_comma, /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`. /// lhs can be omitted which means `.{.a = b, .c = d}`. /// main_token is the lbrace. - StructInit, - /// Same as `StructInit` except there is known to be a trailing comma + struct_init, + /// Same as `struct_init` except there is known to be a trailing comma /// before the final rbrace. - StructInitComma, + struct_init_comma, /// `lhs(rhs)`. rhs can be omitted. - CallOne, + call_one, /// `lhs(rhs,)`. rhs can be omitted. - CallOneComma, + call_one_comma, /// `async lhs(rhs)`. rhs can be omitted. - AsyncCallOne, + async_call_one, /// `async lhs(rhs,)`. - AsyncCallOneComma, + async_call_one_comma, /// `lhs(a, b, c)`. `SubRange[rhs]`. /// main_token is the `(`. - Call, + call, /// `lhs(a, b, c,)`. `SubRange[rhs]`. /// main_token is the `(`. - CallComma, + call_comma, /// `async lhs(a, b, c)`. `SubRange[rhs]`. /// main_token is the `(`. - AsyncCall, + async_call, /// `async lhs(a, b, c,)`. `SubRange[rhs]`. /// main_token is the `(`. - AsyncCallComma, + async_call_comma, /// `switch(lhs) {}`. `SubRange[rhs]`. - Switch, - /// Same as Switch except there is known to be a trailing comma + @"switch", + /// Same as switch except there is known to be a trailing comma /// before the final rbrace - SwitchComma, + switch_comma, /// `lhs => rhs`. If lhs is omitted it means `else`. /// main_token is the `=>` - SwitchCaseOne, + switch_case_one, /// `a, b, c => rhs`. `SubRange[lhs]`. /// main_token is the `=>` - SwitchCase, + switch_case, /// `lhs...rhs`. - SwitchRange, + switch_range, /// `while (lhs) rhs`. /// `while (lhs) |x| rhs`. - WhileSimple, + while_simple, /// `while (lhs) : (a) b`. `WhileCont[rhs]`. /// `while (lhs) : (a) b`. `WhileCont[rhs]`. - WhileCont, + while_cont, /// `while (lhs) : (a) b else c`. `While[rhs]`. /// `while (lhs) |x| : (a) b else c`. `While[rhs]`. /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`. - While, + @"while", /// `for (lhs) rhs`. - ForSimple, + for_simple, /// `for (lhs) a else b`. `if_list[rhs]`. - For, + @"for", /// `if (lhs) rhs`. /// `if (lhs) |a| rhs`. - IfSimple, + if_simple, /// `if (lhs) a else b`. `If[rhs]`. /// `if (lhs) |x| a else b`. `If[rhs]`. /// `if (lhs) |x| a else |y| b`. `If[rhs]`. - If, + @"if", /// `suspend lhs`. lhs can be omitted. rhs is unused. - Suspend, + @"suspend", /// `resume lhs`. rhs is unused. - Resume, + @"resume", /// `continue`. lhs is token index of label if any. rhs is unused. - Continue, + @"continue", /// `break :lhs rhs` /// both lhs and rhs may be omitted. - Break, + @"break", /// `return lhs`. lhs can be omitted. rhs is unused. - Return, + @"return", /// `fn(a: lhs) rhs`. lhs can be omitted. /// anytype and ... parameters are omitted from the AST tree. - FnProtoSimple, + fn_proto_simple, /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`. /// anytype and ... parameters are omitted from the AST tree. - FnProtoMulti, + fn_proto_multi, /// `fn(a: b) rhs linksection(e) callconv(f)`. `FnProtoOne[lhs]`. /// zero or one parameters. /// anytype and ... parameters are omitted from the AST tree. - FnProtoOne, + fn_proto_one, /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`. /// anytype and ... parameters are omitted from the AST tree. - FnProto, - /// lhs is the FnProto. + fn_proto, + /// lhs is the fn_proto. /// rhs is the function body block if non-zero. - /// if rhs is zero, the funtion decl has no body (e.g. an extern function) - FnDecl, + /// if rhs is zero, the function decl has no body (e.g. an extern function) + fn_decl, /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index. - AnyFrameType, + anyframe_type, /// Both lhs and rhs unused. - AnyFrameLiteral, + anyframe_literal, /// Both lhs and rhs unused. - CharLiteral, + char_literal, /// Both lhs and rhs unused. - IntegerLiteral, + integer_literal, /// Both lhs and rhs unused. - FloatLiteral, + float_literal, /// Both lhs and rhs unused. - FalseLiteral, + false_literal, /// Both lhs and rhs unused. - TrueLiteral, + true_literal, /// Both lhs and rhs unused. - NullLiteral, + null_literal, /// Both lhs and rhs unused. - UndefinedLiteral, + undefined_literal, /// Both lhs and rhs unused. - UnreachableLiteral, + unreachable_literal, /// Both lhs and rhs unused. /// Most identifiers will not have explicit AST nodes, however for expressions /// which could be one of many different kinds of AST nodes, there will be an - /// Identifier AST node for it. - Identifier, + /// identifier AST node for it. + identifier, /// lhs is the dot token index, rhs unused, main_token is the identifier. - EnumLiteral, + enum_literal, /// main_token is the first token index (redundant with lhs) /// lhs is the first token index; rhs is the last token index. - /// Could be a series of MultilineStringLiteralLine tokens, or a single - /// StringLiteral token. - StringLiteral, + /// Could be a series of multiline_string_literal_line tokens, or a single + /// string_literal token. + string_literal, /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`. - GroupedExpression, + grouped_expression, /// `@a(lhs, rhs)`. lhs and rhs may be omitted. - BuiltinCallTwo, - /// Same as BuiltinCallTwo but there is known to be a trailing comma before the rparen. - BuiltinCallTwoComma, + builtin_call_two, + /// Same as builtin_call_two but there is known to be a trailing comma before the rparen. + builtin_call_two_comma, /// `@a(b, c)`. `sub_list[lhs..rhs]`. - BuiltinCall, - /// Same as BuiltinCall but there is known to be a trailing comma before the rparen. - BuiltinCallComma, + builtin_call, + /// Same as builtin_call but there is known to be a trailing comma before the rparen. + builtin_call_comma, /// `error{a, b}`. /// rhs is the rbrace, lhs is unused. - ErrorSetDecl, + error_set_decl, /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`. /// main_token is `struct`, `union`, `opaque`, `enum` keyword. - ContainerDecl, + container_decl, /// Same as ContainerDecl but there is known to be a trailing comma before the rbrace. - ContainerDeclComma, + container_decl_comma, /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`. /// lhs or rhs can be omitted. /// main_token is `struct`, `union`, `opaque`, `enum` keyword. - ContainerDeclTwo, + container_decl_two, /// Same as ContainerDeclTwo except there is known to be a trailing comma /// before the rbrace. - ContainerDeclTwoComma, + container_decl_two_comma, /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`. - ContainerDeclArg, - /// Same as ContainerDeclArg but there is known to be a trailing comma before the rbrace. - ContainerDeclArgComma, + container_decl_arg, + /// Same as container_decl_arg but there is known to be a trailing comma before the rbrace. + container_decl_arg_comma, /// `union(enum) {}`. `sub_list[lhs..rhs]`. /// Note that tagged unions with explicitly provided enums are represented - /// by `ContainerDeclArg`. - TaggedUnion, - /// Same as TaggedUnion but there is known to be a trailing comma before the rbrace. - TaggedUnionComma, + /// by `container_decl_arg`. + tagged_union, + /// Same as tagged_union but there is known to be a trailing comma before the rbrace. + tagged_union_comma, /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted. /// Note that tagged unions with explicitly provided enums are represented - /// by `ContainerDeclArg`. - TaggedUnionTwo, - /// Same as TaggedUnionTwo but there is known to be a trailing comma before the rbrace. - TaggedUnionTwoComma, + /// by `container_decl_arg`. + tagged_union_two, + /// Same as tagged_union_two but there is known to be a trailing comma before the rbrace. + tagged_union_two_comma, /// `union(enum(lhs)) {}`. `SubRange[rhs]`. - TaggedUnionEnumTag, - /// Same as TaggedUnionEnumTag but there is known to be a trailing comma + tagged_union_enum_tag, + /// Same as tagged_union_enum_tag but there is known to be a trailing comma /// before the rbrace. - TaggedUnionEnumTagComma, + tagged_union_enum_tag_comma, /// `a: lhs = rhs,`. lhs and rhs can be omitted. /// main_token is the field name identifier. /// lastToken() does not include the possible trailing comma. - ContainerFieldInit, + container_field_init, /// `a: lhs align(rhs),`. rhs can be omitted. /// main_token is the field name identifier. /// lastToken() does not include the possible trailing comma. - ContainerFieldAlign, + container_field_align, /// `a: lhs align(c) = d,`. `container_field_list[rhs]`. /// main_token is the field name identifier. /// lastToken() does not include the possible trailing comma. - ContainerField, + container_field, /// `anytype`. both lhs and rhs unused. /// Used by `ContainerField`. - AnyType, + @"anytype", /// `comptime lhs`. rhs unused. - Comptime, + @"comptime", /// `nosuspend lhs`. rhs unused. - Nosuspend, + @"nosuspend", /// `{lhs rhs}`. rhs or lhs can be omitted. /// main_token points at the lbrace. - BlockTwo, - /// Same as BlockTwo but there is known to be a semicolon before the rbrace. - BlockTwoSemicolon, + block_two, + /// Same as block_two but there is known to be a semicolon before the rbrace. + block_two_semicolon, /// `{}`. `sub_list[lhs..rhs]`. /// main_token points at the lbrace. - Block, - /// Same as Block but there is known to be a semicolon before the rbrace. - BlockSemicolon, + block, + /// Same as block but there is known to be a semicolon before the rbrace. + block_semicolon, /// `asm(lhs)`. rhs is the token index of the rparen. - AsmSimple, + asm_simple, /// `asm(lhs, a)`. `Asm[rhs]`. - Asm, + @"asm", /// `[a] "b" (c)`. lhs is 0, rhs is token index of the rparen. /// `[a] "b" (-> lhs)`. rhs is token index of the rparen. /// main_token is `a`. - AsmOutput, + asm_output, /// `[a] "b" (lhs)`. rhs is token index of the rparen. /// main_token is `a`. - AsmInput, + asm_input, /// `error.a`. lhs is token index of `.`. rhs is token index of `a`. - ErrorValue, + error_value, /// `lhs!rhs`. main_token is the `!`. - ErrorUnion, + error_union, pub fn isContainerField(tag: Tag) bool { return switch (tag) { - .ContainerFieldInit, - .ContainerFieldAlign, - .ContainerField, + .container_field_init, + .container_field_align, + .container_field, => true, else => false, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 855f889794..6eb617910c 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -57,7 +57,7 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { // Root node must be index 0. // Root <- skip ContainerMembers eof parser.nodes.appendAssumeCapacity(.{ - .tag = .Root, + .tag = .root, .main_token = 0, .data = .{ .lhs = undefined, @@ -251,7 +251,7 @@ const Parser = struct { }; if (block != 0) { const comptime_node = try p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = comptime_token, .data = .{ .lhs = block, @@ -477,7 +477,7 @@ const Parser = struct { const block_node = try p.parseBlock(); if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); return p.addNode(.{ - .tag = .TestDecl, + .tag = .test_decl, .main_token = test_token, .data = .{ .lhs = name_token orelse 0, @@ -517,7 +517,7 @@ const Parser = struct { const semicolon_token = p.nextToken(); try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ - .tag = .FnDecl, + .tag = .fn_decl, .main_token = p.nodes.items(.main_token)[fn_proto], .data = .{ .lhs = fn_proto, @@ -529,7 +529,7 @@ const Parser = struct { const body_block = try p.parseBlock(); assert(body_block != 0); return p.addNode(.{ - .tag = .FnDecl, + .tag = .fn_decl, .main_token = p.nodes.items(.main_token)[fn_proto], .data = .{ .lhs = fn_proto, @@ -587,7 +587,7 @@ const Parser = struct { const semicolon_token = try p.expectToken(.semicolon); try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ - .tag = .UsingNamespace, + .tag = .@"usingnamespace", .main_token = usingnamespace_token, .data = .{ .lhs = expr, @@ -627,7 +627,7 @@ const Parser = struct { if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) { switch (params) { .zero_or_one => |param| return p.addNode(.{ - .tag = .FnProtoSimple, + .tag = .fn_proto_simple, .main_token = fn_token, .data = .{ .lhs = param, @@ -637,7 +637,7 @@ const Parser = struct { .multi => |list| { const span = try p.listToSpan(list); return p.addNode(.{ - .tag = .FnProtoMulti, + .tag = .fn_proto_multi, .main_token = fn_token, .data = .{ .lhs = try p.addExtra(Node.SubRange{ @@ -652,7 +652,7 @@ const Parser = struct { } switch (params) { .zero_or_one => |param| return p.addNode(.{ - .tag = .FnProtoOne, + .tag = .fn_proto_one, .main_token = fn_token, .data = .{ .lhs = try p.addExtra(Node.FnProtoOne{ @@ -667,7 +667,7 @@ const Parser = struct { .multi => |list| { const span = try p.listToSpan(list); return p.addNode(.{ - .tag = .FnProto, + .tag = .fn_proto, .main_token = fn_token, .data = .{ .lhs = try p.addExtra(Node.FnProto{ @@ -698,7 +698,7 @@ const Parser = struct { if (section_node == 0) { if (align_node == 0) { return p.addNode(.{ - .tag = .SimpleVarDecl, + .tag = .simple_var_decl, .main_token = mut_token, .data = .{ .lhs = type_node, @@ -707,7 +707,7 @@ const Parser = struct { }); } else if (type_node == 0) { return p.addNode(.{ - .tag = .AlignedVarDecl, + .tag = .aligned_var_decl, .main_token = mut_token, .data = .{ .lhs = align_node, @@ -716,7 +716,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .LocalVarDecl, + .tag = .local_var_decl, .main_token = mut_token, .data = .{ .lhs = try p.addExtra(Node.LocalVarDecl{ @@ -729,7 +729,7 @@ const Parser = struct { } } else { return p.addNode(.{ - .tag = .GlobalVarDecl, + .tag = .global_var_decl, .main_token = mut_token, .data = .{ .lhs = try p.addExtra(Node.GlobalVarDecl{ @@ -753,7 +753,7 @@ const Parser = struct { if (p.eatToken(.colon)) |_| { if (p.eatToken(.keyword_anytype)) |anytype_tok| { type_expr = try p.addNode(.{ - .tag = .AnyType, + .tag = .@"anytype", .main_token = anytype_tok, .data = .{ .lhs = undefined, @@ -770,7 +770,7 @@ const Parser = struct { if (align_expr == 0) { return p.addNode(.{ - .tag = .ContainerFieldInit, + .tag = .container_field_init, .main_token = name_token, .data = .{ .lhs = type_expr, @@ -779,7 +779,7 @@ const Parser = struct { }); } else if (value_expr == 0) { return p.addNode(.{ - .tag = .ContainerFieldAlign, + .tag = .container_field_align, .main_token = name_token, .data = .{ .lhs = type_expr, @@ -788,7 +788,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .ContainerField, + .tag = .container_field, .main_token = name_token, .data = .{ .lhs = type_expr, @@ -833,7 +833,7 @@ const Parser = struct { if (comptime_token) |token| { return p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = token, .data = .{ .lhs = try p.expectBlockExprStatement(), @@ -845,7 +845,7 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .keyword_nosuspend => { return p.addNode(.{ - .tag = .Nosuspend, + .tag = .@"nosuspend", .main_token = p.nextToken(), .data = .{ .lhs = try p.expectBlockExprStatement(), @@ -860,7 +860,7 @@ const Parser = struct { else try p.expectBlockExprStatement(); return p.addNode(.{ - .tag = .Suspend, + .tag = .@"suspend", .main_token = token, .data = .{ .lhs = block_expr, @@ -869,7 +869,7 @@ const Parser = struct { }); }, .keyword_defer => return p.addNode(.{ - .tag = .Defer, + .tag = .@"defer", .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -877,7 +877,7 @@ const Parser = struct { }, }), .keyword_errdefer => return p.addNode(.{ - .tag = .ErrDefer, + .tag = .@"errdefer", .main_token = p.nextToken(), .data = .{ .lhs = try p.parsePayload(), @@ -947,7 +947,7 @@ const Parser = struct { } if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ - .tag = .IfSimple, + .tag = .if_simple, .main_token = if_token, .data = .{ .lhs = condition, @@ -963,7 +963,7 @@ const Parser = struct { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } return p.addNode(.{ - .tag = .IfSimple, + .tag = .if_simple, .main_token = if_token, .data = .{ .lhs = condition, @@ -974,7 +974,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectStatement(); return p.addNode(.{ - .tag = .If, + .tag = .@"if", .main_token = if_token, .data = .{ .lhs = condition, @@ -1041,7 +1041,7 @@ const Parser = struct { } if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -1057,7 +1057,7 @@ const Parser = struct { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -1066,7 +1066,7 @@ const Parser = struct { }); }; return p.addNode(.{ - .tag = .For, + .tag = .@"for", .main_token = for_token, .data = .{ .lhs = array_expr, @@ -1103,7 +1103,7 @@ const Parser = struct { if (p.eatToken(.semicolon)) |_| { if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -1112,7 +1112,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -1133,7 +1133,7 @@ const Parser = struct { } if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -1142,7 +1142,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -1157,7 +1157,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectStatement(); return p.addNode(.{ - .tag = .While, + .tag = .@"while", .main_token = while_token, .data = .{ .lhs = condition, @@ -1233,20 +1233,20 @@ const Parser = struct { if (expr == 0) return null_node; const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .asterisk_equal => .AssignMul, - .slash_equal => .AssignDiv, - .percent_equal => .AssignMod, - .plus_equal => .AssignAdd, - .minus_equal => .AssignSub, - .angle_bracket_angle_bracket_left_equal => .AssignBitShiftLeft, - .angle_bracket_angle_bracket_right_equal => .AssignBitShiftRight, - .ampersand_equal => .AssignBitAnd, - .caret_equal => .AssignBitXor, - .pipe_equal => .AssignBitOr, - .asterisk_percent_equal => .AssignMulWrap, - .plus_percent_equal => .AssignAddWrap, - .minus_percent_equal => .AssignSubWrap, - .equal => .Assign, + .asterisk_equal => .assign_mul, + .slash_equal => .assign_div, + .percent_equal => .assign_mod, + .plus_equal => .assign_add, + .minus_equal => .assign_sub, + .angle_bracket_angle_bracket_left_equal => .assign_bit_shift_left, + .angle_bracket_angle_bracket_right_equal => .assign_bit_shift_right, + .ampersand_equal => .assign_bit_and, + .caret_equal => .assign_bit_xor, + .pipe_equal => .assign_bit_or, + .asterisk_percent_equal => .assign_mul_wrap, + .plus_percent_equal => .assign_add_wrap, + .minus_percent_equal => .assign_sub_wrap, + .equal => .assign, else => return expr, }; return p.addNode(.{ @@ -1295,7 +1295,7 @@ const Parser = struct { return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); } res = try p.addNode(.{ - .tag = .BoolOr, + .tag = .bool_or, .main_token = or_token, .data = .{ .lhs = res, @@ -1322,7 +1322,7 @@ const Parser = struct { return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); } res = try p.addNode(.{ - .tag = .BoolAnd, + .tag = .bool_and, .main_token = and_token, .data = .{ .lhs = res, @@ -1348,12 +1348,12 @@ const Parser = struct { if (expr == 0) return null_node; const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .equal_equal => .EqualEqual, - .bang_equal => .BangEqual, - .angle_bracket_left => .LessThan, - .angle_bracket_right => .GreaterThan, - .angle_bracket_left_equal => .LessOrEqual, - .angle_bracket_right_equal => .GreaterOrEqual, + .equal_equal => .equal_equal, + .bang_equal => .bang_equal, + .angle_bracket_left => .less_than, + .angle_bracket_right => .greater_than, + .angle_bracket_left_equal => .less_or_equal, + .angle_bracket_right_equal => .greater_or_equal, else => return expr, }; return p.addNode(.{ @@ -1379,10 +1379,10 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .ampersand => .BitAnd, - .caret => .BitXor, - .pipe => .BitOr, - .keyword_orelse => .OrElse, + .ampersand => .bit_and, + .caret => .bit_xor, + .pipe => .bit_or, + .keyword_orelse => .@"orelse", .keyword_catch => { const catch_token = p.nextToken(); _ = try p.parsePayload(); @@ -1391,7 +1391,7 @@ const Parser = struct { return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); } res = try p.addNode(.{ - .tag = .Catch, + .tag = .@"catch", .main_token = catch_token, .data = .{ .lhs = res, @@ -1432,8 +1432,8 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .angle_bracket_angle_bracket_left => .BitShiftLeft, - .angle_bracket_angle_bracket_right => .BitShiftRight, + .angle_bracket_angle_bracket_left => .bit_shift_left, + .angle_bracket_angle_bracket_right => .bit_shift_right, else => return res, }; res = try p.addNode(.{ @@ -1469,11 +1469,11 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .plus => .Add, - .minus => .Sub, - .plus_plus => .ArrayCat, - .plus_percent => .AddWrap, - .minus_percent => .SubWrap, + .plus => .add, + .minus => .sub, + .plus_plus => .array_cat, + .plus_percent => .add_wrap, + .minus_percent => .sub_wrap, else => return res, }; res = try p.addNode(.{ @@ -1509,12 +1509,12 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .pipe_pipe => .MergeErrorSets, - .asterisk => .Mul, - .slash => .Div, - .percent => .Mod, - .asterisk_asterisk => .ArrayMult, - .asterisk_percent => .MulWrap, + .pipe_pipe => .merge_error_sets, + .asterisk => .mul, + .slash => .div, + .percent => .mod, + .asterisk_asterisk => .array_mult, + .asterisk_percent => .mul_wrap, else => return res, }; res = try p.addNode(.{ @@ -1547,13 +1547,13 @@ const Parser = struct { /// / KEYWORD_await fn parsePrefixExpr(p: *Parser) Error!Node.Index { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .bang => .BoolNot, - .minus => .Negation, - .tilde => .BitNot, - .minus_percent => .NegationWrap, - .ampersand => .AddressOf, - .keyword_try => .Try, - .keyword_await => .Await, + .bang => .bool_not, + .minus => .negation, + .tilde => .bit_not, + .minus_percent => .negation_wrap, + .ampersand => .address_of, + .keyword_try => .@"try", + .keyword_await => .@"await", else => return p.parsePrimaryExpr(), }; return p.addNode(.{ @@ -1588,7 +1588,7 @@ const Parser = struct { fn parseTypeExpr(p: *Parser) Error!Node.Index { switch (p.token_tags[p.tok_i]) { .question_mark => return p.addNode(.{ - .tag = .OptionalType, + .tag = .optional_type, .main_token = p.nextToken(), .data = .{ .lhs = try p.expectTypeExpr(), @@ -1597,7 +1597,7 @@ const Parser = struct { }), .keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) { .arrow => return p.addNode(.{ - .tag = .AnyFrameType, + .tag = .anyframe_type, .main_token = p.nextToken(), .data = .{ .lhs = p.nextToken(), @@ -1612,7 +1612,7 @@ const Parser = struct { const elem_type = try p.expectTypeExpr(); if (mods.bit_range_start == 0) { return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = mods.align_node, @@ -1621,7 +1621,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .PtrTypeBitRange, + .tag = .ptr_type_bit_range, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrTypeBitRange{ @@ -1642,7 +1642,7 @@ const Parser = struct { const inner: Node.Index = inner: { if (mods.bit_range_start == 0) { break :inner try p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = mods.align_node, @@ -1651,7 +1651,7 @@ const Parser = struct { }); } else { break :inner try p.addNode(.{ - .tag = .PtrTypeBitRange, + .tag = .ptr_type_bit_range, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrTypeBitRange{ @@ -1666,7 +1666,7 @@ const Parser = struct { } }; return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = 0, @@ -1698,7 +1698,7 @@ const Parser = struct { if (mods.bit_range_start == 0) { if (sentinel == 0) { return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = mods.align_node, @@ -1707,7 +1707,7 @@ const Parser = struct { }); } else if (mods.align_node == 0) { return p.addNode(.{ - .tag = .PtrTypeSentinel, + .tag = .ptr_type_sentinel, .main_token = asterisk, .data = .{ .lhs = sentinel, @@ -1716,7 +1716,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .PtrType, + .tag = .ptr_type, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrType{ @@ -1729,7 +1729,7 @@ const Parser = struct { } } else { return p.addNode(.{ - .tag = .PtrTypeBitRange, + .tag = .ptr_type_bit_range, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrTypeBitRange{ @@ -1762,7 +1762,7 @@ const Parser = struct { if (len_expr == 0) { if (sentinel == 0) { return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = lbracket, .data = .{ .lhs = mods.align_node, @@ -1771,7 +1771,7 @@ const Parser = struct { }); } else if (mods.align_node == 0) { return p.addNode(.{ - .tag = .PtrTypeSentinel, + .tag = .ptr_type_sentinel, .main_token = lbracket, .data = .{ .lhs = sentinel, @@ -1780,7 +1780,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .PtrType, + .tag = .ptr_type, .main_token = lbracket, .data = .{ .lhs = try p.addExtra(Node.PtrType{ @@ -1800,7 +1800,7 @@ const Parser = struct { } if (sentinel == 0) { return p.addNode(.{ - .tag = .ArrayType, + .tag = .array_type, .main_token = lbracket, .data = .{ .lhs = len_expr, @@ -1809,7 +1809,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .ArrayTypeSentinel, + .tag = .array_type_sentinel, .main_token = lbracket, .data = .{ .lhs = len_expr, @@ -1854,7 +1854,7 @@ const Parser = struct { .keyword_break => { p.tok_i += 1; return p.addNode(.{ - .tag = .Break, + .tag = .@"break", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.parseBreakLabel(), @@ -1865,7 +1865,7 @@ const Parser = struct { .keyword_continue => { p.tok_i += 1; return p.addNode(.{ - .tag = .Continue, + .tag = .@"continue", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.parseBreakLabel(), @@ -1876,7 +1876,7 @@ const Parser = struct { .keyword_comptime => { p.tok_i += 1; return p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.expectExpr(), @@ -1887,7 +1887,7 @@ const Parser = struct { .keyword_nosuspend => { p.tok_i += 1; return p.addNode(.{ - .tag = .Nosuspend, + .tag = .@"nosuspend", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.expectExpr(), @@ -1898,7 +1898,7 @@ const Parser = struct { .keyword_resume => { p.tok_i += 1; return p.addNode(.{ - .tag = .Resume, + .tag = .@"resume", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.expectExpr(), @@ -1909,7 +1909,7 @@ const Parser = struct { .keyword_return => { p.tok_i += 1; return p.addNode(.{ - .tag = .Return, + .tag = .@"return", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.parseExpr(), @@ -1976,7 +1976,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = .BlockTwo, + .tag = .block_two, .main_token = lbrace, .data = .{ .lhs = 0, @@ -1989,7 +1989,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; return p.addNode(.{ - .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, + .tag = if (semicolon) .block_two_semicolon else .block_two, .main_token = lbrace, .data = .{ .lhs = stmt_one, @@ -2001,7 +2001,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; return p.addNode(.{ - .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, + .tag = if (semicolon) .block_two_semicolon else .block_two, .main_token = lbrace, .data = .{ .lhs = stmt_one, @@ -2025,7 +2025,7 @@ const Parser = struct { const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; const statements_span = try p.listToSpan(statements.items); return p.addNode(.{ - .tag = if (semicolon) .BlockSemicolon else .Block, + .tag = if (semicolon) .block_semicolon else .block, .main_token = lbrace, .data = .{ .lhs = statements_span.start, @@ -2046,7 +2046,7 @@ const Parser = struct { const then_expr = try p.expectExpr(); const else_token = p.eatToken(.keyword_else) orelse { return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2056,7 +2056,7 @@ const Parser = struct { }; const else_expr = try p.expectExpr(); return p.addNode(.{ - .tag = .For, + .tag = .@"for", .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2082,7 +2082,7 @@ const Parser = struct { const else_token = p.eatToken(.keyword_else) orelse { if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -2091,7 +2091,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -2106,7 +2106,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectExpr(); return p.addNode(.{ - .tag = .While, + .tag = .@"while", .main_token = while_token, .data = .{ .lhs = condition, @@ -2134,7 +2134,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = .StructInitOne, + .tag = .struct_init_one, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2147,7 +2147,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .StructInitOneComma else .StructInitOne, + .tag = if (comma_one != null) .struct_init_one_comma else .struct_init_one, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2192,7 +2192,7 @@ const Parser = struct { } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (p.token_tags[p.tok_i - 2] == .comma) .StructInitComma else .StructInit, + .tag = if (p.token_tags[p.tok_i - 2] == .comma) .struct_init_comma else .struct_init, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2208,7 +2208,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .ArrayInitOneComma else .ArrayInitOne, + .tag = if (comma_one != null) .array_init_one_comma else .array_init_one, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2239,7 +2239,7 @@ const Parser = struct { _ = try p.expectToken(.r_brace); const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (trailing_comma) .ArrayInitComma else .ArrayInit, + .tag = if (trailing_comma) .array_init_comma else .array_init, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2257,7 +2257,7 @@ const Parser = struct { if (suffix_expr == 0) return null_node; const bang = p.eatToken(.bang) orelse return suffix_expr; return p.addNode(.{ - .tag = .ErrorUnion, + .tag = .error_union, .main_token = bang, .data = .{ .lhs = suffix_expr, @@ -2286,7 +2286,7 @@ const Parser = struct { }; if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .AsyncCallOne, + .tag = .async_call_one, .main_token = lparen, .data = .{ .lhs = res, @@ -2298,7 +2298,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = if (comma_one == null) .AsyncCallOne else .AsyncCallOneComma, + .tag = if (comma_one == null) .async_call_one else .async_call_one_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2325,7 +2325,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |_| { const span = try p.listToSpan(param_list.items); return p.addNode(.{ - .tag = .AsyncCallComma, + .tag = .async_call_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2342,7 +2342,7 @@ const Parser = struct { .r_paren => { const span = try p.listToSpan(param_list.items); return p.addNode(.{ - .tag = .AsyncCall, + .tag = .async_call, .main_token = lparen, .data = .{ .lhs = res, @@ -2387,7 +2387,7 @@ const Parser = struct { const lparen = p.eatToken(.l_paren) orelse return res; if (p.eatToken(.r_paren)) |_| { break :res try p.addNode(.{ - .tag = .CallOne, + .tag = .call_one, .main_token = lparen, .data = .{ .lhs = res, @@ -2399,7 +2399,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_paren)) |_| { break :res try p.addNode(.{ - .tag = if (comma_one == null) .CallOne else .CallOneComma, + .tag = if (comma_one == null) .call_one else .call_one_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2426,7 +2426,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |_| { const span = try p.listToSpan(param_list.items); break :res try p.addNode(.{ - .tag = .CallComma, + .tag = .call_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2443,7 +2443,7 @@ const Parser = struct { .r_paren => { const span = try p.listToSpan(param_list.items); break :res try p.addNode(.{ - .tag = .Call, + .tag = .call, .main_token = lparen, .data = .{ .lhs = res, @@ -2518,7 +2518,7 @@ const Parser = struct { fn parsePrimaryTypeExpr(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { .char_literal => return p.addNode(.{ - .tag = .CharLiteral, + .tag = .char_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2526,7 +2526,7 @@ const Parser = struct { }, }), .integer_literal => return p.addNode(.{ - .tag = .IntegerLiteral, + .tag = .integer_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2534,7 +2534,7 @@ const Parser = struct { }, }), .float_literal => return p.addNode(.{ - .tag = .FloatLiteral, + .tag = .float_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2542,7 +2542,7 @@ const Parser = struct { }, }), .keyword_false => return p.addNode(.{ - .tag = .FalseLiteral, + .tag = .false_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2550,7 +2550,7 @@ const Parser = struct { }, }), .keyword_true => return p.addNode(.{ - .tag = .TrueLiteral, + .tag = .true_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2558,7 +2558,7 @@ const Parser = struct { }, }), .keyword_null => return p.addNode(.{ - .tag = .NullLiteral, + .tag = .null_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2566,7 +2566,7 @@ const Parser = struct { }, }), .keyword_undefined => return p.addNode(.{ - .tag = .UndefinedLiteral, + .tag = .undefined_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2574,7 +2574,7 @@ const Parser = struct { }, }), .keyword_unreachable => return p.addNode(.{ - .tag = .UnreachableLiteral, + .tag = .unreachable_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2582,7 +2582,7 @@ const Parser = struct { }, }), .keyword_anyframe => return p.addNode(.{ - .tag = .AnyFrameLiteral, + .tag = .anyframe_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2592,7 +2592,7 @@ const Parser = struct { .string_literal => { const main_token = p.nextToken(); return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = main_token, .data = .{ .lhs = main_token, @@ -2620,7 +2620,7 @@ const Parser = struct { => return p.parseContainerDeclAuto(), .keyword_comptime => return p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = p.nextToken(), .data = .{ .lhs = try p.expectTypeExpr(), @@ -2633,7 +2633,7 @@ const Parser = struct { p.tok_i += 1; } return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = first_line, .data = .{ .lhs = first_line, @@ -2662,7 +2662,7 @@ const Parser = struct { return p.parseWhileTypeExpr(); }, else => return p.addNode(.{ - .tag = .Identifier, + .tag = .identifier, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2671,7 +2671,7 @@ const Parser = struct { }), }, else => return p.addNode(.{ - .tag = .Identifier, + .tag = .identifier, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2681,7 +2681,7 @@ const Parser = struct { }, .period => switch (p.token_tags[p.tok_i + 1]) { .identifier => return p.addNode(.{ - .tag = .EnumLiteral, + .tag = .enum_literal, .data = .{ .lhs = p.nextToken(), // dot .rhs = undefined, @@ -2697,7 +2697,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = .StructInitDotTwo, + .tag = .struct_init_dot_two, .main_token = lbrace, .data = .{ .lhs = 0, @@ -2710,7 +2710,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .StructInitDotTwoComma else .StructInitDotTwo, + .tag = if (comma_one != null) .struct_init_dot_two_comma else .struct_init_dot_two, .main_token = lbrace, .data = .{ .lhs = field_init_one, @@ -2727,7 +2727,7 @@ const Parser = struct { const comma_two = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_two != null) .StructInitDotTwoComma else .StructInitDotTwo, + .tag = if (comma_two != null) .struct_init_dot_two_comma else .struct_init_dot_two, .main_token = lbrace, .data = .{ .lhs = field_init_one, @@ -2778,7 +2778,7 @@ const Parser = struct { const span = try p.listToSpan(init_list.items); const trailing_comma = p.token_tags[p.tok_i - 2] == .comma; return p.addNode(.{ - .tag = if (trailing_comma) .StructInitDotComma else .StructInitDot, + .tag = if (trailing_comma) .struct_init_dot_comma else .struct_init_dot, .main_token = lbrace, .data = .{ .lhs = span.start, @@ -2791,7 +2791,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, + .tag = if (comma_one != null) .array_init_dot_two_comma else .array_init_dot_two, .main_token = lbrace, .data = .{ .lhs = elem_init_one, @@ -2808,7 +2808,7 @@ const Parser = struct { const comma_two = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_two != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, + .tag = if (comma_two != null) .array_init_dot_two_comma else .array_init_dot_two, .main_token = lbrace, .data = .{ .lhs = elem_init_one, @@ -2858,7 +2858,7 @@ const Parser = struct { } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (p.token_tags[p.tok_i - 2] == .comma) .ArrayInitDotComma else .ArrayInitDot, + .tag = if (p.token_tags[p.tok_i - 2] == .comma) .array_init_dot_comma else .array_init_dot, .main_token = lbrace, .data = .{ .lhs = span.start, @@ -2875,7 +2875,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |rbrace| { return p.addNode(.{ - .tag = .ErrorSetDecl, + .tag = .error_set_decl, .main_token = error_token, .data = .{ .lhs = undefined, @@ -2913,7 +2913,7 @@ const Parser = struct { } } return p.addNode(.{ - .tag = .ErrorSetDecl, + .tag = .error_set_decl, .main_token = error_token, .data = .{ .lhs = undefined, @@ -2922,7 +2922,7 @@ const Parser = struct { }); }, else => return p.addNode(.{ - .tag = .ErrorValue, + .tag = .error_value, .main_token = p.nextToken(), .data = .{ .lhs = try p.expectToken(.period), @@ -2931,7 +2931,7 @@ const Parser = struct { }), }, .l_paren => return p.addNode(.{ - .tag = .GroupedExpression, + .tag = .grouped_expression, .main_token = p.nextToken(), .data = .{ .lhs = try p.expectExpr(), @@ -2962,7 +2962,7 @@ const Parser = struct { const then_expr = try p.expectExpr(); const else_token = p.eatToken(.keyword_else) orelse { return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2972,7 +2972,7 @@ const Parser = struct { }; const else_expr = try p.expectTypeExpr(); return p.addNode(.{ - .tag = .For, + .tag = .@"for", .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2998,7 +2998,7 @@ const Parser = struct { const else_token = p.eatToken(.keyword_else) orelse { if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -3007,7 +3007,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -3022,7 +3022,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectTypeExpr(); return p.addNode(.{ - .tag = .While, + .tag = .@"while", .main_token = while_token, .data = .{ .lhs = condition, @@ -3047,7 +3047,7 @@ const Parser = struct { _ = try p.expectToken(.r_brace); return p.addNode(.{ - .tag = if (trailing_comma) .SwitchComma else .Switch, + .tag = if (trailing_comma) .switch_comma else .@"switch", .main_token = switch_token, .data = .{ .lhs = expr_node, @@ -3074,7 +3074,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |rparen| { return p.addNode(.{ - .tag = .AsmSimple, + .tag = .asm_simple, .main_token = asm_token, .data = .{ .lhs = template, @@ -3140,7 +3140,7 @@ const Parser = struct { const rparen = try p.expectToken(.r_paren); const span = try p.listToSpan(list.items); return p.addNode(.{ - .tag = .Asm, + .tag = .@"asm", .main_token = asm_token, .data = .{ .lhs = template, @@ -3170,7 +3170,7 @@ const Parser = struct { }; const rparen = try p.expectToken(.r_paren); return p.addNode(.{ - .tag = .AsmOutput, + .tag = .asm_output, .main_token = identifier, .data = .{ .lhs = type_expr, @@ -3189,7 +3189,7 @@ const Parser = struct { const expr = try p.expectExpr(); const rparen = try p.expectToken(.r_paren); return p.addNode(.{ - .tag = .AsmInput, + .tag = .asm_input, .main_token = identifier, .data = .{ .lhs = expr, @@ -3336,7 +3336,7 @@ const Parser = struct { const arrow_token = try p.expectToken(.equal_angle_bracket_right); _ = try p.parsePtrPayload(); return p.addNode(.{ - .tag = .SwitchCaseOne, + .tag = .switch_case_one, .main_token = arrow_token, .data = .{ .lhs = 0, @@ -3350,7 +3350,7 @@ const Parser = struct { if (p.eatToken(.equal_angle_bracket_right)) |arrow_token| { _ = try p.parsePtrPayload(); return p.addNode(.{ - .tag = .SwitchCaseOne, + .tag = .switch_case_one, .main_token = arrow_token, .data = .{ .lhs = first_item, @@ -3372,7 +3372,7 @@ const Parser = struct { const arrow_token = try p.expectToken(.equal_angle_bracket_right); _ = try p.parsePtrPayload(); return p.addNode(.{ - .tag = .SwitchCase, + .tag = .switch_case, .main_token = arrow_token, .data = .{ .lhs = try p.addExtra(Node.SubRange{ @@ -3391,7 +3391,7 @@ const Parser = struct { if (p.eatToken(.ellipsis3)) |token| { return p.addNode(.{ - .tag = .SwitchRange, + .tag = .switch_range, .main_token = token, .data = .{ .lhs = expr, @@ -3485,7 +3485,7 @@ const Parser = struct { if (end_expr == 0) { _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .SliceOpen, + .tag = .slice_open, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3497,7 +3497,7 @@ const Parser = struct { const sentinel = try p.parseExpr(); _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .SliceSentinel, + .tag = .slice_sentinel, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3511,7 +3511,7 @@ const Parser = struct { } else { _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .Slice, + .tag = .slice, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3525,7 +3525,7 @@ const Parser = struct { } _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .ArrayAccess, + .tag = .array_access, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3534,7 +3534,7 @@ const Parser = struct { }); }, .period_asterisk => return p.addNode(.{ - .tag = .Deref, + .tag = .deref, .main_token = p.nextToken(), .data = .{ .lhs = lhs, @@ -3545,7 +3545,7 @@ const Parser = struct { const period_asterisk = p.nextToken(); try p.warn(.{ .AsteriskAfterPointerDereference = .{ .token = period_asterisk } }); return p.addNode(.{ - .tag = .Deref, + .tag = .deref, .main_token = period_asterisk, .data = .{ .lhs = lhs, @@ -3555,7 +3555,7 @@ const Parser = struct { }, .period => switch (p.token_tags[p.tok_i + 1]) { .identifier => return p.addNode(.{ - .tag = .FieldAccess, + .tag = .field_access, .main_token = p.nextToken(), .data = .{ .lhs = lhs, @@ -3563,7 +3563,7 @@ const Parser = struct { }, }), .question_mark => return p.addNode(.{ - .tag = .UnwrapOptional, + .tag = .unwrap_optional, .main_token = p.nextToken(), .data = .{ .lhs = lhs, @@ -3613,8 +3613,8 @@ const Parser = struct { _ = try p.expectToken(.r_brace); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .TaggedUnionEnumTagComma, - false => .TaggedUnionEnumTag, + true => .tagged_union_enum_tag_comma, + false => .tagged_union_enum_tag, }, .main_token = main_token, .data = .{ @@ -3631,8 +3631,8 @@ const Parser = struct { if (members.len <= 2) { return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .TaggedUnionTwoComma, - false => .TaggedUnionTwo, + true => .tagged_union_two_comma, + false => .tagged_union_two, }, .main_token = main_token, .data = .{ @@ -3644,8 +3644,8 @@ const Parser = struct { const span = try members.toSpan(p); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .TaggedUnionComma, - false => .TaggedUnion, + true => .tagged_union_comma, + false => .tagged_union, }, .main_token = main_token, .data = .{ @@ -3673,8 +3673,8 @@ const Parser = struct { if (members.len <= 2) { return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .ContainerDeclTwoComma, - false => .ContainerDeclTwo, + true => .container_decl_two_comma, + false => .container_decl_two, }, .main_token = main_token, .data = .{ @@ -3686,8 +3686,8 @@ const Parser = struct { const span = try members.toSpan(p); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .ContainerDeclComma, - false => .ContainerDecl, + true => .container_decl_comma, + false => .container_decl, }, .main_token = main_token, .data = .{ @@ -3700,8 +3700,8 @@ const Parser = struct { const span = try members.toSpan(p); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .ContainerDeclArgComma, - false => .ContainerDeclArg, + true => .container_decl_arg_comma, + false => .container_decl_arg, }, .main_token = main_token, .data = .{ @@ -3860,7 +3860,7 @@ const Parser = struct { }); // Pretend this was an identifier so we can continue parsing. return p.addNode(.{ - .tag = .Identifier, + .tag = .identifier, .main_token = builtin_token, .data = .{ .lhs = undefined, @@ -3870,7 +3870,7 @@ const Parser = struct { }; if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .builtin_call_two, .main_token = builtin_token, .data = .{ .lhs = 0, @@ -3883,7 +3883,7 @@ const Parser = struct { .comma => { if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwoComma, + .tag = .builtin_call_two_comma, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3893,7 +3893,7 @@ const Parser = struct { } }, .r_paren => return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .builtin_call_two, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3914,7 +3914,7 @@ const Parser = struct { .comma => { if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwoComma, + .tag = .builtin_call_two_comma, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3924,7 +3924,7 @@ const Parser = struct { } }, .r_paren => return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .builtin_call_two, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3954,7 +3954,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |_| { const params = try p.listToSpan(list.items); return p.addNode(.{ - .tag = .BuiltinCallComma, + .tag = .builtin_call_comma, .main_token = builtin_token, .data = .{ .lhs = params.start, @@ -3967,7 +3967,7 @@ const Parser = struct { .r_paren => { const params = try p.listToSpan(list.items); return p.addNode(.{ - .tag = .BuiltinCall, + .tag = .builtin_call, .main_token = builtin_token, .data = .{ .lhs = params.start, @@ -3993,7 +3993,7 @@ const Parser = struct { .string_literal => { const main_token = p.nextToken(); return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = main_token, .data = .{ .lhs = main_token, @@ -4007,7 +4007,7 @@ const Parser = struct { p.tok_i += 1; } return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = first_line, .data = .{ .lhs = first_line, @@ -4029,7 +4029,7 @@ const Parser = struct { fn expectIntegerLiteral(p: *Parser) !Node.Index { return p.addNode(.{ - .tag = .IntegerLiteral, + .tag = .integer_literal, .main_token = try p.expectToken(.integer_literal), .data = .{ .lhs = undefined, @@ -4050,7 +4050,7 @@ const Parser = struct { if (then_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); const else_token = p.eatToken(.keyword_else) orelse return p.addNode(.{ - .tag = .IfSimple, + .tag = .if_simple, .main_token = if_token, .data = .{ .lhs = condition, @@ -4062,7 +4062,7 @@ const Parser = struct { if (else_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); return p.addNode(.{ - .tag = .If, + .tag = .@"if", .main_token = if_token, .data = .{ .lhs = condition, diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 0f14856fdf..1510409ae1 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -96,7 +96,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E const datas = tree.nodes.items(.data); try renderDocComments(ais, tree, tree.firstToken(decl)); switch (tree.nodes.items(.tag)[decl]) { - .FnDecl => { + .fn_decl => { // Some examples: // pub extern "foo" fn ... // export fn ... @@ -132,16 +132,16 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E return renderToken(ais, tree, tree.lastToken(fn_proto) + 1, space); // semicolon } }, - .FnProtoSimple, - .FnProtoMulti, - .FnProtoOne, - .FnProto, + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, => { try renderExpression(ais, tree, decl, .None); return renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon }, - .UsingNamespace => { + .@"usingnamespace" => { const main_token = main_tokens[decl]; const expr = datas[decl].lhs; if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) { @@ -152,12 +152,12 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E return renderToken(ais, tree, tree.lastToken(expr) + 1, space); // ; }, - .GlobalVarDecl => return renderVarDecl(ais, tree, tree.globalVarDecl(decl)), - .LocalVarDecl => return renderVarDecl(ais, tree, tree.localVarDecl(decl)), - .SimpleVarDecl => return renderVarDecl(ais, tree, tree.simpleVarDecl(decl)), - .AlignedVarDecl => return renderVarDecl(ais, tree, tree.alignedVarDecl(decl)), + .global_var_decl => return renderVarDecl(ais, tree, tree.globalVarDecl(decl)), + .local_var_decl => return renderVarDecl(ais, tree, tree.localVarDecl(decl)), + .simple_var_decl => return renderVarDecl(ais, tree, tree.simpleVarDecl(decl)), + .aligned_var_decl => return renderVarDecl(ais, tree, tree.alignedVarDecl(decl)), - .TestDecl => { + .test_decl => { const test_token = main_tokens[decl]; try renderToken(ais, tree, test_token, .Space); if (token_tags[test_token + 1] == .string_literal) { @@ -166,12 +166,12 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E try renderExpression(ais, tree, datas[decl].rhs, space); }, - .ContainerFieldInit => return renderContainerField(ais, tree, tree.containerFieldInit(decl), space), - .ContainerFieldAlign => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), - .ContainerField => return renderContainerField(ais, tree, tree.containerField(decl), space), - .Comptime => return renderExpression(ais, tree, decl, space), + .container_field_init => return renderContainerField(ais, tree, tree.containerFieldInit(decl), space), + .container_field_align => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), + .container_field => return renderContainerField(ais, tree, tree.containerField(decl), space), + .@"comptime" => return renderExpression(ais, tree, decl, space), - .Root => unreachable, + .root => unreachable, else => unreachable, } } @@ -182,29 +182,29 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const node_tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); switch (node_tags[node]) { - .Identifier, - .IntegerLiteral, - .FloatLiteral, - .StringLiteral, - .CharLiteral, - .TrueLiteral, - .FalseLiteral, - .NullLiteral, - .UnreachableLiteral, - .UndefinedLiteral, - .AnyFrameLiteral, + .identifier, + .integer_literal, + .float_literal, + .string_literal, + .char_literal, + .true_literal, + .false_literal, + .null_literal, + .unreachable_literal, + .undefined_literal, + .anyframe_literal, => return renderToken(ais, tree, main_tokens[node], space), - .ErrorValue => { + .error_value => { try renderToken(ais, tree, main_tokens[node], .None); try renderToken(ais, tree, main_tokens[node] + 1, .None); return renderToken(ais, tree, main_tokens[node] + 2, space); }, - .AnyType => return renderToken(ais, tree, main_tokens[node], space), + .@"anytype" => return renderToken(ais, tree, main_tokens[node], space), - .BlockTwo, - .BlockTwoSemicolon, + .block_two, + .block_two_semicolon, => { const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; if (datas[node].lhs == 0) { @@ -215,14 +215,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderBlock(ais, tree, node, statements[0..2], space); } }, - .Block, - .BlockSemicolon, + .block, + .block_semicolon, => { const statements = tree.extra_data[datas[node].lhs..datas[node].rhs]; return renderBlock(ais, tree, node, statements, space); }, - .ErrDefer => { + .@"errdefer" => { const defer_token = main_tokens[node]; const payload_token = datas[node].lhs; const expr = datas[node].rhs; @@ -236,20 +236,20 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, expr, space); }, - .Defer => { + .@"defer" => { const defer_token = main_tokens[node]; const expr = datas[node].rhs; try renderToken(ais, tree, defer_token, .Space); return renderExpression(ais, tree, expr, space); }, - .Comptime, .Nosuspend => { + .@"comptime", .@"nosuspend" => { const comptime_token = main_tokens[node]; const block = datas[node].lhs; try renderToken(ais, tree, comptime_token, .Space); return renderExpression(ais, tree, block, space); }, - .Suspend => { + .@"suspend" => { const suspend_token = main_tokens[node]; const body = datas[node].lhs; if (body != 0) { @@ -260,7 +260,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Catch => { + .@"catch" => { const main_token = main_tokens[node]; const fallback_first = tree.firstToken(datas[node].rhs); @@ -283,15 +283,15 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderExpression(ais, tree, datas[node].rhs, space); // fallback }, - .FieldAccess => { + .field_access => { const field_access = datas[node]; try renderExpression(ais, tree, field_access.lhs, .None); try renderToken(ais, tree, main_tokens[node], .None); return renderToken(ais, tree, field_access.rhs, space); }, - .ErrorUnion, - .SwitchRange, + .error_union, + .switch_range, => { const infix = datas[node]; try renderExpression(ais, tree, infix.lhs, .None); @@ -299,45 +299,45 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, infix.rhs, space); }, - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Sub, - .SubWrap, - .OrElse, + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .sub, + .sub_wrap, + .@"orelse", => { const infix = datas[node]; try renderExpression(ais, tree, infix.lhs, .Space); @@ -353,75 +353,75 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, infix.rhs, space); }, - .BitNot, - .BoolNot, - .Negation, - .NegationWrap, - .OptionalType, - .AddressOf, + .bit_not, + .bool_not, + .negation, + .negation_wrap, + .optional_type, + .address_of, => { try renderToken(ais, tree, main_tokens[node], .None); return renderExpression(ais, tree, datas[node].lhs, space); }, - .Try, - .Resume, - .Await, + .@"try", + .@"resume", + .@"await", => { try renderToken(ais, tree, main_tokens[node], .Space); return renderExpression(ais, tree, datas[node].lhs, space); }, - .ArrayType => return renderArrayType(ais, tree, tree.arrayType(node), space), - .ArrayTypeSentinel => return renderArrayType(ais, tree, tree.arrayTypeSentinel(node), space), + .array_type => return renderArrayType(ais, tree, tree.arrayType(node), space), + .array_type_sentinel => return renderArrayType(ais, tree, tree.arrayTypeSentinel(node), space), - .PtrTypeAligned => return renderPtrType(ais, tree, tree.ptrTypeAligned(node), space), - .PtrTypeSentinel => return renderPtrType(ais, tree, tree.ptrTypeSentinel(node), space), - .PtrType => return renderPtrType(ais, tree, tree.ptrType(node), space), - .PtrTypeBitRange => return renderPtrType(ais, tree, tree.ptrTypeBitRange(node), space), + .ptr_type_aligned => return renderPtrType(ais, tree, tree.ptrTypeAligned(node), space), + .ptr_type_sentinel => return renderPtrType(ais, tree, tree.ptrTypeSentinel(node), space), + .ptr_type => return renderPtrType(ais, tree, tree.ptrType(node), space), + .ptr_type_bit_range => return renderPtrType(ais, tree, tree.ptrTypeBitRange(node), space), - .ArrayInitOne, .ArrayInitOneComma => { + .array_init_one, .array_init_one_comma => { var elements: [1]ast.Node.Index = undefined; return renderArrayInit(ais, tree, tree.arrayInitOne(&elements, node), space); }, - .ArrayInitDotTwo, .ArrayInitDotTwoComma => { + .array_init_dot_two, .array_init_dot_two_comma => { var elements: [2]ast.Node.Index = undefined; return renderArrayInit(ais, tree, tree.arrayInitDotTwo(&elements, node), space); }, - .ArrayInitDot, - .ArrayInitDotComma, + .array_init_dot, + .array_init_dot_comma, => return renderArrayInit(ais, tree, tree.arrayInitDot(node), space), - .ArrayInit, - .ArrayInitComma, + .array_init, + .array_init_comma, => return renderArrayInit(ais, tree, tree.arrayInit(node), space), - .StructInitOne, .StructInitOneComma => { + .struct_init_one, .struct_init_one_comma => { var fields: [1]ast.Node.Index = undefined; return renderStructInit(ais, tree, tree.structInitOne(&fields, node), space); }, - .StructInitDotTwo, .StructInitDotTwoComma => { + .struct_init_dot_two, .struct_init_dot_two_comma => { var fields: [2]ast.Node.Index = undefined; return renderStructInit(ais, tree, tree.structInitDotTwo(&fields, node), space); }, - .StructInitDot, - .StructInitDotComma, + .struct_init_dot, + .struct_init_dot_comma, => return renderStructInit(ais, tree, tree.structInitDot(node), space), - .StructInit, - .StructInitComma, + .struct_init, + .struct_init_comma, => return renderStructInit(ais, tree, tree.structInit(node), space), - .CallOne, .CallOneComma, .AsyncCallOne, .AsyncCallOneComma => { + .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => { var params: [1]ast.Node.Index = undefined; return renderCall(ais, tree, tree.callOne(¶ms, node), space); }, - .Call, - .CallComma, - .AsyncCall, - .AsyncCallComma, + .call, + .call_comma, + .async_call, + .async_call_comma, => return renderCall(ais, tree, tree.callFull(node), space), - .ArrayAccess => { + .array_access => { const suffix = datas[node]; const lbracket = tree.firstToken(suffix.rhs) - 1; const rbracket = tree.lastToken(suffix.rhs) + 1; @@ -431,22 +431,22 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, rbracket, space); // ] }, - .SliceOpen => try renderSlice(ais, tree, tree.sliceOpen(node), space), - .Slice => try renderSlice(ais, tree, tree.slice(node), space), - .SliceSentinel => try renderSlice(ais, tree, tree.sliceSentinel(node), space), + .slice_open => try renderSlice(ais, tree, tree.sliceOpen(node), space), + .slice => try renderSlice(ais, tree, tree.slice(node), space), + .slice_sentinel => try renderSlice(ais, tree, tree.sliceSentinel(node), space), - .Deref => { + .deref => { try renderExpression(ais, tree, datas[node].lhs, .None); return renderToken(ais, tree, main_tokens[node], space); }, - .UnwrapOptional => { + .unwrap_optional => { try renderExpression(ais, tree, datas[node].lhs, .None); try renderToken(ais, tree, main_tokens[node], .None); return renderToken(ais, tree, datas[node].rhs, space); }, - .Break => { + .@"break" => { const main_token = main_tokens[node]; const label_token = datas[node].lhs; const target = datas[node].rhs; @@ -467,7 +467,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Continue => { + .@"continue" => { const main_token = main_tokens[node]; const label = datas[node].lhs; if (label != 0) { @@ -479,7 +479,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Return => { + .@"return" => { if (datas[node].lhs != 0) { try renderToken(ais, tree, main_tokens[node], .Space); try renderExpression(ais, tree, datas[node].lhs, space); @@ -488,7 +488,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .GroupedExpression => { + .grouped_expression => { ais.pushIndentNextLine(); try renderToken(ais, tree, main_tokens[node], .None); // lparen try renderExpression(ais, tree, datas[node].lhs, .None); @@ -496,32 +496,32 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, datas[node].rhs, space); // rparen }, - .ContainerDecl, - .ContainerDeclComma, + .container_decl, + .container_decl_comma, => return renderContainerDecl(ais, tree, tree.containerDecl(node), space), - .ContainerDeclTwo, .ContainerDeclTwoComma => { + .container_decl_two, .container_decl_two_comma => { var buffer: [2]ast.Node.Index = undefined; return renderContainerDecl(ais, tree, tree.containerDeclTwo(&buffer, node), space); }, - .ContainerDeclArg, - .ContainerDeclArgComma, + .container_decl_arg, + .container_decl_arg_comma, => return renderContainerDecl(ais, tree, tree.containerDeclArg(node), space), - .TaggedUnion, - .TaggedUnionComma, + .tagged_union, + .tagged_union_comma, => return renderContainerDecl(ais, tree, tree.taggedUnion(node), space), - .TaggedUnionTwo, .TaggedUnionTwoComma => { + .tagged_union_two, .tagged_union_two_comma => { var buffer: [2]ast.Node.Index = undefined; return renderContainerDecl(ais, tree, tree.taggedUnionTwo(&buffer, node), space); }, - .TaggedUnionEnumTag, - .TaggedUnionEnumTagComma, + .tagged_union_enum_tag, + .tagged_union_enum_tag_comma, => return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space), // TODO: handle comments properly - .ErrorSetDecl => { + .error_set_decl => { const error_token = main_tokens[node]; const lbrace = error_token + 1; const rbrace = datas[node].rhs; @@ -569,7 +569,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .BuiltinCallTwo, .BuiltinCallTwoComma => { + .builtin_call_two, .builtin_call_two_comma => { if (datas[node].lhs == 0) { const params = [_]ast.Node.Index{}; return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); @@ -581,23 +581,23 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); } }, - .BuiltinCall, .BuiltinCallComma => { + .builtin_call, .builtin_call_comma => { const params = tree.extra_data[datas[node].lhs..datas[node].rhs]; return renderBuiltinCall(ais, tree, main_tokens[node], params, space); }, - .FnProtoSimple => { + .fn_proto_simple => { var params: [1]ast.Node.Index = undefined; return renderFnProto(ais, tree, tree.fnProtoSimple(¶ms, node), space); }, - .FnProtoMulti => return renderFnProto(ais, tree, tree.fnProtoMulti(node), space), - .FnProtoOne => { + .fn_proto_multi => return renderFnProto(ais, tree, tree.fnProtoMulti(node), space), + .fn_proto_one => { var params: [1]ast.Node.Index = undefined; return renderFnProto(ais, tree, tree.fnProtoOne(¶ms, node), space); }, - .FnProto => return renderFnProto(ais, tree, tree.fnProto(node), space), + .fn_proto => return renderFnProto(ais, tree, tree.fnProto(node), space), - .AnyFrameType => { + .anyframe_type => { const main_token = main_tokens[node]; if (datas[node].rhs != 0) { try renderToken(ais, tree, main_token, .None); // anyframe @@ -608,8 +608,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Switch, - .SwitchComma, + .@"switch", + .switch_comma, => { const switch_token = main_tokens[node]; const condition = datas[node].lhs; @@ -635,39 +635,39 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, tree.lastToken(node), space); // rbrace }, - .SwitchCaseOne => return renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), - .SwitchCase => return renderSwitchCase(ais, tree, tree.switchCase(node), space), + .switch_case_one => return renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), + .switch_case => return renderSwitchCase(ais, tree, tree.switchCase(node), space), - .WhileSimple => return renderWhile(ais, tree, tree.whileSimple(node), space), - .WhileCont => return renderWhile(ais, tree, tree.whileCont(node), space), - .While => return renderWhile(ais, tree, tree.whileFull(node), space), - .ForSimple => return renderWhile(ais, tree, tree.forSimple(node), space), - .For => return renderWhile(ais, tree, tree.forFull(node), space), + .while_simple => return renderWhile(ais, tree, tree.whileSimple(node), space), + .while_cont => return renderWhile(ais, tree, tree.whileCont(node), space), + .@"while" => return renderWhile(ais, tree, tree.whileFull(node), space), + .for_simple => return renderWhile(ais, tree, tree.forSimple(node), space), + .@"for" => return renderWhile(ais, tree, tree.forFull(node), space), - .IfSimple => return renderIf(ais, tree, tree.ifSimple(node), space), - .If => return renderIf(ais, tree, tree.ifFull(node), space), + .if_simple => return renderIf(ais, tree, tree.ifSimple(node), space), + .@"if" => return renderIf(ais, tree, tree.ifFull(node), space), - .AsmSimple => return renderAsm(ais, tree, tree.asmSimple(node), space), - .Asm => return renderAsm(ais, tree, tree.asmFull(node), space), + .asm_simple => return renderAsm(ais, tree, tree.asmSimple(node), space), + .@"asm" => return renderAsm(ais, tree, tree.asmFull(node), space), - .EnumLiteral => { + .enum_literal => { try renderToken(ais, tree, main_tokens[node] - 1, .None); // . return renderToken(ais, tree, main_tokens[node], space); // name }, - .FnDecl => unreachable, - .ContainerField => unreachable, - .ContainerFieldInit => unreachable, - .ContainerFieldAlign => unreachable, - .Root => unreachable, - .GlobalVarDecl => unreachable, - .LocalVarDecl => unreachable, - .SimpleVarDecl => unreachable, - .AlignedVarDecl => unreachable, - .UsingNamespace => unreachable, - .TestDecl => unreachable, - .AsmOutput => unreachable, - .AsmInput => unreachable, + .fn_decl => unreachable, + .container_field => unreachable, + .container_field_init => unreachable, + .container_field_align => unreachable, + .root => unreachable, + .global_var_decl => unreachable, + .local_var_decl => unreachable, + .simple_var_decl => unreachable, + .aligned_var_decl => unreachable, + .@"usingnamespace" => unreachable, + .test_decl => unreachable, + .asm_output => unreachable, + .asm_input => unreachable, } } @@ -814,7 +814,7 @@ fn renderAsmOutput( const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); - assert(node_tags[asm_output] == .AsmOutput); + assert(node_tags[asm_output] == .asm_output); const symbolic_name = main_tokens[asm_output]; try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket @@ -842,7 +842,7 @@ fn renderAsmInput( const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); - assert(node_tags[asm_input] == .AsmInput); + assert(node_tags[asm_input] == .asm_input); const symbolic_name = main_tokens[asm_input]; try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket @@ -1516,10 +1516,10 @@ fn renderBlock( try renderToken(ais, tree, lbrace, .Newline); for (statements) |stmt, i| { switch (node_tags[stmt]) { - .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), - .LocalVarDecl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), - .SimpleVarDecl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), - .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), + .global_var_decl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), + .local_var_decl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), + .simple_var_decl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), + .aligned_var_decl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), else => try renderExpression(ais, tree, stmt, .Semicolon), } } @@ -1867,7 +1867,7 @@ fn renderCall( try renderExpression(ais, tree, param_node, Space.None); // Unindent the comma for multiline string literals - const is_multiline_string = node_tags[param_node] == .StringLiteral and + const is_multiline_string = node_tags[param_node] == .string_literal and token_tags[main_tokens[param_node]] == .multiline_string_literal_line; if (is_multiline_string) ais.popIndent(); @@ -2031,19 +2031,19 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error fn nodeIsBlock(tag: ast.Node.Tag) bool { return switch (tag) { - .Block, - .BlockSemicolon, - .BlockTwo, - .BlockTwoSemicolon, - .If, - .IfSimple, - .For, - .ForSimple, - .While, - .WhileSimple, - .WhileCont, - .Switch, - .SwitchComma, + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + .@"if", + .if_simple, + .@"for", + .for_simple, + .@"while", + .while_simple, + .while_cont, + .@"switch", + .switch_comma, => true, else => false, }; @@ -2051,47 +2051,47 @@ fn nodeIsBlock(tag: ast.Node.Tag) bool { fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool { return switch (tag) { - .Catch, - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .ErrorUnion, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Sub, - .SubWrap, - .OrElse, + .@"catch", + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .error_union, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .sub, + .sub_wrap, + .@"orelse", => true, else => false, |
