1//! Abstract Syntax Tree for Zig source code. 2 3/// Reference to externally-owned data. 4source: [:0]const u8, 5 6tokens: TokenList.Slice, 7/// The root AST node is assumed to be index 0. Since there can be no 8/// references to the root node, this means 0 is available to indicate null. 9nodes: NodeList.Slice, 10extra_data: []Node.Index, 11 12errors: []const Error, 13 14const std = @import("../std.zig"); 15const assert = std.debug.assert; 16const testing = std.testing; 17const mem = std.mem; 18const Token = std.zig.Token; 19const Tree = @This(); 20 21pub const TokenIndex = u32; 22pub const ByteOffset = u32; 23 24pub const TokenList = std.MultiArrayList(struct { 25 tag: Token.Tag, 26 start: ByteOffset, 27}); 28pub const NodeList = std.MultiArrayList(Node); 29 30pub const Location = struct { 31 line: usize, 32 column: usize, 33 line_start: usize, 34 line_end: usize, 35}; 36 37pub fn deinit(tree: *Tree, gpa: mem.Allocator) void { 38 tree.tokens.deinit(gpa); 39 tree.nodes.deinit(gpa); 40 gpa.free(tree.extra_data); 41 gpa.free(tree.errors); 42 tree.* = undefined; 43} 44 45pub const RenderError = error{ 46 /// Ran out of memory allocating call stack frames to complete rendering, or 47 /// ran out of memory allocating space in the output buffer. 48 OutOfMemory, 49}; 50 51/// `gpa` is used for allocating the resulting formatted source code, as well as 52/// for allocating extra stack memory if needed, because this function utilizes recursion. 53/// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006. 54/// Caller owns the returned slice of bytes, allocated with `gpa`. 55pub fn render(tree: Tree, gpa: mem.Allocator) RenderError![]u8 { 56 var buffer = std.ArrayList(u8).init(gpa); 57 defer buffer.deinit(); 58 59 try tree.renderToArrayList(&buffer); 60 return buffer.toOwnedSlice(); 61} 62 63pub fn renderToArrayList(tree: Tree, buffer: *std.ArrayList(u8)) RenderError!void { 64 return @import("./render.zig").renderTree(buffer, tree); 65} 66 67pub fn tokenLocation(self: Tree, start_offset: ByteOffset, token_index: TokenIndex) Location { 68 var loc = Location{ 69 .line = 0, 70 .column = 0, 71 .line_start = start_offset, 72 .line_end = self.source.len, 73 }; 74 const token_start = self.tokens.items(.start)[token_index]; 75 for (self.source[start_offset..]) |c, i| { 76 if (i + start_offset == token_start) { 77 loc.line_end = i + start_offset; 78 while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') { 79 loc.line_end += 1; 80 } 81 return loc; 82 } 83 if (c == '\n') { 84 loc.line += 1; 85 loc.column = 0; 86 loc.line_start = i + 1; 87 } else { 88 loc.column += 1; 89 } 90 } 91 return loc; 92} 93 94pub fn tokenSlice(tree: Tree, token_index: TokenIndex) []const u8 { 95 const token_starts = tree.tokens.items(.start); 96 const token_tags = tree.tokens.items(.tag); 97 const token_tag = token_tags[token_index]; 98 99 // Many tokens can be determined entirely by their tag. 100 if (token_tag.lexeme()) |lexeme| { 101 return lexeme; 102 } 103 104 // For some tokens, re-tokenization is needed to find the end. 105 var tokenizer: std.zig.Tokenizer = .{ 106 .buffer = tree.source, 107 .index = token_starts[token_index], 108 .pending_invalid_token = null, 109 }; 110 const token = tokenizer.next(); 111 assert(token.tag == token_tag); 112 return tree.source[token.loc.start..token.loc.end]; 113} 114 115pub fn extraData(tree: Tree, index: usize, comptime T: type) T { 116 const fields = std.meta.fields(T); 117 var result: T = undefined; 118 inline for (fields) |field, i| { 119 comptime assert(field.field_type == Node.Index); 120 @field(result, field.name) = tree.extra_data[index + i]; 121 } 122 return result; 123} 124 125pub fn rootDecls(tree: Tree) []const Node.Index { 126 // Root is always index 0. 127 const nodes_data = tree.nodes.items(.data); 128 return tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; 129} 130 131pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void { 132 const token_tags = tree.tokens.items(.tag); 133 switch (parse_error.tag) { 134 .asterisk_after_ptr_deref => { 135 // Note that the token will point at the `.*` but ideally the source 136 // location would point to the `*` after the `.*`. 137 return stream.writeAll("'.*' cannot be followed by '*'. Are you missing a space?"); 138 }, 139 .chained_comparison_operators => { 140 return stream.writeAll("comparison operators cannot be chained"); 141 }, 142 .decl_between_fields => { 143 return stream.writeAll("declarations are not allowed between container fields"); 144 }, 145 .expected_block => { 146 return stream.print("expected block or field, found '{s}'", .{ 147 token_tags[parse_error.token].symbol(), 148 }); 149 }, 150 .expected_block_or_assignment => { 151 return stream.print("expected block or assignment, found '{s}'", .{ 152 token_tags[parse_error.token].symbol(), 153 }); 154 }, 155 .expected_block_or_expr => { 156 return stream.print("expected block or expression, found '{s}'", .{ 157 token_tags[parse_error.token].symbol(), 158 }); 159 }, 160 .expected_block_or_field => { 161 return stream.print("expected block or field, found '{s}'", .{ 162 token_tags[parse_error.token].symbol(), 163 }); 164 }, 165 .expected_container_members => { 166 return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{ 167 token_tags[parse_error.token].symbol(), 168 }); 169 }, 170 .expected_expr => { 171 return stream.print("expected expression, found '{s}'", .{ 172 token_tags[parse_error.token].symbol(), 173 }); 174 }, 175 .expected_expr_or_assignment => { 176 return stream.print("expected expression or assignment, found '{s}'", .{ 177 token_tags[parse_error.token].symbol(), 178 }); 179 }, 180 .expected_fn => { 181 return stream.print("expected function, found '{s}'", .{ 182 token_tags[parse_error.token].symbol(), 183 }); 184 }, 185 .expected_inlinable => { 186 return stream.print("expected 'while' or 'for', found '{s}'", .{ 187 token_tags[parse_error.token].symbol(), 188 }); 189 }, 190 .expected_labelable => { 191 return stream.print("expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'", .{ 192 token_tags[parse_error.token].symbol(), 193 }); 194 }, 195 .expected_param_list => { 196 return stream.print("expected parameter list, found '{s}'", .{ 197 token_tags[parse_error.token].symbol(), 198 }); 199 }, 200 .expected_prefix_expr => { 201 return stream.print("expected prefix expression, found '{s}'", .{ 202 token_tags[parse_error.token].symbol(), 203 }); 204 }, 205 .expected_primary_type_expr => { 206 return stream.print("expected primary type expression, found '{s}'", .{ 207 token_tags[parse_error.token].symbol(), 208 }); 209 }, 210 .expected_pub_item => { 211 return stream.writeAll("expected function or variable declaration after pub"); 212 }, 213 .expected_return_type => { 214 return stream.print("expected return type expression, found '{s}'", .{ 215 token_tags[parse_error.token].symbol(), 216 }); 217 }, 218 .expected_semi_or_else => { 219 return stream.print("expected ';' or 'else', found '{s}'", .{ 220 token_tags[parse_error.token].symbol(), 221 }); 222 }, 223 .expected_semi_or_lbrace => { 224 return stream.print("expected ';' or '{{', found '{s}'", .{ 225 token_tags[parse_error.token].symbol(), 226 }); 227 }, 228 .expected_statement => { 229 return stream.print("expected statement, found '{s}'", .{ 230 token_tags[parse_error.token].symbol(), 231 }); 232 }, 233 .expected_string_literal => { 234 return stream.print("expected string literal, found '{s}'", .{ 235 token_tags[parse_error.token].symbol(), 236 }); 237 }, 238 .expected_suffix_op => { 239 return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{ 240 token_tags[parse_error.token].symbol(), 241 }); 242 }, 243 .expected_type_expr => { 244 return stream.print("expected type expression, found '{s}'", .{ 245 token_tags[parse_error.token].symbol(), 246 }); 247 }, 248 .expected_var_decl => { 249 return stream.print("expected variable declaration, found '{s}'", .{ 250 token_tags[parse_error.token].symbol(), 251 }); 252 }, 253 .expected_var_decl_or_fn => { 254 return stream.print("expected variable declaration or function, found '{s}'", .{ 255 token_tags[parse_error.token].symbol(), 256 }); 257 }, 258 .expected_loop_payload => { 259 return stream.print("expected loop payload, found '{s}'", .{ 260 token_tags[parse_error.token].symbol(), 261 }); 262 }, 263 .expected_container => { 264 return stream.print("expected a struct, enum or union, found '{s}'", .{ 265 token_tags[parse_error.token].symbol(), 266 }); 267 }, 268 .extern_fn_body => { 269 return stream.writeAll("extern functions have no body"); 270 }, 271 .extra_addrspace_qualifier => { 272 return stream.writeAll("extra addrspace qualifier"); 273 }, 274 .extra_align_qualifier => { 275 return stream.writeAll("extra align qualifier"); 276 }, 277 .extra_allowzero_qualifier => { 278 return stream.writeAll("extra allowzero qualifier"); 279 }, 280 .extra_const_qualifier => { 281 return stream.writeAll("extra const qualifier"); 282 }, 283 .extra_volatile_qualifier => { 284 return stream.writeAll("extra volatile qualifier"); 285 }, 286 .ptr_mod_on_array_child_type => { 287 return stream.print("pointer modifier '{s}' not allowed on array child type", .{ 288 token_tags[parse_error.token].symbol(), 289 }); 290 }, 291 .invalid_bit_range => { 292 return stream.writeAll("bit range not allowed on slices and arrays"); 293 }, 294 .invalid_token => { 295 return stream.print("invalid token: '{s}'", .{ 296 token_tags[parse_error.token].symbol(), 297 }); 298 }, 299 .same_line_doc_comment => { 300 return stream.writeAll("same line documentation comment"); 301 }, 302 .unattached_doc_comment => { 303 return stream.writeAll("unattached documentation comment"); 304 }, 305 .varargs_nonfinal => { 306 return stream.writeAll("function prototype has parameter after varargs"); 307 }, 308 309 .expected_token => { 310 const found_tag = token_tags[parse_error.token]; 311 const expected_symbol = parse_error.extra.expected_tag.symbol(); 312 switch (found_tag) { 313 .invalid => return stream.print("expected '{s}', found invalid bytes", .{ 314 expected_symbol, 315 }), 316 else => return stream.print("expected '{s}', found '{s}'", .{ 317 expected_symbol, found_tag.symbol(), 318 }), 319 } 320 }, 321 } 322} 323 324pub fn firstToken(tree: Tree, node: Node.Index) TokenIndex { 325 const tags = tree.nodes.items(.tag); 326 const datas = tree.nodes.items(.data); 327 const main_tokens = tree.nodes.items(.main_token); 328 const token_tags = tree.tokens.items(.tag); 329 var end_offset: TokenIndex = 0; 330 var n = node; 331 while (true) switch (tags[n]) { 332 .root => return 0, 333 334 .test_decl, 335 .@"errdefer", 336 .@"defer", 337 .bool_not, 338 .negation, 339 .bit_not, 340 .negation_wrap, 341 .address_of, 342 .@"try", 343 .@"await", 344 .optional_type, 345 .@"switch", 346 .switch_comma, 347 .if_simple, 348 .@"if", 349 .@"suspend", 350 .@"resume", 351 .@"continue", 352 .@"break", 353 .@"return", 354 .anyframe_type, 355 .identifier, 356 .anyframe_literal, 357 .char_literal, 358 .integer_literal, 359 .float_literal, 360 .unreachable_literal, 361 .string_literal, 362 .multiline_string_literal, 363 .grouped_expression, 364 .builtin_call_two, 365 .builtin_call_two_comma, 366 .builtin_call, 367 .builtin_call_comma, 368 .error_set_decl, 369 .@"anytype", 370 .@"comptime", 371 .@"nosuspend", 372 .asm_simple, 373 .@"asm", 374 .array_type, 375 .array_type_sentinel, 376 .error_value, 377 => return main_tokens[n] - end_offset, 378 379 .array_init_dot, 380 .array_init_dot_comma, 381 .array_init_dot_two, 382 .array_init_dot_two_comma, 383 .struct_init_dot, 384 .struct_init_dot_comma, 385 .struct_init_dot_two, 386 .struct_init_dot_two_comma, 387 .enum_literal, 388 => return main_tokens[n] - 1 - end_offset, 389 390 .@"catch", 391 .field_access, 392 .unwrap_optional, 393 .equal_equal, 394 .bang_equal, 395 .less_than, 396 .greater_than, 397 .less_or_equal, 398 .greater_or_equal, 399 .assign_mul, 400 .assign_div, 401 .assign_mod, 402 .assign_add, 403 .assign_sub, 404 .assign_shl, 405 .assign_shl_sat, 406 .assign_shr, 407 .assign_bit_and, 408 .assign_bit_xor, 409 .assign_bit_or, 410 .assign_mul_wrap, 411 .assign_add_wrap, 412 .assign_sub_wrap, 413 .assign_mul_sat, 414 .assign_add_sat, 415 .assign_sub_sat, 416 .assign, 417 .merge_error_sets, 418 .mul, 419 .div, 420 .mod, 421 .array_mult, 422 .mul_wrap, 423 .mul_sat, 424 .add, 425 .sub, 426 .array_cat, 427 .add_wrap, 428 .sub_wrap, 429 .add_sat, 430 .sub_sat, 431 .shl, 432 .shl_sat, 433 .shr, 434 .bit_and, 435 .bit_xor, 436 .bit_or, 437 .@"orelse", 438 .bool_and, 439 .bool_or, 440 .slice_open, 441 .slice, 442 .slice_sentinel, 443 .deref, 444 .array_access, 445 .array_init_one, 446 .array_init_one_comma, 447 .array_init, 448 .array_init_comma, 449 .struct_init_one, 450 .struct_init_one_comma, 451 .struct_init, 452 .struct_init_comma, 453 .call_one, 454 .call_one_comma, 455 .call, 456 .call_comma, 457 .switch_range, 458 .error_union, 459 => n = datas[n].lhs, 460 461 .fn_decl, 462 .fn_proto_simple, 463 .fn_proto_multi, 464 .fn_proto_one, 465 .fn_proto, 466 => { 467 var i = main_tokens[n]; // fn token 468 while (i > 0) { 469 i -= 1; 470 switch (token_tags[i]) { 471 .keyword_extern, 472 .keyword_export, 473 .keyword_pub, 474 .keyword_inline, 475 .keyword_noinline, 476 .string_literal, 477 => continue, 478 479 else => return i + 1 - end_offset, 480 } 481 } 482 return i - end_offset; 483 }, 484 485 .@"usingnamespace" => { 486 const main_token = main_tokens[n]; 487 if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) { 488 end_offset += 1; 489 } 490 return main_token - end_offset; 491 }, 492 493 .async_call_one, 494 .async_call_one_comma, 495 .async_call, 496 .async_call_comma, 497 => { 498 end_offset += 1; // async token 499 n = datas[n].lhs; 500 }, 501 502 .container_field_init, 503 .container_field_align, 504 .container_field, 505 => { 506 const name_token = main_tokens[n]; 507 if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) { 508 end_offset += 1; 509 } 510 return name_token - end_offset; 511 }, 512 513 .global_var_decl, 514 .local_var_decl, 515 .simple_var_decl, 516 .aligned_var_decl, 517 => { 518 var i = main_tokens[n]; // mut token 519 while (i > 0) { 520 i -= 1; 521 switch (token_tags[i]) { 522 .keyword_extern, 523 .keyword_export, 524 .keyword_comptime, 525 .keyword_pub, 526 .keyword_threadlocal, 527 .string_literal, 528 => continue, 529 530 else => return i + 1 - end_offset, 531 } 532 } 533 return i - end_offset; 534 }, 535 536 .block, 537 .block_semicolon, 538 .block_two, 539 .block_two_semicolon, 540 => { 541 // Look for a label. 542 const lbrace = main_tokens[n]; 543 if (token_tags[lbrace - 1] == .colon and 544 token_tags[lbrace - 2] == .identifier) 545 { 546 end_offset += 2; 547 } 548 return lbrace - end_offset; 549 }, 550 551 .container_decl, 552 .container_decl_trailing, 553 .container_decl_two, 554 .container_decl_two_trailing, 555 .container_decl_arg, 556 .container_decl_arg_trailing, 557 .tagged_union, 558 .tagged_union_trailing, 559 .tagged_union_two, 560 .tagged_union_two_trailing, 561 .tagged_union_enum_tag, 562 .tagged_union_enum_tag_trailing, 563 => { 564 const main_token = main_tokens[n]; 565 switch (token_tags[main_token - 1]) { 566 .keyword_packed, .keyword_extern => end_offset += 1, 567 else => {}, 568 } 569 return main_token - end_offset; 570 }, 571 572 .ptr_type_aligned, 573 .ptr_type_sentinel, 574 .ptr_type, 575 .ptr_type_bit_range, 576 => { 577 const main_token = main_tokens[n]; 578 return switch (token_tags[main_token]) { 579 .asterisk, 580 .asterisk_asterisk, 581 => switch (token_tags[main_token - 1]) { 582 .l_bracket => main_token - 1, 583 else => main_token, 584 }, 585 .l_bracket => main_token, 586 else => unreachable, 587 } - end_offset; 588 }, 589 590 .switch_case_one => { 591 if (datas[n].lhs == 0) { 592 return main_tokens[n] - 1 - end_offset; // else token 593 } else { 594 n = datas[n].lhs; 595 } 596 }, 597 .switch_case => { 598 const extra = tree.extraData(datas[n].lhs, Node.SubRange); 599 assert(extra.end - extra.start > 0); 600 n = tree.extra_data[extra.start]; 601 }, 602 603 .asm_output, .asm_input => { 604 assert(token_tags[main_tokens[n] - 1] == .l_bracket); 605 return main_tokens[n] - 1 - end_offset; 606 }, 607 608 .while_simple, 609 .while_cont, 610 .@"while", 611 .for_simple, 612 .@"for", 613 => { 614 // Look for a label and inline. 615 const main_token = main_tokens[n]; 616 var result = main_token; 617 if (token_tags[result - 1] == .keyword_inline) { 618 result -= 1; 619 } 620 if (token_tags[result - 1] == .colon) { 621 result -= 2; 622 } 623 return result - end_offset; 624 }, 625 }; 626} 627 628pub fn lastToken(tree: Tree, node: Node.Index) TokenIndex { 629 const tags = tree.nodes.items(.tag); 630 const datas = tree.nodes.items(.data); 631 const main_tokens = tree.nodes.items(.main_token); 632 const token_starts = tree.tokens.items(.start); 633 const token_tags = tree.tokens.items(.tag); 634 var n = node; 635 var end_offset: TokenIndex = 0; 636 while (true) switch (tags[n]) { 637 .root => return @intCast(TokenIndex, tree.tokens.len - 1), 638 639 .@"usingnamespace", 640 .bool_not, 641 .negation, 642 .bit_not, 643 .negation_wrap, 644 .address_of, 645 .@"try", 646 .@"await", 647 .optional_type, 648 .@"resume", 649 .@"nosuspend", 650 .@"comptime", 651 => n = datas[n].lhs, 652 653 .test_decl, 654 .@"errdefer", 655 .@"defer", 656 .@"catch", 657 .equal_equal, 658 .bang_equal, 659 .less_than, 660 .greater_than, 661 .less_or_equal, 662 .greater_or_equal, 663 .assign_mul, 664 .assign_div, 665 .assign_mod, 666 .assign_add, 667 .assign_sub, 668 .assign_shl, 669 .assign_shl_sat, 670 .assign_shr, 671 .assign_bit_and, 672 .assign_bit_xor, 673 .assign_bit_or, 674 .assign_mul_wrap, 675 .assign_add_wrap, 676 .assign_sub_wrap, 677 .assign_mul_sat, 678 .assign_add_sat, 679 .assign_sub_sat, 680 .assign, 681 .merge_error_sets, 682 .mul, 683 .div, 684 .mod, 685 .array_mult, 686 .mul_wrap, 687 .mul_sat, 688 .add, 689 .sub, 690 .array_cat, 691 .add_wrap, 692 .sub_wrap, 693 .add_sat, 694 .sub_sat, 695 .shl, 696 .shl_sat, 697 .shr, 698 .bit_and, 699 .bit_xor, 700 .bit_or, 701 .@"orelse", 702 .bool_and, 703 .bool_or, 704 .anyframe_type, 705 .error_union, 706 .if_simple, 707 .while_simple, 708 .for_simple, 709 .fn_proto_simple, 710 .fn_proto_multi, 711 .ptr_type_aligned, 712 .ptr_type_sentinel, 713 .ptr_type, 714 .ptr_type_bit_range, 715 .array_type, 716 .switch_case_one, 717 .switch_case, 718 .switch_range, 719 => n = datas[n].rhs, 720 721 .field_access, 722 .unwrap_optional, 723 .grouped_expression, 724 .multiline_string_literal, 725 .error_set_decl, 726 .asm_simple, 727 .asm_output, 728 .asm_input, 729 .error_value, 730 => return datas[n].rhs + end_offset, 731 732 .@"anytype", 733 .anyframe_literal, 734 .char_literal, 735 .integer_literal, 736 .float_literal, 737 .unreachable_literal, 738 .identifier, 739 .deref, 740 .enum_literal, 741 .string_literal, 742 => return main_tokens[n] + end_offset, 743 744 .@"return" => if (datas[n].lhs != 0) { 745 n = datas[n].lhs; 746 } else { 747 return main_tokens[n] + end_offset; 748 }, 749 750 .call, .async_call => { 751 end_offset += 1; // for the rparen 752 const params = tree.extraData(datas[n].rhs, Node.SubRange); 753 if (params.end - params.start == 0) { 754 return main_tokens[n] + end_offset; 755 } 756 n = tree.extra_data[params.end - 1]; // last parameter 757 }, 758 .tagged_union_enum_tag => { 759 const members = tree.extraData(datas[n].rhs, Node.SubRange); 760 if (members.end - members.start == 0) { 761 end_offset += 4; // for the rparen + rparen + lbrace + rbrace 762 n = datas[n].lhs; 763 } else { 764 end_offset += 1; // for the rbrace 765 n = tree.extra_data[members.end - 1]; // last parameter 766 } 767 }, 768 .call_comma, 769 .async_call_comma, 770 .tagged_union_enum_tag_trailing, 771 => { 772 end_offset += 2; // for the comma/semicolon + rparen/rbrace 773 const params = tree.extraData(datas[n].rhs, Node.SubRange); 774 assert(params.end > params.start); 775 n = tree.extra_data[params.end - 1]; // last parameter 776 }, 777 .@"switch" => { 778 const cases = tree.extraData(datas[n].rhs, Node.SubRange); 779 if (cases.end - cases.start == 0) { 780 end_offset += 3; // rparen, lbrace, rbrace 781 n = datas[n].lhs; // condition expression 782 } else { 783 end_offset += 1; // for the rbrace 784 n = tree.extra_data[cases.end - 1]; // last case 785 } 786 }, 787 .container_decl_arg => { 788 const members = tree.extraData(datas[n].rhs, Node.SubRange); 789 if (members.end - members.start == 0) { 790 end_offset += 3; // for the rparen + lbrace + rbrace 791 n = datas[n].lhs; 792 } else { 793 end_offset += 1; // for the rbrace 794 n = tree.extra_data[members.end - 1]; // last parameter 795 } 796 }, 797 .@"asm" => { 798 const extra = tree.extraData(datas[n].rhs, Node.Asm); 799 return extra.rparen + end_offset; 800 }, 801 .array_init, 802 .struct_init, 803 => { 804 const elements = tree.extraData(datas[n].rhs, Node.SubRange); 805 assert(elements.end - elements.start > 0); 806 end_offset += 1; // for the rbrace 807 n = tree.extra_data[elements.end - 1]; // last element 808 }, 809 .array_init_comma, 810 .struct_init_comma, 811 .container_decl_arg_trailing, 812 .switch_comma, 813 => { 814 const members = tree.extraData(datas[n].rhs, Node.SubRange); 815 assert(members.end - members.start > 0); 816 end_offset += 2; // for the comma + rbrace 817 n = tree.extra_data[members.end - 1]; // last parameter 818 }, 819 .array_init_dot, 820 .struct_init_dot, 821 .block, 822 .container_decl, 823 .tagged_union, 824 .builtin_call, 825 => { 826 assert(datas[n].rhs - datas[n].lhs > 0); 827 end_offset += 1; // for the rbrace 828 n = tree.extra_data[datas[n].rhs - 1]; // last statement 829 }, 830 .array_init_dot_comma, 831 .struct_init_dot_comma, 832 .block_semicolon, 833 .container_decl_trailing, 834 .tagged_union_trailing, 835 .builtin_call_comma, 836 => { 837 assert(datas[n].rhs - datas[n].lhs > 0); 838 end_offset += 2; // for the comma/semicolon + rbrace/rparen 839 n = tree.extra_data[datas[n].rhs - 1]; // last member 840 }, 841 .call_one, 842 .async_call_one, 843 .array_access, 844 => { 845 end_offset += 1; // for the rparen/rbracket 846 if (datas[n].rhs == 0) { 847 return main_tokens[n] + end_offset; 848 } 849 n = datas[n].rhs; 850 }, 851 .array_init_dot_two, 852 .block_two, 853 .builtin_call_two, 854 .struct_init_dot_two, 855 .container_decl_two, 856 .tagged_union_two, 857 => { 858 if (datas[n].rhs != 0) { 859 end_offset += 1; // for the rparen/rbrace 860 n = datas[n].rhs; 861 } else if (datas[n].lhs != 0) { 862 end_offset += 1; // for the rparen/rbrace 863 n = datas[n].lhs; 864 } else { 865 switch (tags[n]) { 866 .array_init_dot_two, 867 .block_two, 868 .struct_init_dot_two, 869 => end_offset += 1, // rbrace 870 .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace 871 .container_decl_two => { 872 var i: u32 = 2; // lbrace + rbrace 873 while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; 874 end_offset += i; 875 }, 876 .tagged_union_two => { 877 var i: u32 = 5; // (enum) {} 878 while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; 879 end_offset += i; 880 }, 881 else => unreachable, 882 } 883 return main_tokens[n] + end_offset; 884 } 885 }, 886 .array_init_dot_two_comma, 887 .builtin_call_two_comma, 888 .block_two_semicolon, 889 .struct_init_dot_two_comma, 890 .container_decl_two_trailing, 891 .tagged_union_two_trailing, 892 => { 893 end_offset += 2; // for the comma/semicolon + rbrace/rparen 894 if (datas[n].rhs != 0) { 895 n = datas[n].rhs; 896 } else if (datas[n].lhs != 0) { 897 n = datas[n].lhs; 898 } else { 899 unreachable; 900 } 901 }, 902 .simple_var_decl => { 903 if (datas[n].rhs != 0) { 904 n = datas[n].rhs; 905 } else if (datas[n].lhs != 0) { 906 n = datas[n].lhs; 907 } else { 908 end_offset += 1; // from mut token to name 909 return main_tokens[n] + end_offset; 910 } 911 }, 912 .aligned_var_decl => { 913 if (datas[n].rhs != 0) { 914 n = datas[n].rhs; 915 } else if (datas[n].lhs != 0) { 916 end_offset += 1; // for the rparen 917 n = datas[n].lhs; 918 } else { 919 end_offset += 1; // from mut token to name 920 return main_tokens[n] + end_offset; 921 } 922 }, 923 .global_var_decl => { 924 if (datas[n].rhs != 0) { 925 n = datas[n].rhs; 926 } else { 927 const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl); 928 if (extra.section_node != 0) { 929 end_offset += 1; // for the rparen 930 n = extra.section_node; 931 } else if (extra.align_node != 0) { 932 end_offset += 1; // for the rparen 933 n = extra.align_node; 934 } else if (extra.type_node != 0) { 935 n = extra.type_node; 936 } else { 937 end_offset += 1; // from mut token to name 938 return main_tokens[n] + end_offset; 939 } 940 } 941 }, 942 .local_var_decl => { 943 if (datas[n].rhs != 0) { 944 n = datas[n].rhs; 945 } else { 946 const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl); 947 if (extra.align_node != 0) { 948 end_offset += 1; // for the rparen 949 n = extra.align_node; 950 } else if (extra.type_node != 0) { 951 n = extra.type_node; 952 } else { 953 end_offset += 1; // from mut token to name 954 return main_tokens[n] + end_offset; 955 } 956 } 957 }, 958 .container_field_init => { 959 if (datas[n].rhs != 0) { 960 n = datas[n].rhs; 961 } else if (datas[n].lhs != 0) { 962 n = datas[n].lhs; 963 } else { 964 return main_tokens[n] + end_offset; 965 } 966 }, 967 .container_field_align => { 968 if (datas[n].rhs != 0) { 969 end_offset += 1; // for the rparen 970 n = datas[n].rhs; 971 } else if (datas[n].lhs != 0) { 972 n = datas[n].lhs; 973 } else { 974 return main_tokens[n] + end_offset; 975 } 976 }, 977 .container_field => { 978 const extra = tree.extraData(datas[n].rhs, Node.ContainerField); 979 if (extra.value_expr != 0) { 980 n = extra.value_expr; 981 } else if (extra.align_expr != 0) { 982 end_offset += 1; // for the rparen 983 n = extra.align_expr; 984 } else if (datas[n].lhs != 0) { 985 n = datas[n].lhs; 986 } else { 987 return main_tokens[n] + end_offset; 988 } 989 }, 990 991 .array_init_one, 992 .struct_init_one, 993 => { 994 end_offset += 1; // rbrace 995 if (datas[n].rhs == 0) { 996 return main_tokens[n] + end_offset; 997 } else { 998 n = datas[n].rhs; 999 } 1000 }, 1001 .slice_open, 1002 .call_one_comma, 1003 .async_call_one_comma, 1004 .array_init_one_comma, 1005 .struct_init_one_comma, 1006 => { 1007 end_offset += 2; // ellipsis2 + rbracket, or comma + rparen 1008 n = datas[n].rhs; 1009 assert(n != 0); 1010 }, 1011 .slice => { 1012 const extra = tree.extraData(datas[n].rhs, Node.Slice); 1013 assert(extra.end != 0); // should have used slice_open 1014 end_offset += 1; // rbracket 1015 n = extra.end; 1016 }, 1017 .slice_sentinel => { 1018 const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel); 1019 assert(extra.sentinel != 0); // should have used slice 1020 end_offset += 1; // rbracket 1021 n = extra.sentinel; 1022 }, 1023 1024 .@"continue" => { 1025 if (datas[n].lhs != 0) { 1026 return datas[n].lhs + end_offset; 1027 } else { 1028 return main_tokens[n] + end_offset; 1029 } 1030 }, 1031 .@"break" => { 1032 if (datas[n].rhs != 0) { 1033 n = datas[n].rhs; 1034 } else if (datas[n].lhs != 0) { 1035 return datas[n].lhs + end_offset; 1036 } else { 1037 return main_tokens[n] + end_offset; 1038 } 1039 }, 1040 .fn_decl => { 1041 if (datas[n].rhs != 0) { 1042 n = datas[n].rhs; 1043 } else { 1044 n = datas[n].lhs; 1045 } 1046 }, 1047 .fn_proto_one => { 1048 const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); 1049 // addrspace, linksection, callconv, align can appear in any order, so we 1050 // find the last one here. 1051 var max_node: Node.Index = datas[n].rhs; 1052 var max_start = token_starts[main_tokens[max_node]]; 1053 var max_offset: TokenIndex = 0; 1054 if (extra.align_expr != 0) { 1055 const start = token_starts[main_tokens[extra.align_expr]]; 1056 if (start > max_start) { 1057 max_node = extra.align_expr; 1058 max_start = start; 1059 max_offset = 1; // for the rparen 1060 } 1061 } 1062 if (extra.addrspace_expr != 0) { 1063 const start = token_starts[main_tokens[extra.addrspace_expr]]; 1064 if (start > max_start) { 1065 max_node = extra.addrspace_expr; 1066 max_start = start; 1067 max_offset = 1; // for the rparen 1068 } 1069 } 1070 if (extra.section_expr != 0) { 1071 const start = token_starts[main_tokens[extra.section_expr]]; 1072 if (start > max_start) { 1073 max_node = extra.section_expr; 1074 max_start = start; 1075 max_offset = 1; // for the rparen 1076 } 1077 } 1078 if (extra.callconv_expr != 0) { 1079 const start = token_starts[main_tokens[extra.callconv_expr]]; 1080 if (start > max_start) { 1081 max_node = extra.callconv_expr; 1082 max_start = start; 1083 max_offset = 1; // for the rparen 1084 } 1085 } 1086 n = max_node; 1087 end_offset += max_offset; 1088 }, 1089 .fn_proto => { 1090 const extra = tree.extraData(datas[n].lhs, Node.FnProto); 1091 // addrspace, linksection, callconv, align can appear in any order, so we 1092 // find the last one here. 1093 var max_node: Node.Index = datas[n].rhs; 1094 var max_start = token_starts[main_tokens[max_node]]; 1095 var max_offset: TokenIndex = 0; 1096 if (extra.align_expr != 0) { 1097 const start = token_starts[main_tokens[extra.align_expr]]; 1098 if (start > max_start) { 1099 max_node = extra.align_expr; 1100 max_start = start; 1101 max_offset = 1; // for the rparen 1102 } 1103 } 1104 if (extra.addrspace_expr != 0) { 1105 const start = token_starts[main_tokens[extra.addrspace_expr]]; 1106 if (start > max_start) { 1107 max_node = extra.addrspace_expr; 1108 max_start = start; 1109 max_offset = 1; // for the rparen 1110 } 1111 } 1112 if (extra.section_expr != 0) { 1113 const start = token_starts[main_tokens[extra.section_expr]]; 1114 if (start > max_start) { 1115 max_node = extra.section_expr; 1116 max_start = start; 1117 max_offset = 1; // for the rparen 1118 } 1119 } 1120 if (extra.callconv_expr != 0) { 1121 const start = token_starts[main_tokens[extra.callconv_expr]]; 1122 if (start > max_start) { 1123 max_node = extra.callconv_expr; 1124 max_start = start; 1125 max_offset = 1; // for the rparen 1126 } 1127 } 1128 n = max_node; 1129 end_offset += max_offset; 1130 }, 1131 .while_cont => { 1132 const extra = tree.extraData(datas[n].rhs, Node.WhileCont); 1133 assert(extra.then_expr != 0); 1134 n = extra.then_expr; 1135 }, 1136 .@"while" => { 1137 const extra = tree.extraData(datas[n].rhs, Node.While); 1138 assert(extra.else_expr != 0); 1139 n = extra.else_expr; 1140 }, 1141 .@"if", .@"for" => { 1142 const extra = tree.extraData(datas[n].rhs, Node.If); 1143 assert(extra.else_expr != 0); 1144 n = extra.else_expr; 1145 }, 1146 .@"suspend" => { 1147 if (datas[n].lhs != 0) { 1148 n = datas[n].lhs; 1149 } else { 1150 return main_tokens[n] + end_offset; 1151 } 1152 }, 1153 .array_type_sentinel => { 1154 const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); 1155 n = extra.elem_type; 1156 }, 1157 }; 1158} 1159 1160pub fn tokensOnSameLine(tree: Tree, token1: TokenIndex, token2: TokenIndex) bool { 1161 const token_starts = tree.tokens.items(.start); 1162 const source = tree.source[token_starts[token1]..token_starts[token2]]; 1163 return mem.indexOfScalar(u8, source, '\n') == null; 1164} 1165 1166pub fn getNodeSource(tree: Tree, node: Node.Index) []const u8 { 1167 const token_starts = tree.tokens.items(.start); 1168 const first_token = tree.firstToken(node); 1169 const last_token = tree.lastToken(node); 1170 const start = token_starts[first_token]; 1171 const end = token_starts[last_token] + tree.tokenSlice(last_token).len; 1172 return tree.source[start..end]; 1173} 1174 1175pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl { 1176 assert(tree.nodes.items(.tag)[node] == .global_var_decl); 1177 const data = tree.nodes.items(.data)[node]; 1178 const extra = tree.extraData(data.lhs, Node.GlobalVarDecl); 1179 return tree.fullVarDecl(.{ 1180 .type_node = extra.type_node, 1181 .align_node = extra.align_node, 1182 .addrspace_node = extra.addrspace_node, 1183 .section_node = extra.section_node, 1184 .init_node = data.rhs, 1185 .mut_token = tree.nodes.items(.main_token)[node], 1186 }); 1187} 1188 1189pub fn localVarDecl(tree: Tree, node: Node.Index) full.VarDecl { 1190 assert(tree.nodes.items(.tag)[node] == .local_var_decl); 1191 const data = tree.nodes.items(.data)[node]; 1192 const extra = tree.extraData(data.lhs, Node.LocalVarDecl); 1193 return tree.fullVarDecl(.{ 1194 .type_node = extra.type_node, 1195 .align_node = extra.align_node, 1196 .addrspace_node = 0, 1197 .section_node = 0, 1198 .init_node = data.rhs, 1199 .mut_token = tree.nodes.items(.main_token)[node], 1200 }); 1201} 1202 1203pub fn simpleVarDecl(tree: Tree, node: Node.Index) full.VarDecl { 1204 assert(tree.nodes.items(.tag)[node] == .simple_var_decl); 1205 const data = tree.nodes.items(.data)[node]; 1206 return tree.fullVarDecl(.{ 1207 .type_node = data.lhs, 1208 .align_node = 0, 1209 .addrspace_node = 0, 1210 .section_node = 0, 1211 .init_node = data.rhs, 1212 .mut_token = tree.nodes.items(.main_token)[node], 1213 }); 1214} 1215 1216pub fn alignedVarDecl(tree: Tree, node: Node.Index) full.VarDecl { 1217 assert(tree.nodes.items(.tag)[node] == .aligned_var_decl); 1218 const data = tree.nodes.items(.data)[node]; 1219 return tree.fullVarDecl(.{ 1220 .type_node = 0, 1221 .align_node = data.lhs, 1222 .addrspace_node = 0, 1223 .section_node = 0, 1224 .init_node = data.rhs, 1225 .mut_token = tree.nodes.items(.main_token)[node], 1226 }); 1227} 1228 1229pub fn ifSimple(tree: Tree, node: Node.Index) full.If { 1230 assert(tree.nodes.items(.tag)[node] == .if_simple); 1231 const data = tree.nodes.items(.data)[node]; 1232 return tree.fullIf(.{ 1233 .cond_expr = data.lhs, 1234 .then_expr = data.rhs, 1235 .else_expr = 0, 1236 .if_token = tree.nodes.items(.main_token)[node], 1237 }); 1238} 1239 1240pub fn ifFull(tree: Tree, node: Node.Index) full.If { 1241 assert(tree.nodes.items(.tag)[node] == .@"if"); 1242 const data = tree.nodes.items(.data)[node]; 1243 const extra = tree.extraData(data.rhs, Node.If); 1244 return tree.fullIf(.{ 1245 .cond_expr = data.lhs, 1246 .then_expr = extra.then_expr, 1247 .else_expr = extra.else_expr, 1248 .if_token = tree.nodes.items(.main_token)[node], 1249 }); 1250} 1251 1252pub fn containerField(tree: Tree, node: Node.Index) full.ContainerField { 1253 assert(tree.nodes.items(.tag)[node] == .container_field); 1254 const data = tree.nodes.items(.data)[node]; 1255 const extra = tree.extraData(data.rhs, Node.ContainerField); 1256 return tree.fullContainerField(.{ 1257 .name_token = tree.nodes.items(.main_token)[node], 1258 .type_expr = data.lhs, 1259 .value_expr = extra.value_expr, 1260 .align_expr = extra.align_expr, 1261 }); 1262} 1263 1264pub fn containerFieldInit(tree: Tree, node: Node.Index) full.ContainerField { 1265 assert(tree.nodes.items(.tag)[node] == .container_field_init); 1266 const data = tree.nodes.items(.data)[node]; 1267 return tree.fullContainerField(.{ 1268 .name_token = tree.nodes.items(.main_token)[node], 1269 .type_expr = data.lhs, 1270 .value_expr = data.rhs, 1271 .align_expr = 0, 1272 }); 1273} 1274 1275pub fn containerFieldAlign(tree: Tree, node: Node.Index) full.ContainerField { 1276 assert(tree.nodes.items(.tag)[node] == .container_field_align); 1277 const data = tree.nodes.items(.data)[node]; 1278 return tree.fullContainerField(.{ 1279 .name_token = tree.nodes.items(.main_token)[node], 1280 .type_expr = data.lhs, 1281 .value_expr = 0, 1282 .align_expr = data.rhs, 1283 }); 1284} 1285 1286pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { 1287 assert(tree.nodes.items(.tag)[node] == .fn_proto_simple); 1288 const data = tree.nodes.items(.data)[node]; 1289 buffer[0] = data.lhs; 1290 const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1]; 1291 return tree.fullFnProto(.{ 1292 .proto_node = node, 1293 .fn_token = tree.nodes.items(.main_token)[node], 1294 .return_type = data.rhs, 1295 .params = params, 1296 .align_expr = 0, 1297 .addrspace_expr = 0, 1298 .section_expr = 0, 1299 .callconv_expr = 0, 1300 }); 1301} 1302 1303pub fn fnProtoMulti(tree: Tree, node: Node.Index) full.FnProto { 1304 assert(tree.nodes.items(.tag)[node] == .fn_proto_multi); 1305 const data = tree.nodes.items(.data)[node]; 1306 const params_range = tree.extraData(data.lhs, Node.SubRange); 1307 const params = tree.extra_data[params_range.start..params_range.end]; 1308 return tree.fullFnProto(.{ 1309 .proto_node = node, 1310 .fn_token = tree.nodes.items(.main_token)[node], 1311 .return_type = data.rhs, 1312 .params = params, 1313 .align_expr = 0, 1314 .addrspace_expr = 0, 1315 .section_expr = 0, 1316 .callconv_expr = 0, 1317 }); 1318} 1319 1320pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { 1321 assert(tree.nodes.items(.tag)[node] == .fn_proto_one); 1322 const data = tree.nodes.items(.data)[node]; 1323 const extra = tree.extraData(data.lhs, Node.FnProtoOne); 1324 buffer[0] = extra.param; 1325 const params = if (extra.param == 0) buffer[0..0] else buffer[0..1]; 1326 return tree.fullFnProto(.{ 1327 .proto_node = node, 1328 .fn_token = tree.nodes.items(.main_token)[node], 1329 .return_type = data.rhs, 1330 .params = params, 1331 .align_expr = extra.align_expr, 1332 .addrspace_expr = extra.addrspace_expr, 1333 .section_expr = extra.section_expr, 1334 .callconv_expr = extra.callconv_expr, 1335 }); 1336} 1337 1338pub fn fnProto(tree: Tree, node: Node.Index) full.FnProto { 1339 assert(tree.nodes.items(.tag)[node] == .fn_proto); 1340 const data = tree.nodes.items(.data)[node]; 1341 const extra = tree.extraData(data.lhs, Node.FnProto); 1342 const params = tree.extra_data[extra.params_start..extra.params_end]; 1343 return tree.fullFnProto(.{ 1344 .proto_node = node, 1345 .fn_token = tree.nodes.items(.main_token)[node], 1346 .return_type = data.rhs, 1347 .params = params, 1348 .align_expr = extra.align_expr, 1349 .addrspace_expr = extra.addrspace_expr, 1350 .section_expr = extra.section_expr, 1351 .callconv_expr = extra.callconv_expr, 1352 }); 1353} 1354 1355pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit { 1356 assert(tree.nodes.items(.tag)[node] == .struct_init_one or 1357 tree.nodes.items(.tag)[node] == .struct_init_one_comma); 1358 const data = tree.nodes.items(.data)[node]; 1359 buffer[0] = data.rhs; 1360 const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; 1361 return tree.fullStructInit(.{ 1362 .lbrace = tree.nodes.items(.main_token)[node], 1363 .fields = fields, 1364 .type_expr = data.lhs, 1365 }); 1366} 1367 1368pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.StructInit { 1369 assert(tree.nodes.items(.tag)[node] == .struct_init_dot_two or 1370 tree.nodes.items(.tag)[node] == .struct_init_dot_two_comma); 1371 const data = tree.nodes.items(.data)[node]; 1372 buffer.* = .{ data.lhs, data.rhs }; 1373 const fields = if (data.rhs != 0) 1374 buffer[0..2] 1375 else if (data.lhs != 0) 1376 buffer[0..1] 1377 else 1378 buffer[0..0]; 1379 return tree.fullStructInit(.{ 1380 .lbrace = tree.nodes.items(.main_token)[node], 1381 .fields = fields, 1382 .type_expr = 0, 1383 }); 1384} 1385 1386pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit { 1387 assert(tree.nodes.items(.tag)[node] == .struct_init_dot or 1388 tree.nodes.items(.tag)[node] == .struct_init_dot_comma); 1389 const data = tree.nodes.items(.data)[node]; 1390 return tree.fullStructInit(.{ 1391 .lbrace = tree.nodes.items(.main_token)[node], 1392 .fields = tree.extra_data[data.lhs..data.rhs], 1393 .type_expr = 0, 1394 }); 1395} 1396 1397pub fn structInit(tree: Tree, node: Node.Index) full.StructInit { 1398 assert(tree.nodes.items(.tag)[node] == .struct_init or 1399 tree.nodes.items(.tag)[node] == .struct_init_comma); 1400 const data = tree.nodes.items(.data)[node]; 1401 const fields_range = tree.extraData(data.rhs, Node.SubRange); 1402 return tree.fullStructInit(.{ 1403 .lbrace = tree.nodes.items(.main_token)[node], 1404 .fields = tree.extra_data[fields_range.start..fields_range.end], 1405 .type_expr = data.lhs, 1406 }); 1407} 1408 1409pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit { 1410 assert(tree.nodes.items(.tag)[node] == .array_init_one or 1411 tree.nodes.items(.tag)[node] == .array_init_one_comma); 1412 const data = tree.nodes.items(.data)[node]; 1413 buffer[0] = data.rhs; 1414 const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; 1415 return .{ 1416 .ast = .{ 1417 .lbrace = tree.nodes.items(.main_token)[node], 1418 .elements = elements, 1419 .type_expr = data.lhs, 1420 }, 1421 }; 1422} 1423 1424pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit { 1425 assert(tree.nodes.items(.tag)[node] == .array_init_dot_two or 1426 tree.nodes.items(.tag)[node] == .array_init_dot_two_comma); 1427 const data = tree.nodes.items(.data)[node]; 1428 buffer.* = .{ data.lhs, data.rhs }; 1429 const elements = if (data.rhs != 0) 1430 buffer[0..2] 1431 else if (data.lhs != 0) 1432 buffer[0..1] 1433 else 1434 buffer[0..0]; 1435 return .{ 1436 .ast = .{ 1437 .lbrace = tree.nodes.items(.main_token)[node], 1438 .elements = elements, 1439 .type_expr = 0, 1440 }, 1441 }; 1442} 1443 1444pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit { 1445 assert(tree.nodes.items(.tag)[node] == .array_init_dot or 1446 tree.nodes.items(.tag)[node] == .array_init_dot_comma); 1447 const data = tree.nodes.items(.data)[node]; 1448 return .{ 1449 .ast = .{ 1450 .lbrace = tree.nodes.items(.main_token)[node], 1451 .elements = tree.extra_data[data.lhs..data.rhs], 1452 .type_expr = 0, 1453 }, 1454 }; 1455} 1456 1457pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit { 1458 assert(tree.nodes.items(.tag)[node] == .array_init or 1459 tree.nodes.items(.tag)[node] == .array_init_comma); 1460 const data = tree.nodes.items(.data)[node]; 1461 const elem_range = tree.extraData(data.rhs, Node.SubRange); 1462 return .{ 1463 .ast = .{ 1464 .lbrace = tree.nodes.items(.main_token)[node], 1465 .elements = tree.extra_data[elem_range.start..elem_range.end], 1466 .type_expr = data.lhs, 1467 }, 1468 }; 1469} 1470 1471pub fn arrayType(tree: Tree, node: Node.Index) full.ArrayType { 1472 assert(tree.nodes.items(.tag)[node] == .array_type); 1473 const data = tree.nodes.items(.data)[node]; 1474 return .{ 1475 .ast = .{ 1476 .lbracket = tree.nodes.items(.main_token)[node], 1477 .elem_count = data.lhs, 1478 .sentinel = 0, 1479 .elem_type = data.rhs, 1480 }, 1481 }; 1482} 1483 1484pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) full.ArrayType { 1485 assert(tree.nodes.items(.tag)[node] == .array_type_sentinel); 1486 const data = tree.nodes.items(.data)[node]; 1487 const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel); 1488 assert(extra.sentinel != 0); 1489 return .{ 1490 .ast = .{ 1491 .lbracket = tree.nodes.items(.main_token)[node], 1492 .elem_count = data.lhs, 1493 .sentinel = extra.sentinel, 1494 .elem_type = extra.elem_type, 1495 }, 1496 }; 1497} 1498 1499pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType { 1500 assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned); 1501 const data = tree.nodes.items(.data)[node]; 1502 return tree.fullPtrType(.{ 1503 .main_token = tree.nodes.items(.main_token)[node], 1504 .align_node = data.lhs, 1505 .addrspace_node = 0, 1506 .sentinel = 0, 1507 .bit_range_start = 0, 1508 .bit_range_end = 0, 1509 .child_type = data.rhs, 1510 }); 1511} 1512 1513pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType { 1514 assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel); 1515 const data = tree.nodes.items(.data)[node]; 1516 return tree.fullPtrType(.{ 1517 .main_token = tree.nodes.items(.main_token)[node], 1518 .align_node = 0, 1519 .addrspace_node = 0, 1520 .sentinel = data.lhs, 1521 .bit_range_start = 0, 1522 .bit_range_end = 0, 1523 .child_type = data.rhs, 1524 }); 1525} 1526 1527pub fn ptrType(tree: Tree, node: Node.Index) full.PtrType { 1528 assert(tree.nodes.items(.tag)[node] == .ptr_type); 1529 const data = tree.nodes.items(.data)[node]; 1530 const extra = tree.extraData(data.lhs, Node.PtrType); 1531 return tree.fullPtrType(.{ 1532 .main_token = tree.nodes.items(.main_token)[node], 1533 .align_node = extra.align_node, 1534 .addrspace_node = extra.addrspace_node, 1535 .sentinel = extra.sentinel, 1536 .bit_range_start = 0, 1537 .bit_range_end = 0, 1538 .child_type = data.rhs, 1539 }); 1540} 1541 1542pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType { 1543 assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range); 1544 const data = tree.nodes.items(.data)[node]; 1545 const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange); 1546 return tree.fullPtrType(.{ 1547 .main_token = tree.nodes.items(.main_token)[node], 1548 .align_node = extra.align_node, 1549 .addrspace_node = extra.addrspace_node, 1550 .sentinel = extra.sentinel, 1551 .bit_range_start = extra.bit_range_start, 1552 .bit_range_end = extra.bit_range_end, 1553 .child_type = data.rhs, 1554 }); 1555} 1556 1557pub fn sliceOpen(tree: Tree, node: Node.Index) full.Slice { 1558 assert(tree.nodes.items(.tag)[node] == .slice_open); 1559 const data = tree.nodes.items(.data)[node]; 1560 return .{ 1561 .ast = .{ 1562 .sliced = data.lhs, 1563 .lbracket = tree.nodes.items(.main_token)[node], 1564 .start = data.rhs, 1565 .end = 0, 1566 .sentinel = 0, 1567 }, 1568 }; 1569} 1570 1571pub fn slice(tree: Tree, node: Node.Index) full.Slice { 1572 assert(tree.nodes.items(.tag)[node] == .slice); 1573 const data = tree.nodes.items(.data)[node]; 1574 const extra = tree.extraData(data.rhs, Node.Slice); 1575 return .{ 1576 .ast = .{ 1577 .sliced = data.lhs, 1578 .lbracket = tree.nodes.items(.main_token)[node], 1579 .start = extra.start, 1580 .end = extra.end, 1581 .sentinel = 0, 1582 }, 1583 }; 1584} 1585 1586pub fn sliceSentinel(tree: Tree, node: Node.Index) full.Slice { 1587 assert(tree.nodes.items(.tag)[node] == .slice_sentinel); 1588 const data = tree.nodes.items(.data)[node]; 1589 const extra = tree.extraData(data.rhs, Node.SliceSentinel); 1590 return .{ 1591 .ast = .{ 1592 .sliced = data.lhs, 1593 .lbracket = tree.nodes.items(.main_token)[node], 1594 .start = extra.start, 1595 .end = extra.end, 1596 .sentinel = extra.sentinel, 1597 }, 1598 }; 1599} 1600 1601pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { 1602 assert(tree.nodes.items(.tag)[node] == .container_decl_two or 1603 tree.nodes.items(.tag)[node] == .container_decl_two_trailing); 1604 const data = tree.nodes.items(.data)[node]; 1605 buffer.* = .{ data.lhs, data.rhs }; 1606 const members = if (data.rhs != 0) 1607 buffer[0..2] 1608 else if (data.lhs != 0) 1609 buffer[0..1] 1610 else 1611 buffer[0..0]; 1612 return tree.fullContainerDecl(.{ 1613 .main_token = tree.nodes.items(.main_token)[node], 1614 .enum_token = null, 1615 .members = members, 1616 .arg = 0, 1617 }); 1618} 1619 1620pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl { 1621 assert(tree.nodes.items(.tag)[node] == .container_decl or 1622 tree.nodes.items(.tag)[node] == .container_decl_trailing); 1623 const data = tree.nodes.items(.data)[node]; 1624 return tree.fullContainerDecl(.{ 1625 .main_token = tree.nodes.items(.main_token)[node], 1626 .enum_token = null, 1627 .members = tree.extra_data[data.lhs..data.rhs], 1628 .arg = 0, 1629 }); 1630} 1631 1632pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl { 1633 assert(tree.nodes.items(.tag)[node] == .container_decl_arg or 1634 tree.nodes.items(.tag)[node] == .container_decl_arg_trailing); 1635 const data = tree.nodes.items(.data)[node]; 1636 const members_range = tree.extraData(data.rhs, Node.SubRange); 1637 return tree.fullContainerDecl(.{ 1638 .main_token = tree.nodes.items(.main_token)[node], 1639 .enum_token = null, 1640 .members = tree.extra_data[members_range.start..members_range.end], 1641 .arg = data.lhs, 1642 }); 1643} 1644 1645pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { 1646 assert(tree.nodes.items(.tag)[node] == .tagged_union_two or 1647 tree.nodes.items(.tag)[node] == .tagged_union_two_trailing); 1648 const data = tree.nodes.items(.data)[node]; 1649 buffer.* = .{ data.lhs, data.rhs }; 1650 const members = if (data.rhs != 0) 1651 buffer[0..2] 1652 else if (data.lhs != 0) 1653 buffer[0..1] 1654 else 1655 buffer[0..0]; 1656 const main_token = tree.nodes.items(.main_token)[node]; 1657 return tree.fullContainerDecl(.{ 1658 .main_token = main_token, 1659 .enum_token = main_token + 2, // union lparen enum 1660 .members = members, 1661 .arg = 0, 1662 }); 1663} 1664 1665pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl { 1666 assert(tree.nodes.items(.tag)[node] == .tagged_union or 1667 tree.nodes.items(.tag)[node] == .tagged_union_trailing); 1668 const data = tree.nodes.items(.data)[node]; 1669 const main_token = tree.nodes.items(.main_token)[node]; 1670 return tree.fullContainerDecl(.{ 1671 .main_token = main_token, 1672 .enum_token = main_token + 2, // union lparen enum 1673 .members = tree.extra_data[data.lhs..data.rhs], 1674 .arg = 0, 1675 }); 1676} 1677 1678pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl { 1679 assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or 1680 tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_trailing); 1681 const data = tree.nodes.items(.data)[node]; 1682 const members_range = tree.extraData(data.rhs, Node.SubRange); 1683 const main_token = tree.nodes.items(.main_token)[node]; 1684 return tree.fullContainerDecl(.{ 1685 .main_token = main_token, 1686 .enum_token = main_token + 2, // union lparen enum 1687 .members = tree.extra_data[members_range.start..members_range.end], 1688 .arg = data.lhs, 1689 }); 1690} 1691 1692pub fn switchCaseOne(tree: Tree, node: Node.Index) full.SwitchCase { 1693 const data = &tree.nodes.items(.data)[node]; 1694 const values: *[1]Node.Index = &data.lhs; 1695 return tree.fullSwitchCase(.{ 1696 .values = if (data.lhs == 0) values[0..0] else values[0..1], 1697 .arrow_token = tree.nodes.items(.main_token)[node], 1698 .target_expr = data.rhs, 1699 }); 1700} 1701 1702pub fn switchCase(tree: Tree, node: Node.Index) full.SwitchCase { 1703 const data = tree.nodes.items(.data)[node]; 1704 const extra = tree.extraData(data.lhs, Node.SubRange); 1705 return tree.fullSwitchCase(.{ 1706 .values = tree.extra_data[extra.start..extra.end], 1707 .arrow_token = tree.nodes.items(.main_token)[node], 1708 .target_expr = data.rhs, 1709 }); 1710} 1711 1712pub fn asmSimple(tree: Tree, node: Node.Index) full.Asm { 1713 const data = tree.nodes.items(.data)[node]; 1714 return tree.fullAsm(.{ 1715 .asm_token = tree.nodes.items(.main_token)[node], 1716 .template = data.lhs, 1717 .items = &.{}, 1718 .rparen = data.rhs, 1719 }); 1720} 1721 1722pub fn asmFull(tree: Tree, node: Node.Index) full.Asm { 1723 const data = tree.nodes.items(.data)[node]; 1724 const extra = tree.extraData(data.rhs, Node.Asm); 1725 return tree.fullAsm(.{ 1726 .asm_token = tree.nodes.items(.main_token)[node], 1727 .template = data.lhs, 1728 .items = tree.extra_data[extra.items_start..extra.items_end], 1729 .rparen = extra.rparen, 1730 }); 1731} 1732 1733pub fn whileSimple(tree: Tree, node: Node.Index) full.While { 1734 const data = tree.nodes.items(.data)[node]; 1735 return tree.fullWhile(.{ 1736 .while_token = tree.nodes.items(.main_token)[node], 1737 .cond_expr = data.lhs, 1738 .cont_expr = 0, 1739 .then_expr = data.rhs, 1740 .else_expr = 0, 1741 }); 1742} 1743 1744pub fn whileCont(tree: Tree, node: Node.Index) full.While { 1745 const data = tree.nodes.items(.data)[node]; 1746 const extra = tree.extraData(data.rhs, Node.WhileCont); 1747 return tree.fullWhile(.{ 1748 .while_token = tree.nodes.items(.main_token)[node], 1749 .cond_expr = data.lhs, 1750 .cont_expr = extra.cont_expr, 1751 .then_expr = extra.then_expr, 1752 .else_expr = 0, 1753 }); 1754} 1755 1756pub fn whileFull(tree: Tree, node: Node.Index) full.While { 1757 const data = tree.nodes.items(.data)[node]; 1758 const extra = tree.extraData(data.rhs, Node.While); 1759 return tree.fullWhile(.{ 1760 .while_token = tree.nodes.items(.main_token)[node], 1761 .cond_expr = data.lhs, 1762 .cont_expr = extra.cont_expr, 1763 .then_expr = extra.then_expr, 1764 .else_expr = extra.else_expr, 1765 }); 1766} 1767 1768pub fn forSimple(tree: Tree, node: Node.Index) full.While { 1769 const data = tree.nodes.items(.data)[node]; 1770 return tree.fullWhile(.{ 1771 .while_token = tree.nodes.items(.main_token)[node], 1772 .cond_expr = data.lhs, 1773 .cont_expr = 0, 1774 .then_expr = data.rhs, 1775 .else_expr = 0, 1776 }); 1777} 1778 1779pub fn forFull(tree: Tree, node: Node.Index) full.While { 1780 const data = tree.nodes.items(.data)[node]; 1781 const extra = tree.extraData(data.rhs, Node.If); 1782 return tree.fullWhile(.{ 1783 .while_token = tree.nodes.items(.main_token)[node], 1784 .cond_expr = data.lhs, 1785 .cont_expr = 0, 1786 .then_expr = extra.then_expr, 1787 .else_expr = extra.else_expr, 1788 }); 1789} 1790 1791pub fn callOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.Call { 1792 const data = tree.nodes.items(.data)[node]; 1793 buffer.* = .{data.rhs}; 1794 const params = if (data.rhs != 0) buffer[0..1] else buffer[0..0]; 1795 return tree.fullCall(.{ 1796 .lparen = tree.nodes.items(.main_token)[node], 1797 .fn_expr = data.lhs, 1798 .params = params, 1799 }); 1800} 1801 1802pub fn callFull(tree: Tree, node: Node.Index) full.Call { 1803 const data = tree.nodes.items(.data)[node]; 1804 const extra = tree.extraData(data.rhs, Node.SubRange); 1805 return tree.fullCall(.{ 1806 .lparen = tree.nodes.items(.main_token)[node], 1807 .fn_expr = data.lhs, 1808 .params = tree.extra_data[extra.start..extra.end], 1809 }); 1810} 1811 1812fn fullVarDecl(tree: Tree, info: full.VarDecl.Components) full.VarDecl { 1813 const token_tags = tree.tokens.items(.tag); 1814 var result: full.VarDecl = .{ 1815 .ast = info, 1816 .visib_token = null, 1817 .extern_export_token = null, 1818 .lib_name = null, 1819 .threadlocal_token = null, 1820 .comptime_token = null, 1821 }; 1822 var i = info.mut_token; 1823 while (i > 0) { 1824 i -= 1; 1825 switch (token_tags[i]) { 1826 .keyword_extern, .keyword_export => result.extern_export_token = i, 1827 .keyword_comptime => result.comptime_token = i, 1828 .keyword_pub => result.visib_token = i, 1829 .keyword_threadlocal => result.threadlocal_token = i, 1830 .string_literal => result.lib_name = i, 1831 else => break, 1832 } 1833 } 1834 return result; 1835} 1836 1837fn fullIf(tree: Tree, info: full.If.Components) full.If { 1838 const token_tags = tree.tokens.items(.tag); 1839 var result: full.If = .{ 1840 .ast = info, 1841 .payload_token = null, 1842 .error_token = null, 1843 .else_token = undefined, 1844 }; 1845 // if (cond_expr) |x| 1846 // ^ ^ 1847 const payload_pipe = tree.lastToken(info.cond_expr) + 2; 1848 if (token_tags[payload_pipe] == .pipe) { 1849 result.payload_token = payload_pipe + 1; 1850 } 1851 if (info.else_expr != 0) { 1852 // then_expr else |x| 1853 // ^ ^ 1854 result.else_token = tree.lastToken(info.then_expr) + 1; 1855 if (token_tags[result.else_token + 1] == .pipe) { 1856 result.error_token = result.else_token + 2; 1857 } 1858 } 1859 return result; 1860} 1861 1862fn fullContainerField(tree: Tree, info: full.ContainerField.Components) full.ContainerField { 1863 const token_tags = tree.tokens.items(.tag); 1864 var result: full.ContainerField = .{ 1865 .ast = info, 1866 .comptime_token = null, 1867 }; 1868 // comptime name: type = init, 1869 // ^ 1870 if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) { 1871 result.comptime_token = info.name_token - 1; 1872 } 1873 return result; 1874} 1875 1876fn fullFnProto(tree: Tree, info: full.FnProto.Components) full.FnProto { 1877 const token_tags = tree.tokens.items(.tag); 1878 var result: full.FnProto = .{ 1879 .ast = info, 1880 .visib_token = null, 1881 .extern_export_inline_token = null, 1882 .lib_name = null, 1883 .name_token = null, 1884 .lparen = undefined, 1885 }; 1886 var i = info.fn_token; 1887 while (i > 0) { 1888 i -= 1; 1889 switch (token_tags[i]) { 1890 .keyword_extern, 1891 .keyword_export, 1892 .keyword_inline, 1893 .keyword_noinline, 1894 => result.extern_export_inline_token = i, 1895 .keyword_pub => result.visib_token = i, 1896 .string_literal => result.lib_name = i, 1897 else => break, 1898 } 1899 } 1900 const after_fn_token = info.fn_token + 1; 1901 if (token_tags[after_fn_token] == .identifier) { 1902 result.name_token = after_fn_token; 1903 result.lparen = after_fn_token + 1; 1904 } else { 1905 result.lparen = after_fn_token; 1906 } 1907 assert(token_tags[result.lparen] == .l_paren); 1908 1909 return result; 1910} 1911 1912fn fullStructInit(tree: Tree, info: full.StructInit.Components) full.StructInit { 1913 _ = tree; 1914 var result: full.StructInit = .{ 1915 .ast = info, 1916 }; 1917 return result; 1918} 1919 1920fn fullPtrType(tree: Tree, info: full.PtrType.Components) full.PtrType { 1921 const token_tags = tree.tokens.items(.tag); 1922 // TODO: looks like stage1 isn't quite smart enough to handle enum 1923 // literals in some places here 1924 const Size = std.builtin.TypeInfo.Pointer.Size; 1925 const size: Size = switch (token_tags[info.main_token]) { 1926 .asterisk, 1927 .asterisk_asterisk, 1928 => switch (token_tags[info.main_token + 1]) { 1929 .r_bracket, .colon => .Many, 1930 .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Size.C else .One, 1931 else => .One, 1932 }, 1933 .l_bracket => Size.Slice, 1934 else => unreachable, 1935 }; 1936 var result: full.PtrType = .{ 1937 .size = size, 1938 .allowzero_token = null, 1939 .const_token = null, 1940 .volatile_token = null, 1941 .ast = info, 1942 }; 1943 // We need to be careful that we don't iterate over any sub-expressions 1944 // here while looking for modifiers as that could result in false 1945 // positives. Therefore, start after a sentinel if there is one and 1946 // skip over any align node and bit range nodes. 1947 var i = if (info.sentinel != 0) tree.lastToken(info.sentinel) + 1 else info.main_token; 1948 const end = tree.firstToken(info.child_type); 1949 while (i < end) : (i += 1) { 1950 switch (token_tags[i]) { 1951 .keyword_allowzero => result.allowzero_token = i, 1952 .keyword_const => result.const_token = i, 1953 .keyword_volatile => result.volatile_token = i, 1954 .keyword_align => { 1955 assert(info.align_node != 0); 1956 if (info.bit_range_end != 0) { 1957 assert(info.bit_range_start != 0); 1958 i = tree.lastToken(info.bit_range_end) + 1; 1959 } else { 1960 i = tree.lastToken(info.align_node) + 1; 1961 } 1962 }, 1963 else => {}, 1964 } 1965 } 1966 return result; 1967} 1968 1969fn fullContainerDecl(tree: Tree, info: full.ContainerDecl.Components) full.ContainerDecl { 1970 const token_tags = tree.tokens.items(.tag); 1971 var result: full.ContainerDecl = .{ 1972 .ast = info, 1973 .layout_token = null, 1974 }; 1975 switch (token_tags[info.main_token - 1]) { 1976 .keyword_extern, .keyword_packed => result.layout_token = info.main_token - 1, 1977 else => {}, 1978 } 1979 return result; 1980} 1981 1982fn fullSwitchCase(tree: Tree, info: full.SwitchCase.Components) full.SwitchCase { 1983 const token_tags = tree.tokens.items(.tag); 1984 var result: full.SwitchCase = .{ 1985 .ast = info, 1986 .payload_token = null, 1987 }; 1988 if (token_tags[info.arrow_token + 1] == .pipe) { 1989 result.payload_token = info.arrow_token + 2; 1990 } 1991 return result; 1992} 1993 1994fn fullAsm(tree: Tree, info: full.Asm.Components) full.Asm { 1995 const token_tags = tree.tokens.items(.tag); 1996 const node_tags = tree.nodes.items(.tag); 1997 var result: full.Asm = .{ 1998 .ast = info, 1999 .volatile_token = null, 2000 .inputs = &.{}, 2001 .outputs = &.{}, 2002 .first_clobber = null, 2003 }; 2004 if (token_tags[info.asm_token + 1] == .keyword_volatile) { 2005 result.volatile_token = info.asm_token + 1; 2006 } 2007 const outputs_end: usize = for (info.items) |item, i| { 2008 switch (node_tags[item]) { 2009 .asm_output => continue, 2010 else => break i, 2011 } 2012 } else info.items.len; 2013 2014 result.outputs = info.items[0..outputs_end]; 2015 result.inputs = info.items[outputs_end..]; 2016 2017 if (info.items.len == 0) { 2018 // asm ("foo" ::: "a", "b"); 2019 const template_token = tree.lastToken(info.template); 2020 if (token_tags[template_token + 1] == .colon and 2021 token_tags[template_token + 2] == .colon and 2022 token_tags[template_token + 3] == .colon and 2023 token_tags[template_token + 4] == .string_literal) 2024 { 2025 result.first_clobber = template_token + 4; 2026 } 2027 } else if (result.inputs.len != 0) { 2028 // asm ("foo" :: [_] "" (y) : "a", "b"); 2029 const last_input = result.inputs[result.inputs.len - 1]; 2030 const rparen = tree.lastToken(last_input); 2031 var i = rparen + 1; 2032 // Allow a (useless) comma right after the closing parenthesis. 2033 if (token_tags[i] == .comma) i += 1; 2034 if (token_tags[i] == .colon and 2035 token_tags[i + 1] == .string_literal) 2036 { 2037 result.first_clobber = i + 1; 2038 } 2039 } else { 2040 // asm ("foo" : [_] "" (x) :: "a", "b"); 2041 const last_output = result.outputs[result.outputs.len - 1]; 2042 const rparen = tree.lastToken(last_output); 2043 var i = rparen + 1; 2044 // Allow a (useless) comma right after the closing parenthesis. 2045 if (token_tags[i] == .comma) i += 1; 2046 if (token_tags[i] == .colon and 2047 token_tags[i + 1] == .colon and 2048 token_tags[i + 2] == .string_literal) 2049 { 2050 result.first_clobber = i + 2; 2051 } 2052 } 2053 2054 return result; 2055} 2056 2057fn fullWhile(tree: Tree, info: full.While.Components) full.While { 2058 const token_tags = tree.tokens.items(.tag); 2059 var result: full.While = .{ 2060 .ast = info, 2061 .inline_token = null, 2062 .label_token = null, 2063 .payload_token = null, 2064 .else_token = undefined, 2065 .error_token = null, 2066 }; 2067 var tok_i = info.while_token - 1; 2068 if (token_tags[tok_i] == .keyword_inline) { 2069 result.inline_token = tok_i; 2070 tok_i -= 1; 2071 } 2072 if (token_tags[tok_i] == .colon and 2073 token_tags[tok_i - 1] == .identifier) 2074 { 2075 result.label_token = tok_i - 1; 2076 } 2077 const last_cond_token = tree.lastToken(info.cond_expr); 2078 if (token_tags[last_cond_token + 2] == .pipe) { 2079 result.payload_token = last_cond_token + 3; 2080 } 2081 if (info.else_expr != 0) { 2082 // then_expr else |x| 2083 // ^ ^ 2084 result.else_token = tree.lastToken(info.then_expr) + 1; 2085 if (token_tags[result.else_token + 1] == .pipe) { 2086 result.error_token = result.else_token + 2; 2087 } 2088 } 2089 return result; 2090} 2091 2092fn fullCall(tree: Tree, info: full.Call.Components) full.Call { 2093 const token_tags = tree.tokens.items(.tag); 2094 var result: full.Call = .{ 2095 .ast = info, 2096 .async_token = null, 2097 }; 2098 const maybe_async_token = tree.firstToken(info.fn_expr) - 1; 2099 if (token_tags[maybe_async_token] == .keyword_async) { 2100 result.async_token = maybe_async_token; 2101 } 2102 return result; 2103} 2104 2105/// Fully assembled AST node information. 2106pub const full = struct { 2107 pub const VarDecl = struct { 2108 visib_token: ?TokenIndex, 2109 extern_export_token: ?TokenIndex, 2110 lib_name: ?TokenIndex, 2111 threadlocal_token: ?TokenIndex, 2112 comptime_token: ?TokenIndex, 2113 ast: Components, 2114 2115 pub const Components = struct { 2116 mut_token: TokenIndex, 2117 type_node: Node.Index, 2118 align_node: Node.Index, 2119 addrspace_node: Node.Index, 2120 section_node: Node.Index, 2121 init_node: Node.Index, 2122 }; 2123 }; 2124 2125 pub const If = struct { 2126 /// Points to the first token after the `|`. Will either be an identifier or 2127 /// a `*` (with an identifier immediately after it). 2128 payload_token: ?TokenIndex, 2129 /// Points to the identifier after the `|`. 2130 error_token: ?TokenIndex, 2131 /// Populated only if else_expr != 0. 2132 else_token: TokenIndex, 2133 ast: Components, 2134 2135 pub const Components = struct { 2136 if_token: TokenIndex, 2137 cond_expr: Node.Index, 2138 then_expr: Node.Index, 2139 else_expr: Node.Index, 2140 }; 2141 }; 2142 2143 pub const While = struct { 2144 ast: Components, 2145 inline_token: ?TokenIndex, 2146 label_token: ?TokenIndex, 2147 payload_token: ?TokenIndex, 2148 error_token: ?TokenIndex, 2149 /// Populated only if else_expr != 0. 2150 else_token: TokenIndex, 2151 2152 pub const Components = struct { 2153 while_token: TokenIndex, 2154 cond_expr: Node.Index, 2155 cont_expr: Node.Index, 2156 then_expr: Node.Index, 2157 else_expr: Node.Index, 2158 }; 2159 }; 2160 2161 pub const ContainerField = struct { 2162 comptime_token: ?TokenIndex, 2163 ast: Components, 2164 2165 pub const Components = struct { 2166 name_token: TokenIndex, 2167 type_expr: Node.Index, 2168 value_expr: Node.Index, 2169 align_expr: Node.Index, 2170 }; 2171 }; 2172 2173 pub const FnProto = struct { 2174 visib_token: ?TokenIndex, 2175 extern_export_inline_token: ?TokenIndex, 2176 lib_name: ?TokenIndex, 2177 name_token: ?TokenIndex, 2178 lparen: TokenIndex, 2179 ast: Components, 2180 2181 pub const Components = struct { 2182 proto_node: Node.Index, 2183 fn_token: TokenIndex, 2184 return_type: Node.Index, 2185 params: []const Node.Index, 2186 align_expr: Node.Index, 2187 addrspace_expr: Node.Index, 2188 section_expr: Node.Index, 2189 callconv_expr: Node.Index, 2190 }; 2191 2192 pub const Param = struct { 2193 first_doc_comment: ?TokenIndex, 2194 name_token: ?TokenIndex, 2195 comptime_noalias: ?TokenIndex, 2196 anytype_ellipsis3: ?TokenIndex, 2197 type_expr: Node.Index, 2198 }; 2199 2200 /// Abstracts over the fact that anytype and ... are not included 2201 /// in the params slice, since they are simple identifiers and 2202 /// not sub-expressions. 2203 pub const Iterator = struct { 2204 tree: *const Tree, 2205 fn_proto: *const FnProto, 2206 param_i: usize, 2207 tok_i: TokenIndex, 2208 tok_flag: bool, 2209 2210 pub fn next(it: *Iterator) ?Param { 2211 const token_tags = it.tree.tokens.items(.tag); 2212 while (true) { 2213 var first_doc_comment: ?TokenIndex = null; 2214 var comptime_noalias: ?TokenIndex = null; 2215 var name_token: ?TokenIndex = null; 2216 if (!it.tok_flag) { 2217 if (it.param_i >= it.fn_proto.ast.params.len) { 2218 return null; 2219 } 2220 const param_type = it.fn_proto.ast.params[it.param_i]; 2221 var tok_i = it.tree.firstToken(param_type) - 1; 2222 while (true) : (tok_i -= 1) switch (token_tags[tok_i]) { 2223 .colon => continue, 2224 .identifier => name_token = tok_i, 2225 .doc_comment => first_doc_comment = tok_i, 2226 .keyword_comptime, .keyword_noalias => comptime_noalias = tok_i, 2227 else => break, 2228 }; 2229 it.param_i += 1; 2230 it.tok_i = it.tree.lastToken(param_type) + 1; 2231 // Look for anytype and ... params afterwards. 2232 if (token_tags[it.tok_i] == .comma) { 2233 it.tok_i += 1; 2234 } 2235 it.tok_flag = true; 2236 return Param{ 2237 .first_doc_comment = first_doc_comment, 2238 .comptime_noalias = comptime_noalias, 2239 .name_token = name_token, 2240 .anytype_ellipsis3 = null, 2241 .type_expr = param_type, 2242 }; 2243 } 2244 if (token_tags[it.tok_i] == .comma) { 2245 it.tok_i += 1; 2246 } 2247 if (token_tags[it.tok_i] == .r_paren) { 2248 return null; 2249 } 2250 if (token_tags[it.tok_i] == .doc_comment) { 2251 first_doc_comment = it.tok_i; 2252 while (token_tags[it.tok_i] == .doc_comment) { 2253 it.tok_i += 1; 2254 } 2255 } 2256 switch (token_tags[it.tok_i]) { 2257 .ellipsis3 => { 2258 it.tok_flag = false; // Next iteration should return null. 2259 return Param{ 2260 .first_doc_comment = first_doc_comment, 2261 .comptime_noalias = null, 2262 .name_token = null, 2263 .anytype_ellipsis3 = it.tok_i, 2264 .type_expr = 0, 2265 }; 2266 }, 2267 .keyword_noalias, .keyword_comptime => { 2268 comptime_noalias = it.tok_i; 2269 it.tok_i += 1; 2270 }, 2271 else => {}, 2272 } 2273 if (token_tags[it.tok_i] == .identifier and 2274 token_tags[it.tok_i + 1] == .colon) 2275 { 2276 name_token = it.tok_i; 2277 it.tok_i += 2; 2278 } 2279 if (token_tags[it.tok_i] == .keyword_anytype) { 2280 it.tok_i += 1; 2281 return Param{ 2282 .first_doc_comment = first_doc_comment, 2283 .comptime_noalias = comptime_noalias, 2284 .name_token = name_token, 2285 .anytype_ellipsis3 = it.tok_i - 1, 2286 .type_expr = 0, 2287 }; 2288 } 2289 it.tok_flag = false; 2290 } 2291 } 2292 }; 2293 2294 pub fn iterate(fn_proto: FnProto, tree: Tree) Iterator { 2295 return .{ 2296 .tree = &tree, 2297 .fn_proto = &fn_proto, 2298 .param_i = 0, 2299 .tok_i = fn_proto.lparen + 1, 2300 .tok_flag = true, 2301 }; 2302 } 2303 }; 2304 2305 pub const StructInit = struct { 2306 ast: Components, 2307 2308 pub const Components = struct { 2309 lbrace: TokenIndex, 2310 fields: []const Node.Index, 2311 type_expr: Node.Index, 2312 }; 2313 }; 2314 2315 pub const ArrayInit = struct { 2316 ast: Components, 2317 2318 pub const Components = struct { 2319 lbrace: TokenIndex, 2320 elements: []const Node.Index, 2321 type_expr: Node.Index, 2322 }; 2323 }; 2324 2325 pub const ArrayType = struct { 2326 ast: Components, 2327 2328 pub const Components = struct { 2329 lbracket: TokenIndex, 2330 elem_count: Node.Index, 2331 sentinel: Node.Index, 2332 elem_type: Node.Index, 2333 }; 2334 }; 2335 2336 pub const PtrType = struct { 2337 size: std.builtin.TypeInfo.Pointer.Size, 2338 allowzero_token: ?TokenIndex, 2339 const_token: ?TokenIndex, 2340 volatile_token: ?TokenIndex, 2341 ast: Components, 2342 2343 pub const Components = struct { 2344 main_token: TokenIndex, 2345 align_node: Node.Index, 2346 addrspace_node: Node.Index, 2347 sentinel: Node.Index, 2348 bit_range_start: Node.Index, 2349 bit_range_end: Node.Index, 2350 child_type: Node.Index, 2351 }; 2352 }; 2353 2354 pub const Slice = struct { 2355 ast: Components, 2356 2357 pub const Components = struct { 2358 sliced: Node.Index, 2359 lbracket: TokenIndex, 2360 start: Node.Index, 2361 end: Node.Index, 2362 sentinel: Node.Index, 2363 }; 2364 }; 2365 2366 pub const ContainerDecl = struct { 2367 layout_token: ?TokenIndex, 2368 ast: Components, 2369 2370 pub const Components = struct { 2371 main_token: TokenIndex, 2372 /// Populated when main_token is Keyword_union. 2373 enum_token: ?TokenIndex, 2374 members: []const Node.Index, 2375 arg: Node.Index, 2376 }; 2377 }; 2378 2379 pub const SwitchCase = struct { 2380 /// Points to the first token after the `|`. Will either be an identifier or 2381 /// a `*` (with an identifier immediately after it). 2382 payload_token: ?TokenIndex, 2383 ast: Components, 2384 2385 pub const Components = struct { 2386 /// If empty, this is an else case 2387 values: []const Node.Index, 2388 arrow_token: TokenIndex, 2389 target_expr: Node.Index, 2390 }; 2391 }; 2392 2393 pub const Asm = struct { 2394 ast: Components, 2395 volatile_token: ?TokenIndex, 2396 first_clobber: ?TokenIndex, 2397 outputs: []const Node.Index, 2398 inputs: []const Node.Index, 2399 2400 pub const Components = struct { 2401 asm_token: TokenIndex, 2402 template: Node.Index, 2403 items: []const Node.Index, 2404 rparen: TokenIndex, 2405 }; 2406 }; 2407 2408 pub const Call = struct { 2409 ast: Components, 2410 async_token: ?TokenIndex, 2411 2412 pub const Components = struct { 2413 lparen: TokenIndex, 2414 fn_expr: Node.Index, 2415 params: []const Node.Index, 2416 }; 2417 }; 2418}; 2419 2420pub const Error = struct { 2421 tag: Tag, 2422 token: TokenIndex, 2423 extra: union { 2424 none: void, 2425 expected_tag: Token.Tag, 2426 } = .{ .none = {} }, 2427 2428 pub const Tag = enum { 2429 asterisk_after_ptr_deref, 2430 chained_comparison_operators, 2431 decl_between_fields, 2432 expected_block, 2433 expected_block_or_assignment, 2434 expected_block_or_expr, 2435 expected_block_or_field, 2436 expected_container_members, 2437 expected_expr, 2438 expected_expr_or_assignment, 2439 expected_fn, 2440 expected_inlinable, 2441 expected_labelable, 2442 expected_param_list, 2443 expected_prefix_expr, 2444 expected_primary_type_expr, 2445 expected_pub_item, 2446 expected_return_type, 2447 expected_semi_or_else, 2448 expected_semi_or_lbrace, 2449 expected_statement, 2450 expected_string_literal, 2451 expected_suffix_op, 2452 expected_type_expr, 2453 expected_var_decl, 2454 expected_var_decl_or_fn, 2455 expected_loop_payload, 2456 expected_container, 2457 extern_fn_body, 2458 extra_addrspace_qualifier, 2459 extra_align_qualifier, 2460 extra_allowzero_qualifier, 2461 extra_const_qualifier, 2462 extra_volatile_qualifier, 2463 ptr_mod_on_array_child_type, 2464 invalid_bit_range, 2465 invalid_token, 2466 same_line_doc_comment, 2467 unattached_doc_comment, 2468 varargs_nonfinal, 2469 2470 /// `expected_tag` is populated. 2471 expected_token, 2472 }; 2473}; 2474 2475pub const Node = struct { 2476 tag: Tag, 2477 main_token: TokenIndex, 2478 data: Data, 2479 2480 pub const Index = u32; 2481 2482 comptime { 2483 // Goal is to keep this under one byte for efficiency. 2484 assert(@sizeOf(Tag) == 1); 2485 } 2486 2487 /// Note: The FooComma/FooSemicolon variants exist to ease the implementation of 2488 /// Tree.lastToken() 2489 pub const Tag = enum { 2490 /// sub_list[lhs...rhs] 2491 root, 2492 /// `usingnamespace lhs;`. rhs unused. main_token is `usingnamespace`. 2493 @"usingnamespace", 2494 /// lhs is test name token (must be string literal), if any. 2495 /// rhs is the body node. 2496 test_decl, 2497 /// lhs is the index into extra_data. 2498 /// rhs is the initialization expression, if any. 2499 /// main_token is `var` or `const`. 2500 global_var_decl, 2501 /// `var a: x align(y) = rhs` 2502 /// lhs is the index into extra_data. 2503 /// main_token is `var` or `const`. 2504 local_var_decl, 2505 /// `var a: lhs = rhs`. lhs and rhs may be unused. 2506 /// Can be local or global. 2507 /// main_token is `var` or `const`. 2508 simple_var_decl, 2509 /// `var a align(lhs) = rhs`. lhs and rhs may be unused. 2510 /// Can be local or global. 2511 /// main_token is `var` or `const`. 2512 aligned_var_decl, 2513 /// lhs is the identifier token payload if any, 2514 /// rhs is the deferred expression. 2515 @"errdefer", 2516 /// lhs is unused. 2517 /// rhs is the deferred expression. 2518 @"defer", 2519 /// lhs catch rhs 2520 /// lhs catch |err| rhs 2521 /// main_token is the `catch` keyword. 2522 /// payload is determined by looking at the next token after the `catch` keyword. 2523 @"catch", 2524 /// `lhs.a`. main_token is the dot. rhs is the identifier token index. 2525 field_access, 2526 /// `lhs.?`. main_token is the dot. rhs is the `?` token index. 2527 unwrap_optional, 2528 /// `lhs == rhs`. main_token is op. 2529 equal_equal, 2530 /// `lhs != rhs`. main_token is op. 2531 bang_equal, 2532 /// `lhs < rhs`. main_token is op. 2533 less_than, 2534 /// `lhs > rhs`. main_token is op. 2535 greater_than, 2536 /// `lhs <= rhs`. main_token is op. 2537 less_or_equal, 2538 /// `lhs >= rhs`. main_token is op. 2539 greater_or_equal, 2540 /// `lhs *= rhs`. main_token is op. 2541 assign_mul, 2542 /// `lhs /= rhs`. main_token is op. 2543 assign_div, 2544 /// `lhs *= rhs`. main_token is op. 2545 assign_mod, 2546 /// `lhs += rhs`. main_token is op. 2547 assign_add, 2548 /// `lhs -= rhs`. main_token is op. 2549 assign_sub, 2550 /// `lhs <<= rhs`. main_token is op. 2551 assign_shl, 2552 /// `lhs <<|= rhs`. main_token is op. 2553 assign_shl_sat, 2554 /// `lhs >>= rhs`. main_token is op. 2555 assign_shr, 2556 /// `lhs &= rhs`. main_token is op. 2557 assign_bit_and, 2558 /// `lhs ^= rhs`. main_token is op. 2559 assign_bit_xor, 2560 /// `lhs |= rhs`. main_token is op. 2561 assign_bit_or, 2562 /// `lhs *%= rhs`. main_token is op. 2563 assign_mul_wrap, 2564 /// `lhs +%= rhs`. main_token is op. 2565 assign_add_wrap, 2566 /// `lhs -%= rhs`. main_token is op. 2567 assign_sub_wrap, 2568 /// `lhs *|= rhs`. main_token is op. 2569 assign_mul_sat, 2570 /// `lhs +|= rhs`. main_token is op. 2571 assign_add_sat, 2572 /// `lhs -|= rhs`. main_token is op. 2573 assign_sub_sat, 2574 /// `lhs = rhs`. main_token is op. 2575 assign, 2576 /// `lhs || rhs`. main_token is the `||`. 2577 merge_error_sets, 2578 /// `lhs * rhs`. main_token is the `*`. 2579 mul, 2580 /// `lhs / rhs`. main_token is the `/`. 2581 div, 2582 /// `lhs % rhs`. main_token is the `%`. 2583 mod, 2584 /// `lhs ** rhs`. main_token is the `**`. 2585 array_mult, 2586 /// `lhs *% rhs`. main_token is the `*%`. 2587 mul_wrap, 2588 /// `lhs *| rhs`. main_token is the `*|`. 2589 mul_sat, 2590 /// `lhs + rhs`. main_token is the `+`. 2591 add, 2592 /// `lhs - rhs`. main_token is the `-`. 2593 sub, 2594 /// `lhs ++ rhs`. main_token is the `++`. 2595 array_cat, 2596 /// `lhs +% rhs`. main_token is the `+%`. 2597 add_wrap, 2598 /// `lhs -% rhs`. main_token is the `-%`. 2599 sub_wrap, 2600 /// `lhs +| rhs`. main_token is the `+|`. 2601 add_sat, 2602 /// `lhs -| rhs`. main_token is the `-|`. 2603 sub_sat, 2604 /// `lhs << rhs`. main_token is the `<<`. 2605 shl, 2606 /// `lhs <<| rhs`. main_token is the `<<|`. 2607 shl_sat, 2608 /// `lhs >> rhs`. main_token is the `>>`. 2609 shr, 2610 /// `lhs & rhs`. main_token is the `&`. 2611 bit_and, 2612 /// `lhs ^ rhs`. main_token is the `^`. 2613 bit_xor, 2614 /// `lhs | rhs`. main_token is the `|`. 2615 bit_or, 2616 /// `lhs orelse rhs`. main_token is the `orelse`. 2617 @"orelse", 2618 /// `lhs and rhs`. main_token is the `and`. 2619 bool_and, 2620 /// `lhs or rhs`. main_token is the `or`. 2621 bool_or, 2622 /// `op lhs`. rhs unused. main_token is op. 2623 bool_not, 2624 /// `op lhs`. rhs unused. main_token is op. 2625 negation, 2626 /// `op lhs`. rhs unused. main_token is op. 2627 bit_not, 2628 /// `op lhs`. rhs unused. main_token is op. 2629 negation_wrap, 2630 /// `op lhs`. rhs unused. main_token is op. 2631 address_of, 2632 /// `op lhs`. rhs unused. main_token is op. 2633 @"try", 2634 /// `op lhs`. rhs unused. main_token is op. 2635 @"await", 2636 /// `?lhs`. rhs unused. main_token is the `?`. 2637 optional_type, 2638 /// `[lhs]rhs`. 2639 array_type, 2640 /// `[lhs:a]b`. `ArrayTypeSentinel[rhs]`. 2641 array_type_sentinel, 2642 /// `[*]align(lhs) rhs`. lhs can be omitted. 2643 /// `*align(lhs) rhs`. lhs can be omitted. 2644 /// `[]rhs`. 2645 /// main_token is the asterisk if a pointer or the lbracket if a slice 2646 /// main_token might be a ** token, which is shared with a parent/child 2647 /// pointer type and may require special handling. 2648 ptr_type_aligned, 2649 /// `[*:lhs]rhs`. lhs can be omitted. 2650 /// `*rhs`. 2651 /// `[:lhs]rhs`. 2652 /// main_token is the asterisk if a pointer or the lbracket if a slice 2653 /// main_token might be a ** token, which is shared with a parent/child 2654 /// pointer type and may require special handling. 2655 ptr_type_sentinel, 2656 /// lhs is index into ptr_type. rhs is the element type expression. 2657 /// main_token is the asterisk if a pointer or the lbracket if a slice 2658 /// main_token might be a ** token, which is shared with a parent/child 2659 /// pointer type and may require special handling. 2660 ptr_type, 2661 /// lhs is index into ptr_type_bit_range. rhs is the element type expression. 2662 /// main_token is the asterisk if a pointer or the lbracket if a slice 2663 /// main_token might be a ** token, which is shared with a parent/child 2664 /// pointer type and may require special handling. 2665 ptr_type_bit_range, 2666 /// `lhs[rhs..]` 2667 /// main_token is the lbracket. 2668 slice_open, 2669 /// `lhs[b..c]`. rhs is index into Slice 2670 /// main_token is the lbracket. 2671 slice, 2672 /// `lhs[b..c :d]`. rhs is index into SliceSentinel 2673 /// main_token is the lbracket. 2674 slice_sentinel, 2675 /// `lhs.*`. rhs is unused. 2676 deref, 2677 /// `lhs[rhs]`. 2678 array_access, 2679 /// `lhs{rhs}`. rhs can be omitted. 2680 array_init_one, 2681 /// `lhs{rhs,}`. rhs can *not* be omitted 2682 array_init_one_comma, 2683 /// `.{lhs, rhs}`. lhs and rhs can be omitted. 2684 array_init_dot_two, 2685 /// Same as `array_init_dot_two` except there is known to be a trailing comma 2686 /// before the final rbrace. 2687 array_init_dot_two_comma, 2688 /// `.{a, b}`. `sub_list[lhs..rhs]`. 2689 array_init_dot, 2690 /// Same as `array_init_dot` except there is known to be a trailing comma 2691 /// before the final rbrace. 2692 array_init_dot_comma, 2693 /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`. 2694 array_init, 2695 /// Same as `array_init` except there is known to be a trailing comma 2696 /// before the final rbrace. 2697 array_init_comma, 2698 /// `lhs{.a = rhs}`. rhs can be omitted making it empty. 2699 /// main_token is the lbrace. 2700 struct_init_one, 2701 /// `lhs{.a = rhs,}`. rhs can *not* be omitted. 2702 /// main_token is the lbrace. 2703 struct_init_one_comma, 2704 /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. 2705 /// main_token is the lbrace. 2706 /// No trailing comma before the rbrace. 2707 struct_init_dot_two, 2708 /// Same as `struct_init_dot_two` except there is known to be a trailing comma 2709 /// before the final rbrace. 2710 struct_init_dot_two_comma, 2711 /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. 2712 /// main_token is the lbrace. 2713 struct_init_dot, 2714 /// Same as `struct_init_dot` except there is known to be a trailing comma 2715 /// before the final rbrace. 2716 struct_init_dot_comma, 2717 /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`. 2718 /// lhs can be omitted which means `.{.a = b, .c = d}`. 2719 /// main_token is the lbrace. 2720 struct_init, 2721 /// Same as `struct_init` except there is known to be a trailing comma 2722 /// before the final rbrace. 2723 struct_init_comma, 2724 /// `lhs(rhs)`. rhs can be omitted. 2725 /// main_token is the lparen. 2726 call_one, 2727 /// `lhs(rhs,)`. rhs can be omitted. 2728 /// main_token is the lparen. 2729 call_one_comma, 2730 /// `async lhs(rhs)`. rhs can be omitted. 2731 async_call_one, 2732 /// `async lhs(rhs,)`. 2733 async_call_one_comma, 2734 /// `lhs(a, b, c)`. `SubRange[rhs]`. 2735 /// main_token is the `(`. 2736 call, 2737 /// `lhs(a, b, c,)`. `SubRange[rhs]`. 2738 /// main_token is the `(`. 2739 call_comma, 2740 /// `async lhs(a, b, c)`. `SubRange[rhs]`. 2741 /// main_token is the `(`. 2742 async_call, 2743 /// `async lhs(a, b, c,)`. `SubRange[rhs]`. 2744 /// main_token is the `(`. 2745 async_call_comma, 2746 /// `switch(lhs) {}`. `SubRange[rhs]`. 2747 @"switch", 2748 /// Same as switch except there is known to be a trailing comma 2749 /// before the final rbrace 2750 switch_comma, 2751 /// `lhs => rhs`. If lhs is omitted it means `else`. 2752 /// main_token is the `=>` 2753 switch_case_one, 2754 /// `a, b, c => rhs`. `SubRange[lhs]`. 2755 /// main_token is the `=>` 2756 switch_case, 2757 /// `lhs...rhs`. 2758 switch_range, 2759 /// `while (lhs) rhs`. 2760 /// `while (lhs) |x| rhs`. 2761 while_simple, 2762 /// `while (lhs) : (a) b`. `WhileCont[rhs]`. 2763 /// `while (lhs) : (a) b`. `WhileCont[rhs]`. 2764 while_cont, 2765 /// `while (lhs) : (a) b else c`. `While[rhs]`. 2766 /// `while (lhs) |x| : (a) b else c`. `While[rhs]`. 2767 /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`. 2768 @"while", 2769 /// `for (lhs) rhs`. 2770 for_simple, 2771 /// `for (lhs) a else b`. `if_list[rhs]`. 2772 @"for", 2773 /// `if (lhs) rhs`. 2774 /// `if (lhs) |a| rhs`. 2775 if_simple, 2776 /// `if (lhs) a else b`. `If[rhs]`. 2777 /// `if (lhs) |x| a else b`. `If[rhs]`. 2778 /// `if (lhs) |x| a else |y| b`. `If[rhs]`. 2779 @"if", 2780 /// `suspend lhs`. lhs can be omitted. rhs is unused. 2781 @"suspend", 2782 /// `resume lhs`. rhs is unused. 2783 @"resume", 2784 /// `continue`. lhs is token index of label if any. rhs is unused. 2785 @"continue", 2786 /// `break :lhs rhs` 2787 /// both lhs and rhs may be omitted. 2788 @"break", 2789 /// `return lhs`. lhs can be omitted. rhs is unused. 2790 @"return", 2791 /// `fn(a: lhs) rhs`. lhs can be omitted. 2792 /// anytype and ... parameters are omitted from the AST tree. 2793 /// main_token is the `fn` keyword. 2794 /// extern function declarations use this tag. 2795 fn_proto_simple, 2796 /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`. 2797 /// anytype and ... parameters are omitted from the AST tree. 2798 /// main_token is the `fn` keyword. 2799 /// extern function declarations use this tag. 2800 fn_proto_multi, 2801 /// `fn(a: b) rhs addrspace(e) linksection(f) callconv(g)`. `FnProtoOne[lhs]`. 2802 /// zero or one parameters. 2803 /// anytype and ... parameters are omitted from the AST tree. 2804 /// main_token is the `fn` keyword. 2805 /// extern function declarations use this tag. 2806 fn_proto_one, 2807 /// `fn(a: b, c: d) rhs addrspace(e) linksection(f) callconv(g)`. `FnProto[lhs]`. 2808 /// anytype and ... parameters are omitted from the AST tree. 2809 /// main_token is the `fn` keyword. 2810 /// extern function declarations use this tag. 2811 fn_proto, 2812 /// lhs is the fn_proto. 2813 /// rhs is the function body block. 2814 /// Note that extern function declarations use the fn_proto tags rather 2815 /// than this one. 2816 fn_decl, 2817 /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index. 2818 anyframe_type, 2819 /// Both lhs and rhs unused. 2820 anyframe_literal, 2821 /// Both lhs and rhs unused. 2822 char_literal, 2823 /// Both lhs and rhs unused. 2824 integer_literal, 2825 /// Both lhs and rhs unused. 2826 float_literal, 2827 /// Both lhs and rhs unused. 2828 unreachable_literal, 2829 /// Both lhs and rhs unused. 2830 /// Most identifiers will not have explicit AST nodes, however for expressions 2831 /// which could be one of many different kinds of AST nodes, there will be an 2832 /// identifier AST node for it. 2833 identifier, 2834 /// lhs is the dot token index, rhs unused, main_token is the identifier. 2835 enum_literal, 2836 /// main_token is the string literal token 2837 /// Both lhs and rhs unused. 2838 string_literal, 2839 /// main_token is the first token index (redundant with lhs) 2840 /// lhs is the first token index; rhs is the last token index. 2841 /// Could be a series of multiline_string_literal_line tokens, or a single 2842 /// string_literal token. 2843 multiline_string_literal, 2844 /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`. 2845 grouped_expression, 2846 /// `@a(lhs, rhs)`. lhs and rhs may be omitted. 2847 /// main_token is the builtin token. 2848 builtin_call_two, 2849 /// Same as builtin_call_two but there is known to be a trailing comma before the rparen. 2850 builtin_call_two_comma, 2851 /// `@a(b, c)`. `sub_list[lhs..rhs]`. 2852 /// main_token is the builtin token. 2853 builtin_call, 2854 /// Same as builtin_call but there is known to be a trailing comma before the rparen. 2855 builtin_call_comma, 2856 /// `error{a, b}`. 2857 /// rhs is the rbrace, lhs is unused. 2858 error_set_decl, 2859 /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`. 2860 /// main_token is `struct`, `union`, `opaque`, `enum` keyword. 2861 container_decl, 2862 /// Same as ContainerDecl but there is known to be a trailing comma 2863 /// or semicolon before the rbrace. 2864 container_decl_trailing, 2865 /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`. 2866 /// lhs or rhs can be omitted. 2867 /// main_token is `struct`, `union`, `opaque`, `enum` keyword. 2868 container_decl_two, 2869 /// Same as ContainerDeclTwo except there is known to be a trailing comma 2870 /// or semicolon before the rbrace. 2871 container_decl_two_trailing, 2872 /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`. 2873 container_decl_arg, 2874 /// Same as container_decl_arg but there is known to be a trailing 2875 /// comma or semicolon before the rbrace. 2876 container_decl_arg_trailing, 2877 /// `union(enum) {}`. `sub_list[lhs..rhs]`. 2878 /// Note that tagged unions with explicitly provided enums are represented 2879 /// by `container_decl_arg`. 2880 tagged_union, 2881 /// Same as tagged_union but there is known to be a trailing comma 2882 /// or semicolon before the rbrace. 2883 tagged_union_trailing, 2884 /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted. 2885 /// Note that tagged unions with explicitly provided enums are represented 2886 /// by `container_decl_arg`. 2887 tagged_union_two, 2888 /// Same as tagged_union_two but there is known to be a trailing comma 2889 /// or semicolon before the rbrace. 2890 tagged_union_two_trailing, 2891 /// `union(enum(lhs)) {}`. `SubRange[rhs]`. 2892 tagged_union_enum_tag, 2893 /// Same as tagged_union_enum_tag but there is known to be a trailing comma 2894 /// or semicolon before the rbrace. 2895 tagged_union_enum_tag_trailing, 2896 /// `a: lhs = rhs,`. lhs and rhs can be omitted. 2897 /// main_token is the field name identifier. 2898 /// lastToken() does not include the possible trailing comma. 2899 container_field_init, 2900 /// `a: lhs align(rhs),`. rhs can be omitted. 2901 /// main_token is the field name identifier. 2902 /// lastToken() does not include the possible trailing comma. 2903 container_field_align, 2904 /// `a: lhs align(c) = d,`. `container_field_list[rhs]`. 2905 /// main_token is the field name identifier. 2906 /// lastToken() does not include the possible trailing comma. 2907 container_field, 2908 /// `anytype`. both lhs and rhs unused. 2909 /// Used by `ContainerField`. 2910 @"anytype", 2911 /// `comptime lhs`. rhs unused. 2912 @"comptime", 2913 /// `nosuspend lhs`. rhs unused. 2914 @"nosuspend", 2915 /// `{lhs rhs}`. rhs or lhs can be omitted. 2916 /// main_token points at the lbrace. 2917 block_two, 2918 /// Same as block_two but there is known to be a semicolon before the rbrace. 2919 block_two_semicolon, 2920 /// `{}`. `sub_list[lhs..rhs]`. 2921 /// main_token points at the lbrace. 2922 block, 2923 /// Same as block but there is known to be a semicolon before the rbrace. 2924 block_semicolon, 2925 /// `asm(lhs)`. rhs is the token index of the rparen. 2926 asm_simple, 2927 /// `asm(lhs, a)`. `Asm[rhs]`. 2928 @"asm", 2929 /// `[a] "b" (c)`. lhs is 0, rhs is token index of the rparen. 2930 /// `[a] "b" (-> lhs)`. rhs is token index of the rparen. 2931 /// main_token is `a`. 2932 asm_output, 2933 /// `[a] "b" (lhs)`. rhs is token index of the rparen. 2934 /// main_token is `a`. 2935 asm_input, 2936 /// `error.a`. lhs is token index of `.`. rhs is token index of `a`. 2937 error_value, 2938 /// `lhs!rhs`. main_token is the `!`. 2939 error_union, 2940 2941 pub fn isContainerField(tag: Tag) bool { 2942 return switch (tag) { 2943 .container_field_init, 2944 .container_field_align, 2945 .container_field, 2946 => true, 2947 2948 else => false, 2949 }; 2950 } 2951 }; 2952 2953 pub const Data = struct { 2954 lhs: Index, 2955 rhs: Index, 2956 }; 2957 2958 pub const LocalVarDecl = struct { 2959 type_node: Index, 2960 align_node: Index, 2961 }; 2962 2963 pub const ArrayTypeSentinel = struct { 2964 elem_type: Index, 2965 sentinel: Index, 2966 }; 2967 2968 pub const PtrType = struct { 2969 sentinel: Index, 2970 align_node: Index, 2971 addrspace_node: Index, 2972 }; 2973 2974 pub const PtrTypeBitRange = struct { 2975 sentinel: Index, 2976 align_node: Index, 2977 addrspace_node: Index, 2978 bit_range_start: Index, 2979 bit_range_end: Index, 2980 }; 2981 2982 pub const SubRange = struct { 2983 /// Index into sub_list. 2984 start: Index, 2985 /// Index into sub_list. 2986 end: Index, 2987 }; 2988 2989 pub const If = struct { 2990 then_expr: Index, 2991 else_expr: Index, 2992 }; 2993 2994 pub const ContainerField = struct { 2995 value_expr: Index, 2996 align_expr: Index, 2997 }; 2998 2999 pub const GlobalVarDecl = struct { 3000 /// Populated if there is an explicit type ascription. 3001 type_node: Index, 3002 /// Populated if align(A) is present. 3003 align_node: Index, 3004 /// Populated if addrspace(A) is present. 3005 addrspace_node: Index, 3006 /// Populated if linksection(A) is present. 3007 section_node: Index, 3008 }; 3009 3010 pub const Slice = struct { 3011 start: Index, 3012 end: Index, 3013 }; 3014 3015 pub const SliceSentinel = struct { 3016 start: Index, 3017 /// May be 0 if the slice is "open" 3018 end: Index, 3019 sentinel: Index, 3020 }; 3021 3022 pub const While = struct { 3023 cont_expr: Index, 3024 then_expr: Index, 3025 else_expr: Index, 3026 }; 3027 3028 pub const WhileCont = struct { 3029 cont_expr: Index, 3030 then_expr: Index, 3031 }; 3032 3033 pub const FnProtoOne = struct { 3034 /// Populated if there is exactly 1 parameter. Otherwise there are 0 parameters. 3035 param: Index, 3036 /// Populated if align(A) is present. 3037 align_expr: Index, 3038 /// Populated if addrspace(A) is present. 3039 addrspace_expr: Index, 3040 /// Populated if linksection(A) is present. 3041 section_expr: Index, 3042 /// Populated if callconv(A) is present. 3043 callconv_expr: Index, 3044 }; 3045 3046 pub const FnProto = struct { 3047 params_start: Index, 3048 params_end: Index, 3049 /// Populated if align(A) is present. 3050 align_expr: Index, 3051 /// Populated if addrspace(A) is present. 3052 addrspace_expr: Index, 3053 /// Populated if linksection(A) is present. 3054 section_expr: Index, 3055 /// Populated if callconv(A) is present. 3056 callconv_expr: Index, 3057 }; 3058 3059 pub const Asm = struct { 3060 items_start: Index, 3061 items_end: Index, 3062 /// Needed to make lastToken() work. 3063 rparen: TokenIndex, 3064 }; 3065}; 3066