diff --git a/src/analysis.zig b/src/analysis.zig index 17df144..d9c8118 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -4,7 +4,7 @@ const ast = std.zig.ast; const types = @import("types.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.analysis); -const lastToken = offsets.lastToken; +usingnamespace @import("ast.zig"); /// Get a declaration's doc comment token index pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { @@ -2819,7 +2819,7 @@ fn makeScopeInternal( .if_simple, => { const if_node: ast.full.If = if (node_tag == .@"if") - tree.ifFull(node_idx) + ifFull(tree, node_idx) else tree.ifSimple(node_idx); diff --git a/src/ast.zig b/src/ast.zig new file mode 100644 index 0000000..80f387f --- /dev/null +++ b/src/ast.zig @@ -0,0 +1,560 @@ +//! Collection of functions from std.zig.ast that we need +//! and may hit undefined in the standard library implementation +//! when there are parser errors. + +const std = @import("std"); +const ast = std.zig.ast; +const Tree = ast.Tree; +const Node = ast.Node; +const full = ast.full; +const assert = std.debug.assert; + +fn fullIf(tree: Tree, info: full.If.Ast) full.If { + const token_tags = tree.tokens.items(.tag); + var result: full.If = .{ + .ast = info, + .payload_token = null, + .error_token = null, + .else_token = undefined, + }; + // if (cond_expr) |x| + // ^ ^ + const payload_pipe = lastToken(tree, info.cond_expr) + 2; + if (token_tags[payload_pipe] == .pipe) { + result.payload_token = payload_pipe + 1; + } + if (info.else_expr != 0) { + // then_expr else |x| + // ^ ^ + result.else_token = lastToken(tree, info.then_expr) + 1; + if (token_tags[result.else_token + 1] == .pipe) { + result.error_token = result.else_token + 2; + } + } + return result; +} + +pub fn ifFull(tree: Tree, node: Node.Index) full.If { + assert(tree.nodes.items(.tag)[node] == .@"if"); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.If); + return fullIf(tree, .{ + .cond_expr = data.lhs, + .then_expr = extra.then_expr, + .else_expr = extra.else_expr, + .if_token = tree.nodes.items(.main_token)[node], + }); +} + +pub fn lastToken(tree: ast.Tree, node: ast.Node.Index) ast.TokenIndex { + const TokenIndex = ast.TokenIndex; + const tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + var n = node; + var end_offset: TokenIndex = 0; + while (true) switch (tags[n]) { + .root => return @intCast(TokenIndex, tree.tokens.len - 1), + + .@"usingnamespace", + .bool_not, + .negation, + .bit_not, + .negation_wrap, + .address_of, + .@"try", + .@"await", + .optional_type, + .@"resume", + .@"nosuspend", + .@"comptime", + => n = datas[n].lhs, + + .test_decl, + .@"errdefer", + .@"defer", + .@"catch", + .equal_equal, + .bang_equal, + .less_than, + .greater_than, + .less_or_equal, + .greater_or_equal, + .assign_mul, + .assign_div, + .assign_mod, + .assign_add, + .assign_sub, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_and, + .assign_bit_xor, + .assign_bit_or, + .assign_mul_wrap, + .assign_add_wrap, + .assign_sub_wrap, + .assign, + .merge_error_sets, + .mul, + .div, + .mod, + .array_mult, + .mul_wrap, + .add, + .sub, + .array_cat, + .add_wrap, + .sub_wrap, + .bit_shift_left, + .bit_shift_right, + .bit_and, + .bit_xor, + .bit_or, + .@"orelse", + .bool_and, + .bool_or, + .anyframe_type, + .error_union, + .if_simple, + .while_simple, + .for_simple, + .fn_proto_simple, + .fn_proto_multi, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, + .array_type, + .switch_case_one, + .switch_case, + .switch_range, + => n = datas[n].rhs, + + .field_access, + .unwrap_optional, + .grouped_expression, + .multiline_string_literal, + .error_set_decl, + .asm_simple, + .asm_output, + .asm_input, + .error_value, + => return datas[n].rhs + end_offset, + + .@"anytype", + .anyframe_literal, + .char_literal, + .integer_literal, + .float_literal, + .false_literal, + .true_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + .identifier, + .deref, + .enum_literal, + .string_literal, + => return main_tokens[n] + end_offset, + + .@"return" => if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + }, + + .call, .async_call => { + end_offset += 1; // for the rparen + const params = tree.extraData(datas[n].rhs, Node.SubRange); + if (params.end - params.start == 0) { + return main_tokens[n] + end_offset; + } + n = tree.extra_data[params.end - 1]; // last parameter + }, + .tagged_union_enum_tag => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + if (members.end - members.start == 0) { + end_offset += 4; // for the rparen + rparen + lbrace + rbrace + n = datas[n].lhs; + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[members.end - 1]; // last parameter + } + }, + .call_comma, + .async_call_comma, + .tagged_union_enum_tag_trailing, + => { + end_offset += 2; // for the comma/semicolon + rparen/rbrace + const params = tree.extraData(datas[n].rhs, Node.SubRange); + std.debug.assert(params.end > params.start); + n = tree.extra_data[params.end - 1]; // last parameter + }, + .@"switch" => { + const cases = tree.extraData(datas[n].rhs, Node.SubRange); + if (cases.end - cases.start == 0) { + end_offset += 3; // rparen, lbrace, rbrace + n = datas[n].lhs; // condition expression + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[cases.end - 1]; // last case + } + }, + .container_decl_arg => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + if (members.end - members.start == 0) { + end_offset += 3; // for the rparen + lbrace + rbrace + n = datas[n].lhs; + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[members.end - 1]; // last parameter + } + }, + .@"asm" => { + const extra = tree.extraData(datas[n].rhs, Node.Asm); + return extra.rparen + end_offset; + }, + .array_init, + .struct_init, + => { + const elements = tree.extraData(datas[n].rhs, Node.SubRange); + std.debug.assert(elements.end - elements.start > 0); + end_offset += 1; // for the rbrace + n = tree.extra_data[elements.end - 1]; // last element + }, + .array_init_comma, + .struct_init_comma, + .container_decl_arg_trailing, + .switch_comma, + => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + std.debug.assert(members.end - members.start > 0); + end_offset += 2; // for the comma + rbrace + n = tree.extra_data[members.end - 1]; // last parameter + }, + .array_init_dot, + .struct_init_dot, + .block, + .container_decl, + .tagged_union, + .builtin_call, + => { + std.debug.assert(datas[n].rhs - datas[n].lhs > 0); + end_offset += 1; // for the rbrace + n = tree.extra_data[datas[n].rhs - 1]; // last statement + }, + .array_init_dot_comma, + .struct_init_dot_comma, + .block_semicolon, + .container_decl_trailing, + .tagged_union_trailing, + .builtin_call_comma, + => { + std.debug.assert(datas[n].rhs - datas[n].lhs > 0); + end_offset += 2; // for the comma/semicolon + rbrace/rparen + n = tree.extra_data[datas[n].rhs - 1]; // last member + }, + .call_one, + .async_call_one, + .array_access, + => { + end_offset += 1; // for the rparen/rbracket + if (datas[n].rhs == 0) { + return main_tokens[n] + end_offset; + } + n = datas[n].rhs; + }, + .array_init_dot_two, + .block_two, + .builtin_call_two, + .struct_init_dot_two, + .container_decl_two, + .tagged_union_two, + => { + if (datas[n].rhs != 0) { + end_offset += 1; // for the rparen/rbrace + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + end_offset += 1; // for the rparen/rbrace + n = datas[n].lhs; + } else { + switch (tags[n]) { + .array_init_dot_two, + .block_two, + .struct_init_dot_two, + => end_offset += 1, // rbrace + .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace + .container_decl_two => { + var i: u32 = 2; // lbrace + rbrace + while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; + end_offset += i; + }, + .tagged_union_two => { + var i: u32 = 5; // (enum) {} + while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; + end_offset += i; + }, + else => unreachable, + } + return main_tokens[n] + end_offset; + } + }, + .array_init_dot_two_comma, + .builtin_call_two_comma, + .block_two_semicolon, + .struct_init_dot_two_comma, + .container_decl_two_trailing, + .tagged_union_two_trailing, + => { + end_offset += 2; // for the comma/semicolon + rbrace/rparen + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; // returns { } + } + }, + .simple_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + }, + .aligned_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + end_offset += 1; // for the rparen + n = datas[n].lhs; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + }, + .global_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl); + if (extra.section_node != 0) { + end_offset += 1; // for the rparen + n = extra.section_node; + } else if (extra.align_node != 0) { + end_offset += 1; // for the rparen + n = extra.align_node; + } else if (extra.type_node != 0) { + n = extra.type_node; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + } + }, + .local_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl); + if (extra.align_node != 0) { + end_offset += 1; // for the rparen + n = extra.align_node; + } else if (extra.type_node != 0) { + n = extra.type_node; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + } + }, + .container_field_init => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .container_field_align => { + if (datas[n].rhs != 0) { + end_offset += 1; // for the rparen + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .container_field => { + const extra = tree.extraData(datas[n].rhs, Node.ContainerField); + if (extra.value_expr != 0) { + n = extra.value_expr; + } else if (extra.align_expr != 0) { + end_offset += 1; // for the rparen + n = extra.align_expr; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + + .array_init_one, + .struct_init_one, + => { + end_offset += 1; // rbrace + if (datas[n].rhs == 0) { + return main_tokens[n] + end_offset; + } else { + n = datas[n].rhs; + } + }, + .slice_open, + .call_one_comma, + .async_call_one_comma, + .array_init_one_comma, + .struct_init_one_comma, + => { + end_offset += 2; // ellipsis2 + rbracket, or comma + rparen + n = datas[n].rhs; + std.debug.assert(n != 0); + }, + .slice => { + const extra = tree.extraData(datas[n].rhs, Node.Slice); + std.debug.assert(extra.end != 0); // should have used slice_open + end_offset += 1; // rbracket + n = extra.end; + }, + .slice_sentinel => { + const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel); + std.debug.assert(extra.sentinel != 0); // should have used slice + end_offset += 1; // rbracket + n = extra.sentinel; + }, + + .@"continue" => { + if (datas[n].lhs != 0) { + return datas[n].lhs + end_offset; + } else { + return main_tokens[n] + end_offset; + } + }, + .@"break" => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + return datas[n].lhs + end_offset; + } else { + return main_tokens[n] + end_offset; + } + }, + .fn_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + n = datas[n].lhs; + } + }, + .fn_proto_one => { + const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); + // linksection, callconv, align can appear in any order, so we + // find the last one here. + var max_node: Node.Index = datas[n].rhs; + var max_start = token_starts[main_tokens[max_node]]; + var max_offset: TokenIndex = 0; + if (extra.align_expr != 0) { + const start = token_starts[main_tokens[extra.align_expr]]; + if (start > max_start) { + max_node = extra.align_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.section_expr != 0) { + const start = token_starts[main_tokens[extra.section_expr]]; + if (start > max_start) { + max_node = extra.section_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.callconv_expr != 0) { + const start = token_starts[main_tokens[extra.callconv_expr]]; + if (start > max_start) { + max_node = extra.callconv_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + n = max_node; + end_offset += max_offset; + }, + .fn_proto => { + const extra = tree.extraData(datas[n].lhs, Node.FnProto); + // linksection, callconv, align can appear in any order, so we + // find the last one here. + var max_node: Node.Index = datas[n].rhs; + var max_start = token_starts[main_tokens[max_node]]; + var max_offset: TokenIndex = 0; + if (extra.align_expr != 0) { + const start = token_starts[main_tokens[extra.align_expr]]; + if (start > max_start) { + max_node = extra.align_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.section_expr != 0) { + const start = token_starts[main_tokens[extra.section_expr]]; + if (start > max_start) { + max_node = extra.section_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.callconv_expr != 0) { + const start = token_starts[main_tokens[extra.callconv_expr]]; + if (start > max_start) { + max_node = extra.callconv_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + n = max_node; + end_offset += max_offset; + }, + .while_cont => { + const extra = tree.extraData(datas[n].rhs, Node.WhileCont); + std.debug.assert(extra.then_expr != 0); + n = extra.then_expr; + }, + .@"while" => { + const extra = tree.extraData(datas[n].rhs, Node.While); + std.debug.assert(extra.else_expr != 0); + n = extra.else_expr; + }, + .@"if", .@"for" => { + const extra = tree.extraData(datas[n].rhs, Node.If); + std.debug.assert(extra.else_expr != 0); + n = extra.else_expr; + }, + .@"suspend" => { + if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .array_type_sentinel => { + const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); + n = extra.elem_type; + }, + }; +} diff --git a/src/offsets.zig b/src/offsets.zig index 54d9488..12c6471 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -218,520 +218,3 @@ pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range { }; } } - -// Updated version from std that allows for failures -// by removing the unreachables and returning up to that point -// so that we can always provide information while the user is still typing -pub fn lastToken(tree: ast.Tree, node: ast.Node.Index) ast.TokenIndex { - const Node = ast.Node; - const TokenIndex = ast.TokenIndex; - const tags = tree.nodes.items(.tag); - const datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const token_starts = tree.tokens.items(.start); - const token_tags = tree.tokens.items(.tag); - var n = node; - var end_offset: TokenIndex = 0; - while (true) switch (tags[n]) { - .root => return @intCast(TokenIndex, tree.tokens.len - 1), - - .@"usingnamespace", - .bool_not, - .negation, - .bit_not, - .negation_wrap, - .address_of, - .@"try", - .@"await", - .optional_type, - .@"resume", - .@"nosuspend", - .@"comptime", - => n = datas[n].lhs, - - .test_decl, - .@"errdefer", - .@"defer", - .@"catch", - .equal_equal, - .bang_equal, - .less_than, - .greater_than, - .less_or_equal, - .greater_or_equal, - .assign_mul, - .assign_div, - .assign_mod, - .assign_add, - .assign_sub, - .assign_bit_shift_left, - .assign_bit_shift_right, - .assign_bit_and, - .assign_bit_xor, - .assign_bit_or, - .assign_mul_wrap, - .assign_add_wrap, - .assign_sub_wrap, - .assign, - .merge_error_sets, - .mul, - .div, - .mod, - .array_mult, - .mul_wrap, - .add, - .sub, - .array_cat, - .add_wrap, - .sub_wrap, - .bit_shift_left, - .bit_shift_right, - .bit_and, - .bit_xor, - .bit_or, - .@"orelse", - .bool_and, - .bool_or, - .anyframe_type, - .error_union, - .if_simple, - .while_simple, - .for_simple, - .fn_proto_simple, - .fn_proto_multi, - .ptr_type_aligned, - .ptr_type_sentinel, - .ptr_type, - .ptr_type_bit_range, - .array_type, - .switch_case_one, - .switch_case, - .switch_range, - => n = datas[n].rhs, - - .field_access, - .unwrap_optional, - .grouped_expression, - .multiline_string_literal, - .error_set_decl, - .asm_simple, - .asm_output, - .asm_input, - .error_value, - => return datas[n].rhs + end_offset, - - .@"anytype", - .anyframe_literal, - .char_literal, - .integer_literal, - .float_literal, - .false_literal, - .true_literal, - .null_literal, - .undefined_literal, - .unreachable_literal, - .identifier, - .deref, - .enum_literal, - .string_literal, - => return main_tokens[n] + end_offset, - - .@"return" => if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - return main_tokens[n] + end_offset; - }, - - .call, .async_call => { - end_offset += 1; // for the rparen - const params = tree.extraData(datas[n].rhs, Node.SubRange); - if (params.end - params.start == 0) { - return main_tokens[n] + end_offset; - } - n = tree.extra_data[params.end - 1]; // last parameter - }, - .tagged_union_enum_tag => { - const members = tree.extraData(datas[n].rhs, Node.SubRange); - if (members.end - members.start == 0) { - end_offset += 4; // for the rparen + rparen + lbrace + rbrace - n = datas[n].lhs; - } else { - end_offset += 1; // for the rbrace - n = tree.extra_data[members.end - 1]; // last parameter - } - }, - .call_comma, - .async_call_comma, - .tagged_union_enum_tag_trailing, - => { - end_offset += 2; // for the comma/semicolon + rparen/rbrace - const params = tree.extraData(datas[n].rhs, Node.SubRange); - std.debug.assert(params.end > params.start); - n = tree.extra_data[params.end - 1]; // last parameter - }, - .@"switch" => { - const cases = tree.extraData(datas[n].rhs, Node.SubRange); - if (cases.end - cases.start == 0) { - end_offset += 3; // rparen, lbrace, rbrace - n = datas[n].lhs; // condition expression - } else { - end_offset += 1; // for the rbrace - n = tree.extra_data[cases.end - 1]; // last case - } - }, - .container_decl_arg => { - const members = tree.extraData(datas[n].rhs, Node.SubRange); - if (members.end - members.start == 0) { - end_offset += 3; // for the rparen + lbrace + rbrace - n = datas[n].lhs; - } else { - end_offset += 1; // for the rbrace - n = tree.extra_data[members.end - 1]; // last parameter - } - }, - .@"asm" => { - const extra = tree.extraData(datas[n].rhs, Node.Asm); - return extra.rparen + end_offset; - }, - .array_init, - .struct_init, - => { - const elements = tree.extraData(datas[n].rhs, Node.SubRange); - std.debug.assert(elements.end - elements.start > 0); - end_offset += 1; // for the rbrace - n = tree.extra_data[elements.end - 1]; // last element - }, - .array_init_comma, - .struct_init_comma, - .container_decl_arg_trailing, - .switch_comma, - => { - const members = tree.extraData(datas[n].rhs, Node.SubRange); - std.debug.assert(members.end - members.start > 0); - end_offset += 2; // for the comma + rbrace - n = tree.extra_data[members.end - 1]; // last parameter - }, - .array_init_dot, - .struct_init_dot, - .block, - .container_decl, - .tagged_union, - .builtin_call, - => { - std.debug.assert(datas[n].rhs - datas[n].lhs > 0); - end_offset += 1; // for the rbrace - n = tree.extra_data[datas[n].rhs - 1]; // last statement - }, - .array_init_dot_comma, - .struct_init_dot_comma, - .block_semicolon, - .container_decl_trailing, - .tagged_union_trailing, - .builtin_call_comma, - => { - std.debug.assert(datas[n].rhs - datas[n].lhs > 0); - end_offset += 2; // for the comma/semicolon + rbrace/rparen - n = tree.extra_data[datas[n].rhs - 1]; // last member - }, - .call_one, - .async_call_one, - .array_access, - => { - end_offset += 1; // for the rparen/rbracket - if (datas[n].rhs == 0) { - return main_tokens[n] + end_offset; - } - n = datas[n].rhs; - }, - .array_init_dot_two, - .block_two, - .builtin_call_two, - .struct_init_dot_two, - .container_decl_two, - .tagged_union_two, - => { - if (datas[n].rhs != 0) { - end_offset += 1; // for the rparen/rbrace - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - end_offset += 1; // for the rparen/rbrace - n = datas[n].lhs; - } else { - switch (tags[n]) { - .array_init_dot_two, - .block_two, - .struct_init_dot_two, - => end_offset += 1, // rbrace - .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace - .container_decl_two => { - var i: u32 = 2; // lbrace + rbrace - while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; - end_offset += i; - }, - .tagged_union_two => { - var i: u32 = 5; // (enum) {} - while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; - end_offset += i; - }, - else => unreachable, - } - return main_tokens[n] + end_offset; - } - }, - .array_init_dot_two_comma, - .builtin_call_two_comma, - .block_two_semicolon, - .struct_init_dot_two_comma, - .container_decl_two_trailing, - .tagged_union_two_trailing, - => { - end_offset += 2; // for the comma/semicolon + rbrace/rparen - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - return main_tokens[n] + end_offset; // returns { } - } - }, - .simple_var_decl => { - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - end_offset += 1; // from mut token to name - return main_tokens[n] + end_offset; - } - }, - .aligned_var_decl => { - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - end_offset += 1; // for the rparen - n = datas[n].lhs; - } else { - end_offset += 1; // from mut token to name - return main_tokens[n] + end_offset; - } - }, - .global_var_decl => { - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else { - const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl); - if (extra.section_node != 0) { - end_offset += 1; // for the rparen - n = extra.section_node; - } else if (extra.align_node != 0) { - end_offset += 1; // for the rparen - n = extra.align_node; - } else if (extra.type_node != 0) { - n = extra.type_node; - } else { - end_offset += 1; // from mut token to name - return main_tokens[n] + end_offset; - } - } - }, - .local_var_decl => { - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else { - const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl); - if (extra.align_node != 0) { - end_offset += 1; // for the rparen - n = extra.align_node; - } else if (extra.type_node != 0) { - n = extra.type_node; - } else { - end_offset += 1; // from mut token to name - return main_tokens[n] + end_offset; - } - } - }, - .container_field_init => { - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - return main_tokens[n] + end_offset; - } - }, - .container_field_align => { - if (datas[n].rhs != 0) { - end_offset += 1; // for the rparen - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - return main_tokens[n] + end_offset; - } - }, - .container_field => { - const extra = tree.extraData(datas[n].rhs, Node.ContainerField); - if (extra.value_expr != 0) { - n = extra.value_expr; - } else if (extra.align_expr != 0) { - end_offset += 1; // for the rparen - n = extra.align_expr; - } else if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - return main_tokens[n] + end_offset; - } - }, - - .array_init_one, - .struct_init_one, - => { - end_offset += 1; // rbrace - if (datas[n].rhs == 0) { - return main_tokens[n] + end_offset; - } else { - n = datas[n].rhs; - } - }, - .slice_open, - .call_one_comma, - .async_call_one_comma, - .array_init_one_comma, - .struct_init_one_comma, - => { - end_offset += 2; // ellipsis2 + rbracket, or comma + rparen - n = datas[n].rhs; - std.debug.assert(n != 0); - }, - .slice => { - const extra = tree.extraData(datas[n].rhs, Node.Slice); - std.debug.assert(extra.end != 0); // should have used slice_open - end_offset += 1; // rbracket - n = extra.end; - }, - .slice_sentinel => { - const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel); - std.debug.assert(extra.sentinel != 0); // should have used slice - end_offset += 1; // rbracket - n = extra.sentinel; - }, - - .@"continue" => { - if (datas[n].lhs != 0) { - return datas[n].lhs + end_offset; - } else { - return main_tokens[n] + end_offset; - } - }, - .@"break" => { - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - return datas[n].lhs + end_offset; - } else { - return main_tokens[n] + end_offset; - } - }, - .fn_decl => { - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else { - n = datas[n].lhs; - } - }, - .fn_proto_one => { - const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); - // linksection, callconv, align can appear in any order, so we - // find the last one here. - var max_node: Node.Index = datas[n].rhs; - var max_start = token_starts[main_tokens[max_node]]; - var max_offset: TokenIndex = 0; - if (extra.align_expr != 0) { - const start = token_starts[main_tokens[extra.align_expr]]; - if (start > max_start) { - max_node = extra.align_expr; - max_start = start; - max_offset = 1; // for the rparen - } - } - if (extra.section_expr != 0) { - const start = token_starts[main_tokens[extra.section_expr]]; - if (start > max_start) { - max_node = extra.section_expr; - max_start = start; - max_offset = 1; // for the rparen - } - } - if (extra.callconv_expr != 0) { - const start = token_starts[main_tokens[extra.callconv_expr]]; - if (start > max_start) { - max_node = extra.callconv_expr; - max_start = start; - max_offset = 1; // for the rparen - } - } - n = max_node; - end_offset += max_offset; - }, - .fn_proto => { - const extra = tree.extraData(datas[n].lhs, Node.FnProto); - // linksection, callconv, align can appear in any order, so we - // find the last one here. - var max_node: Node.Index = datas[n].rhs; - var max_start = token_starts[main_tokens[max_node]]; - var max_offset: TokenIndex = 0; - if (extra.align_expr != 0) { - const start = token_starts[main_tokens[extra.align_expr]]; - if (start > max_start) { - max_node = extra.align_expr; - max_start = start; - max_offset = 1; // for the rparen - } - } - if (extra.section_expr != 0) { - const start = token_starts[main_tokens[extra.section_expr]]; - if (start > max_start) { - max_node = extra.section_expr; - max_start = start; - max_offset = 1; // for the rparen - } - } - if (extra.callconv_expr != 0) { - const start = token_starts[main_tokens[extra.callconv_expr]]; - if (start > max_start) { - max_node = extra.callconv_expr; - max_start = start; - max_offset = 1; // for the rparen - } - } - n = max_node; - end_offset += max_offset; - }, - .while_cont => { - const extra = tree.extraData(datas[n].rhs, Node.WhileCont); - std.debug.assert(extra.then_expr != 0); - n = extra.then_expr; - }, - .@"while" => { - const extra = tree.extraData(datas[n].rhs, Node.While); - std.debug.assert(extra.else_expr != 0); - n = extra.else_expr; - }, - .@"if", .@"for" => { - const extra = tree.extraData(datas[n].rhs, Node.If); - std.debug.assert(extra.else_expr != 0); - n = extra.else_expr; - }, - .@"suspend" => { - if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - return main_tokens[n] + end_offset; - } - }, - .array_type_sentinel => { - const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); - n = extra.elem_type; - }, - }; -} diff --git a/src/references.zig b/src/references.zig index 37ad26b..a5a48f9 100644 --- a/src/references.zig +++ b/src/references.zig @@ -4,6 +4,7 @@ const analysis = @import("analysis.zig"); const types = @import("types.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.references); +usingnamespace @import("ast.zig"); const ast = std.zig.ast; @@ -250,7 +251,7 @@ fn symbolReferencesInternal( .@"if", .if_simple, => { - const if_node: ast.full.If = if (node_tags[node] == .@"if") tree.ifFull(node) else tree.ifSimple(node); + const if_node: ast.full.If = if (node_tags[node] == .@"if") ifFull(tree, node) else tree.ifSimple(node); try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.cond_expr, .handle = handle }, decl, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.then_expr, .handle = handle }, decl, encoding, context, handler); diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index e5b7b1d..72cecd0 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -3,7 +3,7 @@ const offsets = @import("offsets.zig"); const DocumentStore = @import("document_store.zig"); const analysis = @import("analysis.zig"); const ast = std.zig.ast; -const lastToken = offsets.lastToken; +usingnamespace @import("ast.zig"); pub const TokenType = enum(u32) { type, @@ -700,7 +700,7 @@ fn writeNodeTokens( .@"if", .if_simple, => { - const if_node: ast.full.If = if (tag == .@"if") tree.ifFull(node) else tree.ifSimple(node); + const if_node: ast.full.If = if (tag == .@"if") ifFull(tree, node) else tree.ifSimple(node); try writeToken(builder, if_node.ast.if_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr });