diff --git a/src/analysis.zig b/src/analysis.zig index 435814d..dc3dcb7 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -4,6 +4,7 @@ const ast = std.zig.ast; const types = @import("types.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.analysis); +const lastToken = offsets.lastToken; /// Get a declaration's doc comment token index pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { @@ -99,7 +100,7 @@ pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { // return type can be 0 when user wrote incorrect fn signature // to ensure we don't break, just end the signature at end of fn token if (func.ast.return_type == 0) return tree.source[start.start..start.end]; - const end = offsets.tokenLocation(tree, tree.lastToken(func.ast.return_type)).end; + const end = offsets.tokenLocation(tree, lastToken(tree, func.ast.return_type)).end; return tree.source[start.start..end]; } @@ -152,7 +153,7 @@ pub fn getFunctionSnippet( try buffer.appendSlice("..."); } else if (param.type_expr != 0) { var curr_token = tree.firstToken(param.type_expr); - var end_token = tree.lastToken(param.type_expr); + var end_token = lastToken(tree, param.type_expr); while (curr_token <= end_token) : (curr_token += 1) { const tag = token_tags[curr_token]; const is_comma = tag == .comma; @@ -173,7 +174,7 @@ pub fn getFunctionSnippet( /// Gets a function signature (keywords, name, return value) pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 { const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start; - const end = offsets.tokenLocation(tree, tree.lastToken(var_decl.ast.init_node)).end; + const end = offsets.tokenLocation(tree, lastToken(tree, var_decl.ast.init_node)).end; return tree.source[start..end]; } @@ -181,7 +182,7 @@ pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const pub fn getContainerFieldSignature(tree: ast.Tree, field: ast.full.ContainerField) []const u8 { const start = offsets.tokenLocation(tree, field.ast.name_token).start; const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr; - const end = offsets.tokenLocation(tree, tree.lastToken(end_node)).end; + const end = offsets.tokenLocation(tree, lastToken(tree, end_node)).end; return tree.source[start..end]; } @@ -1448,7 +1449,7 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { fn nodeContainsSourceIndex(tree: ast.Tree, node: ast.Node.Index, source_index: usize) bool { const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start; - const last_token = offsets.tokenLocation(tree, tree.lastToken(node)).end; + const last_token = offsets.tokenLocation(tree, lastToken(tree, node)).end; return source_index >= first_token and source_index <= last_token; } @@ -1839,7 +1840,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod const end_loc = start_loc.add(try offsets.tokenRelativeLocation( tree, start_loc.offset, - starts[tree.lastToken(node)], + starts[lastToken(tree, node)], context.encoding, )); context.prev_loc = end_loc; @@ -2525,7 +2526,8 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { const loc_start = offsets.tokenLocation(tree, tree.firstToken(node)); - const loc_end = offsets.tokenLocation(tree, tree.lastToken(node)); + const loc_end = offsets.tokenLocation(tree, lastToken(tree, node)); + return SourceRange{ .start = loc_start.start, .end = loc_end.end, @@ -2746,7 +2748,7 @@ fn makeScopeInternal( .block_two_semicolon, => { const first_token = tree.firstToken(node_idx); - const last_token = tree.lastToken(node_idx); + const last_token = lastToken(tree, node_idx); // if labeled block if (token_tags[first_token] == .identifier) { @@ -2827,7 +2829,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end, + .end = offsets.tokenLocation(tree, lastToken(tree, if_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2857,7 +2859,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, err_token).start, - .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.else_expr)).end, + .end = offsets.tokenLocation(tree, lastToken(tree, if_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2895,7 +2897,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, while_node.ast.while_token).start, - .end = offsets.tokenLocation(tree, tree.lastToken(node_idx)).end, + .end = offsets.tokenLocation(tree, lastToken(tree, node_idx)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2912,7 +2914,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end, + .end = offsets.tokenLocation(tree, lastToken(tree, while_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2958,7 +2960,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, err_token).start, - .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.else_expr)).end, + .end = offsets.tokenLocation(tree, lastToken(tree, while_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2992,7 +2994,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end, + .end = offsets.tokenLocation(tree, lastToken(tree, switch_case.ast.target_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -3183,7 +3185,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload_token).start, - .end = offsets.tokenLocation(tree, tree.lastToken(expr)).end, + .end = offsets.tokenLocation(tree, lastToken(tree, expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, diff --git a/src/offsets.zig b/src/offsets.zig index 8f38f56..64a3b89 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -196,3 +196,520 @@ pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range { }; } } + +// Updated version from std that allows for failures +// by removing the unreachables and returning up to that point +// so that we can always provide information while the user is still typing +pub fn lastToken(tree: ast.Tree, node: ast.Node.Index) ast.TokenIndex { + const Node = ast.Node; + const TokenIndex = ast.TokenIndex; + const tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + var n = node; + var end_offset: TokenIndex = 0; + while (true) switch (tags[n]) { + .root => return @intCast(TokenIndex, tree.tokens.len - 1), + + .@"usingnamespace", + .bool_not, + .negation, + .bit_not, + .negation_wrap, + .address_of, + .@"try", + .@"await", + .optional_type, + .@"resume", + .@"nosuspend", + .@"comptime", + => n = datas[n].lhs, + + .test_decl, + .@"errdefer", + .@"defer", + .@"catch", + .equal_equal, + .bang_equal, + .less_than, + .greater_than, + .less_or_equal, + .greater_or_equal, + .assign_mul, + .assign_div, + .assign_mod, + .assign_add, + .assign_sub, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_and, + .assign_bit_xor, + .assign_bit_or, + .assign_mul_wrap, + .assign_add_wrap, + .assign_sub_wrap, + .assign, + .merge_error_sets, + .mul, + .div, + .mod, + .array_mult, + .mul_wrap, + .add, + .sub, + .array_cat, + .add_wrap, + .sub_wrap, + .bit_shift_left, + .bit_shift_right, + .bit_and, + .bit_xor, + .bit_or, + .@"orelse", + .bool_and, + .bool_or, + .anyframe_type, + .error_union, + .if_simple, + .while_simple, + .for_simple, + .fn_proto_simple, + .fn_proto_multi, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, + .array_type, + .switch_case_one, + .switch_case, + .switch_range, + => n = datas[n].rhs, + + .field_access, + .unwrap_optional, + .grouped_expression, + .multiline_string_literal, + .error_set_decl, + .asm_simple, + .asm_output, + .asm_input, + .error_value, + => return datas[n].rhs + end_offset, + + .@"anytype", + .anyframe_literal, + .char_literal, + .integer_literal, + .float_literal, + .false_literal, + .true_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + .identifier, + .deref, + .enum_literal, + .string_literal, + => return main_tokens[n] + end_offset, + + .@"return" => if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + }, + + .call, .async_call => { + end_offset += 1; // for the rparen + const params = tree.extraData(datas[n].rhs, Node.SubRange); + if (params.end - params.start == 0) { + return main_tokens[n] + end_offset; + } + n = tree.extra_data[params.end - 1]; // last parameter + }, + .tagged_union_enum_tag => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + if (members.end - members.start == 0) { + end_offset += 4; // for the rparen + rparen + lbrace + rbrace + n = datas[n].lhs; + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[members.end - 1]; // last parameter + } + }, + .call_comma, + .async_call_comma, + .tagged_union_enum_tag_trailing, + => { + end_offset += 2; // for the comma/semicolon + rparen/rbrace + const params = tree.extraData(datas[n].rhs, Node.SubRange); + std.debug.assert(params.end > params.start); + n = tree.extra_data[params.end - 1]; // last parameter + }, + .@"switch" => { + const cases = tree.extraData(datas[n].rhs, Node.SubRange); + if (cases.end - cases.start == 0) { + end_offset += 3; // rparen, lbrace, rbrace + n = datas[n].lhs; // condition expression + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[cases.end - 1]; // last case + } + }, + .container_decl_arg => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + if (members.end - members.start == 0) { + end_offset += 3; // for the rparen + lbrace + rbrace + n = datas[n].lhs; + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[members.end - 1]; // last parameter + } + }, + .@"asm" => { + const extra = tree.extraData(datas[n].rhs, Node.Asm); + return extra.rparen + end_offset; + }, + .array_init, + .struct_init, + => { + const elements = tree.extraData(datas[n].rhs, Node.SubRange); + std.debug.assert(elements.end - elements.start > 0); + end_offset += 1; // for the rbrace + n = tree.extra_data[elements.end - 1]; // last element + }, + .array_init_comma, + .struct_init_comma, + .container_decl_arg_trailing, + .switch_comma, + => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + std.debug.assert(members.end - members.start > 0); + end_offset += 2; // for the comma + rbrace + n = tree.extra_data[members.end - 1]; // last parameter + }, + .array_init_dot, + .struct_init_dot, + .block, + .container_decl, + .tagged_union, + .builtin_call, + => { + std.debug.assert(datas[n].rhs - datas[n].lhs > 0); + end_offset += 1; // for the rbrace + n = tree.extra_data[datas[n].rhs - 1]; // last statement + }, + .array_init_dot_comma, + .struct_init_dot_comma, + .block_semicolon, + .container_decl_trailing, + .tagged_union_trailing, + .builtin_call_comma, + => { + std.debug.assert(datas[n].rhs - datas[n].lhs > 0); + end_offset += 2; // for the comma/semicolon + rbrace/rparen + n = tree.extra_data[datas[n].rhs - 1]; // last member + }, + .call_one, + .async_call_one, + .array_access, + => { + end_offset += 1; // for the rparen/rbracket + if (datas[n].rhs == 0) { + return main_tokens[n] + end_offset; + } + n = datas[n].rhs; + }, + .array_init_dot_two, + .block_two, + .builtin_call_two, + .struct_init_dot_two, + .container_decl_two, + .tagged_union_two, + => { + if (datas[n].rhs != 0) { + end_offset += 1; // for the rparen/rbrace + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + end_offset += 1; // for the rparen/rbrace + n = datas[n].lhs; + } else { + switch (tags[n]) { + .array_init_dot_two, + .block_two, + .struct_init_dot_two, + => end_offset += 1, // rbrace + .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace + .container_decl_two => { + var i: u32 = 2; // lbrace + rbrace + while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; + end_offset += i; + }, + .tagged_union_two => { + var i: u32 = 5; // (enum) {} + while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; + end_offset += i; + }, + else => unreachable, + } + return main_tokens[n] + end_offset; + } + }, + .array_init_dot_two_comma, + .builtin_call_two_comma, + .block_two_semicolon, + .struct_init_dot_two_comma, + .container_decl_two_trailing, + .tagged_union_two_trailing, + => { + end_offset += 2; // for the comma/semicolon + rbrace/rparen + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; // returns { } + } + }, + .simple_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + }, + .aligned_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + end_offset += 1; // for the rparen + n = datas[n].lhs; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + }, + .global_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl); + if (extra.section_node != 0) { + end_offset += 1; // for the rparen + n = extra.section_node; + } else if (extra.align_node != 0) { + end_offset += 1; // for the rparen + n = extra.align_node; + } else if (extra.type_node != 0) { + n = extra.type_node; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + } + }, + .local_var_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl); + if (extra.align_node != 0) { + end_offset += 1; // for the rparen + n = extra.align_node; + } else if (extra.type_node != 0) { + n = extra.type_node; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + } + }, + .container_field_init => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .container_field_align => { + if (datas[n].rhs != 0) { + end_offset += 1; // for the rparen + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .container_field => { + const extra = tree.extraData(datas[n].rhs, Node.ContainerField); + if (extra.value_expr != 0) { + n = extra.value_expr; + } else if (extra.align_expr != 0) { + end_offset += 1; // for the rparen + n = extra.align_expr; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + + .array_init_one, + .struct_init_one, + => { + end_offset += 1; // rbrace + if (datas[n].rhs == 0) { + return main_tokens[n] + end_offset; + } else { + n = datas[n].rhs; + } + }, + .slice_open, + .call_one_comma, + .async_call_one_comma, + .array_init_one_comma, + .struct_init_one_comma, + => { + end_offset += 2; // ellipsis2 + rbracket, or comma + rparen + n = datas[n].rhs; + std.debug.assert(n != 0); + }, + .slice => { + const extra = tree.extraData(datas[n].rhs, Node.Slice); + std.debug.assert(extra.end != 0); // should have used slice_open + end_offset += 1; // rbracket + n = extra.end; + }, + .slice_sentinel => { + const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel); + std.debug.assert(extra.sentinel != 0); // should have used slice + end_offset += 1; // rbracket + n = extra.sentinel; + }, + + .@"continue" => { + if (datas[n].lhs != 0) { + return datas[n].lhs + end_offset; + } else { + return main_tokens[n] + end_offset; + } + }, + .@"break" => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + return datas[n].lhs + end_offset; + } else { + return main_tokens[n] + end_offset; + } + }, + .fn_decl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + n = datas[n].lhs; + } + }, + .fn_proto_one => { + const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); + // linksection, callconv, align can appear in any order, so we + // find the last one here. + var max_node: Node.Index = datas[n].rhs; + var max_start = token_starts[main_tokens[max_node]]; + var max_offset: TokenIndex = 0; + if (extra.align_expr != 0) { + const start = token_starts[main_tokens[extra.align_expr]]; + if (start > max_start) { + max_node = extra.align_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.section_expr != 0) { + const start = token_starts[main_tokens[extra.section_expr]]; + if (start > max_start) { + max_node = extra.section_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.callconv_expr != 0) { + const start = token_starts[main_tokens[extra.callconv_expr]]; + if (start > max_start) { + max_node = extra.callconv_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + n = max_node; + end_offset += max_offset; + }, + .fn_proto => { + const extra = tree.extraData(datas[n].lhs, Node.FnProto); + // linksection, callconv, align can appear in any order, so we + // find the last one here. + var max_node: Node.Index = datas[n].rhs; + var max_start = token_starts[main_tokens[max_node]]; + var max_offset: TokenIndex = 0; + if (extra.align_expr != 0) { + const start = token_starts[main_tokens[extra.align_expr]]; + if (start > max_start) { + max_node = extra.align_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.section_expr != 0) { + const start = token_starts[main_tokens[extra.section_expr]]; + if (start > max_start) { + max_node = extra.section_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.callconv_expr != 0) { + const start = token_starts[main_tokens[extra.callconv_expr]]; + if (start > max_start) { + max_node = extra.callconv_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + n = max_node; + end_offset += max_offset; + }, + .while_cont => { + const extra = tree.extraData(datas[n].rhs, Node.WhileCont); + std.debug.assert(extra.then_expr != 0); + n = extra.then_expr; + }, + .@"while" => { + const extra = tree.extraData(datas[n].rhs, Node.While); + std.debug.assert(extra.else_expr != 0); + n = extra.else_expr; + }, + .@"if", .@"for" => { + const extra = tree.extraData(datas[n].rhs, Node.If); + std.debug.assert(extra.else_expr != 0); + n = extra.else_expr; + }, + .@"suspend" => { + if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .array_type_sentinel => { + const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); + n = extra.elem_type; + }, + }; +} diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 58df675..679626b 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -3,6 +3,7 @@ const offsets = @import("offsets.zig"); const DocumentStore = @import("document_store.zig"); const analysis = @import("analysis.zig"); const ast = std.zig.ast; +const lastToken = offsets.lastToken; pub const TokenType = enum(u32) { type, @@ -219,7 +220,7 @@ const GapHighlighter = struct { while (i < tree.firstToken(node)) : (i += 1) { try self.handleTok(i); } - self.current_idx = tree.lastToken(node) + 1; + self.current_idx = lastToken(tree, node) + 1; } fn end(self: *GapHighlighter, last: ast.TokenIndex) !void { @@ -284,9 +285,9 @@ fn writeContainerField( if (container_field.ast.value_expr != 0) block: { const eq_tok: ast.TokenIndex = if (container_field.ast.type_expr != 0) - builder.handle.tree.lastToken(container_field.ast.type_expr) + 1 + lastToken(builder.handle.tree, container_field.ast.type_expr) + 1 else if (container_field.ast.align_expr != 0) - builder.handle.tree.lastToken(container_field.ast.align_expr) + 1 + lastToken(builder.handle.tree, container_field.ast.align_expr) + 1 else break :block; // Check this, I believe it is correct. @@ -374,7 +375,7 @@ fn writeNodeTokens( } } - try gap_highlighter.end(tree.lastToken(node)); + try gap_highlighter.end(lastToken(tree, node)); }, .global_var_decl, .local_var_decl, @@ -458,7 +459,7 @@ fn writeNodeTokens( try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); } } - try gap_highlighter.end(tree.lastToken(node)); + try gap_highlighter.end(lastToken(tree, node)); }, .error_value => { // if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); @@ -566,12 +567,12 @@ fn writeNodeTokens( const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); const cases = tree.extra_data[extra.start..extra.end]; - var gap_highlighter = GapHighlighter.init(builder, tree.lastToken(datas[node].lhs) + 1); + var gap_highlighter = GapHighlighter.init(builder, lastToken(tree, datas[node].lhs) + 1); for (cases) |case_node| { try gap_highlighter.next(case_node); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); } - try gap_highlighter.end(tree.lastToken(node)); + try gap_highlighter.end(lastToken(tree, node)); }, .switch_case_one, .switch_case, @@ -727,7 +728,7 @@ fn writeNodeTokens( try writeToken(builder, init_token - 1, .operator); // '=' try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init }); } - try gap_highlighter.end(tree.lastToken(node)); + try gap_highlighter.end(lastToken(tree, node)); }, .call, .call_comma, @@ -749,8 +750,8 @@ fn writeNodeTokens( try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr }); if (builder.current_token) |curr_tok| { - if (curr_tok != tree.lastToken(call.ast.fn_expr) and token_tags[tree.lastToken(call.ast.fn_expr)] == .identifier) { - try writeToken(builder, tree.lastToken(call.ast.fn_expr), .function); + if (curr_tok != lastToken(tree, call.ast.fn_expr) and token_tags[lastToken(tree, call.ast.fn_expr)] == .identifier) { + try writeToken(builder, lastToken(tree, call.ast.fn_expr), .function); } } for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); @@ -768,7 +769,7 @@ fn writeNodeTokens( try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced }); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start }); - try writeToken(builder, tree.lastToken(slice.ast.start) + 1, .operator); + try writeToken(builder, lastToken(tree, slice.ast.start) + 1, .operator); if (slice.ast.end != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end });