From 3d8a9732fcb43f6fdb02b6a485d4ae4a3fd99211 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Fri, 5 Mar 2021 22:38:42 +0100 Subject: [PATCH] Calculate correct token locations and ensure all semantic highlighting matches --- src/analysis.zig | 136 +++++++++++++++++++++++----------------- src/main.zig | 22 +++++-- src/offsets.zig | 35 +++++++++-- src/semantic_tokens.zig | 71 +++++++++++++-------- 4 files changed, 170 insertions(+), 94 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 8c2e2be..f2e6fbb 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -14,42 +14,48 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI var idx = current; if (idx == 0) return null; switch (tags[node]) { - .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => { + .fn_proto, + .fn_proto_one, + .fn_proto_simple, + .fn_proto_multi, + .fn_decl, + => { idx -= 1; if (tokens[idx] == .keyword_extern and idx > 0) idx -= 1; if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; }, - .local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => { + .local_var_decl, + .global_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { idx -= 1; if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; }, - .container_field, .container_field_init, .container_field_align => { - idx -= 2; // skip '.' token - }, + .error_value => idx -= 1, + .container_field, + .container_field_init, + .container_field_align, + => idx -= 1, + .test_decl => idx -= 1, else => { - if (isContainer(tags[node])) { - idx -= 1; // go to '=' - idx -= 1; // mutability - idx -= 1; // possible 'pub' - if (tokens[idx] == .keyword_pub and idx > 0) - idx -= 1; - } else log.debug("Doc comment check for tag: {s}", .{tags[node]}); + log.debug("Doc comment check for tag: {s}", .{tags[node]}); }, } // Find first doc comment token if (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) { - while ((tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) and idx > 0) : (idx -= 1) {} - return idx + 1; + while (idx > 0 and + (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment)) + { + idx -= 1; + } + return idx + @boolToInt(tokens[idx] != .doc_comment and tokens[idx] != .container_doc_comment); } - // @TODO: Implement doc comments for tags - // } else if (node.castTag(.ErrorTag)) |tag| { - // return tag.doc_comments; - // } return null; } @@ -97,9 +103,9 @@ pub fn collectDocComments( /// Gets a function signature (keywords, name, return value) pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { - const start = tree.tokenLocation(0, func.ast.fn_token).line_start; - const end = tree.tokenLocation(0, tree.nodes.items(.main_token)[func.ast.return_type]).line_end; - return tree.source[start .. end - 1]; + const start = offsets.tokenLocation(tree, func.ast.fn_token).start; + const end = offsets.tokenLocation(tree, tree.nodes.items(.main_token)[func.ast.return_type]).end; + return tree.source[start..end]; } /// Gets a function snippet insert text @@ -161,17 +167,17 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a /// Gets a function signature (keywords, name, return value) pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 { - const start = tree.tokenLocation(0, var_decl.ast.mut_token).line_start; - const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(var_decl.ast.init_node)).line_end; + const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start; + const end = offsets.tokenLocation(tree, tree.lastToken(var_decl.ast.init_node)).end; return tree.source[start..end]; } // analysis.getContainerFieldSignature(handle.tree, field) pub fn getContainerFieldSignature(tree: ast.Tree, field: ast.full.ContainerField) []const u8 { - const start = tree.tokenLocation(0, field.ast.name_token).line_start; + const start = offsets.tokenLocation(tree, field.ast.name_token).start; const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr; - const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(end_node)).line_end; - return tree.source[start .. end - 1]; + const end = offsets.tokenLocation(tree, tree.lastToken(end_node)).end; + return tree.source[start..end]; } /// The type node is "type" @@ -610,7 +616,11 @@ pub fn resolveTypeOfNodeInternal( const starts = tree.tokens.items(.start); switch (node_tags[node]) { - .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => { const var_decl = varDecl(tree, node).?; if (var_decl.ast.type_node != 0) block: { return ((try resolveTypeOfNodeInternal( @@ -647,7 +657,10 @@ pub fn resolveTypeOfNodeInternal( } return null; }, - .container_field, .container_field_init, .container_field_align => |c| { + .container_field, + .container_field_init, + .container_field_align, + => |c| { const field: ast.full.ContainerField = switch (c) { .container_field => tree.containerField(node), .container_field_align => tree.containerFieldAlign(node), @@ -721,10 +734,17 @@ pub fn resolveTypeOfNodeInternal( } return null; }, - .@"comptime", .@"nosuspend", .grouped_expression => { + .@"comptime", + .@"nosuspend", + .grouped_expression, + => { return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params); }, - .struct_init, .struct_init_comma, .struct_init_one, .struct_init_one_comma => { + .struct_init, + .struct_init_comma, + .struct_init_one, + .struct_init_one_comma, + => { return ((try resolveTypeOfNodeInternal( store, arena, @@ -735,14 +755,19 @@ pub fn resolveTypeOfNodeInternal( .error_set_decl => { return TypeWithHandle.typeVal(node_handle); }, - .slice, .slice_sentinel, .slice_open => { + .slice, + .slice_sentinel, + .slice_open, + => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveBracketAccessType(store, arena, left_type, .Range, bound_type_params); }, - .deref, .unwrap_optional => { + .deref, + .unwrap_optional, + => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle, @@ -931,10 +956,7 @@ pub fn resolveTypeOfNodeInternal( .type = .{ .data = .{ .other = node }, .is_type_val = false }, .handle = handle, }, - .root => return TypeWithHandle.typeVal(node_handle), - else => { - // log.debug("TODO: implement type resolving for ast tag: {s}", .{node_tags[node]}); - }, + else => {}, } return null; } @@ -1319,6 +1341,8 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { .async_call_one, .async_call_one_comma, => return tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1), + .test_decl => if (data[node].lhs != 0) + return tree.tokenSlice(data[node].lhs), else => |tag| log.debug("INVALID: {}", .{tag}), } @@ -1326,8 +1350,8 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { } fn nodeContainsSourceIndex(tree: ast.Tree, node: ast.Node.Index, source_index: usize) bool { - const first_token = tree.tokenLocation(0, tree.firstToken(node)).line_start; - const last_token = tree.tokenLocation(@truncate(u32, first_token), tree.lastToken(node)).line_end; + const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start; + const last_token = offsets.tokenLocation(tree, tree.lastToken(node)).end; return source_index >= first_token and source_index <= last_token; } @@ -2332,11 +2356,11 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen } fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { - const loc_start = tree.tokenLocation(0, tree.firstToken(node)); - const loc_end = tree.tokenLocation(@truncate(u32, loc_start.line_start), tree.lastToken(node)); + const loc_start = offsets.tokenLocation(tree, tree.firstToken(node)); + const loc_end = offsets.tokenLocation(tree, tree.lastToken(node)); return SourceRange{ - .start = loc_start.line_start, - .end = loc_end.line_end, + .start = loc_start.start, + .end = loc_end.end, }; } @@ -2554,8 +2578,8 @@ fn makeScopeInternal( const scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, main_tokens[node_idx]).line_start, - .end = tree.tokenLocation(0, last_token).line_start, + .start = offsets.tokenLocation(tree, main_tokens[node_idx]).start, + .end = offsets.tokenLocation(tree, last_token).start, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2627,8 +2651,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.then_expr)).line_end, + .start = offsets.tokenLocation(tree, payload).start, + .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2657,8 +2681,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, err_token).line_start, - .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.else_expr)).line_end, + .start = offsets.tokenLocation(tree, err_token).start, + .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2687,8 +2711,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, while_node.ast.while_token).line_start, - .end = tree.tokenLocation(0, tree.lastToken(node_idx)).line_end, + .start = offsets.tokenLocation(tree, while_node.ast.while_token).start, + .end = offsets.tokenLocation(tree, tree.lastToken(node_idx)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2704,8 +2728,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.then_expr)).line_end, + .start = offsets.tokenLocation(tree, payload).start, + .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2733,8 +2757,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, err_token).line_start, - .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.else_expr)).line_end, + .start = offsets.tokenLocation(tree, err_token).start, + .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2760,8 +2784,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(0, tree.lastToken(switch_case.ast.target_expr)).line_end, + .start = offsets.tokenLocation(tree, payload).start, + .end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, diff --git a/src/main.zig b/src/main.zig index a81fd5e..10d85bb 100644 --- a/src/main.zig +++ b/src/main.zig @@ -220,7 +220,12 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han for (tree.rootDecls()) |decl_idx| { const decl = tree.nodes.items(.tag)[decl_idx]; switch (decl) { - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => blk: { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => blk: { var buf: [1]std.zig.ast.Node.Index = undefined; const func = analysis.fnProto(tree, decl_idx, &buf).?; if (func.extern_export_token != null) break :blk; @@ -367,7 +372,12 @@ fn nodeToCompletion( if (is_type_val) return; switch (node_tags[node]) { - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_decl => { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => { var buf: [1]std.zig.ast.Node.Index = undefined; const func = analysis.fnProto(tree, node, &buf).?; if (func.name_token) |name_token| { @@ -376,7 +386,7 @@ fn nodeToCompletion( const insert_text = if (use_snippets) blk: { // TODO Also check if we are dot accessing from a type val and dont skip in that case. const skip_self_param = if (func.ast.params.len > 0) param_check: { - const in_container = analysis.innermostContainer(handle, tree.tokenLocation(0, func.ast.fn_token).line_start); + const in_container = analysis.innermostContainer(handle, tree.tokens.items(.start)[func.ast.fn_token]); var it = func.iterate(tree); const param = it.next().?; @@ -603,7 +613,9 @@ fn hoverSymbol( tree.firstToken(param.type_expr); const last_token = tree.lastToken(param.type_expr); - const signature_str = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end]; + const start = offsets.tokenLocation(tree, first_token).start; + const end = offsets.tokenLocation(tree, last_token).end; + const signature_str = tree.source[start..end]; break :param_decl if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```\n{s}", .{ signature_str, doc_str }) else @@ -895,7 +907,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl .label = tree.tokenSlice(param.name_token.?), .kind = .Constant, .documentation = doc, - .detail = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end], + .detail = tree.source[offsets.tokenLocation(tree, first_token).start..offsets.tokenLocation(tree, last_token).end], }); }, .pointer_payload => |payload| { diff --git a/src/offsets.zig b/src/offsets.zig index 40d40e0..418fe2f 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -1,5 +1,6 @@ const std = @import("std"); const types = @import("types.zig"); +const ast = std.zig.ast; pub const Encoding = enum { utf8, @@ -70,7 +71,7 @@ pub const TokenLocation = struct { } }; -pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token: std.zig.ast.TokenIndex, encoding: Encoding) !TokenLocation { +pub fn tokenRelativeLocation(tree: ast.Tree, start_index: usize, token: ast.TokenIndex, encoding: Encoding) !TokenLocation { const start = tree.tokens.items(.start)[token]; var loc = TokenLocation{ @@ -108,14 +109,14 @@ pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token: } /// Asserts the token is comprised of valid utf8 -pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encoding: Encoding) usize { - const token_loc = tree.tokenLocation(0, token); +pub fn tokenLength(tree: ast.Tree, token: ast.TokenIndex, encoding: Encoding) usize { + const token_loc = tokenLocation(tree, token); if (encoding == .utf8) - return token_loc.line_end - token_loc.line_start; + return token_loc.end - token_loc.start; - var i: usize = token_loc.line_start; + var i: usize = token_loc.start; var utf16_len: usize = 0; - while (i < token_loc.line_end) { + while (i < token_loc.end) { const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable; const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable; if (codepoint < 0x10000) { @@ -128,6 +129,28 @@ pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encodi return utf16_len; } +/// Token location inside source +pub const Loc = struct { + start: usize, + end: usize, +}; + +pub fn tokenLocation(tree: ast.Tree, token_index: ast.TokenIndex) Loc { + const start = tree.tokens.items(.start)[token_index]; + const tag = tree.tokens.items(.tag)[token_index]; + + // For some tokens, re-tokenization is needed to find the end. + var tokenizer: std.zig.Tokenizer = .{ + .buffer = tree.source, + .index = start, + .pending_invalid_token = null, + }; + + const token = tokenizer.next(); + std.debug.assert(token.tag == tag); + return .{ .start = token.loc.start, .end = token.loc.end }; +} + pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range { var line_idx: i64 = 0; var curr_line: []const u8 = doc.text; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 61594ed..87eaed9 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -64,12 +64,13 @@ const Builder = struct { } fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void { + const starts = self.handle.tree.tokens.items(.start); const start_idx = if (self.current_token) |current_token| - self.handle.tree.tokenLocation(0, current_token).line_start + starts[current_token] else 0; - if (start_idx > self.handle.tree.tokenLocation(0, token).line_start) + if (start_idx > starts[token]) return; const delta_loc = offsets.tokenRelativeLocation(self.handle.tree, start_idx, token, self.encoding) catch return; @@ -268,6 +269,8 @@ fn writeNodeTokens( maybe_node: ?ast.Node.Index, ) error{OutOfMemory}!void { if (maybe_node == null) return; + const node = maybe_node.?; + if (node == 0) return; const handle = builder.handle; const tree = handle.tree; @@ -275,9 +278,8 @@ fn writeNodeTokens( const token_tags = tree.tokens.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + if (node > datas.len) return; - const node = maybe_node.?; - if (node > node_tags.len) return; const tag = node_tags[node]; const main_token = main_tokens[node]; @@ -286,18 +288,16 @@ fn writeNodeTokens( defer arena.child_allocator.free(child_frame); switch (tag) { - .root => { - var gap_highlighter = GapHighlighter.init(builder, 0); - var buf: [2]ast.Node.Index = undefined; - for (analysis.declMembers(tree, .root, 0, &buf)) |child| { - try gap_highlighter.next(child); - if (node_tags[child].isContainerField()) { - try writeContainerField(builder, arena, store, child, .field, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } - try gap_highlighter.end(@truncate(u32, tree.tokens.len) - 1); + .root => unreachable, + .container_field, + .container_field_align, + .container_field_init, + => try writeContainerField(builder, arena, store, node, .field, child_frame), + .@"errdefer" => { + if (datas[node].lhs != 0) + try writeToken(builder, datas[node].lhs, .variable); + + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, .block, .block_semicolon, @@ -381,12 +381,21 @@ fn writeNodeTokens( .container_decl_two_trailing, .container_decl_arg, .container_decl_arg_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .tagged_union_two, + .tagged_union_two_trailing, => { var buf: [2]ast.Node.Index = undefined; const decl: ast.full.ContainerDecl = switch (tag) { .container_decl, .container_decl_trailing => tree.containerDecl(node), .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node), .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node), + .tagged_union, .tagged_union_trailing => tree.taggedUnion(node), + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node), + .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node), else => unreachable, }; @@ -632,7 +641,7 @@ fn writeNodeTokens( .handle = handle, })) |struct_type| switch (struct_type.type.data) { .other => |type_node| if (analysis.isContainer(struct_type.handle.tree.nodes.items(.tag)[type_node])) - fieldTokenType(type_node, handle) + fieldTokenType(type_node, struct_type.handle) else null, else => null, @@ -644,11 +653,9 @@ fn writeNodeTokens( try gap_highlighter.next(field_init); const init_token = tree.firstToken(field_init); - if (field_token_type) |tok_type| { - try writeToken(builder, init_token - 3, tok_type); - try writeToken(builder, init_token - 2, tok_type); - } - try writeToken(builder, init_token - 1, .operator); + try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.' + try writeToken(builder, init_token - 2, field_token_type orelse .field); // name + try writeToken(builder, init_token - 1, .operator); // '=' try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init }); } try gap_highlighter.end(tree.lastToken(node)); @@ -712,7 +719,6 @@ fn writeNodeTokens( .grouped_expression => { try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - .@"return", .@"break", .@"continue", => { @@ -722,7 +728,7 @@ fn writeNodeTokens( if (datas[node].rhs != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .@"suspend" => { + .@"suspend", .@"return" => { try writeToken(builder, main_token, .keyword); if (datas[node].lhs != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); @@ -872,6 +878,8 @@ fn writeNodeTokens( if (data.rhs == 0) return; const rhs_str = tree.tokenSlice(data.rhs); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs }); + // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added // writeToken code. // Maybe we can hook into it insead? Also applies to Identifier and VarDecl @@ -935,7 +943,7 @@ fn writeNodeTokens( }); } - try writeToken(builder, main_token, .operator); + if (ptr_type.size == .One) try writeToken(builder, main_token, .operator); if (ptr_type.ast.sentinel != 0) { return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, @@ -995,7 +1003,7 @@ fn writeNodeTokens( try writeToken(builder, main_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - else => std.log.scoped(.semantic_tokens).debug("TODO: {s}", .{tag}), + .anyframe_literal => try writeToken(builder, main_token, .keyword), } } @@ -1003,7 +1011,16 @@ fn writeNodeTokens( pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); + // reverse the ast from the root declarations + var gap_highlighter = GapHighlighter.init(&builder, 0); + var buf: [2]ast.Node.Index = undefined; + for (analysis.declMembers(handle.tree, .root, 0, &buf)) |child| { + try gap_highlighter.next(child); + try writeNodeTokens(&builder, arena, store, child); + } + + try gap_highlighter.end(@truncate(u32, handle.tree.tokens.len) - 1); // pass root node, which always has index '0' - try writeNodeTokens(&builder, arena, store, 0); + // try writeNodeTokens(&builder, arena, store, 0); return builder.toOwnedSlice(); }