From 3fefcfb398f58eba0b6a85440c2f5c8ca510b03b Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 31 Mar 2023 20:48:47 +0200 Subject: [PATCH 1/8] simplify semantic token comment handling --- src/features/semantic_tokens.zig | 123 ++++--------------------- tests/lsp_features/semantic_tokens.zig | 9 +- 2 files changed, 27 insertions(+), 105 deletions(-) diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index d030535..41546bd 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -49,83 +49,22 @@ const Builder = struct { encoding: offsets.Encoding, limited: bool, - fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void { - switch (token_type) { - .type, - .parameter, - .variable, - .enumMember, - .field, - .errorTag, - .function, - .label, - => {}, - else => if (self.limited) return, - } + fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) error{OutOfMemory}!void { const tree = self.handle.tree; const starts = tree.tokens.items(.start); - if (starts[token] < self.previous_source_index) return; - - if (self.previous_token) |prev| { - // Highlight gaps between AST nodes. These can contain comments or malformed code. - var i = prev + 1; - while (i < token) : (i += 1) { - try handleComments(self, starts[i - 1], starts[i]); - } - } - self.previous_token = token; - try self.handleComments(starts[token -| 1], starts[token]); - - const length = offsets.tokenLength(tree, token, self.encoding); - try self.addDirect(token_type, token_modifiers, starts[token], length); + try self.handleComments(self.previous_source_index, starts[token]); + try self.addDirect(token_type, token_modifiers, offsets.tokenToLoc(tree, token)); } fn finish(self: *Builder) error{OutOfMemory}!types.SemanticTokens { - const starts = self.handle.tree.tokens.items(.start); - - const last_token = self.previous_token orelse 0; - var i = last_token + 1; - while (i < starts.len) : (i += 1) { - try handleComments(self, starts[i - 1], starts[i]); - } - try self.handleComments(starts[starts.len - 1], self.handle.tree.source.len); - + try self.handleComments(self.previous_source_index, self.handle.tree.source.len); return .{ .data = try self.token_buffer.toOwnedSlice(self.arena) }; } - /// Highlight a token without semantic context. - fn handleToken(self: *Builder, tok: Ast.TokenIndex) !void { - const tree = self.handle.tree; - // TODO More highlighting here - const tok_id = tree.tokens.items(.tag)[tok]; - const tok_type: TokenType = switch (tok_id) { - .keyword_unreachable => .keywordLiteral, - .number_literal => .number, - .string_literal, .multiline_string_literal_line, .char_literal => .string, - .period, .comma, .r_paren, .l_paren, .r_brace, .l_brace, .semicolon, .colon => return, - - else => blk: { - const id = @enumToInt(tok_id); - if (id >= @enumToInt(std.zig.Token.Tag.keyword_align) and - id <= @enumToInt(std.zig.Token.Tag.keyword_while)) - break :blk TokenType.keyword; - if (id >= @enumToInt(std.zig.Token.Tag.bang) and - id <= @enumToInt(std.zig.Token.Tag.tilde)) - break :blk TokenType.operator; - - return; - }, - }; - const start = tree.tokens.items(.start)[tok]; - const length = offsets.tokenLength(tree, tok, self.encoding); - try self.addDirect(tok_type, .{}, start, length); - } - /// Highlight normal comments and doc comments. - fn handleComments(self: *Builder, from: usize, to: usize) !void { - if (from == to) return; - std.debug.assert(from < to); + fn handleComments(self: *Builder, from: usize, to: usize) error{OutOfMemory}!void { + if (from >= to) return; const source = self.handle.tree.source; @@ -165,14 +104,14 @@ const Builder = struct { while (i < to and source[i] != '\n') : (i += 1) {} - const length = offsets.locLength(self.handle.tree.source, .{ .start = comment_start, .end = i }, self.encoding); - try self.addDirect(TokenType.comment, mods, comment_start, length); + try self.addDirect(.comment, mods, .{ .start = comment_start, .end = i }); } } - fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void { - if (start < self.previous_source_index) return; - switch (tok_type) { + fn addDirect(self: *Builder, token_type: TokenType, token_modifiers: TokenModifiers, loc: offsets.Loc) error{OutOfMemory}!void { + std.debug.assert(loc.start <= loc.end); + if (loc.start < self.previous_source_index) return; + switch (token_type) { .type, .parameter, .variable, @@ -185,17 +124,18 @@ const Builder = struct { else => if (self.limited) return, } - const text = self.handle.tree.source[self.previous_source_index..start]; - const delta = offsets.indexToPosition(text, text.len, self.encoding); + const delta_text = self.handle.tree.source[self.previous_source_index..loc.start]; + const delta = offsets.indexToPosition(delta_text, delta_text.len, self.encoding); + const length = offsets.locLength(self.handle.tree.source, loc, self.encoding); try self.token_buffer.appendSlice(self.arena, &.{ @truncate(u32, delta.line), @truncate(u32, delta.character), @truncate(u32, length), - @enumToInt(tok_type), - @bitCast(u16, tok_mod), + @enumToInt(token_type), + @bitCast(u16, token_modifiers), }); - self.previous_source_index = start; + self.previous_source_index = loc.start; } }; @@ -326,9 +266,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v .aligned_var_decl, => { const var_decl = tree.fullVarDecl(node).?; - if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| - try writeDocComments(builder, tree, comment_idx); - try writeToken(builder, var_decl.visib_token, .keyword); try writeToken(builder, var_decl.extern_export_token, .keyword); try writeToken(builder, var_decl.threadlocal_token, .keyword); @@ -447,8 +384,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v => { var buf: [1]Ast.Node.Index = undefined; const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?; - if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |docs| - try writeDocComments(builder, tree, docs); try writeToken(builder, fn_proto.visib_token, .keyword); try writeToken(builder, fn_proto.extern_export_inline_token, .keyword); @@ -487,22 +422,10 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v if (tag == .fn_decl) try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); }, - .anyframe_type => { + .anyframe_type, .@"defer" => { try writeToken(builder, main_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); }, - .@"defer" => { - try writeToken(builder, main_token, .keyword); - try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); - }, - .@"comptime", - .@"nosuspend", - => { - if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc| - try writeDocComments(builder, tree, doc); - try writeToken(builder, main_token, .keyword); - try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); - }, .@"switch", .switch_comma, => { @@ -712,7 +635,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v if (node_data[node].lhs != 0) try writeToken(builder, node_data[node].lhs, .label); }, - .@"suspend", .@"return" => { + .@"comptime", .@"nosuspend", .@"suspend", .@"return" => { try writeToken(builder, main_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); }, @@ -797,9 +720,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v .asm_input, => unreachable, .test_decl => { - if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc| - try writeDocComments(builder, tree, doc); - try writeToken(builder, main_token, .keyword); switch (token_tags[node_data[node].lhs]) { .string_literal => try writeToken(builder, node_data[node].lhs, .string), @@ -996,14 +916,9 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void { const tree = builder.handle.tree; const container_field = tree.fullContainerField(node).?; - const base = tree.nodes.items(.main_token)[node]; - const tokens = tree.tokens.items(.tag); var allocator = builder.arena; - if (Analyser.getDocCommentTokenIndex(tokens, base)) |docs| - try writeDocComments(builder, tree, docs); - try writeToken(builder, container_field.comptime_token, .keyword); if (!container_field.ast.tuple_like) { if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.main_token, tok_type); diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index f090f3a..5200431 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -15,7 +15,6 @@ test "semantic tokens - empty" { } test "semantic tokens - comment" { - if (true) return error.SkipZigTest; // TODO try testSemanticTokens( \\// hello world , &.{ @@ -27,6 +26,14 @@ test "semantic tokens - comment" { , &.{ .{ "//! hello world", .comment, .{ .documentation = true } }, }); + try testSemanticTokens( + \\//! first line + \\//! second line + \\ + , &.{ + .{ "//! first line", .comment, .{ .documentation = true } }, + .{ "//! second line", .comment, .{ .documentation = true } }, + }); try testSemanticTokens( \\/// hello world \\const a; From ae5fa110b5b81abf2fbdf7b93569a861a66e7728 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 31 Mar 2023 20:49:25 +0200 Subject: [PATCH 2/8] test and simplify semantic tokens on function call --- src/features/semantic_tokens.zig | 6 ----- tests/lsp_features/semantic_tokens.zig | 36 ++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index 41546bd..0e138d2 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -44,7 +44,6 @@ const Builder = struct { analyser: *Analyser, handle: *const DocumentStore.Handle, previous_source_index: usize = 0, - previous_token: ?Ast.TokenIndex = null, token_buffer: std.ArrayListUnmanaged(u32) = .{}, encoding: offsets.Encoding, limited: bool, @@ -595,11 +594,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v try writeToken(builder, call.async_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr }); - if (builder.previous_token) |prev| { - if (prev != ast.lastToken(tree, call.ast.fn_expr) and token_tags[ast.lastToken(tree, call.ast.fn_expr)] == .identifier) { - try writeToken(builder, ast.lastToken(tree, call.ast.fn_expr), .function); - } - } for (call.ast.params) |param| try callWriteNodeTokens(allocator, .{ builder, param }); }, .slice, diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 5200431..303e118 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -226,6 +226,42 @@ test "semantic tokens - field access" { }); } +test "semantic tokens - call" { + try testSemanticTokens( + \\fn foo() void {} + \\const alpha = foo(); + , &.{ + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + .{ "void", .type, .{} }, + + .{ "const", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "foo", .function, .{} }, + }); + try testSemanticTokens( + \\const ns = struct { + \\ fn foo() void {} + \\}; + \\const alpha = ns.foo(); + , &.{ + .{ "const", .keyword, .{} }, + .{ "ns", .type, .{ .namespace = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "struct", .keyword, .{} }, + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + .{ "void", .type, .{} }, + + .{ "const", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "ns", .type, .{ .namespace = true } }, + .{ "foo", .function, .{} }, + }); +} + test "semantic tokens - catch" { try testSemanticTokens( \\var alpha = a catch b; From 6836f9361d35b8470aee5eaa027b2c306545fffb Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Sun, 26 Mar 2023 00:50:43 +0100 Subject: [PATCH 3/8] use `std.meta.fieldNames` instead of manually re-implementing it --- src/Server.zig | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/src/Server.zig b/src/Server.zig index 213b87e..cbb9d1c 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -562,22 +562,8 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ .full = .{ .bool = true }, .range = .{ .bool = true }, .legend = .{ - .tokenTypes = comptime block: { - const tokTypeFields = std.meta.fields(semantic_tokens.TokenType); - var names: [tokTypeFields.len][]const u8 = undefined; - for (tokTypeFields, &names) |field, *name| { - name.* = field.name; - } - break :block &names; - }, - .tokenModifiers = comptime block: { - const tokModFields = std.meta.fields(semantic_tokens.TokenModifiers); - var names: [tokModFields.len][]const u8 = undefined; - for (tokModFields, &names) |field, *name| { - name.* = field.name; - } - break :block &names; - }, + .tokenTypes = std.meta.fieldNames(semantic_tokens.TokenType), + .tokenModifiers = std.meta.fieldNames(semantic_tokens.TokenModifiers), }, }, }, From 498517ba715f7c65c4beb247abb2c9a0d6f12c96 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 31 Mar 2023 17:38:17 +0200 Subject: [PATCH 4/8] replace semantic token type .field with .property --- src/features/semantic_tokens.zig | 17 +++++++++-------- tests/lsp_features/semantic_tokens.zig | 16 ++++++++-------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index 0e138d2..a22c305 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -13,7 +13,7 @@ pub const TokenType = enum(u32) { parameter, variable, enumMember, - field, + property, errorTag, function, keyword, @@ -115,7 +115,7 @@ const Builder = struct { .parameter, .variable, .enumMember, - .field, + .property, .errorTag, .function, .label, @@ -162,8 +162,9 @@ fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.H const main_token = handle.tree.nodes.items(.main_token)[container_decl]; if (main_token > handle.tree.tokens.len) return null; return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) { - .keyword_struct => .field, + .keyword_struct => .property, .keyword_union, .keyword_enum => .enumMember, + .keyword_error => .errorTag, else => null, }); } @@ -229,7 +230,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v .container_field, .container_field_align, .container_field_init, - => try writeContainerField(builder, node, .field), + => try writeContainerField(builder, node, .property), .@"errdefer" => { try writeToken(builder, main_token, .keyword); @@ -253,7 +254,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v for (statements) |child| { if (node_tags[child].isContainerField()) { - try writeContainerField(builder, child, .field); + try writeContainerField(builder, child, .property); } else { try callWriteNodeTokens(allocator, .{ builder, child }); } @@ -573,8 +574,8 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v for (struct_init.ast.fields) |field_init| { const init_token = tree.firstToken(field_init); - try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.' - try writeToken(builder, init_token - 2, field_token_type orelse .field); // name + try writeToken(builder, init_token - 3, field_token_type orelse .property); // '.' + try writeToken(builder, init_token - 2, field_token_type orelse .property); // name try writeToken(builder, init_token - 1, .operator); // '=' try callWriteNodeTokens(allocator, .{ builder, field_init }); } @@ -823,7 +824,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v const tok_type: ?TokenType = if (ast.isContainer(lhs_type.handle.tree, left_type_node)) fieldTokenType(decl_node, lhs_type.handle) else if (left_type_node == 0) - TokenType.field + .property else null; diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 303e118..4363ec7 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -516,9 +516,9 @@ test "semantic tokens - struct" { .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, .{ "=", .operator, .{} }, .{ "struct", .keyword, .{} }, - .{ "alpha", .field, .{} }, + .{ "alpha", .property, .{} }, .{ "u32", .type, .{} }, - .{ "beta", .field, .{} }, + .{ "beta", .property, .{} }, .{ "void", .type, .{} }, }); try testSemanticTokens( @@ -531,12 +531,12 @@ test "semantic tokens - struct" { .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, .{ "=", .operator, .{} }, .{ "struct", .keyword, .{} }, - .{ "alpha", .field, .{} }, + .{ "alpha", .property, .{} }, .{ "u32", .type, .{} }, .{ "=", .operator, .{} }, .{ "3", .number, .{} }, .{ "comptime", .keyword, .{} }, - .{ "beta", .field, .{} }, + .{ "beta", .property, .{} }, .{ "void", .type, .{} }, .{ "=", .operator, .{} }, }); @@ -592,7 +592,7 @@ test "semantic tokens - union" { .{ "=", .operator, .{} }, .{ "union", .keyword, .{} }, .{ "E", .variable, .{} }, - .{ "alpha", .field, .{} }, + .{ "alpha", .property, .{} }, }); try testSemanticTokens( \\const Foo = union(E) { @@ -605,8 +605,8 @@ test "semantic tokens - union" { .{ "=", .operator, .{} }, .{ "union", .keyword, .{} }, .{ "E", .variable, .{} }, - .{ "alpha", .field, .{} }, - .{ "beta", .field, .{} }, + .{ "alpha", .property, .{} }, + .{ "beta", .property, .{} }, .{ "void", .keyword, .{} }, }); try testSemanticTokens( @@ -619,7 +619,7 @@ test "semantic tokens - union" { .{ "=", .operator, .{} }, .{ "union", .keyword, .{} }, .{ "E", .variable, .{} }, - .{ "alpha", .field, .{} }, + .{ "alpha", .property, .{} }, .{ "void", .keyword, .{} }, .{ "align", .keyword, .{} }, .{ "2", .number, .{} }, From d9965c78345ee914427557cd5c7aceea66aea7d8 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Thu, 30 Mar 2023 14:00:09 +0200 Subject: [PATCH 5/8] fix semantic tokens on if capture by ref --- src/features/semantic_tokens.zig | 6 ++++-- tests/lsp_features/semantic_tokens.zig | 10 ++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index a22c305..410111c 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -516,8 +516,10 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v try writeToken(builder, if_node.ast.if_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr }); - if (if_node.payload_token) |payload| { - try writeTokenMod(builder, payload, .variable, .{ .declaration = true }); + if (if_node.payload_token) |payload_token| { + const capture_is_ref = token_tags[payload_token] == .asterisk; + const actual_payload = payload_token + @boolToInt(capture_is_ref); + try writeTokenMod(builder, actual_payload, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, if_node.ast.then_expr }); diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 4363ec7..9977509 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -808,6 +808,16 @@ test "semantic tokens - if" { .{ "err", .variable, .{ .declaration = true } }, .{ "err", .variable, .{} }, }); + try testSemanticTokens( + \\const foo = if (null) |*value| {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "if", .keyword, .{} }, + .{ "null", .keywordLiteral, .{} }, + .{ "value", .variable, .{ .declaration = true } }, + }); } test "semantic tokens - while" { From cde544125abbe72dec6b3eb3bccc5832ea5b34a5 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 31 Mar 2023 18:16:46 +0200 Subject: [PATCH 6/8] bring semantic token types and modifiers closer to predefined ones --- src/features/semantic_tokens.zig | 46 ++++++++++++++++---------- tests/lsp_features/semantic_tokens.zig | 44 ++++++++++++------------ 2 files changed, 51 insertions(+), 39 deletions(-) diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index 410111c..afec1bf 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -24,19 +24,27 @@ pub const TokenType = enum(u32) { builtin, label, keywordLiteral, + namespace, + @"struct", + @"enum", + @"union", + @"opaque", }; pub const TokenModifiers = packed struct(u16) { - namespace: bool = false, - @"struct": bool = false, - @"enum": bool = false, - @"union": bool = false, - @"opaque": bool = false, declaration: bool = false, + definition: bool = false, + readonly: bool = false, + static: bool = false, + deprecated: bool = false, + abstract: bool = false, @"async": bool = false, + modification: bool = false, documentation: bool = false, + defaultLibrary: bool = false, + generic: bool = false, - _: u7 = 0, + _: u5 = 0, }; const Builder = struct { @@ -172,18 +180,22 @@ fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.H fn colorIdentifierBasedOnType(builder: *Builder, type_node: Analyser.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void { if (type_node.type.is_type_val) { var new_tok_mod = tok_mod; - if (type_node.isNamespace()) - new_tok_mod.namespace = true - else if (type_node.isStructType()) - new_tok_mod.@"struct" = true - else if (type_node.isEnumType()) - new_tok_mod.@"enum" = true - else if (type_node.isUnionType()) - new_tok_mod.@"union" = true - else if (type_node.isOpaqueType()) - new_tok_mod.@"opaque" = true; - try writeTokenMod(builder, target_tok, .type, new_tok_mod); + const token_type: TokenType = + if (type_node.isNamespace()) + .namespace + else if (type_node.isStructType()) + .@"struct" + else if (type_node.isEnumType()) + .@"enum" + else if (type_node.isUnionType()) + .@"union" + else if (type_node.isOpaqueType()) + .@"opaque" + else + .type; + + try writeTokenMod(builder, target_tok, token_type, new_tok_mod); } else if (type_node.isTypeFunc()) { try writeTokenMod(builder, target_tok, .type, tok_mod); } else if (type_node.isFunc()) { diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 9977509..8d94837 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -202,7 +202,7 @@ test "semantic tokens - field access" { \\const std = @import("std"); , &.{ .{ "const", .keyword, .{} }, - .{ "std", .type, .{ .namespace = true, .declaration = true } }, + .{ "std", .namespace, .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "@import", .builtin, .{} }, .{ "\"std\"", .string, .{} }, @@ -212,17 +212,17 @@ test "semantic tokens - field access" { \\const Ast = std.zig.Ast; , &.{ .{ "const", .keyword, .{} }, - .{ "std", .type, .{ .namespace = true, .declaration = true } }, + .{ "std", .namespace, .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "@import", .builtin, .{} }, .{ "\"std\"", .string, .{} }, .{ "const", .keyword, .{} }, - .{ "Ast", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "Ast", .@"struct", .{ .declaration = true } }, .{ "=", .operator, .{} }, - .{ "std", .type, .{ .namespace = true } }, - .{ "zig", .type, .{ .namespace = true } }, - .{ "Ast", .type, .{ .@"struct" = true } }, + .{ "std", .namespace, .{} }, + .{ "zig", .namespace, .{} }, + .{ "Ast", .@"struct", .{} }, }); } @@ -247,7 +247,7 @@ test "semantic tokens - call" { \\const alpha = ns.foo(); , &.{ .{ "const", .keyword, .{} }, - .{ "ns", .type, .{ .namespace = true, .declaration = true } }, + .{ "ns", .namespace, .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "struct", .keyword, .{} }, .{ "fn", .keyword, .{} }, @@ -257,7 +257,7 @@ test "semantic tokens - call" { .{ "const", .keyword, .{} }, .{ "alpha", .variable, .{ .declaration = true } }, .{ "=", .operator, .{} }, - .{ "ns", .type, .{ .namespace = true } }, + .{ "ns", .namespace, .{} }, .{ "foo", .function, .{} }, }); } @@ -492,7 +492,7 @@ test "semantic tokens - struct" { \\const Foo = struct {}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .namespace = true, .declaration = true } }, + .{ "Foo", .namespace, .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "struct", .keyword, .{} }, }); @@ -500,7 +500,7 @@ test "semantic tokens - struct" { \\const Foo = packed struct(u32) {}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .namespace = true, .declaration = true } }, + .{ "Foo", .namespace, .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "packed", .keyword, .{} }, .{ "struct", .keyword, .{} }, @@ -513,7 +513,7 @@ test "semantic tokens - struct" { \\}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "Foo", .@"struct", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "struct", .keyword, .{} }, .{ "alpha", .property, .{} }, @@ -528,7 +528,7 @@ test "semantic tokens - struct" { \\}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "Foo", .@"struct", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "struct", .keyword, .{} }, .{ "alpha", .property, .{} }, @@ -552,7 +552,7 @@ test "semantic tokens - struct" { .{ "=", .operator, .{} }, .{ "u32", .type, .{} }, .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "Foo", .@"struct", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "struct", .keyword, .{} }, .{ "u32", .type, .{} }, @@ -567,7 +567,7 @@ test "semantic tokens - union" { \\const Foo = union {}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "Foo", .@"union", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "union", .keyword, .{} }, }); @@ -575,7 +575,7 @@ test "semantic tokens - union" { \\const Foo = packed union(enum) {}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "Foo", .@"union", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "packed", .keyword, .{} }, .{ "union", .keyword, .{} }, @@ -588,7 +588,7 @@ test "semantic tokens - union" { \\}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "Foo", .@"union", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "union", .keyword, .{} }, .{ "E", .variable, .{} }, @@ -601,7 +601,7 @@ test "semantic tokens - union" { \\}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "Foo", .@"union", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "union", .keyword, .{} }, .{ "E", .variable, .{} }, @@ -615,7 +615,7 @@ test "semantic tokens - union" { \\}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "Foo", .@"union", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "union", .keyword, .{} }, .{ "E", .variable, .{} }, @@ -632,7 +632,7 @@ test "semantic tokens - enum" { \\const Foo = enum {}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"enum" = true, .declaration = true } }, + .{ "Foo", .@"enum", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "enum", .keyword, .{} }, }); @@ -643,7 +643,7 @@ test "semantic tokens - enum" { \\}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"enum" = true, .declaration = true } }, + .{ "Foo", .@"enum", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "enum", .keyword, .{} }, .{ "alpha", .enumMember, .{} }, @@ -653,7 +653,7 @@ test "semantic tokens - enum" { \\const Foo = enum(u4) {}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"enum" = true, .declaration = true } }, + .{ "Foo", .@"enum", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "enum", .keyword, .{} }, .{ "u4", .type, .{} }, @@ -687,7 +687,7 @@ test "semantic tokens - opaque" { \\const Foo = opaque {}; , &.{ .{ "const", .keyword, .{} }, - .{ "Foo", .type, .{ .@"opaque" = true, .declaration = true } }, + .{ "Foo", .@"opaque", .{ .declaration = true } }, .{ "=", .operator, .{} }, .{ "opaque", .keyword, .{} }, }); From 6f0907bc1610feedcda63905e5b59b78e11272f6 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 31 Mar 2023 20:44:01 +0200 Subject: [PATCH 7/8] fix semantic tokens on union and enum container fields --- src/features/semantic_tokens.zig | 35 ++++++++++++++------------ tests/lsp_features/semantic_tokens.zig | 10 ++++---- 2 files changed, 24 insertions(+), 21 deletions(-) diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index afec1bf..b79b090 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -170,8 +170,8 @@ fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.H const main_token = handle.tree.nodes.items(.main_token)[container_decl]; if (main_token > handle.tree.tokens.len) return null; return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) { - .keyword_struct => .property, - .keyword_union, .keyword_enum => .enumMember, + .keyword_struct, .keyword_union => .property, + .keyword_enum => .enumMember, .keyword_error => .errorTag, else => null, }); @@ -242,7 +242,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v .container_field, .container_field_align, .container_field_init, - => try writeContainerField(builder, node, .property), + => try writeContainerField(builder, node, 0), .@"errdefer" => { try writeToken(builder, main_token, .keyword); @@ -265,11 +265,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v const statements = ast.blockStatements(tree, node, &buffer).?; for (statements) |child| { - if (node_tags[child].isContainerField()) { - try writeContainerField(builder, child, .property); - } else { - try callWriteNodeTokens(allocator, .{ builder, child }); - } + try callWriteNodeTokens(allocator, .{ builder, child }); } }, .global_var_decl, @@ -336,10 +332,9 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v try writeToken(builder, enum_token, .keyword); } else try callWriteNodeTokens(allocator, .{ builder, decl.ast.arg }); - const field_token_type = fieldTokenType(node, handle); for (decl.ast.members) |child| { if (node_tags[child].isContainerField()) { - try writeContainerField(builder, child, field_token_type); + try writeContainerField(builder, child, node); } else { try callWriteNodeTokens(allocator, .{ builder, child }); } @@ -922,15 +917,23 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v } } -fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void { +fn writeContainerField(builder: *Builder, node: Ast.Node.Index, container_decl: Ast.Node.Index) !void { const tree = builder.handle.tree; - const container_field = tree.fullContainerField(node).?; - var allocator = builder.arena; + var container_field = tree.fullContainerField(node).?; + const field_token_type = fieldTokenType(container_decl, builder.handle) orelse .property; + + const token_tags = tree.tokens.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + + if (container_decl != 0 and token_tags[main_tokens[container_decl]] != .keyword_struct) { + container_field.convertToNonTupleLike(tree.nodes); + } + try writeToken(builder, container_field.comptime_token, .keyword); if (!container_field.ast.tuple_like) { - if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.main_token, tok_type); + try writeToken(builder, container_field.ast.main_token, field_token_type); } if (container_field.ast.type_expr != 0) { @@ -941,13 +944,13 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type } } - if (container_field.ast.value_expr != 0) block: { + if (container_field.ast.value_expr != 0) { const eq_tok: Ast.TokenIndex = if (container_field.ast.align_expr != 0) ast.lastToken(tree, container_field.ast.align_expr) + 2 else if (container_field.ast.type_expr != 0) ast.lastToken(tree, container_field.ast.type_expr) + 1 else - break :block; + container_field.ast.main_token + 1; try writeToken(builder, eq_tok, .operator); try callWriteNodeTokens(allocator, .{ builder, container_field.ast.value_expr }); diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 8d94837..95ff282 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -581,7 +581,6 @@ test "semantic tokens - union" { .{ "union", .keyword, .{} }, .{ "enum", .keyword, .{} }, }); - if (true) return error.SkipZigTest; // TODO try testSemanticTokens( \\const Foo = union(E) { \\ alpha, @@ -607,7 +606,7 @@ test "semantic tokens - union" { .{ "E", .variable, .{} }, .{ "alpha", .property, .{} }, .{ "beta", .property, .{} }, - .{ "void", .keyword, .{} }, + .{ "void", .type, .{} }, }); try testSemanticTokens( \\const Foo = union(E) { @@ -620,14 +619,13 @@ test "semantic tokens - union" { .{ "union", .keyword, .{} }, .{ "E", .variable, .{} }, .{ "alpha", .property, .{} }, - .{ "void", .keyword, .{} }, + .{ "void", .type, .{} }, .{ "align", .keyword, .{} }, .{ "2", .number, .{} }, }); } test "semantic tokens - enum" { - if (true) return error.SkipZigTest; // TODO try testSemanticTokens( \\const Foo = enum {}; , &.{ @@ -638,7 +636,7 @@ test "semantic tokens - enum" { }); try testSemanticTokens( \\const Foo = enum { - \\ alpha, + \\ alpha = 3, \\ beta, \\}; , &.{ @@ -647,6 +645,8 @@ test "semantic tokens - enum" { .{ "=", .operator, .{} }, .{ "enum", .keyword, .{} }, .{ "alpha", .enumMember, .{} }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, .{ "beta", .enumMember, .{} }, }); try testSemanticTokens( From 5cc7b1341e62a5cdb5d03ad55974210f0f787349 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Tue, 9 May 2023 19:00:16 +0200 Subject: [PATCH 8/8] skip std module resolution test on webassembly target --- tests/lsp_features/semantic_tokens.zig | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 95ff282..567af1c 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -197,6 +197,7 @@ test "semantic tokens - operators" { } test "semantic tokens - field access" { + if (builtin.target.isWasm()) return error.SkipZigTest; // this will make sure that the std module can be resolved try testSemanticTokens( \\const std = @import("std");