Merge pull request #1103 from Techatrix/refactor-semantic-tokens
provide more semantic token information + refactor and fixes
This commit is contained in:
commit
af65197cc2
@ -562,22 +562,8 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ
|
|||||||
.full = .{ .bool = true },
|
.full = .{ .bool = true },
|
||||||
.range = .{ .bool = true },
|
.range = .{ .bool = true },
|
||||||
.legend = .{
|
.legend = .{
|
||||||
.tokenTypes = comptime block: {
|
.tokenTypes = std.meta.fieldNames(semantic_tokens.TokenType),
|
||||||
const tokTypeFields = std.meta.fields(semantic_tokens.TokenType);
|
.tokenModifiers = std.meta.fieldNames(semantic_tokens.TokenModifiers),
|
||||||
var names: [tokTypeFields.len][]const u8 = undefined;
|
|
||||||
for (tokTypeFields, &names) |field, *name| {
|
|
||||||
name.* = field.name;
|
|
||||||
}
|
|
||||||
break :block &names;
|
|
||||||
},
|
|
||||||
.tokenModifiers = comptime block: {
|
|
||||||
const tokModFields = std.meta.fields(semantic_tokens.TokenModifiers);
|
|
||||||
var names: [tokModFields.len][]const u8 = undefined;
|
|
||||||
for (tokModFields, &names) |field, *name| {
|
|
||||||
name.* = field.name;
|
|
||||||
}
|
|
||||||
break :block &names;
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -13,7 +13,7 @@ pub const TokenType = enum(u32) {
|
|||||||
parameter,
|
parameter,
|
||||||
variable,
|
variable,
|
||||||
enumMember,
|
enumMember,
|
||||||
field,
|
property,
|
||||||
errorTag,
|
errorTag,
|
||||||
function,
|
function,
|
||||||
keyword,
|
keyword,
|
||||||
@ -24,19 +24,27 @@ pub const TokenType = enum(u32) {
|
|||||||
builtin,
|
builtin,
|
||||||
label,
|
label,
|
||||||
keywordLiteral,
|
keywordLiteral,
|
||||||
|
namespace,
|
||||||
|
@"struct",
|
||||||
|
@"enum",
|
||||||
|
@"union",
|
||||||
|
@"opaque",
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const TokenModifiers = packed struct(u16) {
|
pub const TokenModifiers = packed struct(u16) {
|
||||||
namespace: bool = false,
|
|
||||||
@"struct": bool = false,
|
|
||||||
@"enum": bool = false,
|
|
||||||
@"union": bool = false,
|
|
||||||
@"opaque": bool = false,
|
|
||||||
declaration: bool = false,
|
declaration: bool = false,
|
||||||
|
definition: bool = false,
|
||||||
|
readonly: bool = false,
|
||||||
|
static: bool = false,
|
||||||
|
deprecated: bool = false,
|
||||||
|
abstract: bool = false,
|
||||||
@"async": bool = false,
|
@"async": bool = false,
|
||||||
|
modification: bool = false,
|
||||||
documentation: bool = false,
|
documentation: bool = false,
|
||||||
|
defaultLibrary: bool = false,
|
||||||
|
|
||||||
generic: bool = false,
|
generic: bool = false,
|
||||||
_: u7 = 0,
|
_: u5 = 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
const Builder = struct {
|
const Builder = struct {
|
||||||
@ -44,88 +52,26 @@ const Builder = struct {
|
|||||||
analyser: *Analyser,
|
analyser: *Analyser,
|
||||||
handle: *const DocumentStore.Handle,
|
handle: *const DocumentStore.Handle,
|
||||||
previous_source_index: usize = 0,
|
previous_source_index: usize = 0,
|
||||||
previous_token: ?Ast.TokenIndex = null,
|
|
||||||
token_buffer: std.ArrayListUnmanaged(u32) = .{},
|
token_buffer: std.ArrayListUnmanaged(u32) = .{},
|
||||||
encoding: offsets.Encoding,
|
encoding: offsets.Encoding,
|
||||||
limited: bool,
|
limited: bool,
|
||||||
|
|
||||||
fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) error{OutOfMemory}!void {
|
||||||
switch (token_type) {
|
|
||||||
.type,
|
|
||||||
.parameter,
|
|
||||||
.variable,
|
|
||||||
.enumMember,
|
|
||||||
.field,
|
|
||||||
.errorTag,
|
|
||||||
.function,
|
|
||||||
.label,
|
|
||||||
=> {},
|
|
||||||
else => if (self.limited) return,
|
|
||||||
}
|
|
||||||
const tree = self.handle.tree;
|
const tree = self.handle.tree;
|
||||||
const starts = tree.tokens.items(.start);
|
const starts = tree.tokens.items(.start);
|
||||||
|
|
||||||
if (starts[token] < self.previous_source_index) return;
|
try self.handleComments(self.previous_source_index, starts[token]);
|
||||||
|
try self.addDirect(token_type, token_modifiers, offsets.tokenToLoc(tree, token));
|
||||||
if (self.previous_token) |prev| {
|
|
||||||
// Highlight gaps between AST nodes. These can contain comments or malformed code.
|
|
||||||
var i = prev + 1;
|
|
||||||
while (i < token) : (i += 1) {
|
|
||||||
try handleComments(self, starts[i - 1], starts[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.previous_token = token;
|
|
||||||
try self.handleComments(starts[token -| 1], starts[token]);
|
|
||||||
|
|
||||||
const length = offsets.tokenLength(tree, token, self.encoding);
|
|
||||||
try self.addDirect(token_type, token_modifiers, starts[token], length);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(self: *Builder) error{OutOfMemory}!types.SemanticTokens {
|
fn finish(self: *Builder) error{OutOfMemory}!types.SemanticTokens {
|
||||||
const starts = self.handle.tree.tokens.items(.start);
|
try self.handleComments(self.previous_source_index, self.handle.tree.source.len);
|
||||||
|
|
||||||
const last_token = self.previous_token orelse 0;
|
|
||||||
var i = last_token + 1;
|
|
||||||
while (i < starts.len) : (i += 1) {
|
|
||||||
try handleComments(self, starts[i - 1], starts[i]);
|
|
||||||
}
|
|
||||||
try self.handleComments(starts[starts.len - 1], self.handle.tree.source.len);
|
|
||||||
|
|
||||||
return .{ .data = try self.token_buffer.toOwnedSlice(self.arena) };
|
return .{ .data = try self.token_buffer.toOwnedSlice(self.arena) };
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Highlight a token without semantic context.
|
|
||||||
fn handleToken(self: *Builder, tok: Ast.TokenIndex) !void {
|
|
||||||
const tree = self.handle.tree;
|
|
||||||
// TODO More highlighting here
|
|
||||||
const tok_id = tree.tokens.items(.tag)[tok];
|
|
||||||
const tok_type: TokenType = switch (tok_id) {
|
|
||||||
.keyword_unreachable => .keywordLiteral,
|
|
||||||
.number_literal => .number,
|
|
||||||
.string_literal, .multiline_string_literal_line, .char_literal => .string,
|
|
||||||
.period, .comma, .r_paren, .l_paren, .r_brace, .l_brace, .semicolon, .colon => return,
|
|
||||||
|
|
||||||
else => blk: {
|
|
||||||
const id = @enumToInt(tok_id);
|
|
||||||
if (id >= @enumToInt(std.zig.Token.Tag.keyword_align) and
|
|
||||||
id <= @enumToInt(std.zig.Token.Tag.keyword_while))
|
|
||||||
break :blk TokenType.keyword;
|
|
||||||
if (id >= @enumToInt(std.zig.Token.Tag.bang) and
|
|
||||||
id <= @enumToInt(std.zig.Token.Tag.tilde))
|
|
||||||
break :blk TokenType.operator;
|
|
||||||
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const start = tree.tokens.items(.start)[tok];
|
|
||||||
const length = offsets.tokenLength(tree, tok, self.encoding);
|
|
||||||
try self.addDirect(tok_type, .{}, start, length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Highlight normal comments and doc comments.
|
/// Highlight normal comments and doc comments.
|
||||||
fn handleComments(self: *Builder, from: usize, to: usize) !void {
|
fn handleComments(self: *Builder, from: usize, to: usize) error{OutOfMemory}!void {
|
||||||
if (from == to) return;
|
if (from >= to) return;
|
||||||
std.debug.assert(from < to);
|
|
||||||
|
|
||||||
const source = self.handle.tree.source;
|
const source = self.handle.tree.source;
|
||||||
|
|
||||||
@ -165,19 +111,19 @@ const Builder = struct {
|
|||||||
|
|
||||||
while (i < to and source[i] != '\n') : (i += 1) {}
|
while (i < to and source[i] != '\n') : (i += 1) {}
|
||||||
|
|
||||||
const length = offsets.locLength(self.handle.tree.source, .{ .start = comment_start, .end = i }, self.encoding);
|
try self.addDirect(.comment, mods, .{ .start = comment_start, .end = i });
|
||||||
try self.addDirect(TokenType.comment, mods, comment_start, length);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void {
|
fn addDirect(self: *Builder, token_type: TokenType, token_modifiers: TokenModifiers, loc: offsets.Loc) error{OutOfMemory}!void {
|
||||||
if (start < self.previous_source_index) return;
|
std.debug.assert(loc.start <= loc.end);
|
||||||
switch (tok_type) {
|
if (loc.start < self.previous_source_index) return;
|
||||||
|
switch (token_type) {
|
||||||
.type,
|
.type,
|
||||||
.parameter,
|
.parameter,
|
||||||
.variable,
|
.variable,
|
||||||
.enumMember,
|
.enumMember,
|
||||||
.field,
|
.property,
|
||||||
.errorTag,
|
.errorTag,
|
||||||
.function,
|
.function,
|
||||||
.label,
|
.label,
|
||||||
@ -185,17 +131,18 @@ const Builder = struct {
|
|||||||
else => if (self.limited) return,
|
else => if (self.limited) return,
|
||||||
}
|
}
|
||||||
|
|
||||||
const text = self.handle.tree.source[self.previous_source_index..start];
|
const delta_text = self.handle.tree.source[self.previous_source_index..loc.start];
|
||||||
const delta = offsets.indexToPosition(text, text.len, self.encoding);
|
const delta = offsets.indexToPosition(delta_text, delta_text.len, self.encoding);
|
||||||
|
const length = offsets.locLength(self.handle.tree.source, loc, self.encoding);
|
||||||
|
|
||||||
try self.token_buffer.appendSlice(self.arena, &.{
|
try self.token_buffer.appendSlice(self.arena, &.{
|
||||||
@truncate(u32, delta.line),
|
@truncate(u32, delta.line),
|
||||||
@truncate(u32, delta.character),
|
@truncate(u32, delta.character),
|
||||||
@truncate(u32, length),
|
@truncate(u32, length),
|
||||||
@enumToInt(tok_type),
|
@enumToInt(token_type),
|
||||||
@bitCast(u16, tok_mod),
|
@bitCast(u16, token_modifiers),
|
||||||
});
|
});
|
||||||
self.previous_source_index = start;
|
self.previous_source_index = loc.start;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -223,8 +170,9 @@ fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.H
|
|||||||
const main_token = handle.tree.nodes.items(.main_token)[container_decl];
|
const main_token = handle.tree.nodes.items(.main_token)[container_decl];
|
||||||
if (main_token > handle.tree.tokens.len) return null;
|
if (main_token > handle.tree.tokens.len) return null;
|
||||||
return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) {
|
return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) {
|
||||||
.keyword_struct => .field,
|
.keyword_struct, .keyword_union => .property,
|
||||||
.keyword_union, .keyword_enum => .enumMember,
|
.keyword_enum => .enumMember,
|
||||||
|
.keyword_error => .errorTag,
|
||||||
else => null,
|
else => null,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -232,18 +180,22 @@ fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.H
|
|||||||
fn colorIdentifierBasedOnType(builder: *Builder, type_node: Analyser.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
fn colorIdentifierBasedOnType(builder: *Builder, type_node: Analyser.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
||||||
if (type_node.type.is_type_val) {
|
if (type_node.type.is_type_val) {
|
||||||
var new_tok_mod = tok_mod;
|
var new_tok_mod = tok_mod;
|
||||||
if (type_node.isNamespace())
|
|
||||||
new_tok_mod.namespace = true
|
|
||||||
else if (type_node.isStructType())
|
|
||||||
new_tok_mod.@"struct" = true
|
|
||||||
else if (type_node.isEnumType())
|
|
||||||
new_tok_mod.@"enum" = true
|
|
||||||
else if (type_node.isUnionType())
|
|
||||||
new_tok_mod.@"union" = true
|
|
||||||
else if (type_node.isOpaqueType())
|
|
||||||
new_tok_mod.@"opaque" = true;
|
|
||||||
|
|
||||||
try writeTokenMod(builder, target_tok, .type, new_tok_mod);
|
const token_type: TokenType =
|
||||||
|
if (type_node.isNamespace())
|
||||||
|
.namespace
|
||||||
|
else if (type_node.isStructType())
|
||||||
|
.@"struct"
|
||||||
|
else if (type_node.isEnumType())
|
||||||
|
.@"enum"
|
||||||
|
else if (type_node.isUnionType())
|
||||||
|
.@"union"
|
||||||
|
else if (type_node.isOpaqueType())
|
||||||
|
.@"opaque"
|
||||||
|
else
|
||||||
|
.type;
|
||||||
|
|
||||||
|
try writeTokenMod(builder, target_tok, token_type, new_tok_mod);
|
||||||
} else if (type_node.isTypeFunc()) {
|
} else if (type_node.isTypeFunc()) {
|
||||||
try writeTokenMod(builder, target_tok, .type, tok_mod);
|
try writeTokenMod(builder, target_tok, .type, tok_mod);
|
||||||
} else if (type_node.isFunc()) {
|
} else if (type_node.isFunc()) {
|
||||||
@ -290,7 +242,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
.container_field,
|
.container_field,
|
||||||
.container_field_align,
|
.container_field_align,
|
||||||
.container_field_init,
|
.container_field_init,
|
||||||
=> try writeContainerField(builder, node, .field),
|
=> try writeContainerField(builder, node, 0),
|
||||||
.@"errdefer" => {
|
.@"errdefer" => {
|
||||||
try writeToken(builder, main_token, .keyword);
|
try writeToken(builder, main_token, .keyword);
|
||||||
|
|
||||||
@ -313,12 +265,8 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
const statements = ast.blockStatements(tree, node, &buffer).?;
|
const statements = ast.blockStatements(tree, node, &buffer).?;
|
||||||
|
|
||||||
for (statements) |child| {
|
for (statements) |child| {
|
||||||
if (node_tags[child].isContainerField()) {
|
|
||||||
try writeContainerField(builder, child, .field);
|
|
||||||
} else {
|
|
||||||
try callWriteNodeTokens(allocator, .{ builder, child });
|
try callWriteNodeTokens(allocator, .{ builder, child });
|
||||||
}
|
}
|
||||||
}
|
|
||||||
},
|
},
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
.local_var_decl,
|
.local_var_decl,
|
||||||
@ -326,9 +274,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
.aligned_var_decl,
|
.aligned_var_decl,
|
||||||
=> {
|
=> {
|
||||||
const var_decl = tree.fullVarDecl(node).?;
|
const var_decl = tree.fullVarDecl(node).?;
|
||||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
|
|
||||||
try writeDocComments(builder, tree, comment_idx);
|
|
||||||
|
|
||||||
try writeToken(builder, var_decl.visib_token, .keyword);
|
try writeToken(builder, var_decl.visib_token, .keyword);
|
||||||
try writeToken(builder, var_decl.extern_export_token, .keyword);
|
try writeToken(builder, var_decl.extern_export_token, .keyword);
|
||||||
try writeToken(builder, var_decl.threadlocal_token, .keyword);
|
try writeToken(builder, var_decl.threadlocal_token, .keyword);
|
||||||
@ -387,10 +332,9 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
try writeToken(builder, enum_token, .keyword);
|
try writeToken(builder, enum_token, .keyword);
|
||||||
} else try callWriteNodeTokens(allocator, .{ builder, decl.ast.arg });
|
} else try callWriteNodeTokens(allocator, .{ builder, decl.ast.arg });
|
||||||
|
|
||||||
const field_token_type = fieldTokenType(node, handle);
|
|
||||||
for (decl.ast.members) |child| {
|
for (decl.ast.members) |child| {
|
||||||
if (node_tags[child].isContainerField()) {
|
if (node_tags[child].isContainerField()) {
|
||||||
try writeContainerField(builder, child, field_token_type);
|
try writeContainerField(builder, child, node);
|
||||||
} else {
|
} else {
|
||||||
try callWriteNodeTokens(allocator, .{ builder, child });
|
try callWriteNodeTokens(allocator, .{ builder, child });
|
||||||
}
|
}
|
||||||
@ -447,8 +391,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
=> {
|
=> {
|
||||||
var buf: [1]Ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?;
|
const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?;
|
||||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
|
||||||
try writeDocComments(builder, tree, docs);
|
|
||||||
|
|
||||||
try writeToken(builder, fn_proto.visib_token, .keyword);
|
try writeToken(builder, fn_proto.visib_token, .keyword);
|
||||||
try writeToken(builder, fn_proto.extern_export_inline_token, .keyword);
|
try writeToken(builder, fn_proto.extern_export_inline_token, .keyword);
|
||||||
@ -487,22 +429,10 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
if (tag == .fn_decl)
|
if (tag == .fn_decl)
|
||||||
try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs });
|
try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs });
|
||||||
},
|
},
|
||||||
.anyframe_type => {
|
.anyframe_type, .@"defer" => {
|
||||||
try writeToken(builder, main_token, .keyword);
|
try writeToken(builder, main_token, .keyword);
|
||||||
try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs });
|
try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs });
|
||||||
},
|
},
|
||||||
.@"defer" => {
|
|
||||||
try writeToken(builder, main_token, .keyword);
|
|
||||||
try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs });
|
|
||||||
},
|
|
||||||
.@"comptime",
|
|
||||||
.@"nosuspend",
|
|
||||||
=> {
|
|
||||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
|
||||||
try writeDocComments(builder, tree, doc);
|
|
||||||
try writeToken(builder, main_token, .keyword);
|
|
||||||
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });
|
|
||||||
},
|
|
||||||
.@"switch",
|
.@"switch",
|
||||||
.switch_comma,
|
.switch_comma,
|
||||||
=> {
|
=> {
|
||||||
@ -593,8 +523,10 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
try writeToken(builder, if_node.ast.if_token, .keyword);
|
try writeToken(builder, if_node.ast.if_token, .keyword);
|
||||||
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr });
|
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr });
|
||||||
|
|
||||||
if (if_node.payload_token) |payload| {
|
if (if_node.payload_token) |payload_token| {
|
||||||
try writeTokenMod(builder, payload, .variable, .{ .declaration = true });
|
const capture_is_ref = token_tags[payload_token] == .asterisk;
|
||||||
|
const actual_payload = payload_token + @boolToInt(capture_is_ref);
|
||||||
|
try writeTokenMod(builder, actual_payload, .variable, .{ .declaration = true });
|
||||||
}
|
}
|
||||||
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.then_expr });
|
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.then_expr });
|
||||||
|
|
||||||
@ -651,8 +583,8 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
|
|
||||||
for (struct_init.ast.fields) |field_init| {
|
for (struct_init.ast.fields) |field_init| {
|
||||||
const init_token = tree.firstToken(field_init);
|
const init_token = tree.firstToken(field_init);
|
||||||
try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.'
|
try writeToken(builder, init_token - 3, field_token_type orelse .property); // '.'
|
||||||
try writeToken(builder, init_token - 2, field_token_type orelse .field); // name
|
try writeToken(builder, init_token - 2, field_token_type orelse .property); // name
|
||||||
try writeToken(builder, init_token - 1, .operator); // '='
|
try writeToken(builder, init_token - 1, .operator); // '='
|
||||||
try callWriteNodeTokens(allocator, .{ builder, field_init });
|
try callWriteNodeTokens(allocator, .{ builder, field_init });
|
||||||
}
|
}
|
||||||
@ -672,11 +604,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
try writeToken(builder, call.async_token, .keyword);
|
try writeToken(builder, call.async_token, .keyword);
|
||||||
try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr });
|
try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr });
|
||||||
|
|
||||||
if (builder.previous_token) |prev| {
|
|
||||||
if (prev != ast.lastToken(tree, call.ast.fn_expr) and token_tags[ast.lastToken(tree, call.ast.fn_expr)] == .identifier) {
|
|
||||||
try writeToken(builder, ast.lastToken(tree, call.ast.fn_expr), .function);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (call.ast.params) |param| try callWriteNodeTokens(allocator, .{ builder, param });
|
for (call.ast.params) |param| try callWriteNodeTokens(allocator, .{ builder, param });
|
||||||
},
|
},
|
||||||
.slice,
|
.slice,
|
||||||
@ -712,7 +639,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
if (node_data[node].lhs != 0)
|
if (node_data[node].lhs != 0)
|
||||||
try writeToken(builder, node_data[node].lhs, .label);
|
try writeToken(builder, node_data[node].lhs, .label);
|
||||||
},
|
},
|
||||||
.@"suspend", .@"return" => {
|
.@"comptime", .@"nosuspend", .@"suspend", .@"return" => {
|
||||||
try writeToken(builder, main_token, .keyword);
|
try writeToken(builder, main_token, .keyword);
|
||||||
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });
|
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });
|
||||||
},
|
},
|
||||||
@ -797,9 +724,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
.asm_input,
|
.asm_input,
|
||||||
=> unreachable,
|
=> unreachable,
|
||||||
.test_decl => {
|
.test_decl => {
|
||||||
if (Analyser.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
|
||||||
try writeDocComments(builder, tree, doc);
|
|
||||||
|
|
||||||
try writeToken(builder, main_token, .keyword);
|
try writeToken(builder, main_token, .keyword);
|
||||||
switch (token_tags[node_data[node].lhs]) {
|
switch (token_tags[node_data[node].lhs]) {
|
||||||
.string_literal => try writeToken(builder, node_data[node].lhs, .string),
|
.string_literal => try writeToken(builder, node_data[node].lhs, .string),
|
||||||
@ -909,7 +833,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
const tok_type: ?TokenType = if (ast.isContainer(lhs_type.handle.tree, left_type_node))
|
const tok_type: ?TokenType = if (ast.isContainer(lhs_type.handle.tree, left_type_node))
|
||||||
fieldTokenType(decl_node, lhs_type.handle)
|
fieldTokenType(decl_node, lhs_type.handle)
|
||||||
else if (left_type_node == 0)
|
else if (left_type_node == 0)
|
||||||
TokenType.field
|
.property
|
||||||
else
|
else
|
||||||
null;
|
null;
|
||||||
|
|
||||||
@ -993,20 +917,23 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void {
|
fn writeContainerField(builder: *Builder, node: Ast.Node.Index, container_decl: Ast.Node.Index) !void {
|
||||||
const tree = builder.handle.tree;
|
const tree = builder.handle.tree;
|
||||||
const container_field = tree.fullContainerField(node).?;
|
|
||||||
const base = tree.nodes.items(.main_token)[node];
|
|
||||||
const tokens = tree.tokens.items(.tag);
|
|
||||||
|
|
||||||
var allocator = builder.arena;
|
var allocator = builder.arena;
|
||||||
|
|
||||||
if (Analyser.getDocCommentTokenIndex(tokens, base)) |docs|
|
var container_field = tree.fullContainerField(node).?;
|
||||||
try writeDocComments(builder, tree, docs);
|
const field_token_type = fieldTokenType(container_decl, builder.handle) orelse .property;
|
||||||
|
|
||||||
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
|
|
||||||
|
if (container_decl != 0 and token_tags[main_tokens[container_decl]] != .keyword_struct) {
|
||||||
|
container_field.convertToNonTupleLike(tree.nodes);
|
||||||
|
}
|
||||||
|
|
||||||
try writeToken(builder, container_field.comptime_token, .keyword);
|
try writeToken(builder, container_field.comptime_token, .keyword);
|
||||||
if (!container_field.ast.tuple_like) {
|
if (!container_field.ast.tuple_like) {
|
||||||
if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.main_token, tok_type);
|
try writeToken(builder, container_field.ast.main_token, field_token_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (container_field.ast.type_expr != 0) {
|
if (container_field.ast.type_expr != 0) {
|
||||||
@ -1017,13 +944,13 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (container_field.ast.value_expr != 0) block: {
|
if (container_field.ast.value_expr != 0) {
|
||||||
const eq_tok: Ast.TokenIndex = if (container_field.ast.align_expr != 0)
|
const eq_tok: Ast.TokenIndex = if (container_field.ast.align_expr != 0)
|
||||||
ast.lastToken(tree, container_field.ast.align_expr) + 2
|
ast.lastToken(tree, container_field.ast.align_expr) + 2
|
||||||
else if (container_field.ast.type_expr != 0)
|
else if (container_field.ast.type_expr != 0)
|
||||||
ast.lastToken(tree, container_field.ast.type_expr) + 1
|
ast.lastToken(tree, container_field.ast.type_expr) + 1
|
||||||
else
|
else
|
||||||
break :block;
|
container_field.ast.main_token + 1;
|
||||||
|
|
||||||
try writeToken(builder, eq_tok, .operator);
|
try writeToken(builder, eq_tok, .operator);
|
||||||
try callWriteNodeTokens(allocator, .{ builder, container_field.ast.value_expr });
|
try callWriteNodeTokens(allocator, .{ builder, container_field.ast.value_expr });
|
||||||
|
@ -15,7 +15,6 @@ test "semantic tokens - empty" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "semantic tokens - comment" {
|
test "semantic tokens - comment" {
|
||||||
if (true) return error.SkipZigTest; // TODO
|
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\// hello world
|
\\// hello world
|
||||||
, &.{
|
, &.{
|
||||||
@ -27,6 +26,14 @@ test "semantic tokens - comment" {
|
|||||||
, &.{
|
, &.{
|
||||||
.{ "//! hello world", .comment, .{ .documentation = true } },
|
.{ "//! hello world", .comment, .{ .documentation = true } },
|
||||||
});
|
});
|
||||||
|
try testSemanticTokens(
|
||||||
|
\\//! first line
|
||||||
|
\\//! second line
|
||||||
|
\\
|
||||||
|
, &.{
|
||||||
|
.{ "//! first line", .comment, .{ .documentation = true } },
|
||||||
|
.{ "//! second line", .comment, .{ .documentation = true } },
|
||||||
|
});
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\/// hello world
|
\\/// hello world
|
||||||
\\const a;
|
\\const a;
|
||||||
@ -190,12 +197,13 @@ test "semantic tokens - operators" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "semantic tokens - field access" {
|
test "semantic tokens - field access" {
|
||||||
|
if (builtin.target.isWasm()) return error.SkipZigTest;
|
||||||
// this will make sure that the std module can be resolved
|
// this will make sure that the std module can be resolved
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\const std = @import("std");
|
\\const std = @import("std");
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "std", .type, .{ .namespace = true, .declaration = true } },
|
.{ "std", .namespace, .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "@import", .builtin, .{} },
|
.{ "@import", .builtin, .{} },
|
||||||
.{ "\"std\"", .string, .{} },
|
.{ "\"std\"", .string, .{} },
|
||||||
@ -205,17 +213,53 @@ test "semantic tokens - field access" {
|
|||||||
\\const Ast = std.zig.Ast;
|
\\const Ast = std.zig.Ast;
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "std", .type, .{ .namespace = true, .declaration = true } },
|
.{ "std", .namespace, .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "@import", .builtin, .{} },
|
.{ "@import", .builtin, .{} },
|
||||||
.{ "\"std\"", .string, .{} },
|
.{ "\"std\"", .string, .{} },
|
||||||
|
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Ast", .type, .{ .@"struct" = true, .declaration = true } },
|
.{ "Ast", .@"struct", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "std", .type, .{ .namespace = true } },
|
.{ "std", .namespace, .{} },
|
||||||
.{ "zig", .type, .{ .namespace = true } },
|
.{ "zig", .namespace, .{} },
|
||||||
.{ "Ast", .type, .{ .@"struct" = true } },
|
.{ "Ast", .@"struct", .{} },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "semantic tokens - call" {
|
||||||
|
try testSemanticTokens(
|
||||||
|
\\fn foo() void {}
|
||||||
|
\\const alpha = foo();
|
||||||
|
, &.{
|
||||||
|
.{ "fn", .keyword, .{} },
|
||||||
|
.{ "foo", .function, .{ .declaration = true } },
|
||||||
|
.{ "void", .type, .{} },
|
||||||
|
|
||||||
|
.{ "const", .keyword, .{} },
|
||||||
|
.{ "alpha", .variable, .{ .declaration = true } },
|
||||||
|
.{ "=", .operator, .{} },
|
||||||
|
.{ "foo", .function, .{} },
|
||||||
|
});
|
||||||
|
try testSemanticTokens(
|
||||||
|
\\const ns = struct {
|
||||||
|
\\ fn foo() void {}
|
||||||
|
\\};
|
||||||
|
\\const alpha = ns.foo();
|
||||||
|
, &.{
|
||||||
|
.{ "const", .keyword, .{} },
|
||||||
|
.{ "ns", .namespace, .{ .declaration = true } },
|
||||||
|
.{ "=", .operator, .{} },
|
||||||
|
.{ "struct", .keyword, .{} },
|
||||||
|
.{ "fn", .keyword, .{} },
|
||||||
|
.{ "foo", .function, .{ .declaration = true } },
|
||||||
|
.{ "void", .type, .{} },
|
||||||
|
|
||||||
|
.{ "const", .keyword, .{} },
|
||||||
|
.{ "alpha", .variable, .{ .declaration = true } },
|
||||||
|
.{ "=", .operator, .{} },
|
||||||
|
.{ "ns", .namespace, .{} },
|
||||||
|
.{ "foo", .function, .{} },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -449,7 +493,7 @@ test "semantic tokens - struct" {
|
|||||||
\\const Foo = struct {};
|
\\const Foo = struct {};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .namespace = true, .declaration = true } },
|
.{ "Foo", .namespace, .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "struct", .keyword, .{} },
|
.{ "struct", .keyword, .{} },
|
||||||
});
|
});
|
||||||
@ -457,7 +501,7 @@ test "semantic tokens - struct" {
|
|||||||
\\const Foo = packed struct(u32) {};
|
\\const Foo = packed struct(u32) {};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .namespace = true, .declaration = true } },
|
.{ "Foo", .namespace, .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "packed", .keyword, .{} },
|
.{ "packed", .keyword, .{} },
|
||||||
.{ "struct", .keyword, .{} },
|
.{ "struct", .keyword, .{} },
|
||||||
@ -470,12 +514,12 @@ test "semantic tokens - struct" {
|
|||||||
\\};
|
\\};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"struct" = true, .declaration = true } },
|
.{ "Foo", .@"struct", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "struct", .keyword, .{} },
|
.{ "struct", .keyword, .{} },
|
||||||
.{ "alpha", .field, .{} },
|
.{ "alpha", .property, .{} },
|
||||||
.{ "u32", .type, .{} },
|
.{ "u32", .type, .{} },
|
||||||
.{ "beta", .field, .{} },
|
.{ "beta", .property, .{} },
|
||||||
.{ "void", .type, .{} },
|
.{ "void", .type, .{} },
|
||||||
});
|
});
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
@ -485,15 +529,15 @@ test "semantic tokens - struct" {
|
|||||||
\\};
|
\\};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"struct" = true, .declaration = true } },
|
.{ "Foo", .@"struct", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "struct", .keyword, .{} },
|
.{ "struct", .keyword, .{} },
|
||||||
.{ "alpha", .field, .{} },
|
.{ "alpha", .property, .{} },
|
||||||
.{ "u32", .type, .{} },
|
.{ "u32", .type, .{} },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "3", .number, .{} },
|
.{ "3", .number, .{} },
|
||||||
.{ "comptime", .keyword, .{} },
|
.{ "comptime", .keyword, .{} },
|
||||||
.{ "beta", .field, .{} },
|
.{ "beta", .property, .{} },
|
||||||
.{ "void", .type, .{} },
|
.{ "void", .type, .{} },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
});
|
});
|
||||||
@ -509,7 +553,7 @@ test "semantic tokens - struct" {
|
|||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "u32", .type, .{} },
|
.{ "u32", .type, .{} },
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"struct" = true, .declaration = true } },
|
.{ "Foo", .@"struct", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "struct", .keyword, .{} },
|
.{ "struct", .keyword, .{} },
|
||||||
.{ "u32", .type, .{} },
|
.{ "u32", .type, .{} },
|
||||||
@ -524,7 +568,7 @@ test "semantic tokens - union" {
|
|||||||
\\const Foo = union {};
|
\\const Foo = union {};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"union" = true, .declaration = true } },
|
.{ "Foo", .@"union", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "union", .keyword, .{} },
|
.{ "union", .keyword, .{} },
|
||||||
});
|
});
|
||||||
@ -532,24 +576,23 @@ test "semantic tokens - union" {
|
|||||||
\\const Foo = packed union(enum) {};
|
\\const Foo = packed union(enum) {};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"union" = true, .declaration = true } },
|
.{ "Foo", .@"union", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "packed", .keyword, .{} },
|
.{ "packed", .keyword, .{} },
|
||||||
.{ "union", .keyword, .{} },
|
.{ "union", .keyword, .{} },
|
||||||
.{ "enum", .keyword, .{} },
|
.{ "enum", .keyword, .{} },
|
||||||
});
|
});
|
||||||
if (true) return error.SkipZigTest; // TODO
|
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\const Foo = union(E) {
|
\\const Foo = union(E) {
|
||||||
\\ alpha,
|
\\ alpha,
|
||||||
\\};
|
\\};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"union" = true, .declaration = true } },
|
.{ "Foo", .@"union", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "union", .keyword, .{} },
|
.{ "union", .keyword, .{} },
|
||||||
.{ "E", .variable, .{} },
|
.{ "E", .variable, .{} },
|
||||||
.{ "alpha", .field, .{} },
|
.{ "alpha", .property, .{} },
|
||||||
});
|
});
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\const Foo = union(E) {
|
\\const Foo = union(E) {
|
||||||
@ -558,13 +601,13 @@ test "semantic tokens - union" {
|
|||||||
\\};
|
\\};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"union" = true, .declaration = true } },
|
.{ "Foo", .@"union", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "union", .keyword, .{} },
|
.{ "union", .keyword, .{} },
|
||||||
.{ "E", .variable, .{} },
|
.{ "E", .variable, .{} },
|
||||||
.{ "alpha", .field, .{} },
|
.{ "alpha", .property, .{} },
|
||||||
.{ "beta", .field, .{} },
|
.{ "beta", .property, .{} },
|
||||||
.{ "void", .keyword, .{} },
|
.{ "void", .type, .{} },
|
||||||
});
|
});
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\const Foo = union(E) {
|
\\const Foo = union(E) {
|
||||||
@ -572,45 +615,46 @@ test "semantic tokens - union" {
|
|||||||
\\};
|
\\};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"union" = true, .declaration = true } },
|
.{ "Foo", .@"union", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "union", .keyword, .{} },
|
.{ "union", .keyword, .{} },
|
||||||
.{ "E", .variable, .{} },
|
.{ "E", .variable, .{} },
|
||||||
.{ "alpha", .field, .{} },
|
.{ "alpha", .property, .{} },
|
||||||
.{ "void", .keyword, .{} },
|
.{ "void", .type, .{} },
|
||||||
.{ "align", .keyword, .{} },
|
.{ "align", .keyword, .{} },
|
||||||
.{ "2", .number, .{} },
|
.{ "2", .number, .{} },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
test "semantic tokens - enum" {
|
test "semantic tokens - enum" {
|
||||||
if (true) return error.SkipZigTest; // TODO
|
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\const Foo = enum {};
|
\\const Foo = enum {};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"enum" = true, .declaration = true } },
|
.{ "Foo", .@"enum", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "enum", .keyword, .{} },
|
.{ "enum", .keyword, .{} },
|
||||||
});
|
});
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\const Foo = enum {
|
\\const Foo = enum {
|
||||||
\\ alpha,
|
\\ alpha = 3,
|
||||||
\\ beta,
|
\\ beta,
|
||||||
\\};
|
\\};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"enum" = true, .declaration = true } },
|
.{ "Foo", .@"enum", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "enum", .keyword, .{} },
|
.{ "enum", .keyword, .{} },
|
||||||
.{ "alpha", .enumMember, .{} },
|
.{ "alpha", .enumMember, .{} },
|
||||||
|
.{ "=", .operator, .{} },
|
||||||
|
.{ "3", .number, .{} },
|
||||||
.{ "beta", .enumMember, .{} },
|
.{ "beta", .enumMember, .{} },
|
||||||
});
|
});
|
||||||
try testSemanticTokens(
|
try testSemanticTokens(
|
||||||
\\const Foo = enum(u4) {};
|
\\const Foo = enum(u4) {};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"enum" = true, .declaration = true } },
|
.{ "Foo", .@"enum", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "enum", .keyword, .{} },
|
.{ "enum", .keyword, .{} },
|
||||||
.{ "u4", .type, .{} },
|
.{ "u4", .type, .{} },
|
||||||
@ -644,7 +688,7 @@ test "semantic tokens - opaque" {
|
|||||||
\\const Foo = opaque {};
|
\\const Foo = opaque {};
|
||||||
, &.{
|
, &.{
|
||||||
.{ "const", .keyword, .{} },
|
.{ "const", .keyword, .{} },
|
||||||
.{ "Foo", .type, .{ .@"opaque" = true, .declaration = true } },
|
.{ "Foo", .@"opaque", .{ .declaration = true } },
|
||||||
.{ "=", .operator, .{} },
|
.{ "=", .operator, .{} },
|
||||||
.{ "opaque", .keyword, .{} },
|
.{ "opaque", .keyword, .{} },
|
||||||
});
|
});
|
||||||
@ -765,6 +809,16 @@ test "semantic tokens - if" {
|
|||||||
.{ "err", .variable, .{ .declaration = true } },
|
.{ "err", .variable, .{ .declaration = true } },
|
||||||
.{ "err", .variable, .{} },
|
.{ "err", .variable, .{} },
|
||||||
});
|
});
|
||||||
|
try testSemanticTokens(
|
||||||
|
\\const foo = if (null) |*value| {};
|
||||||
|
, &.{
|
||||||
|
.{ "const", .keyword, .{} },
|
||||||
|
.{ "foo", .variable, .{ .declaration = true } },
|
||||||
|
.{ "=", .operator, .{} },
|
||||||
|
.{ "if", .keyword, .{} },
|
||||||
|
.{ "null", .keywordLiteral, .{} },
|
||||||
|
.{ "value", .variable, .{ .declaration = true } },
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
test "semantic tokens - while" {
|
test "semantic tokens - while" {
|
||||||
|
Loading…
Reference in New Issue
Block a user