zls/src/semantic_tokens.zig

1115 lines
46 KiB
Zig
Raw Normal View History

const std = @import("std");
const offsets = @import("offsets.zig");
const DocumentStore = @import("document_store.zig");
const analysis = @import("analysis.zig");
const ast = std.zig.ast;
pub const TokenType = enum(u32) {
type,
parameter,
variable,
2021-01-16 06:46:35 +00:00
enumMember,
field,
errorTag,
function,
keyword,
comment,
string,
number,
operator,
2020-06-13 23:07:57 +01:00
builtin,
label,
keywordLiteral,
};
pub const TokenModifiers = packed struct {
namespace: bool = false,
@"struct": bool = false,
@"enum": bool = false,
@"union": bool = false,
@"opaque": bool = false,
declaration: bool = false,
@"async": bool = false,
documentation: bool = false,
2020-06-16 16:49:31 +01:00
generic: bool = false,
fn toInt(self: TokenModifiers) u32 {
var res: u32 = 0;
inline for (std.meta.fields(TokenModifiers)) |field, i| {
if (@field(self, field.name)) {
res |= 1 << i;
}
}
return res;
}
2021-02-15 10:04:22 +00:00
fn set(self: *TokenModifiers, comptime field: []const u8) callconv(.Inline) void {
2020-06-16 16:49:31 +01:00
@field(self, field) = true;
}
};
const Comment = struct {
/// Length of the comment
length: u32,
/// Source index of the comment
start: u32,
};
const CommentList = std.ArrayList(Comment);
const Builder = struct {
handle: *DocumentStore.Handle,
current_token: ?ast.TokenIndex,
arr: std.ArrayList(u32),
encoding: offsets.Encoding,
comments: CommentList,
fn init(allocator: *std.mem.Allocator, handle: *DocumentStore.Handle, encoding: offsets.Encoding) Builder {
return Builder{
.handle = handle,
.current_token = null,
.arr = std.ArrayList(u32).init(allocator),
.encoding = encoding,
.comments = CommentList.init(allocator),
};
}
2020-06-13 23:07:57 +01:00
fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
const starts = self.handle.tree.tokens.items(.start);
var start_idx = if (self.current_token) |current_token|
starts[current_token]
2020-06-13 23:07:57 +01:00
else
0;
if (start_idx > starts[token])
return;
const delta_loc = while (self.findCommentBetween(start_idx, starts[token])) |comment| {
const old_loc = self.handle.tree.tokenLocation(0, self.current_token orelse 0);
const comment_delta = offsets.tokenRelativeLocation(self.handle.tree, start_idx, comment.start, self.encoding) catch return;
try self.arr.appendSlice(&[_]u32{
@truncate(u32, comment_delta.line),
@truncate(u32, comment_delta.column),
comment.length,
@enumToInt(TokenType.comment),
0,
});
start_idx = comment.start;
} else offsets.tokenRelativeLocation(self.handle.tree, start_idx, starts[token], self.encoding) catch return;
2020-06-13 23:07:57 +01:00
try self.arr.appendSlice(&[_]u32{
@truncate(u32, delta_loc.line),
@truncate(u32, delta_loc.column),
@truncate(u32, offsets.tokenLength(self.handle.tree, token, self.encoding)),
2020-06-13 23:07:57 +01:00
@enumToInt(token_type),
token_modifiers.toInt(),
});
self.current_token = token;
}
2020-06-13 23:07:57 +01:00
fn toOwnedSlice(self: *Builder) []u32 {
return self.arr.toOwnedSlice();
}
/// Based on a given start and end index, returns a `Comment` between the positions
/// Returns `null` if none was fone
fn findCommentBetween(self: Builder, from: u32, to: u32) ?Comment {
return for (self.comments.items) |comment| {
if (comment.start > from and comment.start < to)
break comment;
} else null;
}
};
2021-02-15 10:04:22 +00:00
fn writeToken(
builder: *Builder,
token_idx: ?ast.TokenIndex,
tok_type: TokenType,
) callconv(.Inline) !void {
return try writeTokenMod(builder, token_idx, tok_type, .{});
}
2021-02-15 10:04:22 +00:00
fn writeTokenMod(
builder: *Builder,
token_idx: ?ast.TokenIndex,
tok_type: TokenType,
tok_mod: TokenModifiers,
) callconv(.Inline) !void {
if (token_idx) |ti| {
try builder.add(ti, tok_type, tok_mod);
2020-06-13 23:07:57 +01:00
}
}
2021-03-04 21:30:25 +00:00
fn writeDocComments(builder: *Builder, tree: ast.Tree, doc: ast.TokenIndex) !void {
2021-03-04 15:14:30 +00:00
const token_tags = tree.tokens.items(.tag);
var tok_idx = doc;
while (token_tags[tok_idx] == .doc_comment or
token_tags[tok_idx] == .container_doc_comment) : (tok_idx += 1)
{
var tok_mod = TokenModifiers{};
2021-03-04 15:14:30 +00:00
tok_mod.set("documentation");
try builder.add(tok_idx, .comment, tok_mod);
}
}
2020-06-13 23:07:57 +01:00
2021-03-04 21:30:25 +00:00
fn fieldTokenType(container_decl: ast.Node.Index, handle: *DocumentStore.Handle) ?TokenType {
const main_token = handle.tree.nodes.items(.main_token)[container_decl];
if (main_token > handle.tree.tokens.len) return null;
return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) {
.keyword_struct => .field,
2021-03-04 15:14:30 +00:00
.keyword_union, .keyword_enum => .enumMember,
else => null,
});
}
2020-06-18 13:12:09 +01:00
/// This is used to highlight gaps between AST nodes.
/// These gaps can be just gaps between statements/declarations with comments inside them
/// Or malformed code.
const GapHighlighter = struct {
builder: *Builder,
current_idx: ast.TokenIndex,
// TODO More highlighting here
fn handleTok(self: *GapHighlighter, tok: ast.TokenIndex) !void {
2021-03-04 15:14:30 +00:00
const tok_id = self.builder.handle.tree.tokens.items(.tag)[tok];
if (tok_id == .container_doc_comment or tok_id == .doc_comment) {
try writeTokenMod(self.builder, tok, .comment, .{ .documentation = true });
2021-03-04 15:14:30 +00:00
} else if (@enumToInt(tok_id) >= @enumToInt(std.zig.Token.Tag.keyword_align) and
@enumToInt(tok_id) <= @enumToInt(std.zig.Token.Tag.keyword_while))
2020-06-18 13:12:09 +01:00
{
const tok_type: TokenType = switch (tok_id) {
2021-03-04 15:14:30 +00:00
.keyword_true,
.keyword_false,
.keyword_null,
.keyword_undefined,
.keyword_unreachable,
=> .keywordLiteral,
else => .keyword,
};
try writeToken(self.builder, tok, tok_type);
2021-03-04 15:14:30 +00:00
} else if (@enumToInt(tok_id) >= @enumToInt(std.zig.Token.Tag.bang) and
@enumToInt(tok_id) <= @enumToInt(std.zig.Token.Tag.tilde) and
tok_id != .period and tok_id != .comma and tok_id != .r_paren and
2021-03-04 21:30:25 +00:00
tok_id != .l_paren and tok_id != .r_brace and tok_id != .l_brace and
2021-03-04 15:14:30 +00:00
tok_id != .semicolon and tok_id != .colon)
2020-06-18 13:12:09 +01:00
{
try writeToken(self.builder, tok, .operator);
2021-03-04 15:14:30 +00:00
} else if (tok_id == .integer_literal or tok_id == .float_literal) {
2020-06-18 13:12:09 +01:00
try writeToken(self.builder, tok, .number);
2021-03-04 15:14:30 +00:00
} else if (tok_id == .string_literal or tok_id == .multiline_string_literal_line or tok_id == .char_literal) {
2020-06-18 13:12:09 +01:00
try writeToken(self.builder, tok, .string);
}
}
fn init(builder: *Builder, start: ast.TokenIndex) GapHighlighter {
return .{ .builder = builder, .current_idx = start };
}
2021-03-04 21:30:25 +00:00
fn next(self: *GapHighlighter, node: ast.Node.Index) !void {
const tree = self.builder.handle.tree;
if (self.current_idx > 0 and tree.tokens.items(.tag)[self.current_idx - 1] == .container_doc_comment) {
2020-06-26 12:29:59 +01:00
try self.handleTok(self.current_idx - 1);
}
2020-06-18 13:12:09 +01:00
var i = self.current_idx;
2021-03-04 21:30:25 +00:00
while (i < tree.firstToken(node)) : (i += 1) {
2020-06-18 13:12:09 +01:00
try self.handleTok(i);
}
2021-03-04 21:30:25 +00:00
self.current_idx = tree.lastToken(node) + 1;
2020-06-18 13:12:09 +01:00
}
fn end(self: *GapHighlighter, last: ast.TokenIndex) !void {
var i = self.current_idx;
while (i < last) : (i += 1) {
try self.handleTok(i);
}
}
};
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void {
2021-03-04 21:30:25 +00:00
const tree = builder.handle.tree;
2020-06-18 13:12:09 +01:00
if (type_node.type.is_type_val) {
var new_tok_mod = tok_mod;
if (type_node.isNamespace())
new_tok_mod.set("namespace")
else if (type_node.isStructType())
new_tok_mod.set("struct")
else if (type_node.isEnumType())
new_tok_mod.set("enum")
else if (type_node.isUnionType())
new_tok_mod.set("union")
else if (type_node.isOpaqueType())
new_tok_mod.set("opaque");
2020-06-18 13:12:09 +01:00
try writeTokenMod(builder, target_tok, .type, new_tok_mod);
} else if (type_node.isTypeFunc()) {
2020-06-18 13:12:09 +01:00
try writeTokenMod(builder, target_tok, .type, tok_mod);
} else if (type_node.isFunc()) {
2020-06-27 18:45:58 +01:00
var new_tok_mod = tok_mod;
if (type_node.isGenericFunc()) {
2020-06-27 18:45:58 +01:00
new_tok_mod.set("generic");
}
try writeTokenMod(builder, target_tok, .function, new_tok_mod);
2020-06-18 13:12:09 +01:00
} else {
try writeTokenMod(builder, target_tok, .variable, tok_mod);
}
}
fn writeContainerField(
builder: *Builder,
arena: *std.heap.ArenaAllocator,
store: *DocumentStore,
2021-03-04 21:30:25 +00:00
node: ast.Node.Index,
field_token_type: ?TokenType,
2020-07-12 20:12:09 +01:00
child_frame: anytype,
) !void {
2021-03-04 21:30:25 +00:00
const container_field = analysis.containerField(builder.handle.tree, node).?;
if (analysis.getDocCommentTokenIndex(builder.handle.tree, node)) |docs|
try writeDocComments(builder, builder.handle.tree, docs);
try writeToken(builder, container_field.comptime_token, .keyword);
2021-03-04 21:30:25 +00:00
if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.name_token, tok_type);
if (container_field.ast.type_expr != 0) {
if (container_field.ast.align_expr != 0) {
try writeToken(builder, builder.handle.tree.firstToken(container_field.ast.align_expr) - 2, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.align_expr });
}
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.type_expr });
2020-06-18 18:30:58 +01:00
}
2021-03-04 21:30:25 +00:00
if (container_field.ast.value_expr != 0) block: {
const eq_tok: ast.TokenIndex = if (container_field.ast.type_expr != 0)
builder.handle.tree.lastToken(container_field.ast.type_expr) + 1
else if (container_field.ast.align_expr != 0)
builder.handle.tree.lastToken(container_field.ast.align_expr) + 1
else
2020-06-18 18:30:58 +01:00
break :block; // Check this, I believe it is correct.
try writeToken(builder, eq_tok, .operator);
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.value_expr });
}
}
2020-06-18 13:14:55 +01:00
// TODO This is very slow and does a lot of extra work, improve in the future.
2021-03-04 15:14:30 +00:00
fn writeNodeTokens(
builder: *Builder,
arena: *std.heap.ArenaAllocator,
store: *DocumentStore,
maybe_node: ?ast.Node.Index,
) error{OutOfMemory}!void {
if (maybe_node == null) return;
const node = maybe_node.?;
if (node == 0) return;
2021-03-04 15:14:30 +00:00
2021-03-04 21:30:25 +00:00
const handle = builder.handle;
const tree = handle.tree;
2021-03-04 15:14:30 +00:00
const node_tags = tree.nodes.items(.tag);
const token_tags = tree.tokens.items(.tag);
const datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
if (node > datas.len) return;
2021-03-04 21:30:25 +00:00
2021-03-04 15:14:30 +00:00
const tag = node_tags[node];
const main_token = main_tokens[node];
const FrameSize = @sizeOf(@Frame(writeNodeTokens));
var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
defer arena.child_allocator.free(child_frame);
switch (tag) {
.root => unreachable,
.container_field,
.container_field_align,
.container_field_init,
=> try writeContainerField(builder, arena, store, node, .field, child_frame),
.@"errdefer" => {
if (datas[node].lhs != 0)
try writeToken(builder, datas[node].lhs, .variable);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.block,
.block_semicolon,
.block_two,
.block_two_semicolon,
=> {
const first_tok = if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) block: {
2021-03-04 15:14:30 +00:00
try writeToken(builder, main_token - 2, .label);
break :block main_token + 1;
} else 0;
var gap_highlighter = GapHighlighter.init(builder, first_tok);
const statements: []const ast.Node.Index = switch (tag) {
.block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs],
.block_two, .block_two_semicolon => blk: {
const statements = &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
const len: usize = if (datas[node].lhs == 0)
@as(usize, 0)
else if (datas[node].rhs == 0)
@as(usize, 1)
else
@as(usize, 2);
break :blk statements[0..len];
},
else => unreachable,
};
for (statements) |child| {
try gap_highlighter.next(child);
if (node_tags[child].isContainerField()) {
2021-03-04 21:30:25 +00:00
try writeContainerField(builder, arena, store, child, .field, child_frame);
2021-03-04 15:14:30 +00:00
} else {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child });
}
}
try gap_highlighter.end(tree.lastToken(node));
},
2021-03-04 21:30:25 +00:00
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
2021-03-04 15:14:30 +00:00
const var_decl = analysis.varDecl(tree, node).?;
if (analysis.getDocCommentTokenIndex(tree, node)) |comment_idx|
try writeDocComments(builder, handle.tree, comment_idx);
try writeToken(builder, var_decl.visib_token, .keyword);
try writeToken(builder, var_decl.extern_export_token, .keyword);
try writeToken(builder, var_decl.threadlocal_token, .keyword);
try writeToken(builder, var_decl.comptime_token, .keyword);
try writeToken(builder, var_decl.ast.mut_token, .keyword);
2021-03-04 21:30:25 +00:00
2021-03-04 15:14:30 +00:00
if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| {
try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true });
} else {
try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true });
}
2021-03-04 21:30:25 +00:00
if (var_decl.ast.type_node != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node });
if (var_decl.ast.align_node != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node });
if (var_decl.ast.section_node != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node });
2021-03-04 15:14:30 +00:00
try writeToken(builder, var_decl.ast.mut_token + 2, .operator);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.init_node });
},
.@"usingnamespace" => {
const first_tok = tree.firstToken(node);
if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment)
try writeDocComments(builder, builder.handle.tree, first_tok - 1);
try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword);
try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
},
.container_decl,
.container_decl_trailing,
.container_decl_two,
.container_decl_two_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
2021-03-04 15:14:30 +00:00
=> {
var buf: [2]ast.Node.Index = undefined;
const decl: ast.full.ContainerDecl = switch (tag) {
.container_decl, .container_decl_trailing => tree.containerDecl(node),
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node),
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node),
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node),
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node),
2021-03-04 15:14:30 +00:00
else => unreachable,
};
try writeToken(builder, decl.layout_token, .keyword);
try writeToken(builder, decl.ast.main_token, .keyword);
2021-03-04 21:30:25 +00:00
if (decl.ast.enum_token) |enum_token| {
2021-03-04 15:14:30 +00:00
if (decl.ast.arg != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg })
else
try writeToken(builder, enum_token, .keyword);
} else if (decl.ast.arg != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg });
var gap_highlighter = GapHighlighter.init(builder, main_token + 1);
2021-03-04 21:30:25 +00:00
const field_token_type = fieldTokenType(node, handle);
2021-03-04 15:14:30 +00:00
for (decl.ast.members) |child| {
try gap_highlighter.next(child);
2021-03-04 21:30:25 +00:00
if (node_tags[child].isContainerField()) {
try writeContainerField(builder, arena, store, child, field_token_type, child_frame);
2021-03-04 15:14:30 +00:00
} else {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child });
}
}
try gap_highlighter.end(tree.lastToken(node));
},
.error_value => {
// if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs);
try writeToken(builder, datas[node].rhs, .errorTag);
},
.identifier => {
if (analysis.isTypeIdent(handle.tree, main_token)) {
return try writeToken(builder, main_token, .type);
}
2021-03-04 21:30:25 +00:00
if (try analysis.lookupSymbolGlobal(
store,
arena,
handle,
handle.tree.getNodeSource(node),
handle.tree.tokens.items(.start)[main_token],
)) |child| {
2021-03-04 15:14:30 +00:00
if (child.decl.* == .param_decl) {
return try writeToken(builder, main_token, .parameter);
}
var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator);
if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| {
try colorIdentifierBasedOnType(builder, decl_type, main_token, .{});
} else {
try writeTokenMod(builder, main_token, .variable, .{});
}
}
},
2021-03-04 21:30:25 +00:00
.fn_proto,
.fn_proto_one,
.fn_proto_simple,
.fn_proto_multi,
.fn_decl,
=> {
2021-03-04 15:14:30 +00:00
var buf: [1]ast.Node.Index = undefined;
const fn_proto: ast.full.FnProto = analysis.fnProto(tree, node, &buf).?;
2021-03-04 21:30:25 +00:00
if (analysis.getDocCommentTokenIndex(tree, node)) |docs|
2021-03-04 15:14:30 +00:00
try writeDocComments(builder, handle.tree, docs);
try writeToken(builder, fn_proto.visib_token, .keyword);
try writeToken(builder, fn_proto.extern_export_token, .keyword);
try writeToken(builder, fn_proto.lib_name, .string);
try writeToken(builder, fn_proto.ast.fn_token, .keyword);
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto))
.type
else
.function;
const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto))
TokenModifiers{ .generic = true }
else
TokenModifiers{};
try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod);
var it = fn_proto.iterate(tree);
while (it.next()) |param_decl| {
if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, handle.tree, docs);
try writeToken(builder, param_decl.comptime_noalias, .keyword);
try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true });
if (param_decl.anytype_ellipsis3) |any_token| {
2021-03-04 21:30:25 +00:00
try writeToken(builder, any_token, .type);
2021-03-04 15:14:30 +00:00
} else if (param_decl.type_expr != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param_decl.type_expr });
}
if (fn_proto.ast.align_expr != 0)
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr });
2021-03-04 15:14:30 +00:00
if (fn_proto.ast.section_expr != 0)
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr });
2021-03-04 15:14:30 +00:00
if (fn_proto.ast.callconv_expr != 0)
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr });
2021-03-04 15:14:30 +00:00
if (fn_proto.ast.return_type != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.return_type });
if (tag == .fn_decl)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
},
.anyframe_type => {
try writeToken(builder, main_token, .type);
if (datas[node].rhs != 0) {
try writeToken(builder, datas[node].lhs, .type);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
}
},
.@"defer" => {
try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
},
2021-03-04 21:30:25 +00:00
.@"comptime",
.@"nosuspend",
=> {
2021-03-04 15:14:30 +00:00
if (analysis.getDocCommentTokenIndex(tree, node)) |doc|
try writeDocComments(builder, handle.tree, doc);
try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
},
2021-03-04 21:30:25 +00:00
.@"switch",
.switch_comma,
=> {
2021-03-04 15:14:30 +00:00
try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange);
const cases = tree.extra_data[extra.start..extra.end];
2021-03-04 21:30:25 +00:00
var gap_highlighter = GapHighlighter.init(builder, tree.lastToken(datas[node].lhs) + 1);
2021-03-04 15:14:30 +00:00
for (cases) |case_node| {
try gap_highlighter.next(case_node);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node });
}
2021-03-04 21:30:25 +00:00
try gap_highlighter.end(tree.lastToken(node));
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.switch_case_one,
.switch_case,
=> {
2021-03-04 15:14:30 +00:00
const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node);
for (switch_case.ast.values) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node });
// check it it's 'else'
if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword);
try writeToken(builder, switch_case.ast.arrow_token, .operator);
if (switch_case.payload_token) |payload_token| {
const p_token = @boolToInt(token_tags[payload_token] == .asterisk);
try writeToken(builder, p_token, .variable);
}
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.ast.target_expr });
},
2021-03-04 21:30:25 +00:00
.@"while",
.while_simple,
.while_cont,
.for_simple,
.@"for",
=> {
const while_node: ast.full.While = switch (tag) {
.@"while" => tree.whileFull(node),
.while_simple => tree.whileSimple(node),
.while_cont => tree.whileCont(node),
.@"for" => tree.forFull(node),
.for_simple => tree.forSimple(node),
2021-03-04 15:14:30 +00:00
else => unreachable,
};
try writeToken(builder, while_node.label_token, .label);
try writeToken(builder, while_node.inline_token, .keyword);
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr });
2021-03-04 15:14:30 +00:00
try writeToken(builder, while_node.payload_token, .variable);
if (while_node.ast.cont_expr != 0)
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr });
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.then_expr });
try writeToken(builder, while_node.error_token, .variable);
if (while_node.ast.else_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr });
},
.@"if",
.if_simple,
=> {
const if_node: ast.full.If = if (tag == .@"if") tree.ifFull(node) else tree.ifSimple(node);
try writeToken(builder, if_node.ast.if_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr });
try writeToken(builder, if_node.payload_token, .variable);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr });
try writeToken(builder, if_node.error_token, .variable);
if (if_node.ast.else_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr });
},
.array_init,
.array_init_comma,
.array_init_one,
.array_init_one_comma,
.array_init_dot,
.array_init_dot_comma,
.array_init_dot_two,
.array_init_dot_two_comma,
=> {
var buf: [2]ast.Node.Index = undefined;
const array_init: ast.full.ArrayInit = switch (tag) {
.array_init, .array_init_comma => tree.arrayInit(node),
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
else => unreachable,
};
if (array_init.ast.type_expr != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_init.ast.type_expr });
for (array_init.ast.elements) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem });
},
.struct_init,
.struct_init_comma,
.struct_init_dot,
.struct_init_dot_comma,
.struct_init_one,
.struct_init_one_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
=> {
var buf: [2]ast.Node.Index = undefined;
const struct_init: ast.full.StructInit = switch (tag) {
.struct_init, .struct_init_comma => tree.structInit(node),
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
else => unreachable,
};
var field_token_type: ?TokenType = null;
if (struct_init.ast.type_expr != 0) {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_init.ast.type_expr });
field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{
.node = struct_init.ast.type_expr,
.handle = handle,
})) |struct_type| switch (struct_type.type.data) {
.other => |type_node| if (analysis.isContainer(struct_type.handle.tree.nodes.items(.tag)[type_node]))
fieldTokenType(type_node, struct_type.handle)
2021-03-04 21:30:25 +00:00
else
null,
else => null,
} else null;
}
var gap_highlighter = GapHighlighter.init(builder, struct_init.ast.lbrace);
for (struct_init.ast.fields) |field_init| {
try gap_highlighter.next(field_init);
const init_token = tree.firstToken(field_init);
try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.'
try writeToken(builder, init_token - 2, field_token_type orelse .field); // name
try writeToken(builder, init_token - 1, .operator); // '='
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init });
2021-03-04 15:14:30 +00:00
}
2021-03-04 21:30:25 +00:00
try gap_highlighter.end(tree.lastToken(node));
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.call,
.call_comma,
.async_call,
.async_call_comma,
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
=> {
var params: [1]ast.Node.Index = undefined;
const call: ast.full.Call = switch (tag) {
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(&params, node),
else => unreachable,
};
2021-03-04 15:14:30 +00:00
try writeToken(builder, call.async_token, .keyword);
2021-03-04 21:30:25 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr });
2021-03-04 15:14:30 +00:00
if (builder.current_token) |curr_tok| {
2021-03-04 21:30:25 +00:00
if (curr_tok != tree.lastToken(call.ast.fn_expr) and token_tags[tree.lastToken(call.ast.fn_expr)] == .identifier) {
try writeToken(builder, tree.lastToken(call.ast.fn_expr), .function);
2021-03-04 15:14:30 +00:00
}
}
2021-03-04 21:30:25 +00:00
for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param });
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.slice,
.slice_open,
.slice_sentinel,
=> {
const slice: ast.full.Slice = switch (tag) {
.slice => tree.slice(node),
.slice_open => tree.sliceOpen(node),
.slice_sentinel => tree.sliceSentinel(node),
else => unreachable,
};
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced });
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start });
try writeToken(builder, tree.lastToken(slice.ast.start) + 1, .operator);
if (slice.ast.end != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end });
if (slice.ast.sentinel != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sentinel });
},
.array_access => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
},
.deref,
.unwrap_optional,
=> {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
try writeToken(builder, main_token, .operator);
},
.grouped_expression => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
},
.@"break",
.@"continue",
=> {
try writeToken(builder, main_token, .keyword);
if (datas[node].lhs != 0)
try writeToken(builder, datas[node].lhs, .label);
if (datas[node].rhs != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
2021-03-04 15:14:30 +00:00
},
.@"suspend", .@"return" => {
2021-03-04 21:30:25 +00:00
try writeToken(builder, main_token, .keyword);
if (datas[node].lhs != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.integer_literal,
.float_literal,
=> {
try writeToken(builder, main_token, .number);
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.enum_literal => {
try writeToken(builder, main_token - 1, .enumMember);
try writeToken(builder, main_token, .enumMember);
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.builtin_call,
.builtin_call_comma,
.builtin_call_two,
.builtin_call_two_comma,
=> {
const data = datas[node];
const params = switch (tag) {
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
&[_]ast.Node.Index{}
else if (data.rhs == 0)
&[_]ast.Node.Index{data.lhs}
else
&[_]ast.Node.Index{ data.lhs, data.rhs },
else => unreachable,
};
try writeToken(builder, main_token, .builtin);
for (params) |param|
2021-03-04 15:14:30 +00:00
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param });
},
2021-03-04 21:30:25 +00:00
.string_literal,
.char_literal,
=> {
try writeToken(builder, main_token, .string);
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.multiline_string_literal => {
var cur_tok = main_token;
const last_tok = datas[node].rhs;
while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string);
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.true_literal,
.false_literal,
.null_literal,
.undefined_literal,
.unreachable_literal,
=> {
try writeToken(builder, main_token, .keywordLiteral);
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.error_set_decl => {
try writeToken(builder, main_token, .keyword);
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.@"asm",
.asm_output,
.asm_input,
.asm_simple,
=> {
const asm_node: ast.full.Asm = switch (tag) {
.@"asm" => tree.asmFull(node),
.asm_simple => tree.asmSimple(node),
else => return, // TODO Inputs, outputs
};
try writeToken(builder, main_token, .keyword);
try writeToken(builder, asm_node.volatile_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_node.ast.template });
2021-03-04 15:14:30 +00:00
// TODO Inputs, outputs.
},
2021-03-04 21:30:25 +00:00
.@"anytype" => {
try writeToken(builder, main_token, .type);
},
.test_decl => {
if (analysis.getDocCommentTokenIndex(handle.tree, node)) |doc|
try writeDocComments(builder, handle.tree, doc);
2021-03-04 15:14:30 +00:00
2021-03-04 21:30:25 +00:00
try writeToken(builder, main_token, .keyword);
if (token_tags[main_token + 1] == .string_literal)
try writeToken(builder, main_token + 1, .string);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.@"catch" => {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
try writeToken(builder, main_token, .keyword);
if (token_tags[main_token + 1] == .pipe)
try writeToken(builder, main_token + 1, .variable);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
},
.add,
.add_wrap,
.array_cat,
.array_mult,
.assign,
.assign_bit_and,
.assign_bit_or,
.assign_bit_shift_left,
.assign_bit_shift_right,
.assign_bit_xor,
.assign_div,
.assign_sub,
.assign_sub_wrap,
.assign_mod,
.assign_add,
.assign_add_wrap,
.assign_mul,
.assign_mul_wrap,
.bang_equal,
.bit_and,
.bit_or,
.bit_shift_left,
.bit_shift_right,
.bit_xor,
.bool_and,
.bool_or,
.div,
.equal_equal,
.error_union,
.greater_or_equal,
.greater_than,
.less_or_equal,
.less_than,
.merge_error_sets,
.mod,
.mul,
.mul_wrap,
.switch_range,
.sub,
.sub_wrap,
.@"orelse",
=> {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
const token_type: TokenType = switch (tag) {
.bool_and, .bool_or => .keyword,
else => .operator,
2021-03-04 15:14:30 +00:00
};
2021-03-04 21:30:25 +00:00
try writeToken(builder, main_token, token_type);
if (datas[node].rhs != 0)
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs });
},
.field_access => {
const data = datas[node];
if (data.rhs == 0) return;
const rhs_str = tree.tokenSlice(data.rhs);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs });
2021-03-04 21:30:25 +00:00
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
// writeToken code.
// Maybe we can hook into it insead? Also applies to Identifier and VarDecl
var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator);
const lhs_type = try analysis.resolveFieldAccessLhsType(
store,
arena,
(try analysis.resolveTypeOfNodeInternal(store, arena, .{
.node = data.lhs,
.handle = handle,
}, &bound_type_params)) orelse return,
&bound_type_params,
);
const left_type_node = switch (lhs_type.type.data) {
.other => |n| n,
else => return,
2021-03-04 15:14:30 +00:00
};
2021-03-04 21:30:25 +00:00
if (try analysis.lookupSymbolContainer(store, arena, .{
.node = left_type_node,
.handle = lhs_type.handle,
}, rhs_str, !lhs_type.type.is_type_val)) |decl_type| {
switch (decl_type.decl.*) {
.ast_node => |decl_node| {
if (decl_type.handle.tree.nodes.items(.tag)[decl_node].isContainerField()) {
const tok_type: ?TokenType = if (analysis.isContainer(lhs_type.handle.tree.nodes.items(.tag)[left_type_node]))
fieldTokenType(decl_node, lhs_type.handle)
else if (left_type_node == 0)
TokenType.field
else
null;
if (tok_type) |tt| try writeToken(builder, data.rhs, tt);
return;
} else if (decl_type.handle.tree.nodes.items(.tag)[decl_node] == .error_value) {
try writeToken(builder, data.rhs, .errorTag);
}
},
else => {},
}
2021-03-04 15:14:30 +00:00
2021-03-04 21:30:25 +00:00
if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| {
try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{});
}
}
},
.ptr_type,
.ptr_type_aligned,
.ptr_type_bit_range,
.ptr_type_sentinel,
=> {
const ptr_type = analysis.ptrType(tree, node).?;
if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and
main_token == main_tokens[ptr_type.ast.child_type])
{
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{
builder,
arena,
store,
ptr_type.ast.child_type,
});
}
2021-03-04 15:14:30 +00:00
if (ptr_type.size == .One) try writeToken(builder, main_token, .operator);
2021-03-04 21:30:25 +00:00
if (ptr_type.ast.sentinel != 0) {
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{
builder,
arena,
store,
ptr_type.ast.sentinel,
});
}
2021-03-04 15:14:30 +00:00
2021-03-04 21:30:25 +00:00
try writeToken(builder, ptr_type.allowzero_token, .keyword);
2021-03-04 15:14:30 +00:00
2021-03-04 21:30:25 +00:00
if (ptr_type.ast.align_node != 0) {
const first_tok = tree.firstToken(ptr_type.ast.align_node);
try writeToken(builder, first_tok - 2, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.align_node });
2021-03-04 15:14:30 +00:00
2021-03-04 21:30:25 +00:00
if (ptr_type.ast.bit_range_start != 0) {
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_start });
try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_end });
}
2021-03-04 15:14:30 +00:00
}
2021-03-04 21:30:25 +00:00
try writeToken(builder, ptr_type.const_token, .keyword);
try writeToken(builder, ptr_type.volatile_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.child_type });
},
.array_type,
.array_type_sentinel,
=> {
const array_type: ast.full.ArrayType = if (tag == .array_type)
tree.arrayType(node)
else
tree.arrayTypeSentinel(node);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_count });
if (array_type.ast.sentinel) |sentinel|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel });
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_type });
2021-03-04 15:14:30 +00:00
},
2021-03-04 21:30:25 +00:00
.address_of,
.bit_not,
.bool_not,
.optional_type,
.negation,
.negation_wrap,
=> {
try writeToken(builder, main_token, .operator);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
},
.@"try",
.@"resume",
.@"await",
=> {
try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs });
},
.anyframe_literal => try writeToken(builder, main_token, .keyword),
2021-03-04 15:14:30 +00:00
}
}
// TODO Range version, edit version.
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {
var builder = Builder.init(arena.child_allocator, handle, encoding);
2021-03-04 21:30:25 +00:00
// as line comments are not nodes, we parse the text then generate the tokens for them
try findComments(&builder, handle.tree.source, encoding);
// reverse the ast from the root declarations
var gap_highlighter = GapHighlighter.init(&builder, 0);
var buf: [2]ast.Node.Index = undefined;
for (analysis.declMembers(handle.tree, .root, 0, &buf)) |child| {
try gap_highlighter.next(child);
try writeNodeTokens(&builder, arena, store, child);
}
try gap_highlighter.end(@truncate(u32, handle.tree.tokens.len) - 1);
return builder.toOwnedSlice();
}
/// As the AST does not contain nodes for comments
/// this will parse through the entire file to search for comments
/// and generate semantic tokens for them
fn findComments(builder: *Builder, source: []const u8, encoding: offsets.Encoding) !void {
var state: enum { none, comment, doc_comment } = .none;
var prev: u8 = 0;
var start: usize = 0;
for (source) |c, i| {
if (state == .comment and c == '/') {
state = .none;
continue;
}
if (state == .none and c == '/' and prev == '/') {
state = .comment;
start = i - 1;
}
if (c == '\n') {
if (state == .comment) {
state = .none;
const len = if (encoding == .utf8)
i - start
else blk: {
var index: usize = start;
var utf16_len: usize = 0;
while (index < i) {
const n = std.unicode.utf8ByteSequenceLength(source[index]) catch unreachable;
const codepoint = std.unicode.utf8Decode(source[index .. index + n]) catch unreachable;
if (codepoint < 0x10000) {
utf16_len += 1;
} else {
utf16_len += 2;
}
index += n;
}
break :blk utf16_len;
};
try builder.comments.append(.{
.length = @truncate(u32, len),
.start = @truncate(u32, start),
});
}
}
prev = c;
}
}