2020-06-13 19:20:04 +01:00
|
|
|
const std = @import("std");
|
2021-10-01 01:48:40 +01:00
|
|
|
const offsets = @import("./offsets.zig");
|
|
|
|
const DocumentStore = @import("./document_store.zig");
|
|
|
|
const analysis = @import("./analysis.zig");
|
2021-10-01 02:44:06 +01:00
|
|
|
const Ast = std.zig.Ast;
|
2021-05-02 18:15:31 +01:00
|
|
|
const log = std.log.scoped(.semantic_tokens);
|
2021-09-04 20:25:35 +01:00
|
|
|
const SemanticToken = @This();
|
2021-10-01 01:48:40 +01:00
|
|
|
usingnamespace @import("./ast.zig");
|
2020-06-13 19:20:04 +01:00
|
|
|
|
2020-11-06 08:08:20 +00:00
|
|
|
pub const TokenType = enum(u32) {
|
2020-06-13 19:20:04 +01:00
|
|
|
type,
|
|
|
|
parameter,
|
|
|
|
variable,
|
2021-01-16 06:46:35 +00:00
|
|
|
enumMember,
|
2020-06-13 19:20:04 +01:00
|
|
|
field,
|
2020-06-17 21:36:40 +01:00
|
|
|
errorTag,
|
2020-06-13 19:20:04 +01:00
|
|
|
function,
|
|
|
|
keyword,
|
|
|
|
comment,
|
|
|
|
string,
|
|
|
|
number,
|
|
|
|
operator,
|
2020-06-13 23:07:57 +01:00
|
|
|
builtin,
|
2020-06-17 21:36:40 +01:00
|
|
|
label,
|
2020-10-04 11:10:48 +01:00
|
|
|
keywordLiteral,
|
2020-06-13 19:20:04 +01:00
|
|
|
};
|
|
|
|
|
2020-11-06 08:08:20 +00:00
|
|
|
pub const TokenModifiers = packed struct {
|
2020-11-06 09:03:21 +00:00
|
|
|
namespace: bool = false,
|
|
|
|
@"struct": bool = false,
|
|
|
|
@"enum": bool = false,
|
|
|
|
@"union": bool = false,
|
|
|
|
@"opaque": bool = false,
|
2021-01-16 06:33:35 +00:00
|
|
|
declaration: bool = false,
|
2020-06-13 19:20:04 +01:00
|
|
|
@"async": bool = false,
|
|
|
|
documentation: bool = false,
|
2020-06-16 16:49:31 +01:00
|
|
|
generic: bool = false,
|
2020-06-13 19:20:04 +01:00
|
|
|
|
|
|
|
fn toInt(self: TokenModifiers) u32 {
|
2020-09-25 22:23:03 +01:00
|
|
|
var res: u32 = 0;
|
2020-11-06 09:03:21 +00:00
|
|
|
inline for (std.meta.fields(TokenModifiers)) |field, i| {
|
|
|
|
if (@field(self, field.name)) {
|
|
|
|
res |= 1 << i;
|
|
|
|
}
|
|
|
|
}
|
2020-09-25 22:23:03 +01:00
|
|
|
return res;
|
2020-06-13 19:20:04 +01:00
|
|
|
}
|
|
|
|
|
2021-06-24 11:38:01 +01:00
|
|
|
inline fn set(self: *TokenModifiers, comptime field: []const u8) void {
|
2020-06-16 16:49:31 +01:00
|
|
|
@field(self, field) = true;
|
2020-06-13 19:20:04 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const Builder = struct {
|
2020-06-17 21:36:40 +01:00
|
|
|
handle: *DocumentStore.Handle,
|
2021-05-02 18:15:31 +01:00
|
|
|
previous_position: usize = 0,
|
2021-10-01 02:44:06 +01:00
|
|
|
previous_token: ?Ast.TokenIndex = null,
|
2020-06-13 19:20:04 +01:00
|
|
|
arr: std.ArrayList(u32),
|
2020-07-03 09:34:42 +01:00
|
|
|
encoding: offsets.Encoding,
|
2020-06-13 19:20:04 +01:00
|
|
|
|
2020-07-03 09:34:42 +01:00
|
|
|
fn init(allocator: *std.mem.Allocator, handle: *DocumentStore.Handle, encoding: offsets.Encoding) Builder {
|
2020-06-13 19:20:04 +01:00
|
|
|
return Builder{
|
2020-06-17 21:36:40 +01:00
|
|
|
.handle = handle,
|
2020-06-13 19:20:04 +01:00
|
|
|
.arr = std.ArrayList(u32).init(allocator),
|
2020-07-03 09:34:42 +01:00
|
|
|
.encoding = encoding,
|
2020-06-13 19:20:04 +01:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
2021-05-02 18:15:31 +01:00
|
|
|
const tree = self.handle.tree;
|
|
|
|
const starts = tree.tokens.items(.start);
|
|
|
|
const next_start = starts[token];
|
|
|
|
|
|
|
|
if (next_start < self.previous_position) {
|
2021-06-26 20:56:22 +01:00
|
|
|
return error.MovedBackwards;
|
2021-05-02 18:15:31 +01:00
|
|
|
}
|
2021-03-26 19:04:51 +00:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
if (self.previous_token) |prev| {
|
|
|
|
// Highlight gaps between AST nodes. These can contain comments or malformed code.
|
|
|
|
var i = prev + 1;
|
|
|
|
while (i < token) : (i += 1) {
|
2021-05-02 19:14:38 +01:00
|
|
|
try handleComments(self, starts[i - 1], starts[i]);
|
2021-05-02 18:15:31 +01:00
|
|
|
try handleToken(self, i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.previous_token = token;
|
2021-05-03 13:14:11 +01:00
|
|
|
try self.handleComments(if (token > 0) starts[token - 1] else 0, next_start);
|
2021-03-26 19:04:51 +00:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
const length = offsets.tokenLength(tree, token, self.encoding);
|
|
|
|
try self.addDirect(token_type, token_modifiers, next_start, length);
|
2021-03-26 19:04:51 +00:00
|
|
|
}
|
|
|
|
|
2021-05-03 13:14:11 +01:00
|
|
|
fn finish(self: *Builder) !void {
|
|
|
|
const starts = self.handle.tree.tokens.items(.start);
|
|
|
|
|
|
|
|
const last_token = self.previous_token orelse 0;
|
|
|
|
var i = last_token + 1;
|
|
|
|
while (i < starts.len) : (i += 1) {
|
|
|
|
try handleComments(self, starts[i - 1], starts[i]);
|
|
|
|
try handleToken(self, i);
|
|
|
|
}
|
|
|
|
try self.handleComments(starts[starts.len - 1], self.handle.tree.source.len);
|
|
|
|
}
|
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
/// Highlight a token without semantic context.
|
2021-10-01 02:44:06 +01:00
|
|
|
fn handleToken(self: *Builder, tok: Ast.TokenIndex) !void {
|
2021-05-02 18:15:31 +01:00
|
|
|
const tree = self.handle.tree;
|
|
|
|
// TODO More highlighting here
|
|
|
|
const tok_id = tree.tokens.items(.tag)[tok];
|
|
|
|
const tok_type: TokenType = switch (tok_id) {
|
2021-08-30 13:54:27 +01:00
|
|
|
.keyword_unreachable => .keywordLiteral,
|
2021-05-02 19:14:38 +01:00
|
|
|
.integer_literal, .float_literal => .number,
|
|
|
|
.string_literal, .multiline_string_literal_line, .char_literal => .string,
|
|
|
|
.period, .comma, .r_paren, .l_paren, .r_brace, .l_brace, .semicolon, .colon => return,
|
2021-05-02 18:15:31 +01:00
|
|
|
|
|
|
|
else => blk: {
|
|
|
|
const id = @enumToInt(tok_id);
|
|
|
|
if (id >= @enumToInt(std.zig.Token.Tag.keyword_align) and
|
|
|
|
id <= @enumToInt(std.zig.Token.Tag.keyword_while))
|
2021-05-02 19:14:38 +01:00
|
|
|
break :blk TokenType.keyword;
|
2021-05-02 18:15:31 +01:00
|
|
|
if (id >= @enumToInt(std.zig.Token.Tag.bang) and
|
|
|
|
id <= @enumToInt(std.zig.Token.Tag.tilde))
|
|
|
|
break :blk TokenType.operator;
|
|
|
|
|
|
|
|
return;
|
2021-05-02 19:14:38 +01:00
|
|
|
},
|
2021-05-02 18:15:31 +01:00
|
|
|
};
|
|
|
|
const start = tree.tokens.items(.start)[tok];
|
|
|
|
const length = offsets.tokenLength(tree, tok, self.encoding);
|
|
|
|
try self.addDirect(tok_type, .{}, start, length);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Highlight normal comments and doc comments.
|
|
|
|
fn handleComments(self: *Builder, from: usize, to: usize) !void {
|
2021-05-03 13:36:29 +01:00
|
|
|
if (from == to) return;
|
|
|
|
std.debug.assert(from < to);
|
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
const source = self.handle.tree.source;
|
|
|
|
|
|
|
|
var i: usize = from;
|
|
|
|
while (i < to - 1) : (i += 1) {
|
2021-05-02 19:14:38 +01:00
|
|
|
if (source[i] != '/' or source[i + 1] != '/')
|
2021-05-02 18:15:31 +01:00
|
|
|
continue;
|
2021-05-02 19:14:38 +01:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
const comment_start = i;
|
|
|
|
var mods = TokenModifiers{};
|
2021-05-02 19:14:38 +01:00
|
|
|
if (i + 2 < to and (source[i + 2] == '!' or source[i + 2] == '/'))
|
2021-05-02 18:15:31 +01:00
|
|
|
mods.documentation = true;
|
|
|
|
|
2021-05-03 13:14:11 +01:00
|
|
|
while (i < to - 1 and source[i] != '\n') : (i += 1) {}
|
2021-05-02 18:15:31 +01:00
|
|
|
|
|
|
|
const length = try offsets.lineSectionLength(self.handle.tree, comment_start, i, self.encoding);
|
|
|
|
try self.addDirect(TokenType.comment, mods, comment_start, length);
|
2021-03-26 19:04:51 +00:00
|
|
|
}
|
2021-05-02 18:15:31 +01:00
|
|
|
}
|
2021-03-07 13:51:47 +00:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void {
|
2021-03-26 19:04:51 +00:00
|
|
|
const delta = offsets.tokenRelativeLocation(
|
|
|
|
self.handle.tree,
|
2021-05-02 18:15:31 +01:00
|
|
|
self.previous_position,
|
|
|
|
start,
|
2021-03-26 19:04:51 +00:00
|
|
|
self.encoding,
|
|
|
|
) catch return;
|
|
|
|
|
|
|
|
try self.arr.appendSlice(&.{
|
|
|
|
@truncate(u32, delta.line),
|
|
|
|
@truncate(u32, delta.column),
|
2021-05-02 18:15:31 +01:00
|
|
|
@truncate(u32, length),
|
|
|
|
@enumToInt(tok_type),
|
|
|
|
tok_mod.toInt(),
|
2020-06-13 23:07:57 +01:00
|
|
|
});
|
2021-05-02 18:15:31 +01:00
|
|
|
self.previous_position = start;
|
2020-06-13 19:20:04 +01:00
|
|
|
}
|
|
|
|
|
2020-06-13 23:07:57 +01:00
|
|
|
fn toOwnedSlice(self: *Builder) []u32 {
|
2020-06-13 19:20:04 +01:00
|
|
|
return self.arr.toOwnedSlice();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
inline fn writeToken(builder: *Builder, token_idx: ?Ast.TokenIndex, tok_type: TokenType) !void {
|
2020-06-17 21:36:40 +01:00
|
|
|
return try writeTokenMod(builder, token_idx, tok_type, .{});
|
|
|
|
}
|
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
inline fn writeTokenMod(builder: *Builder, token_idx: ?Ast.TokenIndex, tok_type: TokenType, tok_mod: TokenModifiers) !void {
|
2020-06-17 21:36:40 +01:00
|
|
|
if (token_idx) |ti| {
|
|
|
|
try builder.add(ti, tok_type, tok_mod);
|
2020-06-13 23:07:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
fn writeDocComments(builder: *Builder, tree: SemanticToken.Tree, doc: Ast.TokenIndex) !void {
|
2021-03-04 15:14:30 +00:00
|
|
|
const token_tags = tree.tokens.items(.tag);
|
|
|
|
var tok_idx = doc;
|
|
|
|
while (token_tags[tok_idx] == .doc_comment or
|
|
|
|
token_tags[tok_idx] == .container_doc_comment) : (tok_idx += 1)
|
2020-06-17 21:36:40 +01:00
|
|
|
{
|
|
|
|
var tok_mod = TokenModifiers{};
|
2021-03-04 15:14:30 +00:00
|
|
|
tok_mod.set("documentation");
|
2020-06-17 21:36:40 +01:00
|
|
|
|
|
|
|
try builder.add(tok_idx, .comment, tok_mod);
|
|
|
|
}
|
|
|
|
}
|
2020-06-13 23:07:57 +01:00
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
fn fieldTokenType(container_decl: Ast.Node.Index, handle: *DocumentStore.Handle) ?TokenType {
|
2021-03-04 21:30:25 +00:00
|
|
|
const main_token = handle.tree.nodes.items(.main_token)[container_decl];
|
|
|
|
if (main_token > handle.tree.tokens.len) return null;
|
|
|
|
return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) {
|
|
|
|
.keyword_struct => .field,
|
2021-03-04 15:14:30 +00:00
|
|
|
.keyword_union, .keyword_enum => .enumMember,
|
2020-06-17 21:36:40 +01:00
|
|
|
else => null,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
2020-06-18 13:12:09 +01:00
|
|
|
if (type_node.type.is_type_val) {
|
2020-11-06 09:03:21 +00:00
|
|
|
var new_tok_mod = tok_mod;
|
2021-03-04 21:53:54 +00:00
|
|
|
if (type_node.isNamespace())
|
2020-11-06 09:03:21 +00:00
|
|
|
new_tok_mod.set("namespace")
|
2021-03-04 21:53:54 +00:00
|
|
|
else if (type_node.isStructType())
|
2020-11-06 09:03:21 +00:00
|
|
|
new_tok_mod.set("struct")
|
2021-03-04 21:53:54 +00:00
|
|
|
else if (type_node.isEnumType())
|
2020-11-06 09:03:21 +00:00
|
|
|
new_tok_mod.set("enum")
|
2021-03-04 21:53:54 +00:00
|
|
|
else if (type_node.isUnionType())
|
2020-11-06 09:03:21 +00:00
|
|
|
new_tok_mod.set("union")
|
2021-03-04 21:53:54 +00:00
|
|
|
else if (type_node.isOpaqueType())
|
2020-11-06 09:03:21 +00:00
|
|
|
new_tok_mod.set("opaque");
|
2020-06-18 13:12:09 +01:00
|
|
|
|
2020-11-06 09:03:21 +00:00
|
|
|
try writeTokenMod(builder, target_tok, .type, new_tok_mod);
|
2021-03-04 21:53:54 +00:00
|
|
|
} else if (type_node.isTypeFunc()) {
|
2020-06-18 13:12:09 +01:00
|
|
|
try writeTokenMod(builder, target_tok, .type, tok_mod);
|
2021-03-04 21:53:54 +00:00
|
|
|
} else if (type_node.isFunc()) {
|
2020-06-27 18:45:58 +01:00
|
|
|
var new_tok_mod = tok_mod;
|
2021-03-04 21:53:54 +00:00
|
|
|
if (type_node.isGenericFunc()) {
|
2020-06-27 18:45:58 +01:00
|
|
|
new_tok_mod.set("generic");
|
|
|
|
}
|
|
|
|
try writeTokenMod(builder, target_tok, .function, new_tok_mod);
|
2020-06-18 13:12:09 +01:00
|
|
|
} else {
|
|
|
|
try writeTokenMod(builder, target_tok, .variable, tok_mod);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
const WriteTokensError = error{
|
|
|
|
OutOfMemory,
|
|
|
|
Utf8InvalidStartByte,
|
|
|
|
CodepointTooLong,
|
|
|
|
Utf8ExpectedContinuation,
|
|
|
|
Utf8OverlongEncoding,
|
|
|
|
Utf8EncodesSurrogateHalf,
|
|
|
|
Utf8CodepointTooLarge,
|
2021-06-26 20:56:22 +01:00
|
|
|
MovedBackwards,
|
2021-05-02 18:15:31 +01:00
|
|
|
};
|
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?Ast.Node.Index) WriteTokensError!void {
|
2021-04-19 15:17:46 +01:00
|
|
|
const node = maybe_node orelse return;
|
2021-03-04 15:14:30 +00:00
|
|
|
|
2021-03-04 21:30:25 +00:00
|
|
|
const handle = builder.handle;
|
|
|
|
const tree = handle.tree;
|
2021-03-04 15:14:30 +00:00
|
|
|
const node_tags = tree.nodes.items(.tag);
|
|
|
|
const token_tags = tree.tokens.items(.tag);
|
2021-04-15 10:07:43 +01:00
|
|
|
const node_data = tree.nodes.items(.data);
|
2021-03-04 15:14:30 +00:00
|
|
|
const main_tokens = tree.nodes.items(.main_token);
|
2021-04-19 15:17:46 +01:00
|
|
|
if (node == 0 or node > node_data.len) return;
|
|
|
|
|
|
|
|
const FrameSize = @sizeOf(@Frame(writeNodeTokens));
|
|
|
|
var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
|
|
|
|
defer arena.child_allocator.free(child_frame);
|
|
|
|
|
|
|
|
const tag = node_tags[node];
|
|
|
|
const main_token = main_tokens[node];
|
|
|
|
|
|
|
|
switch (tag) {
|
|
|
|
.root => unreachable,
|
|
|
|
.container_field,
|
|
|
|
.container_field_align,
|
|
|
|
.container_field_init,
|
|
|
|
=> try writeContainerField(builder, arena, store, node, .field, child_frame),
|
|
|
|
.@"errdefer" => {
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
|
|
|
|
if (node_data[node].lhs != 0) {
|
|
|
|
const payload_tok = node_data[node].lhs;
|
|
|
|
try writeToken(builder, payload_tok - 1, .operator);
|
|
|
|
try writeToken(builder, payload_tok, .variable);
|
|
|
|
try writeToken(builder, payload_tok + 1, .operator);
|
|
|
|
}
|
2021-03-04 15:14:30 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
|
|
|
},
|
|
|
|
.block,
|
|
|
|
.block_semicolon,
|
|
|
|
.block_two,
|
|
|
|
.block_two_semicolon,
|
|
|
|
=> {
|
2021-06-24 11:38:01 +01:00
|
|
|
if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) {
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, main_token - 2, .label);
|
2021-06-24 11:38:01 +01:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
const statements: []const Ast.Node.Index = switch (tag) {
|
2021-04-19 15:17:46 +01:00
|
|
|
.block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
|
|
|
.block_two, .block_two_semicolon => blk: {
|
2021-10-01 02:44:06 +01:00
|
|
|
const statements = &[_]Ast.Node.Index{ node_data[node].lhs, node_data[node].rhs };
|
2021-04-19 15:17:46 +01:00
|
|
|
const len: usize = if (node_data[node].lhs == 0)
|
|
|
|
@as(usize, 0)
|
|
|
|
else if (node_data[node].rhs == 0)
|
|
|
|
@as(usize, 1)
|
|
|
|
else
|
|
|
|
@as(usize, 2);
|
|
|
|
break :blk statements[0..len];
|
|
|
|
},
|
|
|
|
else => unreachable,
|
|
|
|
};
|
2021-04-15 10:07:43 +01:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
for (statements) |child| {
|
|
|
|
if (node_tags[child].isContainerField()) {
|
|
|
|
try writeContainerField(builder, arena, store, child, .field, child_frame);
|
2021-03-04 15:14:30 +00:00
|
|
|
} else {
|
2021-04-19 15:17:46 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child });
|
2021-03-04 15:14:30 +00:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
}
|
|
|
|
},
|
|
|
|
.global_var_decl,
|
|
|
|
.local_var_decl,
|
|
|
|
.simple_var_decl,
|
|
|
|
.aligned_var_decl,
|
|
|
|
=> {
|
2021-09-04 20:25:35 +01:00
|
|
|
const var_decl = SemanticToken.varDecl(tree, node).?;
|
2021-04-19 15:17:46 +01:00
|
|
|
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
|
|
|
|
try writeDocComments(builder, tree, comment_idx);
|
|
|
|
|
|
|
|
try writeToken(builder, var_decl.visib_token, .keyword);
|
|
|
|
try writeToken(builder, var_decl.extern_export_token, .keyword);
|
|
|
|
try writeToken(builder, var_decl.threadlocal_token, .keyword);
|
|
|
|
try writeToken(builder, var_decl.comptime_token, .keyword);
|
|
|
|
try writeToken(builder, var_decl.ast.mut_token, .keyword);
|
|
|
|
|
|
|
|
if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| {
|
|
|
|
try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true });
|
|
|
|
} else {
|
|
|
|
try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true });
|
|
|
|
}
|
2021-05-02 18:15:31 +01:00
|
|
|
try writeToken(builder, var_decl.ast.mut_token + 2, .operator);
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node });
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node });
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node });
|
2021-04-19 15:17:46 +01:00
|
|
|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.init_node });
|
|
|
|
},
|
|
|
|
.@"usingnamespace" => {
|
|
|
|
const first_tok = tree.firstToken(node);
|
|
|
|
if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment)
|
|
|
|
try writeDocComments(builder, tree, first_tok - 1);
|
|
|
|
try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword);
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
},
|
|
|
|
.container_decl,
|
|
|
|
.container_decl_trailing,
|
|
|
|
.container_decl_two,
|
|
|
|
.container_decl_two_trailing,
|
|
|
|
.container_decl_arg,
|
|
|
|
.container_decl_arg_trailing,
|
|
|
|
.tagged_union,
|
|
|
|
.tagged_union_trailing,
|
|
|
|
.tagged_union_enum_tag,
|
|
|
|
.tagged_union_enum_tag_trailing,
|
|
|
|
.tagged_union_two,
|
|
|
|
.tagged_union_two_trailing,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
var buf: [2]Ast.Node.Index = undefined;
|
|
|
|
const decl: Ast.full.ContainerDecl = switch (tag) {
|
2021-04-19 15:17:46 +01:00
|
|
|
.container_decl, .container_decl_trailing => tree.containerDecl(node),
|
|
|
|
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node),
|
|
|
|
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
|
|
|
|
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node),
|
|
|
|
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node),
|
|
|
|
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node),
|
|
|
|
else => unreachable,
|
|
|
|
};
|
2021-04-15 10:07:43 +01:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, decl.layout_token, .keyword);
|
|
|
|
try writeToken(builder, decl.ast.main_token, .keyword);
|
|
|
|
if (decl.ast.enum_token) |enum_token| {
|
|
|
|
if (decl.ast.arg != 0)
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg })
|
2021-04-15 10:07:43 +01:00
|
|
|
else
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, enum_token, .keyword);
|
2021-05-02 18:15:31 +01:00
|
|
|
} else try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg });
|
2021-04-19 15:17:46 +01:00
|
|
|
|
|
|
|
const field_token_type = fieldTokenType(node, handle);
|
|
|
|
for (decl.ast.members) |child| {
|
|
|
|
if (node_tags[child].isContainerField()) {
|
|
|
|
try writeContainerField(builder, arena, store, child, field_token_type, child_frame);
|
|
|
|
} else {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child });
|
2021-03-10 08:29:25 +00:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
}
|
|
|
|
},
|
|
|
|
.error_value => {
|
2021-05-02 18:15:31 +01:00
|
|
|
if (node_data[node].lhs > 0) {
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, node_data[node].lhs - 1, .keyword);
|
|
|
|
}
|
|
|
|
try writeToken(builder, node_data[node].rhs, .errorTag);
|
|
|
|
},
|
|
|
|
.identifier => {
|
|
|
|
if (analysis.isTypeIdent(tree, main_token)) {
|
|
|
|
return try writeToken(builder, main_token, .type);
|
|
|
|
}
|
2021-04-15 10:07:43 +01:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (try analysis.lookupSymbolGlobal(
|
|
|
|
store,
|
|
|
|
arena,
|
|
|
|
handle,
|
|
|
|
tree.getNodeSource(node),
|
|
|
|
tree.tokens.items(.start)[main_token],
|
|
|
|
)) |child| {
|
|
|
|
if (child.decl.* == .param_decl) {
|
|
|
|
return try writeToken(builder, main_token, .parameter);
|
2021-03-11 19:45:05 +00:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator);
|
|
|
|
if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| {
|
|
|
|
try colorIdentifierBasedOnType(builder, decl_type, main_token, .{});
|
|
|
|
} else {
|
|
|
|
try writeTokenMod(builder, main_token, .variable, .{});
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
}
|
|
|
|
},
|
|
|
|
.fn_proto,
|
|
|
|
.fn_proto_one,
|
|
|
|
.fn_proto_simple,
|
|
|
|
.fn_proto_multi,
|
|
|
|
.fn_decl,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
var buf: [1]Ast.Node.Index = undefined;
|
|
|
|
const fn_proto: Ast.full.FnProto = SemanticToken.fnProto(tree, node, &buf).?;
|
2021-04-19 15:17:46 +01:00
|
|
|
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
|
|
|
try writeDocComments(builder, tree, docs);
|
|
|
|
|
|
|
|
try writeToken(builder, fn_proto.visib_token, .keyword);
|
2021-05-23 17:01:03 +01:00
|
|
|
try writeToken(builder, fn_proto.extern_export_inline_token, .keyword);
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, fn_proto.lib_name, .string);
|
|
|
|
try writeToken(builder, fn_proto.ast.fn_token, .keyword);
|
|
|
|
|
|
|
|
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(tree, fn_proto))
|
|
|
|
.type
|
|
|
|
else
|
|
|
|
.function;
|
|
|
|
|
|
|
|
const tok_mod = if (analysis.isGenericFunction(tree, fn_proto))
|
|
|
|
TokenModifiers{ .generic = true }
|
|
|
|
else
|
|
|
|
TokenModifiers{};
|
|
|
|
|
|
|
|
try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod);
|
|
|
|
|
|
|
|
var it = fn_proto.iterate(tree);
|
|
|
|
while (it.next()) |param_decl| {
|
|
|
|
if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, tree, docs);
|
|
|
|
|
|
|
|
try writeToken(builder, param_decl.comptime_noalias, .keyword);
|
|
|
|
try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true });
|
|
|
|
if (param_decl.anytype_ellipsis3) |any_token| {
|
|
|
|
try writeToken(builder, any_token, .type);
|
2021-05-02 18:15:31 +01:00
|
|
|
} else try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param_decl.type_expr });
|
2021-04-19 15:17:46 +01:00
|
|
|
}
|
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr });
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr });
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr });
|
2021-04-19 15:17:46 +01:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.return_type });
|
2021-04-19 15:17:46 +01:00
|
|
|
|
|
|
|
if (tag == .fn_decl)
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
|
|
|
},
|
|
|
|
.anyframe_type => {
|
|
|
|
try writeToken(builder, main_token, .type);
|
|
|
|
if (node_data[node].rhs != 0) {
|
|
|
|
try writeToken(builder, node_data[node].lhs, .type);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.@"defer" => {
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
|
|
|
},
|
|
|
|
.@"comptime",
|
|
|
|
.@"nosuspend",
|
|
|
|
=> {
|
|
|
|
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
|
|
|
try writeDocComments(builder, tree, doc);
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
},
|
|
|
|
.@"switch",
|
|
|
|
.switch_comma,
|
|
|
|
=> {
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
2021-10-01 02:44:06 +01:00
|
|
|
const extra = tree.extraData(node_data[node].rhs, Ast.Node.SubRange);
|
2021-04-19 15:17:46 +01:00
|
|
|
const cases = tree.extra_data[extra.start..extra.end];
|
|
|
|
|
|
|
|
for (cases) |case_node| {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node });
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.switch_case_one,
|
|
|
|
.switch_case,
|
|
|
|
=> {
|
|
|
|
const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node);
|
|
|
|
for (switch_case.ast.values) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node });
|
|
|
|
// check it it's 'else'
|
|
|
|
if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword);
|
|
|
|
try writeToken(builder, switch_case.ast.arrow_token, .operator);
|
|
|
|
if (switch_case.payload_token) |payload_token| {
|
2021-05-02 18:15:31 +01:00
|
|
|
const actual_payload = payload_token + @boolToInt(token_tags[payload_token] == .asterisk);
|
|
|
|
try writeToken(builder, actual_payload, .variable);
|
2021-04-19 15:17:46 +01:00
|
|
|
}
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.ast.target_expr });
|
|
|
|
},
|
|
|
|
.@"while",
|
|
|
|
.while_simple,
|
|
|
|
.while_cont,
|
|
|
|
.for_simple,
|
|
|
|
.@"for",
|
|
|
|
=> {
|
2021-09-04 20:25:35 +01:00
|
|
|
const while_node = SemanticToken.whileAst(tree, node).?;
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, while_node.label_token, .label);
|
|
|
|
try writeToken(builder, while_node.inline_token, .keyword);
|
|
|
|
try writeToken(builder, while_node.ast.while_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr });
|
|
|
|
if (while_node.payload_token) |payload| {
|
|
|
|
try writeToken(builder, payload - 1, .operator);
|
|
|
|
try writeToken(builder, payload, .variable);
|
|
|
|
var r_pipe = payload + 1;
|
|
|
|
if (token_tags[r_pipe] == .comma) {
|
|
|
|
r_pipe += 1;
|
|
|
|
try writeToken(builder, r_pipe, .variable);
|
|
|
|
r_pipe += 1;
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, r_pipe, .operator);
|
|
|
|
}
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr });
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.then_expr });
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (while_node.ast.else_expr != 0) {
|
|
|
|
try writeToken(builder, while_node.else_token, .keyword);
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (while_node.error_token) |err_token| {
|
|
|
|
try writeToken(builder, err_token - 1, .operator);
|
|
|
|
try writeToken(builder, err_token, .variable);
|
|
|
|
try writeToken(builder, err_token + 1, .operator);
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr });
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.@"if",
|
|
|
|
.if_simple,
|
|
|
|
=> {
|
2021-09-04 20:25:35 +01:00
|
|
|
const if_node = SemanticToken.ifFull(tree, node);
|
2021-04-19 15:17:46 +01:00
|
|
|
|
|
|
|
try writeToken(builder, if_node.ast.if_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr });
|
|
|
|
|
|
|
|
if (if_node.payload_token) |payload| {
|
|
|
|
// if (?x) |x|
|
|
|
|
try writeToken(builder, payload - 1, .operator); // |
|
|
|
|
try writeToken(builder, payload, .variable); // x
|
|
|
|
try writeToken(builder, payload + 1, .operator); // |
|
|
|
|
}
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr });
|
|
|
|
|
|
|
|
if (if_node.ast.else_expr != 0) {
|
|
|
|
try writeToken(builder, if_node.else_token, .keyword);
|
|
|
|
if (if_node.error_token) |err_token| {
|
|
|
|
// else |err|
|
|
|
|
try writeToken(builder, err_token - 1, .operator); // |
|
|
|
|
try writeToken(builder, err_token, .variable); // err
|
|
|
|
try writeToken(builder, err_token + 1, .operator); // |
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr });
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.array_init,
|
|
|
|
.array_init_comma,
|
|
|
|
.array_init_one,
|
|
|
|
.array_init_one_comma,
|
|
|
|
.array_init_dot,
|
|
|
|
.array_init_dot_comma,
|
|
|
|
.array_init_dot_two,
|
|
|
|
.array_init_dot_two_comma,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
var buf: [2]Ast.Node.Index = undefined;
|
|
|
|
const array_init: Ast.full.ArrayInit = switch (tag) {
|
2021-04-19 15:17:46 +01:00
|
|
|
.array_init, .array_init_comma => tree.arrayInit(node),
|
|
|
|
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
|
|
|
|
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
|
|
|
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
|
|
|
|
else => unreachable,
|
|
|
|
};
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_init.ast.type_expr });
|
2021-04-19 15:17:46 +01:00
|
|
|
for (array_init.ast.elements) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem });
|
|
|
|
},
|
|
|
|
.struct_init,
|
|
|
|
.struct_init_comma,
|
|
|
|
.struct_init_dot,
|
|
|
|
.struct_init_dot_comma,
|
|
|
|
.struct_init_one,
|
|
|
|
.struct_init_one_comma,
|
|
|
|
.struct_init_dot_two,
|
|
|
|
.struct_init_dot_two_comma,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
var buf: [2]Ast.Node.Index = undefined;
|
|
|
|
const struct_init: Ast.full.StructInit = switch (tag) {
|
2021-04-19 15:17:46 +01:00
|
|
|
.struct_init, .struct_init_comma => tree.structInit(node),
|
|
|
|
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
|
|
|
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
|
|
|
|
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
|
|
|
|
else => unreachable,
|
|
|
|
};
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
var field_token_type: ?TokenType = null;
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (struct_init.ast.type_expr != 0) {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_init.ast.type_expr });
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{
|
|
|
|
.node = struct_init.ast.type_expr,
|
|
|
|
.handle = handle,
|
|
|
|
})) |struct_type| switch (struct_type.type.data) {
|
2021-09-04 20:25:35 +01:00
|
|
|
.other => |type_node| if (SemanticToken.isContainer(struct_type.handle.tree, type_node))
|
2021-04-19 15:17:46 +01:00
|
|
|
fieldTokenType(type_node, struct_type.handle)
|
2021-04-15 10:07:43 +01:00
|
|
|
else
|
2021-04-19 15:17:46 +01:00
|
|
|
null,
|
|
|
|
else => null,
|
|
|
|
} else null;
|
|
|
|
}
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
for (struct_init.ast.fields) |field_init| {
|
|
|
|
const init_token = tree.firstToken(field_init);
|
|
|
|
try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.'
|
|
|
|
try writeToken(builder, init_token - 2, field_token_type orelse .field); // name
|
|
|
|
try writeToken(builder, init_token - 1, .operator); // '='
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init });
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.call,
|
|
|
|
.call_comma,
|
|
|
|
.async_call,
|
|
|
|
.async_call_comma,
|
|
|
|
.call_one,
|
|
|
|
.call_one_comma,
|
|
|
|
.async_call_one,
|
|
|
|
.async_call_one_comma,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
var params: [1]Ast.Node.Index = undefined;
|
|
|
|
const call: Ast.full.Call = switch (tag) {
|
2021-04-19 15:17:46 +01:00
|
|
|
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
|
|
|
|
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node),
|
|
|
|
else => unreachable,
|
|
|
|
};
|
|
|
|
|
|
|
|
try writeToken(builder, call.async_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr });
|
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
if (builder.previous_token) |prev| {
|
2021-09-04 20:25:35 +01:00
|
|
|
if (prev != SemanticToken.lastToken(tree, call.ast.fn_expr) and token_tags[SemanticToken.lastToken(tree, call.ast.fn_expr)] == .identifier) {
|
|
|
|
try writeToken(builder, SemanticToken.lastToken(tree, call.ast.fn_expr), .function);
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
}
|
|
|
|
for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param });
|
|
|
|
},
|
|
|
|
.slice,
|
|
|
|
.slice_open,
|
|
|
|
.slice_sentinel,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
const slice: Ast.full.Slice = switch (tag) {
|
2021-04-19 15:17:46 +01:00
|
|
|
.slice => tree.slice(node),
|
|
|
|
.slice_open => tree.sliceOpen(node),
|
|
|
|
.slice_sentinel => tree.sliceSentinel(node),
|
|
|
|
else => unreachable,
|
|
|
|
};
|
|
|
|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced });
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start });
|
2021-09-04 20:25:35 +01:00
|
|
|
try writeToken(builder, SemanticToken.lastToken(tree, slice.ast.start) + 1, .operator);
|
2021-04-19 15:17:46 +01:00
|
|
|
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end });
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sentinel });
|
2021-04-19 15:17:46 +01:00
|
|
|
},
|
|
|
|
.array_access => {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
|
|
|
},
|
|
|
|
.deref => {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
try writeToken(builder, main_token, .operator);
|
|
|
|
},
|
|
|
|
.unwrap_optional => {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
try writeToken(builder, main_token + 1, .operator);
|
|
|
|
},
|
|
|
|
.grouped_expression => {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
},
|
|
|
|
.@"break",
|
|
|
|
.@"continue",
|
|
|
|
=> {
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
if (node_data[node].lhs != 0)
|
|
|
|
try writeToken(builder, node_data[node].lhs, .label);
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
2021-04-19 15:17:46 +01:00
|
|
|
},
|
|
|
|
.@"suspend", .@"return" => {
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
2021-04-19 15:17:46 +01:00
|
|
|
},
|
|
|
|
.integer_literal,
|
|
|
|
.float_literal,
|
|
|
|
=> {
|
|
|
|
try writeToken(builder, main_token, .number);
|
|
|
|
},
|
|
|
|
.enum_literal => {
|
|
|
|
try writeToken(builder, main_token - 1, .enumMember);
|
|
|
|
try writeToken(builder, main_token, .enumMember);
|
|
|
|
},
|
|
|
|
.builtin_call,
|
|
|
|
.builtin_call_comma,
|
|
|
|
.builtin_call_two,
|
|
|
|
.builtin_call_two_comma,
|
|
|
|
=> {
|
|
|
|
const data = node_data[node];
|
|
|
|
const params = switch (tag) {
|
|
|
|
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
|
|
|
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
2021-10-01 02:44:06 +01:00
|
|
|
&[_]Ast.Node.Index{}
|
2021-04-19 15:17:46 +01:00
|
|
|
else if (data.rhs == 0)
|
2021-10-01 02:44:06 +01:00
|
|
|
&[_]Ast.Node.Index{data.lhs}
|
2021-04-19 15:17:46 +01:00
|
|
|
else
|
2021-10-01 02:44:06 +01:00
|
|
|
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
2021-04-19 15:17:46 +01:00
|
|
|
else => unreachable,
|
|
|
|
};
|
|
|
|
|
|
|
|
try writeToken(builder, main_token, .builtin);
|
|
|
|
for (params) |param|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param });
|
|
|
|
},
|
|
|
|
.string_literal,
|
|
|
|
.char_literal,
|
|
|
|
=> {
|
|
|
|
try writeToken(builder, main_token, .string);
|
|
|
|
},
|
|
|
|
.multiline_string_literal => {
|
|
|
|
var cur_tok = main_token;
|
|
|
|
const last_tok = node_data[node].rhs;
|
|
|
|
|
|
|
|
while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string);
|
|
|
|
},
|
2021-08-30 13:54:27 +01:00
|
|
|
.unreachable_literal => {
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, main_token, .keywordLiteral);
|
|
|
|
},
|
|
|
|
.error_set_decl => {
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
},
|
|
|
|
.@"asm",
|
|
|
|
.asm_output,
|
|
|
|
.asm_input,
|
|
|
|
.asm_simple,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
const asm_node: Ast.full.Asm = switch (tag) {
|
2021-04-19 15:17:46 +01:00
|
|
|
.@"asm" => tree.asmFull(node),
|
|
|
|
.asm_simple => tree.asmSimple(node),
|
|
|
|
else => return, // TODO Inputs, outputs
|
|
|
|
};
|
|
|
|
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
try writeToken(builder, asm_node.volatile_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_node.ast.template });
|
|
|
|
// TODO Inputs, outputs.
|
|
|
|
},
|
|
|
|
.@"anytype" => {
|
|
|
|
try writeToken(builder, main_token, .type);
|
|
|
|
},
|
|
|
|
.test_decl => {
|
|
|
|
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc|
|
|
|
|
try writeDocComments(builder, tree, doc);
|
|
|
|
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
if (token_tags[main_token + 1] == .string_literal)
|
|
|
|
try writeToken(builder, main_token + 1, .string);
|
|
|
|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
|
|
|
},
|
|
|
|
.@"catch" => {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
if (token_tags[main_token + 1] == .pipe)
|
|
|
|
try writeToken(builder, main_token + 1, .variable);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
|
|
|
},
|
|
|
|
.add,
|
|
|
|
.add_wrap,
|
2021-09-29 05:15:37 +01:00
|
|
|
.add_sat,
|
2021-04-19 15:17:46 +01:00
|
|
|
.array_cat,
|
|
|
|
.array_mult,
|
|
|
|
.assign,
|
|
|
|
.assign_bit_and,
|
|
|
|
.assign_bit_or,
|
2021-09-29 05:15:37 +01:00
|
|
|
.assign_shl,
|
|
|
|
.assign_shl_sat,
|
|
|
|
.assign_shr,
|
2021-04-19 15:17:46 +01:00
|
|
|
.assign_bit_xor,
|
|
|
|
.assign_div,
|
|
|
|
.assign_sub,
|
|
|
|
.assign_sub_wrap,
|
2021-09-29 05:15:37 +01:00
|
|
|
.assign_sub_sat,
|
2021-04-19 15:17:46 +01:00
|
|
|
.assign_mod,
|
|
|
|
.assign_add,
|
|
|
|
.assign_add_wrap,
|
2021-09-29 05:15:37 +01:00
|
|
|
.assign_add_sat,
|
2021-04-19 15:17:46 +01:00
|
|
|
.assign_mul,
|
|
|
|
.assign_mul_wrap,
|
2021-09-29 05:15:37 +01:00
|
|
|
.assign_mul_sat,
|
2021-04-19 15:17:46 +01:00
|
|
|
.bang_equal,
|
|
|
|
.bit_and,
|
|
|
|
.bit_or,
|
2021-09-29 05:15:37 +01:00
|
|
|
.shl,
|
|
|
|
.shl_sat,
|
|
|
|
.shr,
|
2021-04-19 15:17:46 +01:00
|
|
|
.bit_xor,
|
|
|
|
.bool_and,
|
|
|
|
.bool_or,
|
|
|
|
.div,
|
|
|
|
.equal_equal,
|
|
|
|
.error_union,
|
|
|
|
.greater_or_equal,
|
|
|
|
.greater_than,
|
|
|
|
.less_or_equal,
|
|
|
|
.less_than,
|
|
|
|
.merge_error_sets,
|
|
|
|
.mod,
|
|
|
|
.mul,
|
|
|
|
.mul_wrap,
|
2021-09-29 05:15:37 +01:00
|
|
|
.mul_sat,
|
2021-04-19 15:17:46 +01:00
|
|
|
.switch_range,
|
|
|
|
.sub,
|
|
|
|
.sub_wrap,
|
2021-09-29 05:15:37 +01:00
|
|
|
.sub_sat,
|
2021-04-19 15:17:46 +01:00
|
|
|
.@"orelse",
|
|
|
|
=> {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
const token_type: TokenType = switch (tag) {
|
|
|
|
.bool_and, .bool_or => .keyword,
|
|
|
|
else => .operator,
|
|
|
|
};
|
|
|
|
|
|
|
|
try writeToken(builder, main_token, token_type);
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].rhs });
|
2021-04-19 15:17:46 +01:00
|
|
|
},
|
|
|
|
.field_access => {
|
|
|
|
const data = node_data[node];
|
|
|
|
if (data.rhs == 0) return;
|
|
|
|
const rhs_str = tree.tokenSlice(data.rhs);
|
|
|
|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs });
|
|
|
|
|
|
|
|
// TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added
|
|
|
|
// writeToken code.
|
|
|
|
// Maybe we can hook into it insead? Also applies to Identifier and VarDecl
|
|
|
|
var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator);
|
|
|
|
const lhs_type = try analysis.resolveFieldAccessLhsType(
|
|
|
|
store,
|
|
|
|
arena,
|
|
|
|
(try analysis.resolveTypeOfNodeInternal(store, arena, .{
|
|
|
|
.node = data.lhs,
|
|
|
|
.handle = handle,
|
|
|
|
}, &bound_type_params)) orelse return,
|
|
|
|
&bound_type_params,
|
|
|
|
);
|
|
|
|
const left_type_node = switch (lhs_type.type.data) {
|
|
|
|
.other => |n| n,
|
|
|
|
else => return,
|
|
|
|
};
|
|
|
|
if (try analysis.lookupSymbolContainer(store, arena, .{
|
|
|
|
.node = left_type_node,
|
|
|
|
.handle = lhs_type.handle,
|
|
|
|
}, rhs_str, !lhs_type.type.is_type_val)) |decl_type| {
|
|
|
|
switch (decl_type.decl.*) {
|
|
|
|
.ast_node => |decl_node| {
|
|
|
|
if (decl_type.handle.tree.nodes.items(.tag)[decl_node].isContainerField()) {
|
2021-09-04 20:25:35 +01:00
|
|
|
const tok_type: ?TokenType = if (SemanticToken.isContainer(lhs_type.handle.tree, left_type_node))
|
2021-04-19 15:17:46 +01:00
|
|
|
fieldTokenType(decl_node, lhs_type.handle)
|
|
|
|
else if (left_type_node == 0)
|
|
|
|
TokenType.field
|
|
|
|
else
|
|
|
|
null;
|
|
|
|
|
|
|
|
if (tok_type) |tt| try writeToken(builder, data.rhs, tt);
|
|
|
|
return;
|
|
|
|
} else if (decl_type.handle.tree.nodes.items(.tag)[decl_node] == .error_value) {
|
|
|
|
try writeToken(builder, data.rhs, .errorTag);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {},
|
2021-03-04 15:14:30 +00:00
|
|
|
}
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| {
|
|
|
|
try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{});
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
2021-04-19 15:17:46 +01:00
|
|
|
}
|
|
|
|
},
|
|
|
|
.ptr_type,
|
|
|
|
.ptr_type_aligned,
|
|
|
|
.ptr_type_bit_range,
|
|
|
|
.ptr_type_sentinel,
|
|
|
|
=> {
|
2021-09-04 20:25:35 +01:00
|
|
|
const ptr_type = SemanticToken.ptrType(tree, node).?;
|
2021-04-19 15:17:46 +01:00
|
|
|
|
|
|
|
if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and
|
|
|
|
main_token == main_tokens[ptr_type.ast.child_type])
|
|
|
|
{
|
|
|
|
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.child_type });
|
|
|
|
}
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (ptr_type.size == .One) try writeToken(builder, main_token, .operator);
|
|
|
|
if (ptr_type.ast.sentinel != 0) {
|
|
|
|
return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.sentinel });
|
|
|
|
}
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, ptr_type.allowzero_token, .keyword);
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (ptr_type.ast.align_node != 0) {
|
|
|
|
const first_tok = tree.firstToken(ptr_type.ast.align_node);
|
|
|
|
try writeToken(builder, first_tok - 2, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.align_node });
|
2021-03-04 15:14:30 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
if (ptr_type.ast.bit_range_start != 0) {
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_start });
|
|
|
|
try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_end });
|
|
|
|
}
|
|
|
|
}
|
2021-03-04 15:14:30 +00:00
|
|
|
|
2021-04-19 15:17:46 +01:00
|
|
|
try writeToken(builder, ptr_type.const_token, .keyword);
|
|
|
|
try writeToken(builder, ptr_type.volatile_token, .keyword);
|
|
|
|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.child_type });
|
|
|
|
},
|
|
|
|
.array_type,
|
|
|
|
.array_type_sentinel,
|
|
|
|
=> {
|
2021-10-01 02:44:06 +01:00
|
|
|
const array_type: Ast.full.ArrayType = if (tag == .array_type)
|
2021-04-19 15:17:46 +01:00
|
|
|
tree.arrayType(node)
|
|
|
|
else
|
|
|
|
tree.arrayTypeSentinel(node);
|
|
|
|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_count });
|
2021-05-02 18:15:31 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.sentinel });
|
2021-04-19 15:17:46 +01:00
|
|
|
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_type });
|
|
|
|
},
|
|
|
|
.address_of,
|
|
|
|
.bit_not,
|
|
|
|
.bool_not,
|
|
|
|
.optional_type,
|
|
|
|
.negation,
|
|
|
|
.negation_wrap,
|
|
|
|
=> {
|
|
|
|
try writeToken(builder, main_token, .operator);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
},
|
|
|
|
.@"try",
|
|
|
|
.@"resume",
|
|
|
|
.@"await",
|
|
|
|
=> {
|
|
|
|
try writeToken(builder, main_token, .keyword);
|
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
|
|
|
},
|
|
|
|
.anyframe_literal => try writeToken(builder, main_token, .keyword),
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-01 02:44:06 +01:00
|
|
|
fn writeContainerField(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, node: Ast.Node.Index, field_token_type: ?TokenType, child_frame: anytype) !void {
|
2021-04-15 10:07:43 +01:00
|
|
|
const tree = builder.handle.tree;
|
2021-09-04 20:25:35 +01:00
|
|
|
const container_field = SemanticToken.containerField(tree, node).?;
|
2021-04-15 10:07:43 +01:00
|
|
|
const base = tree.nodes.items(.main_token)[node];
|
|
|
|
const tokens = tree.tokens.items(.tag);
|
2021-03-04 15:14:30 +00:00
|
|
|
|
2021-04-15 10:07:43 +01:00
|
|
|
if (analysis.getDocCommentTokenIndex(tokens, base)) |docs|
|
|
|
|
try writeDocComments(builder, tree, docs);
|
2021-03-04 15:14:30 +00:00
|
|
|
|
2021-04-15 10:07:43 +01:00
|
|
|
try writeToken(builder, container_field.comptime_token, .keyword);
|
|
|
|
if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.name_token, tok_type);
|
2021-03-04 15:14:30 +00:00
|
|
|
|
2021-04-15 10:07:43 +01:00
|
|
|
if (container_field.ast.type_expr != 0) {
|
2021-05-08 17:11:26 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.type_expr });
|
2021-04-15 10:07:43 +01:00
|
|
|
if (container_field.ast.align_expr != 0) {
|
|
|
|
try writeToken(builder, tree.firstToken(container_field.ast.align_expr) - 2, .keyword);
|
2021-04-19 15:17:46 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.align_expr });
|
2021-04-15 10:07:43 +01:00
|
|
|
}
|
|
|
|
}
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-04-15 10:07:43 +01:00
|
|
|
if (container_field.ast.value_expr != 0) block: {
|
2021-10-01 02:44:06 +01:00
|
|
|
const eq_tok: Ast.TokenIndex = if (container_field.ast.align_expr != 0)
|
2021-09-04 20:25:35 +01:00
|
|
|
SemanticToken.lastToken(tree, container_field.ast.align_expr) + 2
|
2021-05-08 17:11:26 +01:00
|
|
|
else if (container_field.ast.type_expr != 0)
|
2021-09-04 20:25:35 +01:00
|
|
|
SemanticToken.lastToken(tree, container_field.ast.type_expr) + 1
|
2021-04-15 10:07:43 +01:00
|
|
|
else
|
2021-05-08 17:11:26 +01:00
|
|
|
break :block;
|
2021-04-15 10:07:43 +01:00
|
|
|
|
|
|
|
try writeToken(builder, eq_tok, .operator);
|
2021-04-19 15:17:46 +01:00
|
|
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.value_expr });
|
2021-03-04 15:14:30 +00:00
|
|
|
}
|
|
|
|
}
|
2020-06-17 21:36:40 +01:00
|
|
|
|
|
|
|
// TODO Range version, edit version.
|
2021-10-01 01:51:51 +01:00
|
|
|
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {
|
2020-07-03 09:34:42 +01:00
|
|
|
var builder = Builder.init(arena.child_allocator, handle, encoding);
|
2021-05-02 18:15:31 +01:00
|
|
|
errdefer builder.arr.deinit();
|
2021-03-04 21:30:25 +00:00
|
|
|
|
2021-03-05 21:38:42 +00:00
|
|
|
// reverse the ast from the root declarations
|
2021-10-01 02:44:06 +01:00
|
|
|
var buf: [2]Ast.Node.Index = undefined;
|
2021-09-04 20:25:35 +01:00
|
|
|
for (SemanticToken.declMembers(handle.tree, 0, &buf)) |child| {
|
2021-06-26 20:56:22 +01:00
|
|
|
writeNodeTokens(&builder, arena, store, child) catch |err| switch (err) {
|
|
|
|
error.MovedBackwards => break,
|
|
|
|
else => |e| return e,
|
|
|
|
};
|
2021-03-05 21:38:42 +00:00
|
|
|
}
|
2021-05-03 13:14:11 +01:00
|
|
|
try builder.finish();
|
2020-06-13 19:20:04 +01:00
|
|
|
return builder.toOwnedSlice();
|
|
|
|
}
|