Merge pull request #121 from alexnask/master
Prototype semantic tokens implementation with actual semantic analysis
This commit is contained in:
commit
9d1fdb420b
@ -403,7 +403,7 @@ fn resolveUnwrapErrorType(
|
||||
.type = .{ .data = .{ .other = n }, .is_type_val = rhs.type.is_type_val },
|
||||
.handle = rhs.handle,
|
||||
},
|
||||
.slice => return null,
|
||||
.primitive, .slice => return null,
|
||||
};
|
||||
|
||||
if (rhs_node.cast(ast.Node.InfixOp)) |infix_op| {
|
||||
@ -514,7 +514,38 @@ fn resolveFieldAccessLhsType(
|
||||
return (try resolveDerefType(store, arena, lhs, bound_type_params)) orelse lhs;
|
||||
}
|
||||
|
||||
const BoundTypeParams = std.AutoHashMap(*const ast.Node.FnProto.ParamDecl, TypeWithHandle);
|
||||
pub const BoundTypeParams = std.AutoHashMap(*const ast.Node.FnProto.ParamDecl, TypeWithHandle);
|
||||
|
||||
fn allDigits(str: []const u8) bool {
|
||||
for (str) |c| {
|
||||
if (!std.ascii.isDigit(c)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn isTypeIdent(tree: *ast.Tree, token_idx: ast.TokenIndex) bool {
|
||||
const PrimitiveTypes = std.ComptimeStringMap(void, .{
|
||||
.{"isize"}, .{"usize"},
|
||||
.{"c_short"}, .{"c_ushort"},
|
||||
.{"c_int"}, .{"c_uint"},
|
||||
.{"c_long"}, .{"c_ulong"},
|
||||
.{"c_longlong"}, .{"c_ulonglong"},
|
||||
.{"c_longdouble"}, .{"c_void"},
|
||||
.{"f16"}, .{"f32"},
|
||||
.{"f64"}, .{"f128"},
|
||||
.{"bool"}, .{"void"},
|
||||
.{"noreturn"}, .{"type"},
|
||||
.{"anyerror"}, .{"comptime_int"},
|
||||
.{"comptime_float"}, .{"anyframe"},
|
||||
});
|
||||
|
||||
const text = tree.tokenSlice(token_idx);
|
||||
if (PrimitiveTypes.has(text)) return true;
|
||||
if (text.len > 1 and (text[0] == 'u' or text[0] == 'i') and allDigits(text[1..]))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Resolves the type of a node
|
||||
fn resolveTypeOfNodeInternal(
|
||||
@ -541,6 +572,13 @@ fn resolveTypeOfNodeInternal(
|
||||
return try resolveTypeOfNodeInternal(store, arena, .{ .node = vari.init_node.?, .handle = handle }, bound_type_params);
|
||||
},
|
||||
.Identifier => {
|
||||
if (isTypeIdent(handle.tree, node.firstToken())) {
|
||||
return TypeWithHandle{
|
||||
.type = .{ .data = .primitive, .is_type_val = true },
|
||||
.handle = handle,
|
||||
};
|
||||
}
|
||||
|
||||
if (try lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| {
|
||||
return try child.resolveType(store, arena, bound_type_params);
|
||||
}
|
||||
@ -802,6 +840,7 @@ pub const Type = struct {
|
||||
slice: *ast.Node,
|
||||
error_union: *ast.Node,
|
||||
other: *ast.Node,
|
||||
primitive,
|
||||
},
|
||||
/// If true, the type `type`, the attached data is the value of the type value.
|
||||
is_type_val: bool,
|
||||
@ -828,6 +867,37 @@ pub const TypeWithHandle = struct {
|
||||
.handle = self.handle,
|
||||
};
|
||||
}
|
||||
|
||||
fn isRoot(self: TypeWithHandle) bool {
|
||||
switch (self.type.data) {
|
||||
.other => |n| return n.id == .Root,
|
||||
else => return false,
|
||||
}
|
||||
}
|
||||
|
||||
fn isContainer(self: TypeWithHandle, container_kind_tok: std.zig.Token.Id) bool {
|
||||
switch (self.type.data) {
|
||||
.other => |n| {
|
||||
if (n.cast(ast.Node.ContainerDecl)) |cont| {
|
||||
return self.handle.tree.token_ids[cont.kind_token] == container_kind_tok;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
else => return false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isStructType(self: TypeWithHandle) bool {
|
||||
return self.isContainer(.Keyword_struct) or self.isRoot();
|
||||
}
|
||||
|
||||
pub fn isEnumType(self: TypeWithHandle) bool {
|
||||
return self.isContainer(.Keyword_enum);
|
||||
}
|
||||
|
||||
pub fn isUnionType(self: TypeWithHandle) bool {
|
||||
return self.isContainer(.Keyword_union);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator, node_handle: NodeWithHandle) error{OutOfMemory}!?TypeWithHandle {
|
||||
@ -1338,7 +1408,7 @@ pub const DeclWithHandle = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn resolveType(self: DeclWithHandle, store: *DocumentStore, arena: *std.heap.ArenaAllocator, bound_type_params: *BoundTypeParams) !?TypeWithHandle {
|
||||
pub fn resolveType(self: DeclWithHandle, store: *DocumentStore, arena: *std.heap.ArenaAllocator, bound_type_params: *BoundTypeParams) !?TypeWithHandle {
|
||||
return switch (self.decl.*) {
|
||||
.ast_node => |node| try resolveTypeOfNodeInternal(store, arena, .{ .node = node, .handle = self.handle }, bound_type_params),
|
||||
.param_decl => |param_decl| switch (param_decl.param_type) {
|
||||
|
@ -28,7 +28,7 @@ const ClientCapabilities = struct {
|
||||
var client_capabilities = ClientCapabilities{};
|
||||
|
||||
const initialize_response =
|
||||
\\,"result": {"capabilities": {"signatureHelpProvider": {"triggerCharacters": ["(",","]},"textDocumentSync": 1,"completionProvider": {"resolveProvider": false,"triggerCharacters": [".",":","@"]},"documentHighlightProvider": false,"hoverProvider": true,"codeActionProvider": false,"declarationProvider": true,"definitionProvider": true,"typeDefinitionProvider": true,"implementationProvider": false,"referencesProvider": false,"documentSymbolProvider": true,"colorProvider": false,"documentFormattingProvider": true,"documentRangeFormattingProvider": false,"foldingRangeProvider": false,"selectionRangeProvider": false,"workspaceSymbolProvider": false,"rangeProvider": false,"documentProvider": true,"workspace": {"workspaceFolders": {"supported": true,"changeNotifications": true}},"semanticTokensProvider": {"documentProvider": true,"legend": {"tokenTypes": ["type","struct","enum","union","parameter","variable","tagField","field","function","keyword","modifier","comment","string","number","operator","builtin"],"tokenModifiers": ["definition","async","documentation", "generic"]}}}}}
|
||||
\\,"result": {"capabilities": {"signatureHelpProvider": {"triggerCharacters": ["(",","]},"textDocumentSync": 1,"completionProvider": {"resolveProvider": false,"triggerCharacters": [".",":","@"]},"documentHighlightProvider": false,"hoverProvider": true,"codeActionProvider": false,"declarationProvider": true,"definitionProvider": true,"typeDefinitionProvider": true,"implementationProvider": false,"referencesProvider": false,"documentSymbolProvider": true,"colorProvider": false,"documentFormattingProvider": true,"documentRangeFormattingProvider": false,"foldingRangeProvider": false,"selectionRangeProvider": false,"workspaceSymbolProvider": false,"rangeProvider": false,"documentProvider": true,"workspace": {"workspaceFolders": {"supported": true,"changeNotifications": true}},"semanticTokensProvider": {"documentProvider": true,"legend": {"tokenTypes": ["type","struct","enum","union","parameter","variable","tagField","field","errorTag","function","keyword","modifier","comment","string","number","operator","builtin","label"],"tokenModifiers": ["definition","async","documentation", "generic"]}}}}}
|
||||
;
|
||||
|
||||
const not_implemented_response =
|
||||
@ -231,6 +231,7 @@ fn typeToCompletion(
|
||||
type_handle.type.is_type_val,
|
||||
config,
|
||||
),
|
||||
.primitive => {},
|
||||
}
|
||||
}
|
||||
|
||||
@ -1064,7 +1065,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config, ke
|
||||
};
|
||||
|
||||
const semantic_tokens = @import("semantic_tokens.zig");
|
||||
const token_array = try semantic_tokens.writeAllSemanticTokens(allocator, handle.*);
|
||||
const token_array = try semantic_tokens.writeAllSemanticTokens(allocator, &document_store, handle);
|
||||
defer allocator.free(token_array);
|
||||
|
||||
return try send(types.Response{
|
||||
|
@ -1,5 +1,6 @@
|
||||
const std = @import("std");
|
||||
const DocumentStore = @import("document_store.zig");
|
||||
const analysis = @import("analysis.zig");
|
||||
const ast = std.zig.ast;
|
||||
|
||||
const TokenType = enum(u32) {
|
||||
@ -11,6 +12,7 @@ const TokenType = enum(u32) {
|
||||
variable,
|
||||
tagField,
|
||||
field,
|
||||
errorTag,
|
||||
function,
|
||||
keyword,
|
||||
modifier,
|
||||
@ -19,6 +21,7 @@ const TokenType = enum(u32) {
|
||||
number,
|
||||
operator,
|
||||
builtin,
|
||||
label,
|
||||
};
|
||||
|
||||
const TokenModifiers = packed struct {
|
||||
@ -37,13 +40,13 @@ const TokenModifiers = packed struct {
|
||||
};
|
||||
|
||||
const Builder = struct {
|
||||
tree: *ast.Tree,
|
||||
handle: *DocumentStore.Handle,
|
||||
current_token: ?ast.TokenIndex,
|
||||
arr: std.ArrayList(u32),
|
||||
|
||||
fn init(allocator: *std.mem.Allocator, tree: *ast.Tree) Builder {
|
||||
fn init(allocator: *std.mem.Allocator, handle: *DocumentStore.Handle) Builder {
|
||||
return Builder{
|
||||
.tree = tree,
|
||||
.handle = handle,
|
||||
.current_token = null,
|
||||
.arr = std.ArrayList(u32).init(allocator),
|
||||
};
|
||||
@ -51,12 +54,12 @@ const Builder = struct {
|
||||
|
||||
fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
||||
const start_idx = if (self.current_token) |current_token|
|
||||
self.tree.token_locs[current_token].start
|
||||
self.handle.tree.token_locs[current_token].start
|
||||
else
|
||||
0;
|
||||
|
||||
const token_loc = self.tree.token_locs[token];
|
||||
const delta_loc = self.tree.tokenLocationLoc(start_idx, token_loc);
|
||||
const token_loc = self.handle.tree.token_locs[token];
|
||||
const delta_loc = self.handle.tree.tokenLocationLoc(start_idx, token_loc);
|
||||
try self.arr.appendSlice(&[_]u32{
|
||||
@truncate(u32, delta_loc.line),
|
||||
@truncate(u32, delta_loc.column),
|
||||
@ -72,152 +75,535 @@ const Builder = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn isAllDigit(str: []const u8) bool {
|
||||
for (str) |c| {
|
||||
if (!std.ascii.isDigit(c)) return false;
|
||||
}
|
||||
return true;
|
||||
inline fn writeToken(builder: *Builder, token_idx: ?ast.TokenIndex, tok_type: TokenType) !void {
|
||||
return try writeTokenMod(builder, token_idx, tok_type, .{});
|
||||
}
|
||||
|
||||
fn isTypeIdent(tree: *ast.Tree, token_idx: ast.TokenIndex) bool {
|
||||
const PrimitiveTypes = std.ComptimeStringMap(void, .{
|
||||
.{"isize"}, .{"usize"},
|
||||
.{"c_short"}, .{"c_ushort"},
|
||||
.{"c_int"}, .{"c_uint"},
|
||||
.{"c_long"}, .{"c_ulong"},
|
||||
.{"c_longlong"}, .{"c_ulonglong"},
|
||||
.{"c_longdouble"}, .{"c_void"},
|
||||
.{"f16"}, .{"f32"},
|
||||
.{"f64"}, .{"f128"},
|
||||
.{"bool"}, .{"void"},
|
||||
.{"noreturn"}, .{"type"},
|
||||
.{"anyerror"}, .{"comptime_int"},
|
||||
.{"comptime_float"}, .{"anyframe"},
|
||||
inline fn writeTokenMod(builder: *Builder, token_idx: ?ast.TokenIndex, tok_type: TokenType, tok_mod: TokenModifiers) !void {
|
||||
if (token_idx) |ti| {
|
||||
try builder.add(ti, tok_type, tok_mod);
|
||||
}
|
||||
}
|
||||
|
||||
fn writeDocComments(builder: *Builder, tree: *ast.Tree, doc: *ast.Node.DocComment) !void {
|
||||
var tok_idx = doc.first_line;
|
||||
while (tree.token_ids[tok_idx] == .DocComment or
|
||||
tree.token_ids[tok_idx] == .ContainerDocComment or
|
||||
tree.token_ids[tok_idx] == .LineComment) : (tok_idx += 1)
|
||||
{
|
||||
var tok_mod = TokenModifiers{};
|
||||
if (tree.token_ids[tok_idx] == .DocComment or tree.token_ids[tok_idx] == .ContainerDocComment)
|
||||
tok_mod.set("documentation");
|
||||
|
||||
try builder.add(tok_idx, .comment, tok_mod);
|
||||
}
|
||||
}
|
||||
|
||||
fn writeTokenResolveType(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, type_node: *ast.Node, tok: ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
||||
// Resolve the type of the declaration
|
||||
if (try analysis.resolveTypeOfNode(store, arena, .{ .node = type_node, .handle = builder.handle })) |decl_type| {
|
||||
if (decl_type.type.is_type_val) {
|
||||
const tok_type = if (decl_type.isStructType())
|
||||
.@"struct"
|
||||
else if (decl_type.isEnumType())
|
||||
.@"enum"
|
||||
else if (decl_type.isUnionType())
|
||||
.@"union"
|
||||
else
|
||||
TokenType.type;
|
||||
|
||||
try writeTokenMod(builder, tok, tok_type, tok_mod);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fieldTokenType(container_decl: *ast.Node.ContainerDecl, handle: *DocumentStore.Handle) ?TokenType {
|
||||
return @as(?TokenType, switch (handle.tree.token_ids[container_decl.kind_token]) {
|
||||
.Keyword_struct => .field,
|
||||
.Keyword_union, .Keyword_enum => .tagField,
|
||||
else => null,
|
||||
});
|
||||
|
||||
const text = tree.tokenSlice(token_idx);
|
||||
if (PrimitiveTypes.has(text)) return true;
|
||||
if (text.len > 1 and (text[0] == 'u' or text[0] == 'i') and isAllDigit(text[1..]))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn writeAllSemanticTokens(allocator: *std.mem.Allocator, handle: DocumentStore.Handle) ![]u32 {
|
||||
var builder = Builder.init(allocator, handle.tree);
|
||||
fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?*ast.Node) error{OutOfMemory}!void {
|
||||
if (maybe_node == null) return;
|
||||
const node = maybe_node.?;
|
||||
const handle = builder.handle;
|
||||
|
||||
// TODO We only scan tokens for now, we need to actually do semantic analysis
|
||||
for (handle.tree.token_ids) |token_id, token_idx| {
|
||||
var token_mod = TokenModifiers{};
|
||||
const token_type: TokenType = switch (token_id) {
|
||||
.StringLiteral, .MultilineStringLiteralLine, .CharLiteral => .string,
|
||||
.Builtin => .builtin,
|
||||
.IntegerLiteral, .FloatLiteral => .number,
|
||||
.Bang,
|
||||
.Pipe,
|
||||
.PipePipe,
|
||||
.PipeEqual,
|
||||
.Equal,
|
||||
.EqualEqual,
|
||||
.EqualAngleBracketRight,
|
||||
.BangEqual,
|
||||
.Percent,
|
||||
.PercentEqual,
|
||||
.PeriodAsterisk,
|
||||
.Caret,
|
||||
.CaretEqual,
|
||||
.Plus,
|
||||
.PlusPlus,
|
||||
.PlusEqual,
|
||||
.PlusPercent,
|
||||
.PlusPercentEqual,
|
||||
.Minus,
|
||||
.MinusEqual,
|
||||
.MinusPercent,
|
||||
.MinusPercentEqual,
|
||||
.Asterisk,
|
||||
.AsteriskEqual,
|
||||
.AsteriskAsterisk,
|
||||
.AsteriskPercent,
|
||||
.AsteriskPercentEqual,
|
||||
.Arrow,
|
||||
.Slash,
|
||||
.SlashEqual,
|
||||
.Ampersand,
|
||||
.AmpersandEqual,
|
||||
.QuestionMark,
|
||||
.AngleBracketLeft,
|
||||
.AngleBracketLeftEqual,
|
||||
.AngleBracketAngleBracketLeft,
|
||||
.AngleBracketAngleBracketLeftEqual,
|
||||
.AngleBracketRight,
|
||||
.AngleBracketRightEqual,
|
||||
.AngleBracketAngleBracketRight,
|
||||
.AngleBracketAngleBracketRightEqual,
|
||||
.Tilde,
|
||||
=> .operator,
|
||||
.LineComment, .ContainerDocComment => .comment,
|
||||
.DocComment => block: {
|
||||
token_mod.set("documentation");
|
||||
break :block .comment;
|
||||
switch (node.id) {
|
||||
.Root, .Block => {
|
||||
if (node.cast(ast.Node.Block)) |block_node| {
|
||||
try writeToken(builder, block_node.label, .label);
|
||||
}
|
||||
|
||||
var previous_end = if (node.id == .Root) 0 else node.firstToken();
|
||||
var child_idx: usize = 0;
|
||||
while (node.iterate(child_idx)) |child| : (child_idx += 1) {
|
||||
var i = previous_end;
|
||||
while (i < child.firstToken()) : (i += 1) {
|
||||
if (handle.tree.token_ids[i] == .LineComment) {
|
||||
try writeToken(builder, i, .comment);
|
||||
}
|
||||
}
|
||||
try writeNodeTokens(builder, arena, store, child);
|
||||
previous_end = child.lastToken();
|
||||
}
|
||||
|
||||
var i = previous_end;
|
||||
while (i < node.lastToken()) : (i += 1) {
|
||||
if (handle.tree.token_ids[i] == .LineComment) {
|
||||
try writeToken(builder, i, .comment);
|
||||
}
|
||||
}
|
||||
},
|
||||
.Keyword_align,
|
||||
.Keyword_allowzero,
|
||||
.Keyword_and,
|
||||
.Keyword_asm,
|
||||
.Keyword_async,
|
||||
.Keyword_await,
|
||||
.Keyword_break,
|
||||
.Keyword_callconv,
|
||||
.Keyword_catch,
|
||||
.Keyword_comptime,
|
||||
.Keyword_const,
|
||||
.Keyword_continue,
|
||||
.Keyword_defer,
|
||||
.Keyword_else,
|
||||
.Keyword_enum,
|
||||
.Keyword_errdefer,
|
||||
.Keyword_error,
|
||||
.Keyword_export,
|
||||
.Keyword_extern,
|
||||
.Keyword_false,
|
||||
.Keyword_fn,
|
||||
.Keyword_for,
|
||||
.Keyword_if,
|
||||
.Keyword_inline,
|
||||
.Keyword_noalias,
|
||||
.Keyword_noinline,
|
||||
.Keyword_nosuspend,
|
||||
.Keyword_null,
|
||||
.Keyword_or,
|
||||
.Keyword_orelse,
|
||||
.Keyword_packed,
|
||||
.Keyword_anyframe,
|
||||
.Keyword_pub,
|
||||
.Keyword_resume,
|
||||
.Keyword_return,
|
||||
.Keyword_linksection,
|
||||
.Keyword_struct,
|
||||
.Keyword_suspend,
|
||||
.Keyword_switch,
|
||||
.Keyword_test,
|
||||
.Keyword_threadlocal,
|
||||
.Keyword_true,
|
||||
.Keyword_try,
|
||||
.Keyword_undefined,
|
||||
.Keyword_union,
|
||||
.Keyword_unreachable,
|
||||
.Keyword_usingnamespace,
|
||||
.Keyword_var,
|
||||
.Keyword_volatile,
|
||||
.Keyword_while,
|
||||
=> .keyword,
|
||||
.Identifier => if (isTypeIdent(handle.tree, token_idx)) .type else continue,
|
||||
else => continue,
|
||||
.VarDecl => {
|
||||
const var_decl = node.cast(ast.Node.VarDecl).?;
|
||||
if (var_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc);
|
||||
try writeToken(builder, var_decl.visib_token, .keyword);
|
||||
try writeToken(builder, var_decl.extern_export_token, .keyword);
|
||||
try writeToken(builder, var_decl.thread_local_token, .keyword);
|
||||
try writeToken(builder, var_decl.comptime_token, .keyword);
|
||||
try writeToken(builder, var_decl.mut_token, .keyword);
|
||||
try writeTokenResolveType(builder, arena, store, node, var_decl.name_token, .{ .definition = true });
|
||||
try writeNodeTokens(builder, arena, store, var_decl.type_node);
|
||||
try writeNodeTokens(builder, arena, store, var_decl.align_node);
|
||||
try writeNodeTokens(builder, arena, store, var_decl.section_node);
|
||||
try writeToken(builder, var_decl.eq_token, .operator);
|
||||
try writeNodeTokens(builder, arena, store, var_decl.init_node);
|
||||
},
|
||||
.Use => {
|
||||
const use = node.cast(ast.Node.Use).?;
|
||||
if (use.doc_comments) |docs| try writeDocComments(builder, builder.handle.tree, docs);
|
||||
try writeToken(builder, use.visib_token, .keyword);
|
||||
try writeToken(builder, use.use_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, use.expr);
|
||||
},
|
||||
.ErrorSetDecl => {
|
||||
const error_set = node.cast(ast.Node.ErrorSetDecl).?;
|
||||
try writeToken(builder, error_set.error_token, .keyword);
|
||||
for (error_set.declsConst()) |decl| try writeNodeTokens(builder, arena, store, decl);
|
||||
},
|
||||
.ContainerDecl => {
|
||||
const container_decl = node.cast(ast.Node.ContainerDecl).?;
|
||||
try writeToken(builder, container_decl.layout_token, .keyword);
|
||||
try writeToken(builder, container_decl.kind_token, .keyword);
|
||||
switch (container_decl.init_arg_expr) {
|
||||
.None => {},
|
||||
.Enum => |enum_expr| if (enum_expr) |expr|
|
||||
try writeNodeTokens(builder, arena, store, expr)
|
||||
else
|
||||
try writeToken(builder, container_decl.kind_token + 2, .keyword),
|
||||
.Type => |type_node| try writeNodeTokens(builder, arena, store, type_node),
|
||||
}
|
||||
|
||||
const field_token_type = fieldTokenType(container_decl, handle);
|
||||
var previous_end = container_decl.firstToken();
|
||||
for (container_decl.fieldsAndDeclsConst()) |child| {
|
||||
var i = previous_end;
|
||||
while (i < child.firstToken()) : (i += 1) {
|
||||
if (handle.tree.token_ids[i] == .LineComment) {
|
||||
try writeToken(builder, i, .comment);
|
||||
}
|
||||
}
|
||||
previous_end = child.lastToken();
|
||||
|
||||
if (child.cast(ast.Node.ContainerField)) |container_field| {
|
||||
if (container_field.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs);
|
||||
try writeToken(builder, container_field.comptime_token, .keyword);
|
||||
if (field_token_type) |tok_type| try writeToken(builder, container_field.name_token, tok_type);
|
||||
try writeNodeTokens(builder, arena, store, container_field.align_expr);
|
||||
try writeNodeTokens(builder, arena, store, container_field.type_expr);
|
||||
|
||||
if (container_field.value_expr) |value_expr| {
|
||||
const eq_tok: ast.TokenIndex = if (container_field.type_expr) |type_expr|
|
||||
type_expr.lastToken() + 1
|
||||
else if (container_field.align_expr) |align_expr|
|
||||
align_expr.lastToken() + 1
|
||||
else
|
||||
unreachable; // Check this, I believe it is correct.
|
||||
|
||||
try writeToken(builder, eq_tok, .operator);
|
||||
try writeNodeTokens(builder, arena, store, value_expr);
|
||||
}
|
||||
} else {
|
||||
try writeNodeTokens(builder, arena, store, child);
|
||||
}
|
||||
}
|
||||
|
||||
var i = previous_end;
|
||||
while (i < node.lastToken()) : (i += 1) {
|
||||
if (handle.tree.token_ids[i] == .LineComment) {
|
||||
try writeToken(builder, i, .comment);
|
||||
}
|
||||
}
|
||||
},
|
||||
.ErrorTag => {
|
||||
const error_tag = node.cast(ast.Node.ErrorTag).?;
|
||||
if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs);
|
||||
try writeToken(builder, error_tag.firstToken(), .errorTag);
|
||||
},
|
||||
.Identifier => {
|
||||
if (analysis.isTypeIdent(handle.tree, node.firstToken())) {
|
||||
return try writeToken(builder, node.firstToken(), .type);
|
||||
}
|
||||
|
||||
if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| {
|
||||
if (child.decl.* == .param_decl) {
|
||||
return try writeToken(builder, node.firstToken(), .parameter);
|
||||
}
|
||||
// TODO: Clean this up.
|
||||
var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator);
|
||||
if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| {
|
||||
if (decl_type.type.is_type_val) {
|
||||
const tok_type = if (decl_type.isStructType())
|
||||
.@"struct"
|
||||
else if (decl_type.isEnumType())
|
||||
.@"enum"
|
||||
else if (decl_type.isUnionType())
|
||||
.@"union"
|
||||
else
|
||||
TokenType.type;
|
||||
return try writeTokenMod(builder, node.firstToken(), tok_type, .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.FnProto => {
|
||||
const fn_proto = node.cast(ast.Node.FnProto).?;
|
||||
if (fn_proto.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs);
|
||||
try writeToken(builder, fn_proto.visib_token, .keyword);
|
||||
try writeToken(builder, fn_proto.extern_export_inline_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, fn_proto.lib_name);
|
||||
try writeToken(builder, fn_proto.fn_token, .keyword);
|
||||
|
||||
const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto))
|
||||
.type
|
||||
else
|
||||
.function;
|
||||
try writeToken(builder, fn_proto.name_token, func_name_tok_type);
|
||||
|
||||
for (fn_proto.paramsConst()) |param_decl| {
|
||||
if (param_decl.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs);
|
||||
try writeToken(builder, param_decl.noalias_token, .keyword);
|
||||
try writeToken(builder, param_decl.comptime_token, .keyword);
|
||||
try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .definition = true });
|
||||
switch (param_decl.param_type) {
|
||||
.var_type => |var_node| try writeToken(builder, var_node.firstToken(), .type),
|
||||
.var_args => |var_args_tok| try writeToken(builder, var_args_tok, .operator),
|
||||
.type_expr => |type_expr| try writeNodeTokens(builder, arena, store, type_expr),
|
||||
}
|
||||
}
|
||||
try writeNodeTokens(builder, arena, store, fn_proto.align_expr);
|
||||
try writeNodeTokens(builder, arena, store, fn_proto.section_expr);
|
||||
try writeNodeTokens(builder, arena, store, fn_proto.callconv_expr);
|
||||
|
||||
switch (fn_proto.return_type) {
|
||||
.Explicit => |type_expr| try writeNodeTokens(builder, arena, store, type_expr),
|
||||
.InferErrorSet => |type_expr| {
|
||||
try writeToken(builder, type_expr.firstToken() - 1, .operator);
|
||||
try writeNodeTokens(builder, arena, store, type_expr);
|
||||
},
|
||||
.Invalid => {},
|
||||
}
|
||||
try writeNodeTokens(builder, arena, store, fn_proto.body_node);
|
||||
},
|
||||
.AnyFrameType => {
|
||||
const any_frame_type = node.cast(ast.Node.AnyFrameType).?;
|
||||
try writeToken(builder, any_frame_type.anyframe_token, .type);
|
||||
if (any_frame_type.result) |result| {
|
||||
try writeToken(builder, result.arrow_token, .type);
|
||||
try writeNodeTokens(builder, arena, store, result.return_type);
|
||||
}
|
||||
},
|
||||
.Defer => {
|
||||
const defer_node = node.cast(ast.Node.Defer).?;
|
||||
try writeToken(builder, defer_node.defer_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, defer_node.payload);
|
||||
try writeNodeTokens(builder, arena, store, defer_node.expr);
|
||||
},
|
||||
.Comptime => {
|
||||
const comptime_node = node.cast(ast.Node.Comptime).?;
|
||||
if (comptime_node.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs);
|
||||
try writeToken(builder, comptime_node.comptime_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, comptime_node.expr);
|
||||
},
|
||||
.Nosuspend => {
|
||||
const nosuspend_node = node.cast(ast.Node.Nosuspend).?;
|
||||
try writeToken(builder, nosuspend_node.nosuspend_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, nosuspend_node.expr);
|
||||
},
|
||||
.Payload => {
|
||||
const payload = node.cast(ast.Node.Payload).?;
|
||||
try writeToken(builder, payload.lpipe, .operator);
|
||||
try writeToken(builder, payload.rpipe, .operator);
|
||||
},
|
||||
.PointerPayload => {
|
||||
const payload = node.cast(ast.Node.PointerPayload).?;
|
||||
try writeToken(builder, payload.lpipe, .operator);
|
||||
try writeToken(builder, payload.ptr_token, .operator);
|
||||
try writeToken(builder, payload.rpipe, .operator);
|
||||
},
|
||||
.PointerIndexPayload => {
|
||||
const payload = node.cast(ast.Node.PointerIndexPayload).?;
|
||||
try writeToken(builder, payload.lpipe, .operator);
|
||||
try writeToken(builder, payload.ptr_token, .operator);
|
||||
try writeToken(builder, payload.rpipe, .operator);
|
||||
},
|
||||
.Else => {
|
||||
const else_node = node.cast(ast.Node.Else).?;
|
||||
try writeToken(builder, else_node.else_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, else_node.payload);
|
||||
try writeNodeTokens(builder, arena, store, else_node.body);
|
||||
},
|
||||
.Switch => {
|
||||
const switch_node = node.cast(ast.Node.Switch).?;
|
||||
try writeToken(builder, switch_node.switch_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, switch_node.expr);
|
||||
|
||||
var previous_end = switch_node.firstToken();
|
||||
for (switch_node.casesConst()) |case_node| {
|
||||
var i = previous_end;
|
||||
while (i < case_node.firstToken()) : (i += 1) {
|
||||
if (handle.tree.token_ids[i] == .LineComment) {
|
||||
try writeToken(builder, i, .comment);
|
||||
}
|
||||
}
|
||||
previous_end = case_node.lastToken();
|
||||
|
||||
try writeNodeTokens(builder, arena, store, case_node);
|
||||
}
|
||||
|
||||
var i = previous_end;
|
||||
while (i < node.lastToken()) : (i += 1) {
|
||||
if (handle.tree.token_ids[i] == .LineComment) {
|
||||
try writeToken(builder, i, .comment);
|
||||
}
|
||||
}
|
||||
},
|
||||
.SwitchCase => {
|
||||
const switch_case = node.cast(ast.Node.SwitchCase).?;
|
||||
for (switch_case.itemsConst()) |item_node| try writeNodeTokens(builder, arena, store, item_node);
|
||||
try writeToken(builder, switch_case.arrow_token, .operator);
|
||||
try writeNodeTokens(builder, arena, store, switch_case.payload);
|
||||
try writeNodeTokens(builder, arena, store, switch_case.expr);
|
||||
},
|
||||
.SwitchElse => {
|
||||
const switch_else = node.cast(ast.Node.SwitchElse).?;
|
||||
try writeToken(builder, switch_else.token, .keyword);
|
||||
},
|
||||
.While => {
|
||||
const while_node = node.cast(ast.Node.While).?;
|
||||
try writeToken(builder, while_node.label, .label);
|
||||
try writeToken(builder, while_node.inline_token, .keyword);
|
||||
try writeToken(builder, while_node.while_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, while_node.condition);
|
||||
try writeNodeTokens(builder, arena, store, while_node.payload);
|
||||
try writeNodeTokens(builder, arena, store, while_node.continue_expr);
|
||||
try writeNodeTokens(builder, arena, store, while_node.body);
|
||||
if (while_node.@"else") |else_node| try writeNodeTokens(builder, arena, store, &else_node.base);
|
||||
},
|
||||
.For => {
|
||||
const for_node = node.cast(ast.Node.For).?;
|
||||
try writeToken(builder, for_node.label, .label);
|
||||
try writeToken(builder, for_node.inline_token, .keyword);
|
||||
try writeToken(builder, for_node.for_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, for_node.array_expr);
|
||||
try writeNodeTokens(builder, arena, store, for_node.payload);
|
||||
try writeNodeTokens(builder, arena, store, for_node.body);
|
||||
if (for_node.@"else") |else_node| try writeNodeTokens(builder, arena, store, &else_node.base);
|
||||
},
|
||||
.If => {
|
||||
const if_node = node.cast(ast.Node.If).?;
|
||||
try writeToken(builder, if_node.if_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, if_node.condition);
|
||||
try writeNodeTokens(builder, arena, store, if_node.payload);
|
||||
try writeNodeTokens(builder, arena, store, if_node.body);
|
||||
if (if_node.@"else") |else_node| try writeNodeTokens(builder, arena, store, &else_node.base);
|
||||
},
|
||||
.InfixOp => {
|
||||
const infix_op = node.cast(ast.Node.InfixOp).?;
|
||||
// @TODO Im blowing up my stack!
|
||||
// try writeNodeTokens(builder, arena, store, infix_op.lhs);
|
||||
if (infix_op.op != .Period and infix_op.op != .Catch) {
|
||||
const token_type: TokenType = switch (infix_op.op) {
|
||||
.BoolAnd, .BoolOr => .keyword,
|
||||
else => .operator,
|
||||
};
|
||||
|
||||
try builder.add(token_idx, token_type, token_mod);
|
||||
try writeToken(builder, infix_op.op_token, token_type);
|
||||
try writeNodeTokens(builder, arena, store, infix_op.rhs);
|
||||
}
|
||||
if (infix_op.op == .Period) {
|
||||
// @TODO Special case for dot access.
|
||||
const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken());
|
||||
}
|
||||
},
|
||||
.PrefixOp => {
|
||||
const prefix_op = node.cast(ast.Node.PrefixOp).?;
|
||||
const tok_type: TokenType = switch (prefix_op.op) {
|
||||
.Try, .Await, .Resume => .keyword,
|
||||
else => .operator,
|
||||
};
|
||||
|
||||
try writeToken(builder, prefix_op.op_token, tok_type);
|
||||
switch (prefix_op.op) {
|
||||
.ArrayType => |info| {
|
||||
try writeNodeTokens(builder, arena, store, info.len_expr);
|
||||
try writeToken(builder, info.len_expr.lastToken() + 1, tok_type);
|
||||
},
|
||||
.SliceType, .PtrType => |info| {
|
||||
if (prefix_op.op == .SliceType)
|
||||
try writeToken(builder, prefix_op.op_token + 1, tok_type);
|
||||
|
||||
if (info.align_info) |align_info| {
|
||||
try writeToken(builder, align_info.node.firstToken() - 2, .keyword);
|
||||
}
|
||||
try writeToken(builder, info.const_token, .keyword);
|
||||
try writeToken(builder, info.volatile_token, .keyword);
|
||||
try writeToken(builder, info.allowzero_token, .keyword);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
try writeNodeTokens(builder, arena, store, prefix_op.rhs);
|
||||
},
|
||||
.ArrayInitializer => {
|
||||
const array_initializer = node.cast(ast.Node.ArrayInitializer).?;
|
||||
try writeNodeTokens(builder, arena, store, array_initializer.lhs);
|
||||
for (array_initializer.listConst()) |elem| try writeNodeTokens(builder, arena, store, elem);
|
||||
},
|
||||
.ArrayInitializerDot => {
|
||||
const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?;
|
||||
for (array_initializer.listConst()) |elem| try writeNodeTokens(builder, arena, store, elem);
|
||||
},
|
||||
.StructInitializer => {
|
||||
const struct_initializer = node.cast(ast.Node.StructInitializer).?;
|
||||
try writeNodeTokens(builder, arena, store, struct_initializer.lhs);
|
||||
const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) {
|
||||
.other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl|
|
||||
fieldTokenType(container_decl, handle)
|
||||
else
|
||||
null,
|
||||
else => null,
|
||||
} else null;
|
||||
|
||||
for (struct_initializer.listConst()) |field_init_node| {
|
||||
std.debug.assert(field_init_node.id == .FieldInitializer);
|
||||
const field_init = field_init_node.cast(ast.Node.FieldInitializer).?;
|
||||
if (field_token_type) |tok_type| {
|
||||
try writeToken(builder, field_init.period_token, tok_type);
|
||||
try writeToken(builder, field_init.name_token, tok_type);
|
||||
}
|
||||
try writeToken(builder, field_init.name_token + 1, .operator);
|
||||
try writeNodeTokens(builder, arena, store, field_init.expr);
|
||||
}
|
||||
},
|
||||
.StructInitializerDot => {
|
||||
const struct_initializer = node.cast(ast.Node.StructInitializerDot).?;
|
||||
for (struct_initializer.listConst()) |field_init_node| {
|
||||
std.debug.assert(field_init_node.id == .FieldInitializer);
|
||||
const field_init = field_init_node.cast(ast.Node.FieldInitializer).?;
|
||||
try writeToken(builder, field_init.name_token + 1, .operator);
|
||||
try writeNodeTokens(builder, arena, store, field_init.expr);
|
||||
}
|
||||
},
|
||||
.Call => {
|
||||
const call = node.cast(ast.Node.Call).?;
|
||||
try writeToken(builder, call.async_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, call.lhs);
|
||||
for (call.paramsConst()) |param| try writeNodeTokens(builder, arena, store, param);
|
||||
},
|
||||
.SuffixOp => {
|
||||
const suffix_op = node.cast(ast.Node.SuffixOp).?;
|
||||
// @TODO We blow up the stack here as well T_T
|
||||
switch (suffix_op.op) {
|
||||
// .ArrayAccess => |n| try writeNodeTokens(builder, arena, store, n),
|
||||
// .Slice => |s| {
|
||||
// try writeNodeTokens(builder, arena, store, s.start);
|
||||
// try writeToken(builder, s.start.lastToken() + 1, .operator);
|
||||
// try writeNodeTokens(builder, arena, store, s.end);
|
||||
// try writeNodeTokens(builder, arena, store, s.sentinel);
|
||||
// },
|
||||
else => try writeToken(builder, suffix_op.rtoken, .operator),
|
||||
}
|
||||
},
|
||||
.GroupedExpression => {
|
||||
const grouped_expr = node.cast(ast.Node.GroupedExpression).?;
|
||||
try writeNodeTokens(builder, arena, store, grouped_expr.expr);
|
||||
},
|
||||
.ControlFlowExpression => {
|
||||
const cfe = node.cast(ast.Node.ControlFlowExpression).?;
|
||||
try writeToken(builder, cfe.ltoken, .keyword);
|
||||
switch (cfe.kind) {
|
||||
.Break => |label| if (label) |n| try writeToken(builder, n.firstToken(), .label),
|
||||
.Continue => |label| if (label) |n| try writeToken(builder, n.firstToken(), .label),
|
||||
else => {},
|
||||
}
|
||||
try writeNodeTokens(builder, arena, store, cfe.rhs);
|
||||
},
|
||||
.Suspend => {
|
||||
const suspend_node = node.cast(ast.Node.Suspend).?;
|
||||
try writeToken(builder, suspend_node.suspend_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, suspend_node.body);
|
||||
},
|
||||
.IntegerLiteral => {
|
||||
try writeToken(builder, node.firstToken(), .number);
|
||||
},
|
||||
.EnumLiteral => {
|
||||
const enum_literal = node.cast(ast.Node.EnumLiteral).?;
|
||||
try writeToken(builder, enum_literal.dot, .tagField);
|
||||
try writeToken(builder, enum_literal.name, .tagField);
|
||||
},
|
||||
.FloatLiteral => {
|
||||
try writeToken(builder, node.firstToken(), .number);
|
||||
},
|
||||
.BuiltinCall => {
|
||||
const builtin_call = node.cast(ast.Node.BuiltinCall).?;
|
||||
try writeToken(builder, builtin_call.builtin_token, .builtin);
|
||||
for (builtin_call.paramsConst()) |param| try writeNodeTokens(builder, arena, store, param);
|
||||
},
|
||||
.StringLiteral, .CharLiteral => {
|
||||
try writeToken(builder, node.firstToken(), .string);
|
||||
},
|
||||
.MultilineStringLiteral => {
|
||||
const multi_line = node.cast(ast.Node.MultilineStringLiteral).?;
|
||||
for (multi_line.linesConst()) |line| try writeToken(builder, line, .string);
|
||||
},
|
||||
.BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable, .ErrorType => {
|
||||
try writeToken(builder, node.firstToken(), .keyword);
|
||||
},
|
||||
.Asm => {
|
||||
const asm_expr = node.cast(ast.Node.Asm).?;
|
||||
try writeToken(builder, asm_expr.asm_token, .keyword);
|
||||
try writeToken(builder, asm_expr.volatile_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, asm_expr.template);
|
||||
// TODO Inputs, outputs.
|
||||
},
|
||||
.VarType => {
|
||||
try writeToken(builder, node.firstToken(), .type);
|
||||
},
|
||||
.TestDecl => {
|
||||
const test_decl = node.cast(ast.Node.TestDecl).?;
|
||||
if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc);
|
||||
try writeToken(builder, test_decl.test_token, .keyword);
|
||||
try writeNodeTokens(builder, arena, store, test_decl.name);
|
||||
try writeNodeTokens(builder, arena, store, test_decl.body_node);
|
||||
},
|
||||
// TODO Remove this when we handle all nodes.
|
||||
else => {},
|
||||
}
|
||||
|
||||
// TODO Where we are handling comments, also handle keywords etc.
|
||||
// TODO While editing, the current AST node will be invalid and thus will not exist in the tree at all.
|
||||
// Scan over the tokens we are not covering at all and color the keywords etc.
|
||||
}
|
||||
|
||||
// TODO Range version, edit version.
|
||||
pub fn writeAllSemanticTokens(allocator: *std.mem.Allocator, store: *DocumentStore, handle: *DocumentStore.Handle) ![]u32 {
|
||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
var builder = Builder.init(allocator, handle);
|
||||
try writeNodeTokens(&builder, &arena, store, &handle.tree.root_node.base);
|
||||
return builder.toOwnedSlice();
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user