Basic semantic tokens implementation

This commit is contained in:
Alexandros Naskos 2020-06-14 01:07:57 +03:00
parent 112f6d735a
commit 0592513da4
2 changed files with 160 additions and 30 deletions

View File

@ -28,7 +28,7 @@ const ClientCapabilities = struct {
var client_capabilities = ClientCapabilities{};
const initialize_response =
\\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"hoverProvider":true,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"implementationProvider":false,"referencesProvider":false,"documentSymbolProvider":true,"colorProvider":false,"documentFormattingProvider":false,"documentRangeFormattingProvider":false,"foldingRangeProvider":false,"selectionRangeProvider":false,"workspaceSymbolProvider":false,"rangeProvider":false,"documentProvider":true},"workspace":{"workspaceFolders":{"supported":true,"changeNotifications":true}},"semanticTokensProvider":{"documentProvider":true, "legend":{"tokenTypes":["type","struct","enum","union","parameter","variable","tagField","field","function","keyword","modifier","comment","string","number","operator"],"tokenModifiers":["definition","async","documentation"]}}}}
\\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"hoverProvider":true,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"implementationProvider":false,"referencesProvider":false,"documentSymbolProvider":true,"colorProvider":false,"documentFormattingProvider":false,"documentRangeFormattingProvider":false,"foldingRangeProvider":false,"selectionRangeProvider":false,"workspaceSymbolProvider":false,"rangeProvider":false,"documentProvider":true},"workspace":{"workspaceFolders":{"supported":true,"changeNotifications":true}},"semanticTokensProvider":{"documentProvider":true, "legend":{"tokenTypes":["type","struct","enum","union","parameter","variable","tagField","field","function","keyword","modifier","comment","string","number","operator","builtin"],"tokenModifiers":["definition","async","documentation"]}}}}
;
const not_implemented_response =
@ -957,7 +957,6 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
// TODO Actually test this in some editor, VSCode won't send me requests -_-'.
const semantic_tokens = @import("semantic_tokens.zig");
const token_array = try semantic_tokens.writeAllSemanticTokens(allocator, handle.*);
defer allocator.free(token_array);

View File

@ -2,7 +2,6 @@ const std = @import("std");
const DocumentStore = @import("document_store.zig");
const ast = std.zig.ast;
// ["type","struct","enum","union","parameter","variable","tagField","field","function","keyword","modifier","comment","string","number","operator"]
const TokenType = enum(u32) {
type,
@"struct",
@ -19,6 +18,7 @@ const TokenType = enum(u32) {
string,
number,
operator,
builtin,
};
const TokenModifiers = packed struct {
@ -26,12 +26,8 @@ const TokenModifiers = packed struct {
@"async": bool = false,
documentation: bool = false,
pub fn toInt(value: u32) TokenModifiers {
return @bitCast(TokenModifiers, value);
}
fn toInt(self: TokenModifiers) u32 {
return @bitCast(u32, self);
return @as(u32, @bitCast(u3, self));
}
fn with(lhs: TokenModifiers, rhs: TokenModifiers) TokenModifiers {
@ -48,17 +44,7 @@ const Builder = struct {
current_token: ?ast.TokenIndex,
arr: std.ArrayList(u32),
fn printToken(start_idx: usize, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
const delta_loc = self.tree.tokenLocationLoc(start_idx, token_loc);
try out_stream.print(prefix ++ "{},{},{},{},{}", .{
// TODO Is +1 on the column here correct? I think so.
delta_loc.line, delta_loc.column + 1,
token_loc.end - token_loc.start, @enumToInt(token_type),
token_modifiers.toInt(),
});
}
fn create(allocator: *std.mem.Allocator, tree: *ast.Tree) Builder {
fn init(allocator: *std.mem.Allocator, tree: *ast.Tree) Builder {
return Builder{
.tree = tree,
.current_token = null,
@ -66,25 +52,170 @@ const Builder = struct {
};
}
fn add(self: *Builder, out_stream: var, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
if (self.current_token) |current_token| {
std.debug.assert(token > current_token);
try out_stream.print(",");
try printToken(self.tree.token_locs[current_token].end, token, token_type, token_modifiers);
} else {
try printToken(0, token, token_type, token_modifiers);
}
fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
const start_idx = if (self.current_token) |current_token|
self.tree.token_locs[current_token].start + 1
else
0;
const token_loc = self.tree.token_locs[token];
const delta_loc = self.tree.tokenLocationLoc(start_idx, token_loc);
try self.arr.appendSlice(&[_]u32{
@truncate(u32, if (self.current_token == null) delta_loc.line + 1 else delta_loc.line),
@truncate(u32, delta_loc.column + 1),
@truncate(u32, token_loc.end - token_loc.start),
@enumToInt(token_type),
token_modifiers.toInt(),
});
self.current_token = token;
}
pub fn toOwnedSlice(self: *Builder) []u32 {
fn toOwnedSlice(self: *Builder) []u32 {
return self.arr.toOwnedSlice();
}
};
fn isAllDigit(str: []const u8) bool {
for (str) |c| {
if (!std.ascii.isDigit(c)) return false;
}
return true;
}
fn isTypeIdent(tree: *ast.Tree, token_idx: ast.TokenIndex) bool {
const PrimitiveTypes = std.ComptimeStringMap(void, .{
.{ .@"0" = "isize" }, .{ .@"0" = "usize" },
.{ .@"0" = "c_short" }, .{ .@"0" = "c_ushort" },
.{ .@"0" = "c_int" }, .{ .@"0" = "c_uint" },
.{ .@"0" = "c_long" }, .{ .@"0" = "c_ulong" },
.{ .@"0" = "c_longlong" }, .{ .@"0" = "c_ulonglong" },
.{ .@"0" = "c_longdouble" }, .{ .@"0" = "c_void" },
.{ .@"0" = "f16" }, .{ .@"0" = "f32" },
.{ .@"0" = "f64" }, .{ .@"0" = "f128" },
.{ .@"0" = "bool" }, .{ .@"0" = "void" },
.{ .@"0" = "noreturn" }, .{ .@"0" = "type" },
.{ .@"0" = "anyerror" }, .{ .@"0" = "comptime_int" },
.{ .@"0" = "comptime_float" }, .{ .@"0" = "anyframe" },
});
const text = tree.tokenSlice(token_idx);
if (PrimitiveTypes.has(text)) return true;
if (text.len > 1 and (text[0] == 'u' or text[0] == 'i') and isAllDigit(text[1..]))
return true;
return false;
}
pub fn writeAllSemanticTokens(allocator: *std.mem.Allocator, handle: DocumentStore.Handle) ![]u32 {
// TODO Actual implementation
var builder = Builder.create(allocator, handle.tree);
var builder = Builder.init(allocator, handle.tree);
// TODO We only scan tokens for now, we need to actually do semantic analysis
for (handle.tree.token_ids) |token_id, token_idx| {
const token_type: TokenType = switch (token_id) {
.StringLiteral, .MultilineStringLiteralLine, .CharLiteral => .string,
.Builtin => .builtin,
.IntegerLiteral, .FloatLiteral => .number,
.Bang,
.Pipe,
.PipePipe,
.PipeEqual,
.Equal,
.EqualEqual,
.EqualAngleBracketRight,
.BangEqual,
.Percent,
.PercentEqual,
.PeriodAsterisk,
.Caret,
.CaretEqual,
.Plus,
.PlusPlus,
.PlusEqual,
.PlusPercent,
.PlusPercentEqual,
.Minus,
.MinusEqual,
.MinusPercent,
.MinusPercentEqual,
.Asterisk,
.AsteriskEqual,
.AsteriskAsterisk,
.AsteriskPercent,
.AsteriskPercentEqual,
.Arrow,
.Slash,
.SlashEqual,
.Ampersand,
.AmpersandEqual,
.QuestionMark,
.AngleBracketLeft,
.AngleBracketLeftEqual,
.AngleBracketAngleBracketLeft,
.AngleBracketAngleBracketLeftEqual,
.AngleBracketRight,
.AngleBracketRightEqual,
.AngleBracketAngleBracketRight,
.AngleBracketAngleBracketRightEqual,
.Tilde,
=> .operator,
.LineComment, .DocComment, .ContainerDocComment => .comment,
.Keyword_align,
.Keyword_allowzero,
.Keyword_and,
.Keyword_asm,
.Keyword_async,
.Keyword_await,
.Keyword_break,
.Keyword_callconv,
.Keyword_catch,
.Keyword_comptime,
.Keyword_const,
.Keyword_continue,
.Keyword_defer,
.Keyword_else,
.Keyword_enum,
.Keyword_errdefer,
.Keyword_error,
.Keyword_export,
.Keyword_extern,
.Keyword_false,
.Keyword_fn,
.Keyword_for,
.Keyword_if,
.Keyword_inline,
.Keyword_noalias,
.Keyword_noinline,
.Keyword_nosuspend,
.Keyword_null,
.Keyword_or,
.Keyword_orelse,
.Keyword_packed,
.Keyword_anyframe,
.Keyword_pub,
.Keyword_resume,
.Keyword_return,
.Keyword_linksection,
.Keyword_struct,
.Keyword_suspend,
.Keyword_switch,
.Keyword_test,
.Keyword_threadlocal,
.Keyword_true,
.Keyword_try,
.Keyword_undefined,
.Keyword_union,
.Keyword_unreachable,
.Keyword_usingnamespace,
.Keyword_var,
.Keyword_volatile,
.Keyword_while,
=> .keyword,
.Identifier => if (isTypeIdent(handle.tree, token_idx)) .type else continue,
else => continue,
};
try builder.add(token_idx, token_type, TokenModifiers{});
}
return builder.toOwnedSlice();
}