2020-06-13 19:20:04 +01:00
|
|
|
const std = @import("std");
|
|
|
|
const DocumentStore = @import("document_store.zig");
|
|
|
|
const ast = std.zig.ast;
|
|
|
|
|
|
|
|
const TokenType = enum(u32) {
|
|
|
|
type,
|
|
|
|
@"struct",
|
|
|
|
@"enum",
|
|
|
|
@"union",
|
|
|
|
parameter,
|
|
|
|
variable,
|
|
|
|
tagField,
|
|
|
|
field,
|
|
|
|
function,
|
|
|
|
keyword,
|
|
|
|
modifier,
|
|
|
|
comment,
|
|
|
|
string,
|
|
|
|
number,
|
|
|
|
operator,
|
2020-06-13 23:07:57 +01:00
|
|
|
builtin,
|
2020-06-13 19:20:04 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
const TokenModifiers = packed struct {
|
|
|
|
definition: bool = false,
|
|
|
|
@"async": bool = false,
|
|
|
|
documentation: bool = false,
|
|
|
|
|
|
|
|
fn toInt(self: TokenModifiers) u32 {
|
2020-06-13 23:07:57 +01:00
|
|
|
return @as(u32, @bitCast(u3, self));
|
2020-06-13 19:20:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
fn with(lhs: TokenModifiers, rhs: TokenModifiers) TokenModifiers {
|
|
|
|
return fromInt(toInt(lhs) | toInt(rhs));
|
|
|
|
}
|
|
|
|
|
|
|
|
fn intersect(lhs: TokenModifiers, rhs: TokenModifiers) TokenModifiers {
|
|
|
|
return fromInt(toInt(lhs) & toInt(rhs));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const Builder = struct {
|
|
|
|
tree: *ast.Tree,
|
|
|
|
current_token: ?ast.TokenIndex,
|
|
|
|
arr: std.ArrayList(u32),
|
|
|
|
|
2020-06-13 23:07:57 +01:00
|
|
|
fn init(allocator: *std.mem.Allocator, tree: *ast.Tree) Builder {
|
2020-06-13 19:20:04 +01:00
|
|
|
return Builder{
|
|
|
|
.tree = tree,
|
|
|
|
.current_token = null,
|
|
|
|
.arr = std.ArrayList(u32).init(allocator),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-06-13 23:07:57 +01:00
|
|
|
fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
|
|
|
const start_idx = if (self.current_token) |current_token|
|
2020-06-16 12:27:00 +01:00
|
|
|
self.tree.token_locs[current_token].start
|
2020-06-13 23:07:57 +01:00
|
|
|
else
|
|
|
|
0;
|
|
|
|
|
|
|
|
const token_loc = self.tree.token_locs[token];
|
|
|
|
const delta_loc = self.tree.tokenLocationLoc(start_idx, token_loc);
|
|
|
|
try self.arr.appendSlice(&[_]u32{
|
2020-06-16 12:27:00 +01:00
|
|
|
@truncate(u32, delta_loc.line),
|
|
|
|
@truncate(u32, delta_loc.column),
|
2020-06-13 23:07:57 +01:00
|
|
|
@truncate(u32, token_loc.end - token_loc.start),
|
|
|
|
@enumToInt(token_type),
|
|
|
|
token_modifiers.toInt(),
|
|
|
|
});
|
2020-06-13 19:20:04 +01:00
|
|
|
self.current_token = token;
|
|
|
|
}
|
|
|
|
|
2020-06-13 23:07:57 +01:00
|
|
|
fn toOwnedSlice(self: *Builder) []u32 {
|
2020-06-13 19:20:04 +01:00
|
|
|
return self.arr.toOwnedSlice();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-06-13 23:07:57 +01:00
|
|
|
fn isAllDigit(str: []const u8) bool {
|
|
|
|
for (str) |c| {
|
|
|
|
if (!std.ascii.isDigit(c)) return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn isTypeIdent(tree: *ast.Tree, token_idx: ast.TokenIndex) bool {
|
|
|
|
const PrimitiveTypes = std.ComptimeStringMap(void, .{
|
2020-06-16 13:49:57 +01:00
|
|
|
.{"isize"}, .{"usize"},
|
|
|
|
.{"c_short"}, .{"c_ushort"},
|
|
|
|
.{"c_int"}, .{"c_uint"},
|
|
|
|
.{"c_long"}, .{"c_ulong"},
|
|
|
|
.{"c_longlong"}, .{"c_ulonglong"},
|
|
|
|
.{"c_longdouble"}, .{"c_void"},
|
|
|
|
.{"f16"}, .{"f32"},
|
|
|
|
.{"f64"}, .{"f128"},
|
|
|
|
.{"bool"}, .{"void"},
|
|
|
|
.{"noreturn"}, .{"type"},
|
|
|
|
.{"anyerror"}, .{"comptime_int"},
|
|
|
|
.{"comptime_float"}, .{"anyframe"},
|
2020-06-13 23:07:57 +01:00
|
|
|
});
|
|
|
|
|
|
|
|
const text = tree.tokenSlice(token_idx);
|
|
|
|
if (PrimitiveTypes.has(text)) return true;
|
|
|
|
if (text.len > 1 and (text[0] == 'u' or text[0] == 'i') and isAllDigit(text[1..]))
|
|
|
|
return true;
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-06-13 19:20:04 +01:00
|
|
|
pub fn writeAllSemanticTokens(allocator: *std.mem.Allocator, handle: DocumentStore.Handle) ![]u32 {
|
2020-06-13 23:07:57 +01:00
|
|
|
var builder = Builder.init(allocator, handle.tree);
|
|
|
|
|
|
|
|
// TODO We only scan tokens for now, we need to actually do semantic analysis
|
|
|
|
for (handle.tree.token_ids) |token_id, token_idx| {
|
|
|
|
const token_type: TokenType = switch (token_id) {
|
|
|
|
.StringLiteral, .MultilineStringLiteralLine, .CharLiteral => .string,
|
|
|
|
.Builtin => .builtin,
|
|
|
|
.IntegerLiteral, .FloatLiteral => .number,
|
|
|
|
.Bang,
|
|
|
|
.Pipe,
|
|
|
|
.PipePipe,
|
|
|
|
.PipeEqual,
|
|
|
|
.Equal,
|
|
|
|
.EqualEqual,
|
|
|
|
.EqualAngleBracketRight,
|
|
|
|
.BangEqual,
|
|
|
|
.Percent,
|
|
|
|
.PercentEqual,
|
|
|
|
.PeriodAsterisk,
|
|
|
|
.Caret,
|
|
|
|
.CaretEqual,
|
|
|
|
.Plus,
|
|
|
|
.PlusPlus,
|
|
|
|
.PlusEqual,
|
|
|
|
.PlusPercent,
|
|
|
|
.PlusPercentEqual,
|
|
|
|
.Minus,
|
|
|
|
.MinusEqual,
|
|
|
|
.MinusPercent,
|
|
|
|
.MinusPercentEqual,
|
|
|
|
.Asterisk,
|
|
|
|
.AsteriskEqual,
|
|
|
|
.AsteriskAsterisk,
|
|
|
|
.AsteriskPercent,
|
|
|
|
.AsteriskPercentEqual,
|
|
|
|
.Arrow,
|
|
|
|
.Slash,
|
|
|
|
.SlashEqual,
|
|
|
|
.Ampersand,
|
|
|
|
.AmpersandEqual,
|
|
|
|
.QuestionMark,
|
|
|
|
.AngleBracketLeft,
|
|
|
|
.AngleBracketLeftEqual,
|
|
|
|
.AngleBracketAngleBracketLeft,
|
|
|
|
.AngleBracketAngleBracketLeftEqual,
|
|
|
|
.AngleBracketRight,
|
|
|
|
.AngleBracketRightEqual,
|
|
|
|
.AngleBracketAngleBracketRight,
|
|
|
|
.AngleBracketAngleBracketRightEqual,
|
|
|
|
.Tilde,
|
|
|
|
=> .operator,
|
|
|
|
.LineComment, .DocComment, .ContainerDocComment => .comment,
|
|
|
|
.Keyword_align,
|
|
|
|
.Keyword_allowzero,
|
|
|
|
.Keyword_and,
|
|
|
|
.Keyword_asm,
|
|
|
|
.Keyword_async,
|
|
|
|
.Keyword_await,
|
|
|
|
.Keyword_break,
|
|
|
|
.Keyword_callconv,
|
|
|
|
.Keyword_catch,
|
|
|
|
.Keyword_comptime,
|
|
|
|
.Keyword_const,
|
|
|
|
.Keyword_continue,
|
|
|
|
.Keyword_defer,
|
|
|
|
.Keyword_else,
|
|
|
|
.Keyword_enum,
|
|
|
|
.Keyword_errdefer,
|
|
|
|
.Keyword_error,
|
|
|
|
.Keyword_export,
|
|
|
|
.Keyword_extern,
|
|
|
|
.Keyword_false,
|
|
|
|
.Keyword_fn,
|
|
|
|
.Keyword_for,
|
|
|
|
.Keyword_if,
|
|
|
|
.Keyword_inline,
|
|
|
|
.Keyword_noalias,
|
|
|
|
.Keyword_noinline,
|
|
|
|
.Keyword_nosuspend,
|
|
|
|
.Keyword_null,
|
|
|
|
.Keyword_or,
|
|
|
|
.Keyword_orelse,
|
|
|
|
.Keyword_packed,
|
|
|
|
.Keyword_anyframe,
|
|
|
|
.Keyword_pub,
|
|
|
|
.Keyword_resume,
|
|
|
|
.Keyword_return,
|
|
|
|
.Keyword_linksection,
|
|
|
|
.Keyword_struct,
|
|
|
|
.Keyword_suspend,
|
|
|
|
.Keyword_switch,
|
|
|
|
.Keyword_test,
|
|
|
|
.Keyword_threadlocal,
|
|
|
|
.Keyword_true,
|
|
|
|
.Keyword_try,
|
|
|
|
.Keyword_undefined,
|
|
|
|
.Keyword_union,
|
|
|
|
.Keyword_unreachable,
|
|
|
|
.Keyword_usingnamespace,
|
|
|
|
.Keyword_var,
|
|
|
|
.Keyword_volatile,
|
|
|
|
.Keyword_while,
|
|
|
|
=> .keyword,
|
|
|
|
.Identifier => if (isTypeIdent(handle.tree, token_idx)) .type else continue,
|
|
|
|
else => continue,
|
|
|
|
};
|
|
|
|
|
|
|
|
try builder.add(token_idx, token_type, TokenModifiers{});
|
|
|
|
}
|
2020-06-13 19:20:04 +01:00
|
|
|
|
|
|
|
return builder.toOwnedSlice();
|
|
|
|
}
|