Undo stupidity & run zig fmt
This commit is contained in:
parent
807bd6a571
commit
e1c8e5d407
@ -80,7 +80,7 @@ pub fn lineSectionLength(tree: ast.Tree, start_index: usize, end_index: usize, e
|
||||
|
||||
const codepoint = try std.unicode.utf8Decode(source[i .. i + n]);
|
||||
|
||||
result += 1 + @boolToInt(codepoint >= 0x10000);
|
||||
result += 1 + @as(usize, @boolToInt(codepoint >= 0x10000));
|
||||
i += n;
|
||||
}
|
||||
return result;
|
||||
|
@ -72,8 +72,10 @@ const Builder = struct {
|
||||
const next_start = starts[token];
|
||||
|
||||
if (next_start < self.previous_position) {
|
||||
log.err("Moved backwards from {} at position {} to {} at {}.",
|
||||
.{ tags[self.previous_token orelse 0], self.previous_position, tags[token], next_start });
|
||||
log.err(
|
||||
"Moved backwards from {} at position {} to {} at {}.",
|
||||
.{ tags[self.previous_token orelse 0], self.previous_position, tags[token], next_start },
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -100,18 +102,10 @@ const Builder = struct {
|
||||
// TODO More highlighting here
|
||||
const tok_id = tree.tokens.items(.tag)[tok];
|
||||
const tok_type: TokenType = switch (tok_id) {
|
||||
.keyword_true,
|
||||
.keyword_false,
|
||||
.keyword_null,
|
||||
.keyword_undefined,
|
||||
.keyword_unreachable,
|
||||
=> .keywordLiteral,
|
||||
.integer_literal, .float_literal,
|
||||
=> .number,
|
||||
.string_literal, .multiline_string_literal_line, .char_literal,
|
||||
=> .string,
|
||||
.period, .comma, .r_paren, .l_paren, .r_brace, .l_brace, .semicolon, .colon,
|
||||
=> return,
|
||||
.keyword_true, .keyword_false, .keyword_null, .keyword_undefined, .keyword_unreachable => .keywordLiteral,
|
||||
.integer_literal, .float_literal => .number,
|
||||
.string_literal, .multiline_string_literal_line, .char_literal => .string,
|
||||
.period, .comma, .r_paren, .l_paren, .r_brace, .l_brace, .semicolon, .colon => return,
|
||||
|
||||
else => blk: {
|
||||
const id = @enumToInt(tok_id);
|
||||
@ -123,7 +117,7 @@ const Builder = struct {
|
||||
break :blk TokenType.operator;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
};
|
||||
const start = tree.tokens.items(.start)[tok];
|
||||
const length = offsets.tokenLength(tree, tok, self.encoding);
|
||||
@ -136,15 +130,15 @@ const Builder = struct {
|
||||
|
||||
var i: usize = from;
|
||||
while (i < to - 1) : (i += 1) {
|
||||
|
||||
if (source[i] != '/' or source[i + 1] != '/')
|
||||
continue;
|
||||
|
||||
const comment_start = i;
|
||||
var mods = TokenModifiers{};
|
||||
if (i + 2 < to and (source[i + 2] == '!' or source[i + 2] == '/'))
|
||||
mods.documentation = true;
|
||||
|
||||
while (i < to and source[i] != '\n') { i += 1; }
|
||||
while (i < to and source[i] != '\n') : (i += 1) {}
|
||||
|
||||
const length = try offsets.lineSectionLength(self.handle.tree, comment_start, i, self.encoding);
|
||||
try self.addDirect(TokenType.comment, mods, comment_start, length);
|
||||
@ -216,7 +210,6 @@ fn fieldTokenType(container_decl: ast.Node.Index, handle: *DocumentStore.Handle)
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
||||
const tree = builder.handle.tree;
|
||||
if (type_node.type.is_type_val) {
|
||||
@ -1036,7 +1029,7 @@ pub fn writeAllSemanticTokens(
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
store: *DocumentStore,
|
||||
handle: *DocumentStore.Handle,
|
||||
encoding: offsets.Encoding
|
||||
encoding: offsets.Encoding,
|
||||
) ![]u32 {
|
||||
var builder = Builder.init(arena.child_allocator, handle, encoding);
|
||||
errdefer builder.arr.deinit();
|
||||
|
Loading…
Reference in New Issue
Block a user