Undo stupidity & run zig fmt

This commit is contained in:
Jonathan Hähne 2021-05-02 20:14:38 +02:00
parent 807bd6a571
commit e1c8e5d407
2 changed files with 19 additions and 26 deletions

View File

@ -80,7 +80,7 @@ pub fn lineSectionLength(tree: ast.Tree, start_index: usize, end_index: usize, e
const codepoint = try std.unicode.utf8Decode(source[i .. i + n]); const codepoint = try std.unicode.utf8Decode(source[i .. i + n]);
result += 1 + @boolToInt(codepoint >= 0x10000); result += 1 + @as(usize, @boolToInt(codepoint >= 0x10000));
i += n; i += n;
} }
return result; return result;

View File

@ -72,8 +72,10 @@ const Builder = struct {
const next_start = starts[token]; const next_start = starts[token];
if (next_start < self.previous_position) { if (next_start < self.previous_position) {
log.err("Moved backwards from {} at position {} to {} at {}.", log.err(
.{ tags[self.previous_token orelse 0], self.previous_position, tags[token], next_start }); "Moved backwards from {} at position {} to {} at {}.",
.{ tags[self.previous_token orelse 0], self.previous_position, tags[token], next_start },
);
return; return;
} }
@ -81,13 +83,13 @@ const Builder = struct {
// Highlight gaps between AST nodes. These can contain comments or malformed code. // Highlight gaps between AST nodes. These can contain comments or malformed code.
var i = prev + 1; var i = prev + 1;
while (i < token) : (i += 1) { while (i < token) : (i += 1) {
try handleComments(self, starts[i-1], starts[i]); try handleComments(self, starts[i - 1], starts[i]);
try handleToken(self, i); try handleToken(self, i);
} }
} }
self.previous_token = token; self.previous_token = token;
if (token > 0) { if (token > 0) {
try handleComments(self, starts[token-1], next_start); try handleComments(self, starts[token - 1], next_start);
} }
const length = offsets.tokenLength(tree, token, self.encoding); const length = offsets.tokenLength(tree, token, self.encoding);
@ -100,18 +102,10 @@ const Builder = struct {
// TODO More highlighting here // TODO More highlighting here
const tok_id = tree.tokens.items(.tag)[tok]; const tok_id = tree.tokens.items(.tag)[tok];
const tok_type: TokenType = switch (tok_id) { const tok_type: TokenType = switch (tok_id) {
.keyword_true, .keyword_true, .keyword_false, .keyword_null, .keyword_undefined, .keyword_unreachable => .keywordLiteral,
.keyword_false, .integer_literal, .float_literal => .number,
.keyword_null, .string_literal, .multiline_string_literal_line, .char_literal => .string,
.keyword_undefined, .period, .comma, .r_paren, .l_paren, .r_brace, .l_brace, .semicolon, .colon => return,
.keyword_unreachable,
=> .keywordLiteral,
.integer_literal, .float_literal,
=> .number,
.string_literal, .multiline_string_literal_line, .char_literal,
=> .string,
.period, .comma, .r_paren, .l_paren, .r_brace, .l_brace, .semicolon, .colon,
=> return,
else => blk: { else => blk: {
const id = @enumToInt(tok_id); const id = @enumToInt(tok_id);
@ -123,7 +117,7 @@ const Builder = struct {
break :blk TokenType.operator; break :blk TokenType.operator;
return; return;
} },
}; };
const start = tree.tokens.items(.start)[tok]; const start = tree.tokens.items(.start)[tok];
const length = offsets.tokenLength(tree, tok, self.encoding); const length = offsets.tokenLength(tree, tok, self.encoding);
@ -136,15 +130,15 @@ const Builder = struct {
var i: usize = from; var i: usize = from;
while (i < to - 1) : (i += 1) { while (i < to - 1) : (i += 1) {
if (source[i] != '/' or source[i + 1] != '/')
if (source[i] != '/' or source[i+1] != '/')
continue; continue;
const comment_start = i; const comment_start = i;
var mods = TokenModifiers{}; var mods = TokenModifiers{};
if (i+2 < to and (source[i+2] == '!' or source[i+2] == '/')) if (i + 2 < to and (source[i + 2] == '!' or source[i + 2] == '/'))
mods.documentation = true; mods.documentation = true;
while (i < to and source[i] != '\n') { i += 1; } while (i < to and source[i] != '\n') : (i += 1) {}
const length = try offsets.lineSectionLength(self.handle.tree, comment_start, i, self.encoding); const length = try offsets.lineSectionLength(self.handle.tree, comment_start, i, self.encoding);
try self.addDirect(TokenType.comment, mods, comment_start, length); try self.addDirect(TokenType.comment, mods, comment_start, length);
@ -216,7 +210,6 @@ fn fieldTokenType(container_decl: ast.Node.Index, handle: *DocumentStore.Handle)
}); });
} }
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void { fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void {
const tree = builder.handle.tree; const tree = builder.handle.tree;
if (type_node.type.is_type_val) { if (type_node.type.is_type_val) {
@ -1036,7 +1029,7 @@ pub fn writeAllSemanticTokens(
arena: *std.heap.ArenaAllocator, arena: *std.heap.ArenaAllocator,
store: *DocumentStore, store: *DocumentStore,
handle: *DocumentStore.Handle, handle: *DocumentStore.Handle,
encoding: offsets.Encoding encoding: offsets.Encoding,
) ![]u32 { ) ![]u32 {
var builder = Builder.init(arena.child_allocator, handle, encoding); var builder = Builder.init(arena.child_allocator, handle, encoding);
errdefer builder.arr.deinit(); errdefer builder.arr.deinit();