From 72605c75495c4c1b6b02875177961f05ba42338f Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sat, 27 Feb 2021 16:38:06 +0100 Subject: [PATCH] A lot of fixes --- src/analysis.zig | 268 ++++++---- src/semantic_tokens.zig | 1082 ++++++++++++++++++++------------------- 2 files changed, 724 insertions(+), 626 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 60e9c3d..d93538e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -5,17 +5,33 @@ const types = @import("types.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.analysis); -/// Get a declaration's doc comment node -pub fn getDocCommentNode(tree: ast.Tree, node: *ast.Node) ?*ast.Node.DocComment { - if (node.castTag(.FnProto)) |func| { - return func.getDocComments(); - } else if (node.castTag(.VarDecl)) |var_decl| { - return var_decl.getDocComments(); - } else if (node.castTag(.ContainerField)) |field| { - return field.doc_comments; - } else if (node.castTag(.ErrorTag)) |tag| { - return tag.doc_comments; +/// Get a declaration's doc comment token index +pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { + const tags = tree.nodes.items(.tag); + const tokens = tree.tokens.items(.tag); + const current = tree.nodes.items(.main_token)[node]; + + switch (tags[node]) { + .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi => { + var idx = current - 1; + idx -= @boolToInt(tokens[idx] == .keyword_extern); + idx -= @boolToInt(tokens[idx] == .keyword_pub); + return if (tokens[idx] == .doc_comment) idx else null; + }, + .local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => { + return if (tokens[current - 1] == .doc_comment) current - 1 else null; + }, + .container_field, .container_field_init, .container_field_align => { + var idx = current - 2; // skip '.' + return if (tokens[idx] == .doc_comment) idx else null; + }, + else => return null, } + + // @TODO: Implement doc comments for tags + // } else if (node.castTag(.ErrorTag)) |tag| { + // return tag.doc_comments; + // } return null; } @@ -28,11 +44,11 @@ pub fn getDocCommentNode(tree: ast.Tree, node: *ast.Node) ?*ast.Node.DocComment pub fn getDocComments( allocator: *std.mem.Allocator, tree: ast.Tree, - node: *ast.Node, + node: ast.Node.Index, format: types.MarkupContent.Kind, ) !?[]const u8 { - if (getDocCommentNode(tree, node)) |doc_comment_node| { - return try collectDocComments(allocator, tree, doc_comment_node, format); + if (getDocCommentTokenIndex(tree, node)) |doc_comment_index| { + return try collectDocComments(allocator, tree, doc_comment_index, format); } return null; } @@ -40,17 +56,19 @@ pub fn getDocComments( pub fn collectDocComments( allocator: *std.mem.Allocator, tree: ast.Tree, - doc_comments: *ast.Node.DocComment, + doc_comments: ast.TokenIndex, format: types.MarkupContent.Kind, ) ![]const u8 { var lines = std.ArrayList([]const u8).init(allocator); defer lines.deinit(); - var curr_line_tok = doc_comments.first_line; + const token_tags = tree.tokens.items(.tag); + const loc = tree.tokenLocation(0, doc_comments); + + var curr_line_tok = doc_comments; while (true) : (curr_line_tok += 1) { - switch (tree.token_ids[curr_line_tok]) { - .LineComment => continue, - .DocComment, .ContainerDocComment => { + switch (token_tags[curr_line_tok]) { + .doc_comment, .container_doc_comment => { try lines.append(std.mem.trim(u8, tree.tokenSlice(curr_line_tok)[3..], &std.ascii.spaces)); }, else => break, @@ -61,20 +79,15 @@ pub fn collectDocComments( } /// Gets a function signature (keywords, name, return value) -pub fn getFunctionSignature(tree: ast.Tree, func: *ast.Node.FnProto) []const u8 { - const start = tree.token_locs[func.firstToken()].start; - const end = tree.token_locs[ - switch (func.return_type) { - .Explicit, .InferErrorSet => |node| node.lastToken(), - .Invalid => |r_paren| r_paren, - } - ].end; +pub fn getFunctionSignature(tree: ast.Tree, func: *ast.full.FnProto) []const u8 { + const start = tree.tokenLocation(func.ast.fn_token).line_start; + const end = tree.tokenLocation(func.ast.return_type).line_end; return tree.source[start..end]; } /// Gets a function snippet insert text -pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: *ast.Node.FnProto, skip_self_param: bool) ![]const u8 { - const name_tok = func.getNameToken() orelse unreachable; +pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: *ast.full.FnProto, skip_self_param: bool) ![]const u8 { + const name_index = func.name_token orelse unreachable; var buffer = std.ArrayList(u8).init(allocator); try buffer.ensureCapacity(128); @@ -84,18 +97,20 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * var buf_stream = buffer.writer(); - for (func.paramsConst()) |param, param_num| { - if (skip_self_param and param_num == 0) continue; - if (param_num != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); + const token_tags = tree.tokens.items(.tag); - try buf_stream.print("{}:", .{param_num + 1}); + var it = func.iterate(tree); + while (it.next()) |param| { + if (skip_self_param and it.param_i == 0) continue; + if (it.param_i != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); - if (param.comptime_token) |_| { - try buffer.appendSlice("comptime "); - } + try buf_stream.print("{d}", .{it.param_i + 1}); - if (param.noalias_token) |_| { - try buffer.appendSlice("noalias "); + if (param.comptime_noalias) |token_index| { + if (token_tags[token_index] == .keyword_comptime) + try buffer.appendSlice("comptime ") + else + try buffer.appendSlice("noalias "); } if (param.name_token) |name_token| { @@ -103,23 +118,23 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * try buffer.appendSlice(": "); } - switch (param.param_type) { - .any_type => try buffer.appendSlice("anytype"), - .type_expr => |type_expr| { - var curr_tok = type_expr.firstToken(); - var end_tok = type_expr.lastToken(); - while (curr_tok <= end_tok) : (curr_tok += 1) { - const id = tree.token_ids[curr_tok]; - const is_comma = id == .Comma; + if (param.anytype_ellipsis3) |token_index| { + if (token_tags[token_index] == .keyword_anytype) + try buffer.appendSlice("anytype") + else + try buffer.appendSlice("..."); + } else { + var curr_token = param.type_expr; + var end_token = tree.lastToken(func.ast.params[it.param_i]); + while (curr_token <= end_token) : (curr_token += 1) { + const tag = token_tags[curr_token]; + const is_comma = tag == .comma; - if (curr_tok == end_tok and is_comma) continue; - - try buffer.appendSlice(tree.tokenSlice(curr_tok)); - if (is_comma or id == .Keyword_const) try buffer.append(' '); - } - }, + if (curr_token == end_token and is_comma) continue; + try buffer.appendSlice(tree.tokenSlice(curr_token)); + if (is_comma or tag == .Keyword_const) try buffer.append(' '); + } } - try buffer.append('}'); } try buffer.append(')'); @@ -128,16 +143,16 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * } /// Gets a function signature (keywords, name, return value) -pub fn getVariableSignature(tree: ast.Tree, var_decl: *ast.Node.VarDecl) []const u8 { - const start = tree.token_locs[var_decl.firstToken()].start; - const end = tree.token_locs[var_decl.semicolon_token].start; +pub fn getVariableSignature(tree: ast.Tree, var_decl: *ast.full.VarDecl) []const u8 { + const start = tree.tokenLocation(0, var_decl.ast.mut_token).line_start; + const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(var_decl.ast.init_node)).line_end; return tree.source[start..end]; } // analysis.getContainerFieldSignature(handle.tree, field) -pub fn getContainerFieldSignature(tree: ast.Tree, field: *ast.Node.ContainerField) []const u8 { - const start = tree.token_locs[field.firstToken()].start; - const end = tree.token_locs[field.lastToken()].end; +pub fn getContainerFieldSignature(tree: ast.Tree, field: *ast.full.ContainerField) []const u8 { + const start = tree.tokenLocation(0, field.ast.name_token).line_start; + const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(field.ast.value_expr)).line_start; return tree.source[start..end]; } @@ -150,7 +165,7 @@ fn typeIsType(tree: ast.Tree, node: ast.Node.Index) bool { } pub fn isTypeFunction(tree: ast.Tree, func: ast.full.FnProto) bool { - return typeIsType(func.ast.return_type); + return typeIsType(tree, func.ast.return_type); } pub fn isGenericFunction(tree: ast.Tree, func: *ast.full.FnProto) bool { @@ -1570,7 +1585,9 @@ pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: ast.Tree, encodin } pub const Declaration = union(enum) { - ast_node: ast.Node, + /// Index of the ast node + ast_node: ast.Node.Index, + /// Function parameter param_decl: ast.full.FnProto.Param, pointer_payload: struct { node: ast.full.PtrType, @@ -2062,15 +2079,15 @@ pub const DocumentScope = struct { pub const Scope = struct { pub const Data = union(enum) { - container: *ast.Node, // .id is ContainerDecl or Root or ErrorSetDecl - function: *ast.Node, // .id is FnProto - block: *ast.Node, // .id is Block + container: ast.Node.Index, // .tag is ContainerDecl or Root or ErrorSetDecl + function: ast.Node.Index, // .tag is FnProto + block: ast.Node.Index, // .tag is Block other, }; range: SourceRange, decls: std.StringHashMap(Declaration), - tests: []const *ast.Node, + tests: []const ast.Node.Index, // uses: []const *ast.Node.Data, data: Data, @@ -2088,8 +2105,8 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen for (enum_completions.items) |item| if (item.documentation) |doc| allocator.free(doc.value); enum_completions.deinit(allocator); } - - try makeScopeInternal(allocator, &scopes, &error_completions, &enum_completions, tree); + // pass root node index ('0') + try makeScopeInternal(allocator, &scopes, &error_completions, &enum_completions, tree, 0); return DocumentScope{ .scopes = scopes.toOwnedSlice(allocator), .error_completions = error_completions.toOwnedSlice(allocator), @@ -2105,6 +2122,51 @@ fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { }; } +fn isContainer(tag: ast.Node.Tag) bool { + return switch (tag) { + .container_decl, + .container_decl_trailing, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_two, + .container_decl_two_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .root, + .error_set_decl, + => true, + else => false, + }; +} + +/// Returns the member indices of a given declaration container. +/// Asserts given `tag` is a container node +fn declMembers(tree: ast.Tree, tag: ast.Node.Tag) []ast.Node.index { + std.debug.assert(isContainer(tag)); + return switch (tag) { + .container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members, + .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members, + .container_decl_two, .container_decl_two_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.containerDeclTwo(&buffer, node_idx).ast.members; + }, + .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members, + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members, + .tagged_union_two, .tagged_union_two_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.taggedUnionTwo(&buffer, node_idx).ast.members; + }, + .root => tree.rootDecls(), + // @TODO: Fix error set declarations + .error_set_decl => &[_]ast.Node.Index{}, + else => unreachable, + }; +} + // TODO Possibly collect all imports to diff them on changes // as well fn makeScopeInternal( @@ -2115,27 +2177,23 @@ fn makeScopeInternal( tree: ast.Tree, node_idx: ast.Node.Index, ) error{OutOfMemory}!void { - const nodes = tree.nodes.items(.tag); - const node = nodes[node_idx]; - if (node == .root or node == .container_decl or node == .error_set_decl) { - const ast_decls = switch (node) { - .container_decl => tree.containerDecl(node_idx).ast.members, - .root => tree.rootDecls(), - // @TODO: Fix error set declarations - // .error_set_decl => node.castTag(.ErrorSetDecl).?.declsConst(), - else => unreachable, - }; + const tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + const node = tags[node_idx]; + + if (isContainer(node)) { + const ast_decls = declMembers(tree, node); (try scopes.addOne(allocator)).* = .{ - .range = nodeSourceRange(tree, node), + .range = nodeSourceRange(tree, node_idx), .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, - .data = .{ .container = node }, + .data = .{ .container = node_idx }, }; const scope_idx = scopes.items.len - 1; // var uses = std.ArrayList(*ast.Node.Use).init(allocator); - var tests = std.ArrayList(*ast.Node).init(allocator); + var tests = std.ArrayList(ast.Node.Index).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); @@ -2144,6 +2202,7 @@ fn makeScopeInternal( } for (ast_decls) |decl| { + // @TODO: Implement using namespace // if (decl.castTag(.Use)) |use| { // try uses.append(use); // continue; @@ -2151,12 +2210,13 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, decl); const name = getDeclName(tree, decl) orelse continue; - if (decl.tag == .TestDecl) { - try tests.append(decl); - continue; - } + // @TODO: implement tests + // if (decl.tag == .TestDecl) { + // try tests.append(decl); + // continue; + // } - if (node.tag == .ErrorSetDecl) { + if (tags[decl] == .error_set_decl) { (try error_completions.addOne(allocator)).* = .{ .label = name, .kind = .Constant, @@ -2167,15 +2227,39 @@ fn makeScopeInternal( }; } - if (decl.castTag(.ContainerField)) |field| { - const empty_field = field.type_expr == null and field.value_expr == null; - if (empty_field and node.tag == .Root) { + const container_field: ?ast.full.ContainerField = switch (decl) { + .container_field => tree.containerField(decl), + .container_field_align => tree.containerFieldAlign(decl), + .container_field_init => tree.containerFieldInit(decl), + else => null, + }; + + if (container_field) |field| { + const empty_field = field.type_expr == 0 and field.value_expr == 0; + if (empty_field and node == .root) { continue; } - if (node.castTag(.ContainerDecl)) |container| { - const kind = tree.token_ids[container.kind_token]; - if (empty_field and (kind == .Keyword_struct or (kind == .Keyword_union and container.init_arg_expr == .None))) { + // @TODO: We can probably just use node_idx directly instead of first transforming to container + const container_decl: ?ast.full.ContainerDecl = switch (node) { + .container_decl, .container_decl_trailing => tree.containerDecl(node_idx), + .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx), + .container_decl_two, .container_decl_two_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.containerDeclTwo(&buffer, node_idx); + }, + .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx), + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx), + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.taggedUnionTwo(&buffer, node_idx); + }, + else => null, + }; + + if (container_decl) |container| { + const kind = token_tags[container.ast.main_token]; + if (empty_field and (kind == .keyword_struct or (kind == .keyword_union and container.ast.arg == 0))) { continue; } @@ -2183,7 +2267,7 @@ fn makeScopeInternal( (try enum_completions.addOne(allocator)).* = .{ .label = name, .kind = .Constant, - .documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| + .documentation = if (try getDocComments(allocator, tree, node_idx, .Markdown)) |docs| .{ .kind = .Markdown, .value = docs } else null, @@ -2202,7 +2286,7 @@ fn makeScopeInternal( return; } - switch (node.tag) { + switch (node) { .FnProto => { const func = node.castTag(.FnProto).?; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 2479c8e..68c16df 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -254,568 +254,582 @@ fn writeContainerField( } } +// @TODO: Fix semantic tokens // TODO This is very slow and does a lot of extra work, improve in the future. -fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?*ast.Node) error{OutOfMemory}!void { - if (maybe_node == null) return; - const node = maybe_node.?; - const handle = builder.handle; +// fn writeNodeTokens( +// builder: *Builder, +// arena: *std.heap.ArenaAllocator, +// store: *DocumentStore, +// maybe_node: ?ast.Node.Index, +// tree: ast.Tree, +// ) error{OutOfMemory}!void { +// if (maybe_node == null) return; - const FrameSize = @sizeOf(@Frame(writeNodeTokens)); - var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); - defer arena.child_allocator.free(child_frame); +// const node_tags = tree.nodes.items(.tag); +// const token_tags = tree.tokens.items(.tag); +// const nodes_data = tree.nodes.items(.data); +// const main_tokens = tree.nodes.items(.main_token); - switch (node.tag) { - .Root, .Block, .LabeledBlock => { - const first_tok = if (node.castTag(.LabeledBlock)) |block_node| block: { - try writeToken(builder, block_node.label, .label); - break :block block_node.lbrace + 1; - } else if (node.castTag(.Block)) |block_node| - block_node.lbrace + 1 - else - 0; +// const node = maybe_node.?; +// const handle = builder.handle; - var gap_highlighter = GapHighlighter.init(builder, first_tok); - var child_idx: usize = 0; - while (node.iterate(child_idx)) |child| : (child_idx += 1) { - try gap_highlighter.next(child); - if (child.cast(ast.Node.ContainerField)) |container_field| { - try writeContainerField(builder, arena, store, container_field, .field, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } +// const FrameSize = @sizeOf(@Frame(writeNodeTokens)); +// var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); +// defer arena.child_allocator.free(child_frame); - if (node.tag == .Root) { - try gap_highlighter.end(handle.tree.token_ids.len - 1); - } else { - try gap_highlighter.end(node.lastToken()); - } - }, - .VarDecl => { - const var_decl = node.cast(ast.Node.VarDecl).?; - if (var_decl.getDocComments()) |doc| try writeDocComments(builder, handle.tree, doc); - try writeToken(builder, var_decl.getVisibToken(), .keyword); - try writeToken(builder, var_decl.getExternExportToken(), .keyword); - try writeToken(builder, var_decl.getThreadLocalToken(), .keyword); - try writeToken(builder, var_decl.getComptimeToken(), .keyword); - try writeToken(builder, var_decl.mut_token, .keyword); - if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { - try colorIdentifierBasedOnType(builder, decl_type, var_decl.name_token, .{ .declaration = true }); - } else { - try writeTokenMod(builder, var_decl.name_token, .variable, .{ .declaration = true }); - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getTypeNode() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getAlignNode() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getSectionNode() }); - try writeToken(builder, var_decl.getEqToken(), .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getInitNode() }); - }, - .Use => { - const use = node.cast(ast.Node.Use).?; - if (use.doc_comments) |docs| try writeDocComments(builder, builder.handle.tree, docs); - try writeToken(builder, use.visib_token, .keyword); - try writeToken(builder, use.use_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, use.expr }); - }, - .ErrorSetDecl => { - const error_set = node.cast(ast.Node.ErrorSetDecl).?; - try writeToken(builder, error_set.error_token, .keyword); - for (error_set.declsConst()) |decl| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl }); - }, - .ContainerDecl => { - const container_decl = node.cast(ast.Node.ContainerDecl).?; - try writeToken(builder, container_decl.layout_token, .keyword); - try writeToken(builder, container_decl.kind_token, .keyword); - switch (container_decl.init_arg_expr) { - .None => {}, - .Enum => |enum_expr| if (enum_expr) |expr| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, expr }) - else - try writeToken(builder, container_decl.kind_token + 2, .keyword), - .Type => |type_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_node }), - } +// switch (node_tags[node]) { +// .root, .block, .block_semicolon => |tag| { +// const first_tok = if (tag != block_semicolon) block: { +// const lbrace = main_tokens[node]; +// if (token_tags[lbrace - 1] == .colon and token_tags[lbrace - 2] == .identifier) +// try writeToken(builder, lbrace - 2, .label); - var gap_highlighter = GapHighlighter.init(builder, container_decl.lbrace_token + 1); - const field_token_type = fieldTokenType(container_decl, handle); - for (container_decl.fieldsAndDeclsConst()) |child| { - try gap_highlighter.next(child); - if (child.cast(ast.Node.ContainerField)) |container_field| { - try writeContainerField(builder, arena, store, container_field, field_token_type, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } - try gap_highlighter.end(node.lastToken()); - }, - .ErrorTag => { - const error_tag = node.cast(ast.Node.ErrorTag).?; - if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, error_tag.firstToken(), .errorTag); - }, - .Identifier => { - if (analysis.isTypeIdent(handle.tree, node.firstToken())) { - return try writeToken(builder, node.firstToken(), .type); - } +// break :block lbrace + 1; +// } else 0; - if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| { - if (child.decl.* == .param_decl) { - return try writeToken(builder, node.firstToken(), .parameter); - } - var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); - if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { - try colorIdentifierBasedOnType(builder, decl_type, node.firstToken(), .{}); - } else { - try writeTokenMod(builder, node.firstToken(), .variable, .{}); - } - } - }, - .FnProto => { - const fn_proto = node.cast(ast.Node.FnProto).?; - if (fn_proto.getDocComments()) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, fn_proto.getVisibToken(), .keyword); - try writeToken(builder, fn_proto.getExternExportInlineToken(), .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getLibName() }); - try writeToken(builder, fn_proto.fn_token, .keyword); +// var gap_highlighter = GapHighlighter.init(builder, first_tok); +// var child_idx: usize = 0; +// while (node.iterate(child_idx)) |child| : (child_idx += 1) { +// try gap_highlighter.next(child); +// if (child.cast(ast.Node.ContainerField)) |container_field| { +// try writeContainerField(builder, arena, store, container_field, .field, child_frame); +// } else { +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); +// } +// } - const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) - .type - else - .function; +// if (node.tag == .Root) { +// try gap_highlighter.end(handle.tree.token_ids.len - 1); +// } else { +// try gap_highlighter.end(node.lastToken()); +// } +// }, +// .VarDecl => { +// const var_decl = node.cast(ast.Node.VarDecl).?; +// if (var_decl.getDocComments()) |doc| try writeDocComments(builder, handle.tree, doc); +// try writeToken(builder, var_decl.getVisibToken(), .keyword); +// try writeToken(builder, var_decl.getExternExportToken(), .keyword); +// try writeToken(builder, var_decl.getThreadLocalToken(), .keyword); +// try writeToken(builder, var_decl.getComptimeToken(), .keyword); +// try writeToken(builder, var_decl.mut_token, .keyword); +// if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { +// try colorIdentifierBasedOnType(builder, decl_type, var_decl.name_token, .{ .declaration = true }); +// } else { +// try writeTokenMod(builder, var_decl.name_token, .variable, .{ .declaration = true }); +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getTypeNode() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getAlignNode() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getSectionNode() }); +// try writeToken(builder, var_decl.getEqToken(), .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getInitNode() }); +// }, +// .Use => { +// const use = node.cast(ast.Node.Use).?; +// if (use.doc_comments) |docs| try writeDocComments(builder, builder.handle.tree, docs); +// try writeToken(builder, use.visib_token, .keyword); +// try writeToken(builder, use.use_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, use.expr }); +// }, +// .ErrorSetDecl => { +// const error_set = node.cast(ast.Node.ErrorSetDecl).?; +// try writeToken(builder, error_set.error_token, .keyword); +// for (error_set.declsConst()) |decl| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl }); +// }, +// .ContainerDecl => { +// const container_decl = node.cast(ast.Node.ContainerDecl).?; +// try writeToken(builder, container_decl.layout_token, .keyword); +// try writeToken(builder, container_decl.kind_token, .keyword); +// switch (container_decl.init_arg_expr) { +// .None => {}, +// .Enum => |enum_expr| if (enum_expr) |expr| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, expr }) +// else +// try writeToken(builder, container_decl.kind_token + 2, .keyword), +// .Type => |type_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_node }), +// } - const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) - TokenModifiers{ .generic = true } - else - TokenModifiers{}; +// var gap_highlighter = GapHighlighter.init(builder, container_decl.lbrace_token + 1); +// const field_token_type = fieldTokenType(container_decl, handle); +// for (container_decl.fieldsAndDeclsConst()) |child| { +// try gap_highlighter.next(child); +// if (child.cast(ast.Node.ContainerField)) |container_field| { +// try writeContainerField(builder, arena, store, container_field, field_token_type, child_frame); +// } else { +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); +// } +// } +// try gap_highlighter.end(node.lastToken()); +// }, +// .ErrorTag => { +// const error_tag = node.cast(ast.Node.ErrorTag).?; +// if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, error_tag.firstToken(), .errorTag); +// }, +// .Identifier => { +// if (analysis.isTypeIdent(handle.tree, node.firstToken())) { +// return try writeToken(builder, node.firstToken(), .type); +// } - try writeTokenMod(builder, fn_proto.getNameToken(), func_name_tok_type, tok_mod); +// if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| { +// if (child.decl.* == .param_decl) { +// return try writeToken(builder, node.firstToken(), .parameter); +// } +// var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); +// if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { +// try colorIdentifierBasedOnType(builder, decl_type, node.firstToken(), .{}); +// } else { +// try writeTokenMod(builder, node.firstToken(), .variable, .{}); +// } +// } +// }, +// .FnProto => { +// const fn_proto = node.cast(ast.Node.FnProto).?; +// if (fn_proto.getDocComments()) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, fn_proto.getVisibToken(), .keyword); +// try writeToken(builder, fn_proto.getExternExportInlineToken(), .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getLibName() }); +// try writeToken(builder, fn_proto.fn_token, .keyword); - for (fn_proto.paramsConst()) |param_decl| { - if (param_decl.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, param_decl.noalias_token, .keyword); - try writeToken(builder, param_decl.comptime_token, .keyword); - try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); - switch (param_decl.param_type) { - .any_type => |var_node| try writeToken(builder, var_node.firstToken(), .type), - .type_expr => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), - } - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getAlignExpr() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getSectionExpr() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getCallconvExpr() }); +// const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) +// .type +// else +// .function; - switch (fn_proto.return_type) { - .Explicit => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), - .InferErrorSet => |type_expr| { - try writeToken(builder, type_expr.firstToken() - 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }); - }, - .Invalid => {}, - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getBodyNode() }); - }, - .AnyFrameType => { - const any_frame_type = node.cast(ast.Node.AnyFrameType).?; - try writeToken(builder, any_frame_type.anyframe_token, .type); - if (any_frame_type.result) |any_frame_result| { - try writeToken(builder, any_frame_result.arrow_token, .type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, any_frame_result.return_type }); - } - }, - .Defer => { - const defer_node = node.cast(ast.Node.Defer).?; - try writeToken(builder, defer_node.defer_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.expr }); - }, - .Comptime => { - const comptime_node = node.cast(ast.Node.Comptime).?; - if (comptime_node.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, comptime_node.comptime_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, comptime_node.expr }); - }, - .Nosuspend => { - const nosuspend_node = node.cast(ast.Node.Nosuspend).?; - try writeToken(builder, nosuspend_node.nosuspend_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, nosuspend_node.expr }); - }, - .Payload => { - const payload = node.cast(ast.Node.Payload).?; - try writeToken(builder, payload.lpipe, .operator); - try writeToken(builder, payload.error_symbol.firstToken(), .variable); - try writeToken(builder, payload.rpipe, .operator); - }, - .PointerPayload => { - const payload = node.cast(ast.Node.PointerPayload).?; - try writeToken(builder, payload.lpipe, .operator); - try writeToken(builder, payload.ptr_token, .operator); - try writeToken(builder, payload.value_symbol.firstToken(), .variable); - try writeToken(builder, payload.rpipe, .operator); - }, - .PointerIndexPayload => { - const payload = node.cast(ast.Node.PointerIndexPayload).?; - try writeToken(builder, payload.lpipe, .operator); - try writeToken(builder, payload.ptr_token, .operator); - try writeToken(builder, payload.value_symbol.firstToken(), .variable); - if (payload.index_symbol) |index_symbol| try writeToken(builder, index_symbol.firstToken(), .variable); - try writeToken(builder, payload.rpipe, .operator); - }, - .Else => { - const else_node = node.cast(ast.Node.Else).?; - try writeToken(builder, else_node.else_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.body }); - }, - .Switch => { - const switch_node = node.cast(ast.Node.Switch).?; - try writeToken(builder, switch_node.switch_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_node.expr }); +// const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) +// TokenModifiers{ .generic = true } +// else +// TokenModifiers{}; - var gap_highlighter = GapHighlighter.init(builder, switch_node.expr.lastToken() + 3); - for (switch_node.casesConst()) |case_node| { - try gap_highlighter.next(case_node); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); - } - try gap_highlighter.end(node.lastToken()); - }, - .SwitchCase => { - const switch_case = node.cast(ast.Node.SwitchCase).?; - for (switch_case.itemsConst()) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); - try writeToken(builder, switch_case.arrow_token, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.expr }); - }, - .SwitchElse => { - const switch_else = node.cast(ast.Node.SwitchElse).?; - try writeToken(builder, switch_else.token, .keyword); - }, - .While => { - const while_node = node.cast(ast.Node.While).?; - try writeToken(builder, while_node.label, .label); - try writeToken(builder, while_node.inline_token, .keyword); - try writeToken(builder, while_node.while_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.condition }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.continue_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.body }); - if (while_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .For => { - const for_node = node.cast(ast.Node.For).?; - try writeToken(builder, for_node.label, .label); - try writeToken(builder, for_node.inline_token, .keyword); - try writeToken(builder, for_node.for_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.array_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.body }); - if (for_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .If => { - const if_node = node.cast(ast.Node.If).?; - try writeToken(builder, if_node.if_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.condition }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.body }); - if (if_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .ArrayInitializer => { - const array_initializer = node.cast(ast.Node.ArrayInitializer).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_initializer.lhs }); - for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); - }, - .ArrayInitializerDot => { - const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?; - for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); - }, - .StructInitializer => { - const struct_initializer = node.cast(ast.Node.StructInitializer).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_initializer.lhs }); - const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) { - .other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl| - fieldTokenType(container_decl, handle) - else - null, - else => null, - } else null; +// try writeTokenMod(builder, fn_proto.getNameToken(), func_name_tok_type, tok_mod); - var gap_highlighter = GapHighlighter.init(builder, struct_initializer.lhs.lastToken() + 1); - for (struct_initializer.listConst()) |field_init_node| { - try gap_highlighter.next(field_init_node); - std.debug.assert(field_init_node.tag == .FieldInitializer); - const field_init = field_init_node.cast(ast.Node.FieldInitializer).?; - if (field_token_type) |tok_type| { - try writeToken(builder, field_init.period_token, tok_type); - try writeToken(builder, field_init.name_token, tok_type); - } - try writeToken(builder, field_init.name_token + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); - } - try gap_highlighter.end(struct_initializer.rtoken); - }, - .StructInitializerDot => { - const struct_initializer = node.castTag(.StructInitializerDot).?; +// for (fn_proto.paramsConst()) |param_decl| { +// if (param_decl.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, param_decl.noalias_token, .keyword); +// try writeToken(builder, param_decl.comptime_token, .keyword); +// try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); +// switch (param_decl.param_type) { +// .any_type => |var_node| try writeToken(builder, var_node.firstToken(), .type), +// .type_expr => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), +// } +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getAlignExpr() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getSectionExpr() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getCallconvExpr() }); - var gap_highlighter = GapHighlighter.init(builder, struct_initializer.dot + 1); - for (struct_initializer.listConst()) |field_init_node| { - try gap_highlighter.next(field_init_node); - std.debug.assert(field_init_node.tag == .FieldInitializer); - const field_init = field_init_node.castTag(.FieldInitializer).?; - try writeToken(builder, field_init.period_token, .field); - try writeToken(builder, field_init.name_token, .field); - try writeToken(builder, field_init.name_token + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); - } - try gap_highlighter.end(struct_initializer.rtoken); - }, - .Call => { - const call = node.cast(ast.Node.Call).?; - try writeToken(builder, call.async_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.lhs }); - if (builder.current_token) |curr_tok| { - if (curr_tok != call.lhs.lastToken() and handle.tree.token_ids[call.lhs.lastToken()] == .Identifier) { - try writeToken(builder, call.lhs.lastToken(), .function); - } - } - for (call.paramsConst()) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); - }, - .Slice => { - const slice = node.castTag(.Slice).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.lhs }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.start }); - try writeToken(builder, slice.start.lastToken() + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.end }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.sentinel }); - }, - .ArrayAccess => { - const arr_acc = node.castTag(.ArrayAccess).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.lhs }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.index_expr }); - }, - .Deref, .UnwrapOptional => { - const suffix = node.cast(ast.Node.SimpleSuffixOp).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suffix.lhs }); - try writeToken(builder, suffix.rtoken, .operator); - }, - .GroupedExpression => { - const grouped_expr = node.cast(ast.Node.GroupedExpression).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, grouped_expr.expr }); - }, - .Return, .Break, .Continue => { - const cfe = node.cast(ast.Node.ControlFlowExpression).?; - try writeToken(builder, cfe.ltoken, .keyword); - switch (node.tag) { - .Break => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), - .Continue => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), - else => {}, - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, cfe.getRHS() }); - }, - .Suspend => { - const suspend_node = node.cast(ast.Node.Suspend).?; - try writeToken(builder, suspend_node.suspend_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suspend_node.body }); - }, - .IntegerLiteral => { - try writeToken(builder, node.firstToken(), .number); - }, - .EnumLiteral => { - const enum_literal = node.cast(ast.Node.EnumLiteral).?; - try writeToken(builder, enum_literal.dot, .enumMember); - try writeToken(builder, enum_literal.name, .enumMember); - }, - .FloatLiteral => { - try writeToken(builder, node.firstToken(), .number); - }, - .BuiltinCall => { - const builtin_call = node.cast(ast.Node.BuiltinCall).?; - try writeToken(builder, builtin_call.builtin_token, .builtin); - for (builtin_call.paramsConst()) |param| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); - }, - .StringLiteral, .CharLiteral => { - try writeToken(builder, node.firstToken(), .string); - }, - .MultilineStringLiteral => { - const multi_line = node.cast(ast.Node.MultilineStringLiteral).?; - for (multi_line.linesConst()) |line| try writeToken(builder, line, .string); - }, - .BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable => { - try writeToken(builder, node.firstToken(), .keywordLiteral); - }, - .ErrorType => { - try writeToken(builder, node.firstToken(), .keyword); - }, - .Asm => { - const asm_expr = node.cast(ast.Node.Asm).?; - try writeToken(builder, asm_expr.asm_token, .keyword); - try writeToken(builder, asm_expr.volatile_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_expr.template }); - // TODO Inputs, outputs. - }, - .AnyType => { - try writeToken(builder, node.firstToken(), .type); - }, - .TestDecl => { - const test_decl = node.cast(ast.Node.TestDecl).?; - if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc); - try writeToken(builder, test_decl.test_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.name }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.body_node }); - }, - .Catch => { - const catch_expr = node.cast(ast.Node.Catch).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.lhs }); - try writeToken(builder, catch_expr.op_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.rhs }); - }, - .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolAnd, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Period, .Range, .Sub, .SubWrap, .OrElse => { - const infix_op = node.cast(ast.Node.SimpleInfixOp).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.lhs }); - if (node.tag != .Period) { - const token_type: TokenType = switch (node.tag) { - .BoolAnd, .BoolOr, .OrElse => .keyword, - else => .operator, - }; +// switch (fn_proto.return_type) { +// .Explicit => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), +// .InferErrorSet => |type_expr| { +// try writeToken(builder, type_expr.firstToken() - 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }); +// }, +// .Invalid => {}, +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getBodyNode() }); +// }, +// .AnyFrameType => { +// const any_frame_type = node.cast(ast.Node.AnyFrameType).?; +// try writeToken(builder, any_frame_type.anyframe_token, .type); +// if (any_frame_type.result) |any_frame_result| { +// try writeToken(builder, any_frame_result.arrow_token, .type); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, any_frame_result.return_type }); +// } +// }, +// .Defer => { +// const defer_node = node.cast(ast.Node.Defer).?; +// try writeToken(builder, defer_node.defer_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.expr }); +// }, +// .Comptime => { +// const comptime_node = node.cast(ast.Node.Comptime).?; +// if (comptime_node.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, comptime_node.comptime_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, comptime_node.expr }); +// }, +// .Nosuspend => { +// const nosuspend_node = node.cast(ast.Node.Nosuspend).?; +// try writeToken(builder, nosuspend_node.nosuspend_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, nosuspend_node.expr }); +// }, +// .Payload => { +// const payload = node.cast(ast.Node.Payload).?; +// try writeToken(builder, payload.lpipe, .operator); +// try writeToken(builder, payload.error_symbol.firstToken(), .variable); +// try writeToken(builder, payload.rpipe, .operator); +// }, +// .PointerPayload => { +// const payload = node.cast(ast.Node.PointerPayload).?; +// try writeToken(builder, payload.lpipe, .operator); +// try writeToken(builder, payload.ptr_token, .operator); +// try writeToken(builder, payload.value_symbol.firstToken(), .variable); +// try writeToken(builder, payload.rpipe, .operator); +// }, +// .PointerIndexPayload => { +// const payload = node.cast(ast.Node.PointerIndexPayload).?; +// try writeToken(builder, payload.lpipe, .operator); +// try writeToken(builder, payload.ptr_token, .operator); +// try writeToken(builder, payload.value_symbol.firstToken(), .variable); +// if (payload.index_symbol) |index_symbol| try writeToken(builder, index_symbol.firstToken(), .variable); +// try writeToken(builder, payload.rpipe, .operator); +// }, +// .Else => { +// const else_node = node.cast(ast.Node.Else).?; +// try writeToken(builder, else_node.else_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.body }); +// }, +// .Switch => { +// const switch_node = node.cast(ast.Node.Switch).?; +// try writeToken(builder, switch_node.switch_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_node.expr }); - try writeToken(builder, infix_op.op_token, token_type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.rhs }); - } - switch (node.tag) { - .Period => { - const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken()); +// var gap_highlighter = GapHighlighter.init(builder, switch_node.expr.lastToken() + 3); +// for (switch_node.casesConst()) |case_node| { +// try gap_highlighter.next(case_node); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); +// } +// try gap_highlighter.end(node.lastToken()); +// }, +// .SwitchCase => { +// const switch_case = node.cast(ast.Node.SwitchCase).?; +// for (switch_case.itemsConst()) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); +// try writeToken(builder, switch_case.arrow_token, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.expr }); +// }, +// .SwitchElse => { +// const switch_else = node.cast(ast.Node.SwitchElse).?; +// try writeToken(builder, switch_else.token, .keyword); +// }, +// .While => { +// const while_node = node.cast(ast.Node.While).?; +// try writeToken(builder, while_node.label, .label); +// try writeToken(builder, while_node.inline_token, .keyword); +// try writeToken(builder, while_node.while_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.condition }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.continue_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.body }); +// if (while_node.@"else") |else_node| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); +// }, +// .For => { +// const for_node = node.cast(ast.Node.For).?; +// try writeToken(builder, for_node.label, .label); +// try writeToken(builder, for_node.inline_token, .keyword); +// try writeToken(builder, for_node.for_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.array_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.body }); +// if (for_node.@"else") |else_node| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); +// }, +// .If => { +// const if_node = node.cast(ast.Node.If).?; +// try writeToken(builder, if_node.if_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.condition }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.body }); +// if (if_node.@"else") |else_node| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); +// }, +// .ArrayInitializer => { +// const array_initializer = node.cast(ast.Node.ArrayInitializer).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_initializer.lhs }); +// for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); +// }, +// .ArrayInitializerDot => { +// const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?; +// for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); +// }, +// .StructInitializer => { +// const struct_initializer = node.cast(ast.Node.StructInitializer).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_initializer.lhs }); +// const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) { +// .other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl| +// fieldTokenType(container_decl, handle) +// else +// null, +// else => null, +// } else null; - // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added - // writeToken code. - // Maybe we can hook into it insead? Also applies to Identifier and VarDecl - var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); - const lhs_type = try analysis.resolveFieldAccessLhsType( - store, - arena, - (try analysis.resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.lhs, - .handle = handle, - }, &bound_type_params)) orelse return, - &bound_type_params, - ); - const left_type_node = switch (lhs_type.type.data) { - .other => |n| n, - else => return, - }; - if (try analysis.lookupSymbolContainer(store, arena, .{ .node = left_type_node, .handle = lhs_type.handle }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { - switch (decl_type.decl.*) { - .ast_node => |decl_node| { - if (decl_node.tag == .ContainerField) { - const tok_type: ?TokenType = if (left_type_node.cast(ast.Node.ContainerDecl)) |container_decl| - fieldTokenType(container_decl, lhs_type.handle) - else if (left_type_node.tag == .Root) - TokenType.field - else - null; +// var gap_highlighter = GapHighlighter.init(builder, struct_initializer.lhs.lastToken() + 1); +// for (struct_initializer.listConst()) |field_init_node| { +// try gap_highlighter.next(field_init_node); +// std.debug.assert(field_init_node.tag == .FieldInitializer); +// const field_init = field_init_node.cast(ast.Node.FieldInitializer).?; +// if (field_token_type) |tok_type| { +// try writeToken(builder, field_init.period_token, tok_type); +// try writeToken(builder, field_init.name_token, tok_type); +// } +// try writeToken(builder, field_init.name_token + 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); +// } +// try gap_highlighter.end(struct_initializer.rtoken); +// }, +// .StructInitializerDot => { +// const struct_initializer = node.castTag(.StructInitializerDot).?; - if (tok_type) |tt| try writeToken(builder, infix_op.rhs.firstToken(), tt); - return; - } else if (decl_node.tag == .ErrorTag) { - try writeToken(builder, infix_op.rhs.firstToken(), .errorTag); - } - }, - else => {}, - } +// var gap_highlighter = GapHighlighter.init(builder, struct_initializer.dot + 1); +// for (struct_initializer.listConst()) |field_init_node| { +// try gap_highlighter.next(field_init_node); +// std.debug.assert(field_init_node.tag == .FieldInitializer); +// const field_init = field_init_node.castTag(.FieldInitializer).?; +// try writeToken(builder, field_init.period_token, .field); +// try writeToken(builder, field_init.name_token, .field); +// try writeToken(builder, field_init.name_token + 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); +// } +// try gap_highlighter.end(struct_initializer.rtoken); +// }, +// .Call => { +// const call = node.cast(ast.Node.Call).?; +// try writeToken(builder, call.async_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.lhs }); +// if (builder.current_token) |curr_tok| { +// if (curr_tok != call.lhs.lastToken() and handle.tree.token_ids[call.lhs.lastToken()] == .Identifier) { +// try writeToken(builder, call.lhs.lastToken(), .function); +// } +// } +// for (call.paramsConst()) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); +// }, +// .Slice => { +// const slice = node.castTag(.Slice).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.lhs }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.start }); +// try writeToken(builder, slice.start.lastToken() + 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.end }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.sentinel }); +// }, +// .ArrayAccess => { +// const arr_acc = node.castTag(.ArrayAccess).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.lhs }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.index_expr }); +// }, +// .Deref, .UnwrapOptional => { +// const suffix = node.cast(ast.Node.SimpleSuffixOp).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suffix.lhs }); +// try writeToken(builder, suffix.rtoken, .operator); +// }, +// .GroupedExpression => { +// const grouped_expr = node.cast(ast.Node.GroupedExpression).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, grouped_expr.expr }); +// }, +// .Return, .Break, .Continue => { +// const cfe = node.cast(ast.Node.ControlFlowExpression).?; +// try writeToken(builder, cfe.ltoken, .keyword); +// switch (node.tag) { +// .Break => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), +// .Continue => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), +// else => {}, +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, cfe.getRHS() }); +// }, +// .Suspend => { +// const suspend_node = node.cast(ast.Node.Suspend).?; +// try writeToken(builder, suspend_node.suspend_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suspend_node.body }); +// }, +// .IntegerLiteral => { +// try writeToken(builder, node.firstToken(), .number); +// }, +// .EnumLiteral => { +// const enum_literal = node.cast(ast.Node.EnumLiteral).?; +// try writeToken(builder, enum_literal.dot, .enumMember); +// try writeToken(builder, enum_literal.name, .enumMember); +// }, +// .FloatLiteral => { +// try writeToken(builder, node.firstToken(), .number); +// }, +// .BuiltinCall => { +// const builtin_call = node.cast(ast.Node.BuiltinCall).?; +// try writeToken(builder, builtin_call.builtin_token, .builtin); +// for (builtin_call.paramsConst()) |param| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); +// }, +// .StringLiteral, .CharLiteral => { +// try writeToken(builder, node.firstToken(), .string); +// }, +// .MultilineStringLiteral => { +// const multi_line = node.cast(ast.Node.MultilineStringLiteral).?; +// for (multi_line.linesConst()) |line| try writeToken(builder, line, .string); +// }, +// .BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable => { +// try writeToken(builder, node.firstToken(), .keywordLiteral); +// }, +// .ErrorType => { +// try writeToken(builder, node.firstToken(), .keyword); +// }, +// .Asm => { +// const asm_expr = node.cast(ast.Node.Asm).?; +// try writeToken(builder, asm_expr.asm_token, .keyword); +// try writeToken(builder, asm_expr.volatile_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_expr.template }); +// // TODO Inputs, outputs. +// }, +// .AnyType => { +// try writeToken(builder, node.firstToken(), .type); +// }, +// .TestDecl => { +// const test_decl = node.cast(ast.Node.TestDecl).?; +// if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc); +// try writeToken(builder, test_decl.test_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.name }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.body_node }); +// }, +// .Catch => { +// const catch_expr = node.cast(ast.Node.Catch).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.lhs }); +// try writeToken(builder, catch_expr.op_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.rhs }); +// }, +// .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolAnd, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Period, .Range, .Sub, .SubWrap, .OrElse => { +// const infix_op = node.cast(ast.Node.SimpleInfixOp).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.lhs }); +// if (node.tag != .Period) { +// const token_type: TokenType = switch (node.tag) { +// .BoolAnd, .BoolOr, .OrElse => .keyword, +// else => .operator, +// }; - if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { - try colorIdentifierBasedOnType(builder, resolved_type, infix_op.rhs.firstToken(), .{}); - } - } - }, - else => {}, - } - }, - .SliceType => { - const slice_type = node.castTag(.SliceType).?; - const ptr_info = slice_type.ptr_info; - if (ptr_info.align_info) |align_info| { - try writeToken(builder, slice_type.op_token + 2, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); - } - try writeToken(builder, ptr_info.const_token, .keyword); - try writeToken(builder, ptr_info.volatile_token, .keyword); - try writeToken(builder, ptr_info.allowzero_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice_type.rhs }); - }, - .PtrType => { - const pointer_type = node.castTag(.PtrType).?; - const tok_ids = builder.handle.tree.token_ids; +// try writeToken(builder, infix_op.op_token, token_type); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.rhs }); +// } +// switch (node.tag) { +// .Period => { +// const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken()); - const ptr_info = switch (tok_ids[pointer_type.op_token]) { - .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.ptr_info, - else => pointer_type.ptr_info, - }; - const rhs = switch (tok_ids[pointer_type.op_token]) { - .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.rhs, - else => pointer_type.rhs, - }; +// // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added +// // writeToken code. +// // Maybe we can hook into it insead? Also applies to Identifier and VarDecl +// var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); +// const lhs_type = try analysis.resolveFieldAccessLhsType( +// store, +// arena, +// (try analysis.resolveTypeOfNodeInternal(store, arena, .{ +// .node = infix_op.lhs, +// .handle = handle, +// }, &bound_type_params)) orelse return, +// &bound_type_params, +// ); +// const left_type_node = switch (lhs_type.type.data) { +// .other => |n| n, +// else => return, +// }; +// if (try analysis.lookupSymbolContainer(store, arena, .{ .node = left_type_node, .handle = lhs_type.handle }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { +// switch (decl_type.decl.*) { +// .ast_node => |decl_node| { +// if (decl_node.tag == .ContainerField) { +// const tok_type: ?TokenType = if (left_type_node.cast(ast.Node.ContainerDecl)) |container_decl| +// fieldTokenType(container_decl, lhs_type.handle) +// else if (left_type_node.tag == .Root) +// TokenType.field +// else +// null; - const off = switch (tok_ids[pointer_type.op_token]) { - .Asterisk, .AsteriskAsterisk => blk: { - try writeToken(builder, pointer_type.op_token, .operator); - break :blk pointer_type.op_token + 1; - }, - .LBracket => blk: { - try writeToken(builder, pointer_type.op_token + 1, .operator); - const is_c_ptr = tok_ids[pointer_type.op_token + 2] == .Identifier; +// if (tok_type) |tt| try writeToken(builder, infix_op.rhs.firstToken(), tt); +// return; +// } else if (decl_node.tag == .ErrorTag) { +// try writeToken(builder, infix_op.rhs.firstToken(), .errorTag); +// } +// }, +// else => {}, +// } - if (is_c_ptr) { - try writeToken(builder, pointer_type.op_token + 2, .operator); - } +// if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { +// try colorIdentifierBasedOnType(builder, resolved_type, infix_op.rhs.firstToken(), .{}); +// } +// } +// }, +// else => {}, +// } +// }, +// .SliceType => { +// const slice_type = node.castTag(.SliceType).?; +// const ptr_info = slice_type.ptr_info; +// if (ptr_info.align_info) |align_info| { +// try writeToken(builder, slice_type.op_token + 2, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); +// } +// try writeToken(builder, ptr_info.const_token, .keyword); +// try writeToken(builder, ptr_info.volatile_token, .keyword); +// try writeToken(builder, ptr_info.allowzero_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice_type.rhs }); +// }, +// .PtrType => { +// const pointer_type = node.castTag(.PtrType).?; +// const tok_ids = builder.handle.tree.token_ids; - if (ptr_info.sentinel) |sentinel| { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); - break :blk sentinel.lastToken() + 2; - } +// const ptr_info = switch (tok_ids[pointer_type.op_token]) { +// .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.ptr_info, +// else => pointer_type.ptr_info, +// }; +// const rhs = switch (tok_ids[pointer_type.op_token]) { +// .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.rhs, +// else => pointer_type.rhs, +// }; - break :blk pointer_type.op_token + 3 + @boolToInt(is_c_ptr); - }, - else => 0, - }; +// const off = switch (tok_ids[pointer_type.op_token]) { +// .Asterisk, .AsteriskAsterisk => blk: { +// try writeToken(builder, pointer_type.op_token, .operator); +// break :blk pointer_type.op_token + 1; +// }, +// .LBracket => blk: { +// try writeToken(builder, pointer_type.op_token + 1, .operator); +// const is_c_ptr = tok_ids[pointer_type.op_token + 2] == .Identifier; - if (ptr_info.align_info) |align_info| { - try writeToken(builder, off, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); - } - try writeToken(builder, ptr_info.const_token, .keyword); - try writeToken(builder, ptr_info.volatile_token, .keyword); - try writeToken(builder, ptr_info.allowzero_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, rhs }); - }, - .ArrayType => { - const array_type = node.castTag(.ArrayType).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); - }, - .ArrayTypeSentinel => { - const array_type = node.castTag(.ArrayTypeSentinel).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.sentinel }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); - }, - .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { - const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; - const tok_type: TokenType = switch (node.tag) { - .Try, .Await, .Resume => .keyword, - else => .operator, - }; - try writeToken(builder, prefix_op.op_token, tok_type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, prefix_op.rhs }); - }, - else => {}, - } -} +// if (is_c_ptr) { +// try writeToken(builder, pointer_type.op_token + 2, .operator); +// } + +// if (ptr_info.sentinel) |sentinel| { +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); +// break :blk sentinel.lastToken() + 2; +// } + +// break :blk pointer_type.op_token + 3 + @boolToInt(is_c_ptr); +// }, +// else => 0, +// }; + +// if (ptr_info.align_info) |align_info| { +// try writeToken(builder, off, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); +// } +// try writeToken(builder, ptr_info.const_token, .keyword); +// try writeToken(builder, ptr_info.volatile_token, .keyword); +// try writeToken(builder, ptr_info.allowzero_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, rhs }); +// }, +// .ArrayType => { +// const array_type = node.castTag(.ArrayType).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); +// }, +// .ArrayTypeSentinel => { +// const array_type = node.castTag(.ArrayTypeSentinel).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.sentinel }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); +// }, +// .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { +// const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; +// const tok_type: TokenType = switch (node.tag) { +// .Try, .Await, .Resume => .keyword, +// else => .operator, +// }; +// try writeToken(builder, prefix_op.op_token, tok_type); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, prefix_op.rhs }); +// }, +// else => {}, +// } +// } // TODO Range version, edit version. pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); - try writeNodeTokens(&builder, arena, store, &handle.tree.root_node.base); + // pass root node, which always has index '0' + // try writeNodeTokens(&builder, arena, store, 0, handle.tree); return builder.toOwnedSlice(); }