diff --git a/src/analysis.zig b/src/analysis.zig index 10fd1cc..30686da 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -17,12 +17,7 @@ pub fn deinit() void { resolve_trail.deinit(); } -/// Gets a declaration's doc comments, caller must free memory when a value is returned -/// Like: -///```zig -///var comments = getFunctionDocComments(allocator, tree, func); -///defer if (comments) |comments_pointer| allocator.free(comments_pointer); -///``` +/// Gets a declaration's doc comments. Caller owns returned memory. pub fn getDocComments( allocator: *std.mem.Allocator, tree: ast.Tree, @@ -30,15 +25,30 @@ pub fn getDocComments( format: types.MarkupContent.Kind, ) !?[]const u8 { const base = tree.nodes.items(.main_token)[node]; + const base_kind = tree.nodes.items(.tag)[node]; const tokens = tree.tokens.items(.tag); - if (getDocCommentTokenIndex(tokens, base)) |doc_comment_index| { - return try collectDocComments(allocator, tree, doc_comment_index, format); + switch (base_kind) { + // As far as I know, this does not actually happen yet, but it may come in useful. + .root => + return try collectDocComments(allocator, tree, 0, format, true), + .fn_proto, + .fn_proto_one, + .fn_proto_simple, + .fn_proto_multi, + .fn_decl, + .local_var_decl, + .global_var_decl, + .aligned_var_decl, + .simple_var_decl => + if (getDocCommentTokenIndex(tokens, base)) |doc_comment_index| + return try collectDocComments(allocator, tree, doc_comment_index, format, false), + else => {} } return null; } -/// Get a declaration's doc comment token index +/// Get the first doc comment of a declaration. pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.TokenIndex) ?ast.TokenIndex { var idx = base_token; if (idx == 0) return null; @@ -50,9 +60,9 @@ pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.Toke if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; // Find first doc comment token - if (!(tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment)) + if (!(tokens[idx] == .doc_comment)) return null; - return while (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) { + return while (tokens[idx] == .doc_comment) { if (idx == 0) break 0; idx -= 1; } else idx + 1; @@ -63,6 +73,7 @@ pub fn collectDocComments( tree: ast.Tree, doc_comments: ast.TokenIndex, format: types.MarkupContent.Kind, + container_doc: bool, ) ![]const u8 { var lines = std.ArrayList([]const u8).init(allocator); defer lines.deinit(); @@ -70,28 +81,27 @@ pub fn collectDocComments( var curr_line_tok = doc_comments; while (true) : (curr_line_tok += 1) { - switch (tokens[curr_line_tok]) { - .doc_comment, .container_doc_comment => { + const comm = tokens[curr_line_tok]; + if ((container_doc and comm == .container_doc_comment) + or (!container_doc and comm == .doc_comment)) { try lines.append(std.mem.trim(u8, tree.tokenSlice(curr_line_tok)[3..], &std.ascii.spaces)); - }, - else => break, - } + } else break; } return try std.mem.join(allocator, if (format == .Markdown) " \n" else "\n", lines.items); } -/// Gets a function signature (keywords, name, return value) +/// Gets a function's keyword, name, arguments and return value. pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { const start = offsets.tokenLocation(tree, func.ast.fn_token); - // return type can be 0 when user wrote incorrect fn signature - // to ensure we don't break, just end the signature at end of fn token - if (func.ast.return_type == 0) return tree.source[start.start..start.end]; - const end = offsets.tokenLocation(tree, lastToken(tree, func.ast.return_type)).end; - return tree.source[start.start..end]; + + const end = if (func.ast.return_type != 0) + offsets.tokenLocation(tree, lastToken(tree, func.ast.return_type)) + else start; + return tree.source[start.start..end.end]; } -/// Gets a function snippet insert text +/// Creates snippet insert text for a function. Caller owns returned memory. pub fn getFunctionSnippet( allocator: *std.mem.Allocator, tree: ast.Tree, @@ -197,7 +207,6 @@ pub fn hasSelfParam( return false; } -/// Gets a function signature (keywords, name, return value) pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 { const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start; const end = offsets.tokenLocation(tree, lastToken(tree, var_decl.ast.init_node)).end; @@ -232,14 +241,19 @@ pub fn isGenericFunction(tree: ast.Tree, func: ast.full.FnProto) bool { } return false; } + // STYLE pub fn isCamelCase(name: []const u8) bool { - return !std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name[0..(name.len - 1)], "_") == null; + return !std.ascii.isUpper(name[0]) and !isSnakeCase(name); } pub fn isPascalCase(name: []const u8) bool { - return std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name[0..(name.len - 1)], "_") == null; + return std.ascii.isUpper(name[0]) and !isSnakeCase(name); +} + +pub fn isSnakeCase(name: []const u8) bool { + return std.mem.indexOf(u8, name, "_") != null; } // ANALYSIS ENGINE @@ -698,8 +712,6 @@ pub fn resolveTypeOfNodeInternal( if (std.meta.eql(i, node_handle)) return null; } - // We use the backing allocator here because the ArrayList expects its - // allocated memory to persist while it is empty. try resolve_trail.append(node_handle); defer _ = resolve_trail.pop(); @@ -2271,8 +2283,6 @@ fn resolveUse( // it is self-referential and we cannot resolve it. if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null) return null; - // We use the backing allocator here because the ArrayList expects its - // allocated memory to persist while it is empty. try using_trail.append(symbol.ptr); defer _ = using_trail.pop(); diff --git a/src/main.zig b/src/main.zig index fb633fe..8bd3990 100644 --- a/src/main.zig +++ b/src/main.zig @@ -230,6 +230,7 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han }); } + // TODO: style warnings for types, values and declarations below root scope if (tree.errors.len == 0) { for (tree.rootDecls()) |decl_idx| { const decl = tree.nodes.items(.tag)[decl_idx]; @@ -643,7 +644,7 @@ fn hoverSymbol( }, .param_decl => |param| def: { if (param.first_doc_comment) |doc_comments| { - doc_str = try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments, hover_kind); + doc_str = try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments, hover_kind, false); } const first_token = param.first_doc_comment orelse @@ -959,7 +960,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl const doc = if (param.first_doc_comment) |doc_comments| types.MarkupContent{ .kind = doc_kind, - .value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments, doc_kind), + .value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments, doc_kind, false), } else null; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 511fafc..956f5ee 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -321,13 +321,782 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan } } +fn writeNodeTokens( + builder: *Builder, + arena: *std.heap.ArenaAllocator, + store: *DocumentStore, + maybe_node: ?ast.Node.Index, +) error{OutOfMemory}!void { + const start_node = maybe_node orelse return; + + const handle = builder.handle; + const tree = handle.tree; + const node_tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + const node_data = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + if (start_node > node_data.len) return; + + + var stack = std.ArrayList(ast.Node.Index).init(arena.child_allocator); + defer stack.deinit(); + + try stack.append(start_node); + + while (stack.popOrNull()) |node| { + if (node == 0 or node > node_data.len) continue; + + const tag = node_tags[node]; + const main_token = main_tokens[node]; + switch (tag) { + .root => unreachable, + .container_field, + .container_field_align, + .container_field_init, + => try writeContainerField(builder, arena, store, node, .field, &stack), + .@"errdefer" => { + try writeToken(builder, main_token, .keyword); + + if (node_data[node].lhs != 0) { + const payload_tok = node_data[node].lhs; + try writeToken(builder, payload_tok - 1, .operator); + try writeToken(builder, payload_tok, .variable); + try writeToken(builder, payload_tok + 1, .operator); + } + + try stack.append(node_data[node].rhs); + }, + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + => { + const first_tok = if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) block: { + try writeToken(builder, main_token - 2, .label); + break :block main_token + 1; + } else 0; + + var gap_highlighter = GapHighlighter.init(builder, first_tok); + const statements: []const ast.Node.Index = switch (tag) { + .block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs], + .block_two, .block_two_semicolon => blk: { + const statements = &[_]ast.Node.Index{ node_data[node].lhs, node_data[node].rhs }; + const len: usize = if (node_data[node].lhs == 0) + @as(usize, 0) + else if (node_data[node].rhs == 0) + @as(usize, 1) + else + @as(usize, 2); + break :blk statements[0..len]; + }, + else => unreachable, + }; + + for (statements) |child| { + try gap_highlighter.next(child); + if (node_tags[child].isContainerField()) { + try writeContainerField(builder, arena, store, child, .field, &stack); + } else { + try stack.append(child); + } + } + + try gap_highlighter.end(lastToken(tree, node)); + }, + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => { + const var_decl = varDecl(tree, node).?; + if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| + try writeDocComments(builder, tree, comment_idx); + + try writeToken(builder, var_decl.visib_token, .keyword); + try writeToken(builder, var_decl.extern_export_token, .keyword); + try writeToken(builder, var_decl.threadlocal_token, .keyword); + try writeToken(builder, var_decl.comptime_token, .keyword); + try writeToken(builder, var_decl.ast.mut_token, .keyword); + + if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { + try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true }); + } else { + try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true }); + } + + if (var_decl.ast.type_node != 0) + try stack.append(var_decl.ast.type_node); + if (var_decl.ast.align_node != 0) + try stack.append(var_decl.ast.align_node); + if (var_decl.ast.section_node != 0) + try stack.append(var_decl.ast.section_node); + + try writeToken(builder, var_decl.ast.mut_token + 2, .operator); + try stack.append(var_decl.ast.init_node); + }, + .@"usingnamespace" => { + const first_tok = tree.firstToken(node); + if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment) + try writeDocComments(builder, tree, first_tok - 1); + try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword); + try writeToken(builder, main_token, .keyword); + try stack.append(node_data[node].lhs); + }, + .container_decl, + .container_decl_trailing, + .container_decl_two, + .container_decl_two_trailing, + .container_decl_arg, + .container_decl_arg_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + => { + var buf: [2]ast.Node.Index = undefined; + const decl: ast.full.ContainerDecl = switch (tag) { + .container_decl, .container_decl_trailing => tree.containerDecl(node), + .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node), + .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node), + .tagged_union, .tagged_union_trailing => tree.taggedUnion(node), + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node), + .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node), + else => unreachable, + }; + + try writeToken(builder, decl.layout_token, .keyword); + try writeToken(builder, decl.ast.main_token, .keyword); + if (decl.ast.enum_token) |enum_token| { + if (decl.ast.arg != 0) + try stack.append(decl.ast.arg) + else + try writeToken(builder, enum_token, .keyword); + } else if (decl.ast.arg != 0) try stack.append(decl.ast.arg); + + var gap_highlighter = GapHighlighter.init(builder, main_token + 1); + const field_token_type = fieldTokenType(node, handle); + for (decl.ast.members) |child| { + try gap_highlighter.next(child); + if (node_tags[child].isContainerField()) { + try writeContainerField(builder, arena, store, child, field_token_type, &stack); + } else { + try stack.append(child); + } + } + try gap_highlighter.end(lastToken(tree, node)); + }, + .error_value => { + if (node_data[node].lhs != 0) { + try writeToken(builder, node_data[node].lhs - 1, .keyword); + } + try writeToken(builder, node_data[node].rhs, .errorTag); + }, + .identifier => { + if (analysis.isTypeIdent(tree, main_token)) { + return try writeToken(builder, main_token, .type); + } + + if (try analysis.lookupSymbolGlobal( + store, + arena, + handle, + tree.getNodeSource(node), + tree.tokens.items(.start)[main_token], + )) |child| { + if (child.decl.* == .param_decl) { + return try writeToken(builder, main_token, .parameter); + } + var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); + if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { + try colorIdentifierBasedOnType(builder, decl_type, main_token, .{}); + } else { + try writeTokenMod(builder, main_token, .variable, .{}); + } + } + }, + .fn_proto, + .fn_proto_one, + .fn_proto_simple, + .fn_proto_multi, + .fn_decl, + => { + var buf: [1]ast.Node.Index = undefined; + const fn_proto: ast.full.FnProto = fnProto(tree, node, &buf).?; + if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs| + try writeDocComments(builder, tree, docs); + + try writeToken(builder, fn_proto.visib_token, .keyword); + try writeToken(builder, fn_proto.extern_export_token, .keyword); + try writeToken(builder, fn_proto.lib_name, .string); + try writeToken(builder, fn_proto.ast.fn_token, .keyword); + + const func_name_tok_type: TokenType = if (analysis.isTypeFunction(tree, fn_proto)) + .type + else + .function; + + const tok_mod = if (analysis.isGenericFunction(tree, fn_proto)) + TokenModifiers{ .generic = true } + else + TokenModifiers{}; + + try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod); + + var it = fn_proto.iterate(tree); + while (it.next()) |param_decl| { + if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, tree, docs); + + try writeToken(builder, param_decl.comptime_noalias, .keyword); + try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); + if (param_decl.anytype_ellipsis3) |any_token| { + try writeToken(builder, any_token, .type); + } else if (param_decl.type_expr != 0) try stack.append(param_decl.type_expr); + } + + if (fn_proto.ast.align_expr != 0) + try stack.append(fn_proto.ast.align_expr); + if (fn_proto.ast.section_expr != 0) + try stack.append(fn_proto.ast.section_expr); + if (fn_proto.ast.callconv_expr != 0) + try stack.append(fn_proto.ast.callconv_expr); + + if (fn_proto.ast.return_type != 0) + try stack.append(fn_proto.ast.return_type); + + if (tag == .fn_decl) + try stack.append(node_data[node].rhs); + }, + .anyframe_type => { + try writeToken(builder, main_token, .type); + if (node_data[node].rhs != 0) { + try writeToken(builder, node_data[node].lhs, .type); + try stack.append(node_data[node].rhs); + } + }, + .@"defer" => { + try writeToken(builder, main_token, .keyword); + try stack.append(node_data[node].rhs); + }, + .@"comptime", + .@"nosuspend", + => { + if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| + try writeDocComments(builder, tree, doc); + try writeToken(builder, main_token, .keyword); + try stack.append(node_data[node].lhs); + }, + .@"switch", + .switch_comma, + => { + try writeToken(builder, main_token, .keyword); + try stack.append(node_data[node].lhs); + const extra = tree.extraData(node_data[node].rhs, ast.Node.SubRange); + const cases = tree.extra_data[extra.start..extra.end]; + + var gap_highlighter = GapHighlighter.init(builder, lastToken(tree, node_data[node].lhs) + 1); + for (cases) |case_node| { + try gap_highlighter.next(case_node); + try stack.append(case_node); + } + try gap_highlighter.end(lastToken(tree, node)); + }, + .switch_case_one, + .switch_case, + => { + const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node); + for (switch_case.ast.values) |item_node| try stack.append(item_node); + // check it it's 'else' + if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword); + try writeToken(builder, switch_case.ast.arrow_token, .operator); + if (switch_case.payload_token) |payload_token| { + const p_token = @boolToInt(token_tags[payload_token] == .asterisk); + try writeToken(builder, p_token, .variable); + } + try stack.append(switch_case.ast.target_expr); + }, + .@"while", + .while_simple, + .while_cont, + .for_simple, + .@"for", + => { + const while_node = whileAst(tree, node).?; + try writeToken(builder, while_node.label_token, .label); + try writeToken(builder, while_node.inline_token, .keyword); + try writeToken(builder, while_node.ast.while_token, .keyword); + try stack.append(while_node.ast.cond_expr); + if (while_node.payload_token) |payload| { + try writeToken(builder, payload - 1, .operator); + try writeToken(builder, payload, .variable); + var r_pipe = payload + 1; + if (token_tags[r_pipe] == .comma) { + r_pipe += 1; + try writeToken(builder, r_pipe, .variable); + r_pipe += 1; + } + try writeToken(builder, r_pipe, .operator); + } + if (while_node.ast.cont_expr != 0) + try stack.append(while_node.ast.cont_expr); + + try stack.append(while_node.ast.then_expr); + + if (while_node.ast.else_expr != 0) { + try writeToken(builder, while_node.else_token, .keyword); + + if (while_node.error_token) |err_token| { + try writeToken(builder, err_token - 1, .operator); + try writeToken(builder, err_token, .variable); + try writeToken(builder, err_token + 1, .operator); + } + try stack.append(while_node.ast.else_expr); + } + }, + .@"if", + .if_simple, + => { + const if_node = ifFull(tree, node); + + try writeToken(builder, if_node.ast.if_token, .keyword); + try stack.append(if_node.ast.cond_expr); + + if (if_node.payload_token) |payload| { + // if (?x) |x| + try writeToken(builder, payload - 1, .operator); // | + try writeToken(builder, payload, .variable); // x + try writeToken(builder, payload + 1, .operator); // | + } + try stack.append(if_node.ast.then_expr); + + if (if_node.ast.else_expr != 0) { + try writeToken(builder, if_node.else_token, .keyword); + if (if_node.error_token) |err_token| { + // else |err| + try writeToken(builder, err_token - 1, .operator); // | + try writeToken(builder, err_token, .variable); // err + try writeToken(builder, err_token + 1, .operator); // | + } + try stack.append(if_node.ast.else_expr); + } + }, + .array_init, + .array_init_comma, + .array_init_one, + .array_init_one_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + => { + var buf: [2]ast.Node.Index = undefined; + const array_init: ast.full.ArrayInit = switch (tag) { + .array_init, .array_init_comma => tree.arrayInit(node), + .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node), + .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), + .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node), + else => unreachable, + }; + + if (array_init.ast.type_expr != 0) + try stack.append(array_init.ast.type_expr); + for (array_init.ast.elements) |elem| try stack.append(elem); + }, + .struct_init, + .struct_init_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init_one, + .struct_init_one_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + => { + var buf: [2]ast.Node.Index = undefined; + const struct_init: ast.full.StructInit = switch (tag) { + .struct_init, .struct_init_comma => tree.structInit(node), + .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), + .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node), + .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node), + else => unreachable, + }; + + var field_token_type: ?TokenType = null; + + if (struct_init.ast.type_expr != 0) { + try stack.append(struct_init.ast.type_expr); + + field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ + .node = struct_init.ast.type_expr, + .handle = handle, + })) |struct_type| switch (struct_type.type.data) { + .other => |type_node| if (isContainer(struct_type.handle.tree, type_node)) + fieldTokenType(type_node, struct_type.handle) + else + null, + else => null, + } else null; + } + + var gap_highlighter = GapHighlighter.init(builder, struct_init.ast.lbrace); + for (struct_init.ast.fields) |field_init| { + try gap_highlighter.next(field_init); + + const init_token = tree.firstToken(field_init); + try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.' + try writeToken(builder, init_token - 2, field_token_type orelse .field); // name + try writeToken(builder, init_token - 1, .operator); // '=' + try stack.append(field_init); + } + try gap_highlighter.end(lastToken(tree, node)); + }, + .call, + .call_comma, + .async_call, + .async_call_comma, + .call_one, + .call_one_comma, + .async_call_one, + .async_call_one_comma, + => { + var params: [1]ast.Node.Index = undefined; + const call: ast.full.Call = switch (tag) { + .call, .call_comma, .async_call, .async_call_comma => tree.callFull(node), + .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node), + else => unreachable, + }; + + try writeToken(builder, call.async_token, .keyword); + try stack.append(call.ast.fn_expr); + + if (builder.current_token) |curr_tok| { + if (curr_tok != lastToken(tree, call.ast.fn_expr) and token_tags[lastToken(tree, call.ast.fn_expr)] == .identifier) { + try writeToken(builder, lastToken(tree, call.ast.fn_expr), .function); + } + } + for (call.ast.params) |param| try stack.append(param); + }, + .slice, + .slice_open, + .slice_sentinel, + => { + const slice: ast.full.Slice = switch (tag) { + .slice => tree.slice(node), + .slice_open => tree.sliceOpen(node), + .slice_sentinel => tree.sliceSentinel(node), + else => unreachable, + }; + + try stack.append(slice.ast.sliced); + try stack.append(slice.ast.start); + try writeToken(builder, lastToken(tree, slice.ast.start) + 1, .operator); + + if (slice.ast.end != 0) + try stack.append(slice.ast.end); + if (slice.ast.sentinel != 0) + try stack.append(slice.ast.sentinel); + }, + .array_access => { + try stack.append(node_data[node].lhs); + try stack.append(node_data[node].rhs); + }, + .deref => { + try stack.append(node_data[node].lhs); + try writeToken(builder, main_token, .operator); + }, + .unwrap_optional => { + try stack.append(node_data[node].lhs); + try writeToken(builder, main_token + 1, .operator); + }, + .grouped_expression => { + try stack.append(node_data[node].lhs); + }, + .@"break", + .@"continue", + => { + try writeToken(builder, main_token, .keyword); + if (node_data[node].lhs != 0) + try writeToken(builder, node_data[node].lhs, .label); + if (node_data[node].rhs != 0) + try stack.append(node_data[node].rhs); + }, + .@"suspend", .@"return" => { + try writeToken(builder, main_token, .keyword); + if (node_data[node].lhs != 0) + try stack.append(node_data[node].lhs); + }, + .integer_literal, + .float_literal, + => { + try writeToken(builder, main_token, .number); + }, + .enum_literal => { + try writeToken(builder, main_token - 1, .enumMember); + try writeToken(builder, main_token, .enumMember); + }, + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => { + const data = node_data[node]; + const params = switch (tag) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; + + try writeToken(builder, main_token, .builtin); + for (params) |param| + try stack.append(param); + }, + .string_literal, + .char_literal, + => { + try writeToken(builder, main_token, .string); + }, + .multiline_string_literal => { + var cur_tok = main_token; + const last_tok = node_data[node].rhs; + + while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string); + }, + .true_literal, + .false_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + => { + try writeToken(builder, main_token, .keywordLiteral); + }, + .error_set_decl => { + try writeToken(builder, main_token, .keyword); + }, + .@"asm", + .asm_output, + .asm_input, + .asm_simple, + => { + const asm_node: ast.full.Asm = switch (tag) { + .@"asm" => tree.asmFull(node), + .asm_simple => tree.asmSimple(node), + else => return, // TODO Inputs, outputs + }; + + try writeToken(builder, main_token, .keyword); + try writeToken(builder, asm_node.volatile_token, .keyword); + try stack.append(asm_node.ast.template); + // TODO Inputs, outputs. + }, + .@"anytype" => { + try writeToken(builder, main_token, .type); + }, + .test_decl => { + if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| + try writeDocComments(builder, tree, doc); + + try writeToken(builder, main_token, .keyword); + if (token_tags[main_token + 1] == .string_literal) + try writeToken(builder, main_token + 1, .string); + + try stack.append(node_data[node].rhs); + }, + .@"catch" => { + try stack.append(node_data[node].lhs); + try writeToken(builder, main_token, .keyword); + if (token_tags[main_token + 1] == .pipe) + try writeToken(builder, main_token + 1, .variable); + try stack.append(node_data[node].rhs); + }, + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .error_union, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .switch_range, + .sub, + .sub_wrap, + .@"orelse", + => { + try stack.append(node_data[node].lhs); + const token_type: TokenType = switch (tag) { + .bool_and, .bool_or => .keyword, + else => .operator, + }; + + try writeToken(builder, main_token, token_type); + if (node_data[node].rhs != 0) + try stack.append(node_data[node].rhs); + }, + .field_access => { + const data = node_data[node]; + if (data.rhs == 0) return; + const rhs_str = tree.tokenSlice(data.rhs); + + try stack.append(data.lhs); + + // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added + // writeToken code. + // Maybe we can hook into it insead? Also applies to Identifier and VarDecl + var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); + const lhs_type = try analysis.resolveFieldAccessLhsType( + store, + arena, + (try analysis.resolveTypeOfNodeInternal(store, arena, .{ + .node = data.lhs, + .handle = handle, + }, &bound_type_params)) orelse return, + &bound_type_params, + ); + const left_type_node = switch (lhs_type.type.data) { + .other => |n| n, + else => return, + }; + if (try analysis.lookupSymbolContainer(store, arena, .{ + .node = left_type_node, + .handle = lhs_type.handle, + }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { + switch (decl_type.decl.*) { + .ast_node => |decl_node| { + if (decl_type.handle.tree.nodes.items(.tag)[decl_node].isContainerField()) { + const tok_type: ?TokenType = if (isContainer(lhs_type.handle.tree, left_type_node)) + fieldTokenType(decl_node, lhs_type.handle) + else if (left_type_node == 0) + TokenType.field + else + null; + + if (tok_type) |tt| try writeToken(builder, data.rhs, tt); + return; + } else if (decl_type.handle.tree.nodes.items(.tag)[decl_node] == .error_value) { + try writeToken(builder, data.rhs, .errorTag); + } + }, + else => {}, + } + + if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { + try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{}); + } + } + }, + .ptr_type, + .ptr_type_aligned, + .ptr_type_bit_range, + .ptr_type_sentinel, + => { + const ptr_type = ptrType(tree, node).?; + + if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and + main_token == main_tokens[ptr_type.ast.child_type]) + { + return try stack.append(ptr_type.ast.child_type); + } + + if (ptr_type.size == .One) try writeToken(builder, main_token, .operator); + if (ptr_type.ast.sentinel != 0) { + return try stack.append(ptr_type.ast.sentinel); + } + + try writeToken(builder, ptr_type.allowzero_token, .keyword); + + if (ptr_type.ast.align_node != 0) { + const first_tok = tree.firstToken(ptr_type.ast.align_node); + try writeToken(builder, first_tok - 2, .keyword); + try stack.append(ptr_type.ast.align_node); + + if (ptr_type.ast.bit_range_start != 0) { + try stack.append(ptr_type.ast.bit_range_start); + try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator); + try stack.append(ptr_type.ast.bit_range_end); + } + } + + try writeToken(builder, ptr_type.const_token, .keyword); + try writeToken(builder, ptr_type.volatile_token, .keyword); + + try stack.append(ptr_type.ast.child_type); + }, + .array_type, + .array_type_sentinel, + => { + const array_type: ast.full.ArrayType = if (tag == .array_type) + tree.arrayType(node) + else + tree.arrayTypeSentinel(node); + + try stack.append(array_type.ast.elem_count); + if (array_type.ast.sentinel != 0) + try stack.append(array_type.ast.sentinel); + + try stack.append(array_type.ast.elem_type); + }, + .address_of, + .bit_not, + .bool_not, + .optional_type, + .negation, + .negation_wrap, + => { + try writeToken(builder, main_token, .operator); + try stack.append(node_data[node].lhs); + }, + .@"try", + .@"resume", + .@"await", + => { + try writeToken(builder, main_token, .keyword); + try stack.append(node_data[node].lhs); + }, + .anyframe_literal => try writeToken(builder, main_token, .keyword), + } + } +} + fn writeContainerField( builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, node: ast.Node.Index, field_token_type: ?TokenType, - child_frame: anytype, + stack: *std.ArrayList(ast.Node.Index), ) !void { const tree = builder.handle.tree; const container_field = containerField(tree, node).?; @@ -343,9 +1112,9 @@ fn writeContainerField( if (container_field.ast.type_expr != 0) { if (container_field.ast.align_expr != 0) { try writeToken(builder, tree.firstToken(container_field.ast.align_expr) - 2, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.align_expr }); + try stack.append(container_field.ast.align_expr); } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.type_expr }); + try stack.append(container_field.ast.type_expr); } if (container_field.ast.value_expr != 0) block: { @@ -357,786 +1126,10 @@ fn writeContainerField( break :block; // Check this, I believe it is correct. try writeToken(builder, eq_tok, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.value_expr }); + try stack.append(container_field.ast.value_expr); } } -// TODO This is very slow and does a lot of extra work, improve in the future. -fn writeNodeTokens( - builder: *Builder, - arena: *std.heap.ArenaAllocator, - store: *DocumentStore, - maybe_node: ?ast.Node.Index, -) error{OutOfMemory}!void { - if (maybe_node == null) return; - const node = maybe_node.?; - if (node == 0) return; - - const handle = builder.handle; - const tree = handle.tree; - const node_tags = tree.nodes.items(.tag); - const token_tags = tree.tokens.items(.tag); - const datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - if (node > datas.len) return; - - const tag = node_tags[node]; - const main_token = main_tokens[node]; - - const FrameSize = @sizeOf(@Frame(writeNodeTokens)); - var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); - defer arena.child_allocator.free(child_frame); - - switch (tag) { - .root => unreachable, - .container_field, - .container_field_align, - .container_field_init, - => try writeContainerField(builder, arena, store, node, .field, child_frame), - .@"errdefer" => { - try writeToken(builder, main_token, .keyword); - - if (datas[node].lhs != 0) { - const payload_tok = datas[node].lhs; - try writeToken(builder, payload_tok - 1, .operator); - try writeToken(builder, payload_tok, .variable); - try writeToken(builder, payload_tok + 1, .operator); - } - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .block, - .block_semicolon, - .block_two, - .block_two_semicolon, - => { - const first_tok = if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) block: { - try writeToken(builder, main_token - 2, .label); - break :block main_token + 1; - } else 0; - - var gap_highlighter = GapHighlighter.init(builder, first_tok); - const statements: []const ast.Node.Index = switch (tag) { - .block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs], - .block_two, .block_two_semicolon => blk: { - const statements = &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; - const len: usize = if (datas[node].lhs == 0) - @as(usize, 0) - else if (datas[node].rhs == 0) - @as(usize, 1) - else - @as(usize, 2); - break :blk statements[0..len]; - }, - else => unreachable, - }; - - for (statements) |child| { - try gap_highlighter.next(child); - if (node_tags[child].isContainerField()) { - try writeContainerField(builder, arena, store, child, .field, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } - - try gap_highlighter.end(lastToken(tree, node)); - }, - .global_var_decl, - .local_var_decl, - .simple_var_decl, - .aligned_var_decl, - => { - const var_decl = varDecl(tree, node).?; - if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| - try writeDocComments(builder, handle.tree, comment_idx); - - try writeToken(builder, var_decl.visib_token, .keyword); - try writeToken(builder, var_decl.extern_export_token, .keyword); - try writeToken(builder, var_decl.threadlocal_token, .keyword); - try writeToken(builder, var_decl.comptime_token, .keyword); - try writeToken(builder, var_decl.ast.mut_token, .keyword); - - if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { - try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true }); - } else { - try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true }); - } - - if (var_decl.ast.type_node != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node }); - if (var_decl.ast.align_node != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node }); - if (var_decl.ast.section_node != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node }); - - try writeToken(builder, var_decl.ast.mut_token + 2, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.init_node }); - }, - .@"usingnamespace" => { - const first_tok = tree.firstToken(node); - if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment) - try writeDocComments(builder, builder.handle.tree, first_tok - 1); - try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword); - try writeToken(builder, main_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - }, - .container_decl, - .container_decl_trailing, - .container_decl_two, - .container_decl_two_trailing, - .container_decl_arg, - .container_decl_arg_trailing, - .tagged_union, - .tagged_union_trailing, - .tagged_union_enum_tag, - .tagged_union_enum_tag_trailing, - .tagged_union_two, - .tagged_union_two_trailing, - => { - var buf: [2]ast.Node.Index = undefined; - const decl: ast.full.ContainerDecl = switch (tag) { - .container_decl, .container_decl_trailing => tree.containerDecl(node), - .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node), - .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node), - .tagged_union, .tagged_union_trailing => tree.taggedUnion(node), - .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node), - .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node), - else => unreachable, - }; - - try writeToken(builder, decl.layout_token, .keyword); - try writeToken(builder, decl.ast.main_token, .keyword); - if (decl.ast.enum_token) |enum_token| { - if (decl.ast.arg != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }) - else - try writeToken(builder, enum_token, .keyword); - } else if (decl.ast.arg != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }); - - var gap_highlighter = GapHighlighter.init(builder, main_token + 1); - const field_token_type = fieldTokenType(node, handle); - for (decl.ast.members) |child| { - try gap_highlighter.next(child); - if (node_tags[child].isContainerField()) { - try writeContainerField(builder, arena, store, child, field_token_type, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } - try gap_highlighter.end(lastToken(tree, node)); - }, - .error_value => { - if (datas[node].lhs != 0) { - try writeToken(builder, datas[node].lhs - 1, .keyword); - } - try writeToken(builder, datas[node].rhs, .errorTag); - }, - .identifier => { - if (analysis.isTypeIdent(handle.tree, main_token)) { - return try writeToken(builder, main_token, .type); - } - - if (try analysis.lookupSymbolGlobal( - store, - arena, - handle, - handle.tree.getNodeSource(node), - handle.tree.tokens.items(.start)[main_token], - )) |child| { - if (child.decl.* == .param_decl) { - return try writeToken(builder, main_token, .parameter); - } - var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); - if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { - try colorIdentifierBasedOnType(builder, decl_type, main_token, .{}); - } else { - try writeTokenMod(builder, main_token, .variable, .{}); - } - } - }, - .fn_proto, - .fn_proto_one, - .fn_proto_simple, - .fn_proto_multi, - .fn_decl, - => { - var buf: [1]ast.Node.Index = undefined; - const fn_proto: ast.full.FnProto = fnProto(tree, node, &buf).?; - if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs| - try writeDocComments(builder, handle.tree, docs); - - try writeToken(builder, fn_proto.visib_token, .keyword); - try writeToken(builder, fn_proto.extern_export_token, .keyword); - try writeToken(builder, fn_proto.lib_name, .string); - try writeToken(builder, fn_proto.ast.fn_token, .keyword); - - const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) - .type - else - .function; - - const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) - TokenModifiers{ .generic = true } - else - TokenModifiers{}; - - try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod); - - var it = fn_proto.iterate(tree); - while (it.next()) |param_decl| { - if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, handle.tree, docs); - - try writeToken(builder, param_decl.comptime_noalias, .keyword); - try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); - if (param_decl.anytype_ellipsis3) |any_token| { - try writeToken(builder, any_token, .type); - } else if (param_decl.type_expr != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param_decl.type_expr }); - } - - if (fn_proto.ast.align_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr }); - if (fn_proto.ast.section_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr }); - if (fn_proto.ast.callconv_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr }); - - if (fn_proto.ast.return_type != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.return_type }); - - if (tag == .fn_decl) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .anyframe_type => { - try writeToken(builder, main_token, .type); - if (datas[node].rhs != 0) { - try writeToken(builder, datas[node].lhs, .type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - } - }, - .@"defer" => { - try writeToken(builder, main_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .@"comptime", - .@"nosuspend", - => { - if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| - try writeDocComments(builder, handle.tree, doc); - try writeToken(builder, main_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - }, - .@"switch", - .switch_comma, - => { - try writeToken(builder, main_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); - const cases = tree.extra_data[extra.start..extra.end]; - - var gap_highlighter = GapHighlighter.init(builder, lastToken(tree, datas[node].lhs) + 1); - for (cases) |case_node| { - try gap_highlighter.next(case_node); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); - } - try gap_highlighter.end(lastToken(tree, node)); - }, - .switch_case_one, - .switch_case, - => { - const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node); - for (switch_case.ast.values) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); - // check it it's 'else' - if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword); - try writeToken(builder, switch_case.ast.arrow_token, .operator); - if (switch_case.payload_token) |payload_token| { - const p_token = @boolToInt(token_tags[payload_token] == .asterisk); - try writeToken(builder, p_token, .variable); - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.ast.target_expr }); - }, - .@"while", - .while_simple, - .while_cont, - .for_simple, - .@"for", - => { - const while_node = whileAst(tree, node).?; - try writeToken(builder, while_node.label_token, .label); - try writeToken(builder, while_node.inline_token, .keyword); - try writeToken(builder, while_node.ast.while_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr }); - if (while_node.payload_token) |payload| { - try writeToken(builder, payload - 1, .operator); - try writeToken(builder, payload, .variable); - var r_pipe = payload + 1; - if (token_tags[r_pipe] == .comma) { - r_pipe += 1; - try writeToken(builder, r_pipe, .variable); - r_pipe += 1; - } - try writeToken(builder, r_pipe, .operator); - } - if (while_node.ast.cont_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr }); - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.then_expr }); - - if (while_node.ast.else_expr != 0) { - try writeToken(builder, while_node.else_token, .keyword); - - if (while_node.error_token) |err_token| { - try writeToken(builder, err_token - 1, .operator); - try writeToken(builder, err_token, .variable); - try writeToken(builder, err_token + 1, .operator); - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr }); - } - }, - .@"if", - .if_simple, - => { - const if_node = ifFull(tree, node); - - try writeToken(builder, if_node.ast.if_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr }); - - if (if_node.payload_token) |payload| { - // if (?x) |x| - try writeToken(builder, payload - 1, .operator); // | - try writeToken(builder, payload, .variable); // x - try writeToken(builder, payload + 1, .operator); // | - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr }); - - if (if_node.ast.else_expr != 0) { - try writeToken(builder, if_node.else_token, .keyword); - if (if_node.error_token) |err_token| { - // else |err| - try writeToken(builder, err_token - 1, .operator); // | - try writeToken(builder, err_token, .variable); // err - try writeToken(builder, err_token + 1, .operator); // | - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr }); - } - }, - .array_init, - .array_init_comma, - .array_init_one, - .array_init_one_comma, - .array_init_dot, - .array_init_dot_comma, - .array_init_dot_two, - .array_init_dot_two_comma, - => { - var buf: [2]ast.Node.Index = undefined; - const array_init: ast.full.ArrayInit = switch (tag) { - .array_init, .array_init_comma => tree.arrayInit(node), - .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node), - .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), - .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node), - else => unreachable, - }; - - if (array_init.ast.type_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_init.ast.type_expr }); - for (array_init.ast.elements) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); - }, - .struct_init, - .struct_init_comma, - .struct_init_dot, - .struct_init_dot_comma, - .struct_init_one, - .struct_init_one_comma, - .struct_init_dot_two, - .struct_init_dot_two_comma, - => { - var buf: [2]ast.Node.Index = undefined; - const struct_init: ast.full.StructInit = switch (tag) { - .struct_init, .struct_init_comma => tree.structInit(node), - .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), - .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node), - .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node), - else => unreachable, - }; - - var field_token_type: ?TokenType = null; - - if (struct_init.ast.type_expr != 0) { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_init.ast.type_expr }); - - field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ - .node = struct_init.ast.type_expr, - .handle = handle, - })) |struct_type| switch (struct_type.type.data) { - .other => |type_node| if (isContainer(struct_type.handle.tree, type_node)) - fieldTokenType(type_node, struct_type.handle) - else - null, - else => null, - } else null; - } - - var gap_highlighter = GapHighlighter.init(builder, struct_init.ast.lbrace); - for (struct_init.ast.fields) |field_init| { - try gap_highlighter.next(field_init); - - const init_token = tree.firstToken(field_init); - try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.' - try writeToken(builder, init_token - 2, field_token_type orelse .field); // name - try writeToken(builder, init_token - 1, .operator); // '=' - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init }); - } - try gap_highlighter.end(lastToken(tree, node)); - }, - .call, - .call_comma, - .async_call, - .async_call_comma, - .call_one, - .call_one_comma, - .async_call_one, - .async_call_one_comma, - => { - var params: [1]ast.Node.Index = undefined; - const call: ast.full.Call = switch (tag) { - .call, .call_comma, .async_call, .async_call_comma => tree.callFull(node), - .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node), - else => unreachable, - }; - - try writeToken(builder, call.async_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr }); - - if (builder.current_token) |curr_tok| { - if (curr_tok != lastToken(tree, call.ast.fn_expr) and token_tags[lastToken(tree, call.ast.fn_expr)] == .identifier) { - try writeToken(builder, lastToken(tree, call.ast.fn_expr), .function); - } - } - for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); - }, - .slice, - .slice_open, - .slice_sentinel, - => { - const slice: ast.full.Slice = switch (tag) { - .slice => tree.slice(node), - .slice_open => tree.sliceOpen(node), - .slice_sentinel => tree.sliceSentinel(node), - else => unreachable, - }; - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start }); - try writeToken(builder, lastToken(tree, slice.ast.start) + 1, .operator); - - if (slice.ast.end != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end }); - if (slice.ast.sentinel != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sentinel }); - }, - .array_access => { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .deref => { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - try writeToken(builder, main_token, .operator); - }, - .unwrap_optional => { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - try writeToken(builder, main_token + 1, .operator); - }, - .grouped_expression => { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - }, - .@"break", - .@"continue", - => { - try writeToken(builder, main_token, .keyword); - if (datas[node].lhs != 0) - try writeToken(builder, datas[node].lhs, .label); - if (datas[node].rhs != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .@"suspend", .@"return" => { - try writeToken(builder, main_token, .keyword); - if (datas[node].lhs != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - }, - .integer_literal, - .float_literal, - => { - try writeToken(builder, main_token, .number); - }, - .enum_literal => { - try writeToken(builder, main_token - 1, .enumMember); - try writeToken(builder, main_token, .enumMember); - }, - .builtin_call, - .builtin_call_comma, - .builtin_call_two, - .builtin_call_two_comma, - => { - const data = datas[node]; - const params = switch (tag) { - .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], - .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) - &[_]ast.Node.Index{} - else if (data.rhs == 0) - &[_]ast.Node.Index{data.lhs} - else - &[_]ast.Node.Index{ data.lhs, data.rhs }, - else => unreachable, - }; - - try writeToken(builder, main_token, .builtin); - for (params) |param| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); - }, - .string_literal, - .char_literal, - => { - try writeToken(builder, main_token, .string); - }, - .multiline_string_literal => { - var cur_tok = main_token; - const last_tok = datas[node].rhs; - - while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string); - }, - .true_literal, - .false_literal, - .null_literal, - .undefined_literal, - .unreachable_literal, - => { - try writeToken(builder, main_token, .keywordLiteral); - }, - .error_set_decl => { - try writeToken(builder, main_token, .keyword); - }, - .@"asm", - .asm_output, - .asm_input, - .asm_simple, - => { - const asm_node: ast.full.Asm = switch (tag) { - .@"asm" => tree.asmFull(node), - .asm_simple => tree.asmSimple(node), - else => return, // TODO Inputs, outputs - }; - - try writeToken(builder, main_token, .keyword); - try writeToken(builder, asm_node.volatile_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_node.ast.template }); - // TODO Inputs, outputs. - }, - .@"anytype" => { - try writeToken(builder, main_token, .type); - }, - .test_decl => { - if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| - try writeDocComments(builder, handle.tree, doc); - - try writeToken(builder, main_token, .keyword); - if (token_tags[main_token + 1] == .string_literal) - try writeToken(builder, main_token + 1, .string); - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .@"catch" => { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - try writeToken(builder, main_token, .keyword); - if (token_tags[main_token + 1] == .pipe) - try writeToken(builder, main_token + 1, .variable); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .add, - .add_wrap, - .array_cat, - .array_mult, - .assign, - .assign_bit_and, - .assign_bit_or, - .assign_bit_shift_left, - .assign_bit_shift_right, - .assign_bit_xor, - .assign_div, - .assign_sub, - .assign_sub_wrap, - .assign_mod, - .assign_add, - .assign_add_wrap, - .assign_mul, - .assign_mul_wrap, - .bang_equal, - .bit_and, - .bit_or, - .bit_shift_left, - .bit_shift_right, - .bit_xor, - .bool_and, - .bool_or, - .div, - .equal_equal, - .error_union, - .greater_or_equal, - .greater_than, - .less_or_equal, - .less_than, - .merge_error_sets, - .mod, - .mul, - .mul_wrap, - .switch_range, - .sub, - .sub_wrap, - .@"orelse", - => { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - const token_type: TokenType = switch (tag) { - .bool_and, .bool_or => .keyword, - else => .operator, - }; - - try writeToken(builder, main_token, token_type); - if (datas[node].rhs != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); - }, - .field_access => { - const data = datas[node]; - if (data.rhs == 0) return; - const rhs_str = tree.tokenSlice(data.rhs); - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs }); - - // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added - // writeToken code. - // Maybe we can hook into it insead? Also applies to Identifier and VarDecl - var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); - const lhs_type = try analysis.resolveFieldAccessLhsType( - store, - arena, - (try analysis.resolveTypeOfNodeInternal(store, arena, .{ - .node = data.lhs, - .handle = handle, - }, &bound_type_params)) orelse return, - &bound_type_params, - ); - const left_type_node = switch (lhs_type.type.data) { - .other => |n| n, - else => return, - }; - if (try analysis.lookupSymbolContainer(store, arena, .{ - .node = left_type_node, - .handle = lhs_type.handle, - }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { - switch (decl_type.decl.*) { - .ast_node => |decl_node| { - if (decl_type.handle.tree.nodes.items(.tag)[decl_node].isContainerField()) { - const tok_type: ?TokenType = if (isContainer(lhs_type.handle.tree, left_type_node)) - fieldTokenType(decl_node, lhs_type.handle) - else if (left_type_node == 0) - TokenType.field - else - null; - - if (tok_type) |tt| try writeToken(builder, data.rhs, tt); - return; - } else if (decl_type.handle.tree.nodes.items(.tag)[decl_node] == .error_value) { - try writeToken(builder, data.rhs, .errorTag); - } - }, - else => {}, - } - - if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { - try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{}); - } - } - }, - .ptr_type, - .ptr_type_aligned, - .ptr_type_bit_range, - .ptr_type_sentinel, - => { - const ptr_type = ptrType(tree, node).?; - - if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and - main_token == main_tokens[ptr_type.ast.child_type]) - { - return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ - builder, - arena, - store, - ptr_type.ast.child_type, - }); - } - - if (ptr_type.size == .One) try writeToken(builder, main_token, .operator); - if (ptr_type.ast.sentinel != 0) { - return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ - builder, - arena, - store, - ptr_type.ast.sentinel, - }); - } - - try writeToken(builder, ptr_type.allowzero_token, .keyword); - - if (ptr_type.ast.align_node != 0) { - const first_tok = tree.firstToken(ptr_type.ast.align_node); - try writeToken(builder, first_tok - 2, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.align_node }); - - if (ptr_type.ast.bit_range_start != 0) { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_start }); - try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_end }); - } - } - - try writeToken(builder, ptr_type.const_token, .keyword); - try writeToken(builder, ptr_type.volatile_token, .keyword); - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.child_type }); - }, - .array_type, - .array_type_sentinel, - => { - const array_type: ast.full.ArrayType = if (tag == .array_type) - tree.arrayType(node) - else - tree.arrayTypeSentinel(node); - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_count }); - if (array_type.ast.sentinel != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.sentinel }); - - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_type }); - }, - .address_of, - .bit_not, - .bool_not, - .optional_type, - .negation, - .negation_wrap, - => { - try writeToken(builder, main_token, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - }, - .@"try", - .@"resume", - .@"await", - => { - try writeToken(builder, main_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); - }, - .anyframe_literal => try writeToken(builder, main_token, .keyword), - } -} // TODO Range version, edit version. pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { diff --git a/src/signature_help.zig b/src/signature_help.zig index 3dabacf..2159ee2 100644 --- a/src/signature_help.zig +++ b/src/signature_help.zig @@ -23,15 +23,7 @@ fn fnProtoToSignatureInfo( const token_starts = tree.tokens.items(.start); const alloc = &arena.allocator; const label = analysis.getFunctionSignature(tree, proto); - const proto_comments = types.MarkupContent{ .value = if (try analysis.getDocComments( - alloc, - tree, - fn_node, - .Markdown, - )) |dc| - dc - else - "" }; + const proto_comments = (try analysis.getDocComments(alloc, tree, fn_node, .Markdown)) orelse ""; const arg_idx = if (skip_self_param) blk: { const has_self_param = try analysis.hasSelfParam(arena, document_store, handle, proto); @@ -42,14 +34,9 @@ fn fnProtoToSignatureInfo( var param_it = proto.iterate(tree); while (param_it.next()) |param| { const param_comments = if (param.first_doc_comment) |dc| - types.MarkupContent{ .value = try analysis.collectDocComments( - alloc, - tree, - dc, - .Markdown, - ) } + try analysis.collectDocComments(alloc, tree, dc, .Markdown, false) else - null; + ""; var param_label_start: usize = 0; var param_label_end: usize = 0; @@ -77,12 +64,12 @@ fn fnProtoToSignatureInfo( const param_label = tree.source[param_label_start..param_label_end]; try params.append(alloc, .{ .label = param_label, - .documentation = param_comments, + .documentation = types.MarkupContent{ .value = param_comments }, }); } return types.SignatureInformation{ .label = label, - .documentation = proto_comments, + .documentation = types.MarkupContent{ .value = proto_comments }, .parameters = params.items, .activeParameter = arg_idx, }; diff --git a/tests/sessions.zig b/tests/sessions.zig index 61b6e0a..f32fce3 100644 --- a/tests/sessions.zig +++ b/tests/sessions.zig @@ -189,9 +189,76 @@ test "Request completion with no trailing whitespace" { ); } +test "Encoded space in file name and usingnamespace on non-existing symbol" { + var server = try Server.start(initialize_msg, null); + defer server.shutdown(); + + try server.request("textDocument/didOpen", + \\{"textDocument":{"uri":"file:///%20test.zig","languageId":"zig","version":420,"text":"usingnamespace a.b;\nb."}} + , null); + try server.request("textDocument/completion", + \\{"textDocument":{"uri":"file:///%20test.zig"}, "position":{"line":1,"character":2}} + , + \\{"isIncomplete":false,"items":[]} + ); +} + +test "Self-referential definition" { + var server = try Server.start(initialize_msg, null); + defer server.shutdown(); + try server.request("textDocument/didOpen", + \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const h = h(0);\nc"}} + , null); + try server.request("textDocument/completion", + \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}} + , + \\{"isIncomplete":false,"items":[{"label":"h","kind":21,"textEdit":null,"filterText":null,"insertText":"h","insertTextFormat":1,"detail":"const h = h(0)","documentation":null}]} + ); +} +test "Missing return type" { + var server = try Server.start(initialize_msg, null); + defer server.shutdown(); + try server.request("textDocument/didOpen", + \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"fn w() {}\nc"}} + , null); + try server.request("textDocument/completion", + \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}} + , + \\{"isIncomplete":false,"items":[{"label":"w","kind":3,"textEdit":null,"filterText":null,"insertText":"w()","insertTextFormat":2,"detail":"fn","documentation":null}]} + ); +} + +test "Pointer and optional deref" { + var server = try Server.start(initialize_msg, null); + defer server.shutdown(); + try server.request("textDocument/didOpen", + \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"var value: ?struct { data: i32 = 5 } = null;const ptr = &value;\nconst a = ptr.*.?."}} + , null); + try server.request("textDocument/completion", + \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":18}} + , + \\{"isIncomplete":false,"items":[{"label":"data","kind":5,"textEdit":null,"filterText":null,"insertText":"data","insertTextFormat":1,"detail":"data: i32 = 5","documentation":null}]} + ); +} + test "Request utf-8 offset encoding" { var server = try Server.start(initialize_msg_offs, \\{"offsetEncoding":"utf-8","capabilities":{"signatureHelpProvider":{"triggerCharacters":["("],"retriggerCharacters":[","]},"textDocumentSync":1,"renameProvider":true,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"hoverProvider":true,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"implementationProvider":false,"referencesProvider":true,"documentSymbolProvider":true,"colorProvider":false,"documentFormattingProvider":true,"documentRangeFormattingProvider":false,"foldingRangeProvider":false,"selectionRangeProvider":false,"workspaceSymbolProvider":false,"rangeProvider":false,"documentProvider":true,"workspace":{"workspaceFolders":{"supported":false,"changeNotifications":false}},"semanticTokensProvider":{"full":true,"range":false,"legend":{"tokenTypes":["type","parameter","variable","enumMember","field","errorTag","function","keyword","comment","string","number","operator","builtin","label","keywordLiteral"],"tokenModifiers":["namespace","struct","enum","union","opaque","declaration","async","documentation","generic"]}}},"serverInfo":{"name":"zls","version":"0.1.0"}} ); server.shutdown(); } + +// not fixed yet! +// test "Self-referential import" { +// var server = try Server.start(initialize_msg, null); +// defer server.shutdown(); +// try server.request("textDocument/didOpen", +// \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const a = @import(\"test.zig\").a;\nc"}} +// , null); +// try server.request("textDocument/completion", +// \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}} +// , +// \\{"isIncomplete":false,"items":[]} +// ); +// } +