From 0133f0d863d500073ac39319eeb9cfa746287e95 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Fri, 26 Feb 2021 21:26:52 +0100 Subject: [PATCH 01/36] First set of changes to work with new std's ast --- src/analysis.zig | 443 ++++++++++++++++++++++------------------ src/document_store.zig | 12 +- src/main.zig | 25 ++- src/offsets.zig | 12 +- src/semantic_tokens.zig | 4 +- 5 files changed, 267 insertions(+), 229 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 975b1f6..60e9c3d 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -6,7 +6,7 @@ const offsets = @import("offsets.zig"); const log = std.log.scoped(.analysis); /// Get a declaration's doc comment node -pub fn getDocCommentNode(tree: *ast.Tree, node: *ast.Node) ?*ast.Node.DocComment { +pub fn getDocCommentNode(tree: ast.Tree, node: *ast.Node) ?*ast.Node.DocComment { if (node.castTag(.FnProto)) |func| { return func.getDocComments(); } else if (node.castTag(.VarDecl)) |var_decl| { @@ -27,7 +27,7 @@ pub fn getDocCommentNode(tree: *ast.Tree, node: *ast.Node) ?*ast.Node.DocComment ///``` pub fn getDocComments( allocator: *std.mem.Allocator, - tree: *ast.Tree, + tree: ast.Tree, node: *ast.Node, format: types.MarkupContent.Kind, ) !?[]const u8 { @@ -39,7 +39,7 @@ pub fn getDocComments( pub fn collectDocComments( allocator: *std.mem.Allocator, - tree: *ast.Tree, + tree: ast.Tree, doc_comments: *ast.Node.DocComment, format: types.MarkupContent.Kind, ) ![]const u8 { @@ -61,17 +61,19 @@ pub fn collectDocComments( } /// Gets a function signature (keywords, name, return value) -pub fn getFunctionSignature(tree: *ast.Tree, func: *ast.Node.FnProto) []const u8 { +pub fn getFunctionSignature(tree: ast.Tree, func: *ast.Node.FnProto) []const u8 { const start = tree.token_locs[func.firstToken()].start; - const end = tree.token_locs[switch (func.return_type) { + const end = tree.token_locs[ + switch (func.return_type) { .Explicit, .InferErrorSet => |node| node.lastToken(), .Invalid => |r_paren| r_paren, - }].end; + } + ].end; return tree.source[start..end]; } /// Gets a function snippet insert text -pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: *ast.Node.FnProto, skip_self_param: bool) ![]const u8 { +pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: *ast.Node.FnProto, skip_self_param: bool) ![]const u8 { const name_tok = func.getNameToken() orelse unreachable; var buffer = std.ArrayList(u8).init(allocator); @@ -126,37 +128,36 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: } /// Gets a function signature (keywords, name, return value) -pub fn getVariableSignature(tree: *ast.Tree, var_decl: *ast.Node.VarDecl) []const u8 { +pub fn getVariableSignature(tree: ast.Tree, var_decl: *ast.Node.VarDecl) []const u8 { const start = tree.token_locs[var_decl.firstToken()].start; const end = tree.token_locs[var_decl.semicolon_token].start; return tree.source[start..end]; } // analysis.getContainerFieldSignature(handle.tree, field) -pub fn getContainerFieldSignature(tree: *ast.Tree, field: *ast.Node.ContainerField) []const u8 { +pub fn getContainerFieldSignature(tree: ast.Tree, field: *ast.Node.ContainerField) []const u8 { const start = tree.token_locs[field.firstToken()].start; const end = tree.token_locs[field.lastToken()].end; return tree.source[start..end]; } /// The type node is "type" -fn typeIsType(tree: *ast.Tree, node: *ast.Node) bool { - if (node.castTag(.Identifier)) |ident| { - return std.mem.eql(u8, tree.tokenSlice(ident.token), "type"); +fn typeIsType(tree: ast.Tree, node: ast.Node.Index) bool { + if (tree.nodes.items(.tag)[node] == .identifier) { + return std.mem.eql(u8, tree.tokenSlice(node), "type"); } return false; } -pub fn isTypeFunction(tree: *ast.Tree, func: *ast.Node.FnProto) bool { - switch (func.return_type) { - .Explicit => |node| return typeIsType(tree, node), - .InferErrorSet, .Invalid => return false, - } +pub fn isTypeFunction(tree: ast.Tree, func: ast.full.FnProto) bool { + return typeIsType(func.ast.return_type); } -pub fn isGenericFunction(tree: *ast.Tree, func: *ast.Node.FnProto) bool { - for (func.paramsConst()) |param| { - if (param.param_type == .any_type or param.comptime_token != null) { +pub fn isGenericFunction(tree: ast.Tree, func: *ast.full.FnProto) bool { + var it = func.iterate(); + var slice = tree.nodes.items(.tag); + while (it.next()) |param| { + if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) { return true; } } @@ -174,43 +175,50 @@ pub fn isPascalCase(name: []const u8) bool { // ANALYSIS ENGINE -pub fn getDeclNameToken(tree: *ast.Tree, node: *ast.Node) ?ast.TokenIndex { - switch (node.tag) { - .VarDecl => { - const vari = node.castTag(.VarDecl).?; - return vari.name_token; +pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { + const tags = tree.nodes.items(.tag); + switch (tags[node]) { + // regular declaration names. + 1 to mut token because name comes after 'const'/'var' + .local_var_decl => return tree.localVarDecl(node).ast.mut_token + 1, + .global_var_decl => return tree.globalVarDecl(node).ast.mut_token + 1, + .simple_var_decl => return tree.simpleVarDecl(node).ast.mut_token + 1, + .aligned_var_decl => return tree.alignedVarDecl(node).ast.mut_token + 1, + + // function declaration names + .fn_proto => return tree.fnProto(node).name_token, + .fn_proto_simple => { + var params: [1]ast.Node.Index = undefined; + return tree.fnProtoSimple(¶ms, node).name_token; }, - .FnProto => { - const func = node.castTag(.FnProto).?; - return func.getNameToken(); - }, - .ContainerField => { - const field = node.castTag(.ContainerField).?; - return field.name_token; - }, - .ErrorTag => { - const tag = node.castTag(.ErrorTag).?; - return tag.name_token; - }, - // We need identifier for captures and error set tags - .Identifier => { - const ident = node.castTag(.Identifier).?; - return ident.token; - }, - .TestDecl => { - const decl = node.castTag(.TestDecl).?; - return ((decl.name orelse return null).castTag(.StringLiteral) orelse return null).token; + .fn_proto_one => { + var params: [1]ast.Node.Index = undefined; + return tree.fnProtoOne(¶ms, node).name_token; }, + .fn_proto_multi => return tree.fnProtoMulti(node).name_token, + + // containers + .container_field => return tree.containerField(node).ast.name_token, + .container_field_init => return tree.containerFieldInit(node).ast.name_token, + .container_field_align => return tree.containerFieldAlign(node).ast.name_token, + + // @TODO: Errors + // .error_=> { + // const tag = node.castTag(.ErrorTag).?; + // return tag.name_token; + // }, + + // lhs of main token is name token, so use `node` - 1 + .test_decl => return getDeclNameToken(tree, node - 1), else => {}, } return null; } -fn getDeclName(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { +fn getDeclName(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { const name = tree.tokenSlice(getDeclNameToken(tree, node) orelse return null); - return switch (node.tag) { - .TestDecl => name[1 .. name.len - 1], + return switch (tree.nodes.items(.tag)[node]) { + .test_decl => name[1 .. name.len - 1], else => name, }; } @@ -290,7 +298,7 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato } fn findReturnStatementInternal( - tree: *ast.Tree, + tree: ast.Tree, fn_decl: *ast.Node.FnProto, base_node: *ast.Node, already_found: *bool, @@ -321,7 +329,7 @@ fn findReturnStatementInternal( return result; } -fn findReturnStatement(tree: *ast.Tree, fn_decl: *ast.Node.FnProto) ?*ast.Node.ControlFlowExpression { +fn findReturnStatement(tree: ast.Tree, fn_decl: *ast.Node.FnProto) ?*ast.Node.ControlFlowExpression { var already_found = false; return findReturnStatementInternal(tree, fn_decl, fn_decl.getBodyNode().?, &already_found); } @@ -510,7 +518,7 @@ fn allDigits(str: []const u8) bool { return true; } -pub fn isTypeIdent(tree: *ast.Tree, token_idx: ast.TokenIndex) bool { +pub fn isTypeIdent(tree: ast.Tree, token_idx: ast.TokenIndex) bool { const PrimitiveTypes = std.ComptimeStringMap(void, .{ .{"isize"}, .{"usize"}, .{"c_short"}, .{"c_ushort"}, @@ -963,37 +971,49 @@ pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator, return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params); } -fn maybeCollectImport(tree: *ast.Tree, builtin_call: *ast.Node.BuiltinCall, arr: *std.ArrayList([]const u8)) !void { - if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return; - if (builtin_call.params_len > 1) return; +fn maybeCollectImport(tree: ast.Tree, builtin_call: ast.Node.Index, arr: *std.ArrayList([]const u8)) !void { + const tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); - const import_param = builtin_call.paramsConst()[0]; - if (import_param.tag != .StringLiteral) return; + const builtin_tag = tags[builtin_call]; + const builtin_data = datas[builtin_call]; - const import_str = tree.tokenSlice(import_param.castTag(.StringLiteral).?.token); + std.debug.assert(builtin_tag == .builtin_call); + if (!std.mem.eql(u8, tree.tokenSlice(builtin_call), "@import")) return; + const params = tree.extra_data[builtin_data.lhs..builtin_data.rhs]; + if (params.len > 1) return; + + if (tags[params[0]] != .string_literal) return; + + const import_str = tree.tokenSlice(params[0]); try arr.append(import_str[1 .. import_str.len - 1]); } /// Collects all imports we can find into a slice of import paths (without quotes). /// The import paths are valid as long as the tree is. -pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: *ast.Tree) !void { +pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: ast.Tree) !void { // TODO: Currently only detects `const smth = @import("string literal")<.SomeThing>;` - for (tree.root_node.decls()) |decl| { - if (decl.tag != .VarDecl) continue; - const var_decl = decl.castTag(.VarDecl).?; - const init_node = var_decl.getInitNode() orelse continue; + const tags = tree.nodes.items(.tag); + for (tree.rootDecls()) |decl_idx| { + const var_decl_maybe: ?ast.full.VarDecl = switch (tags[decl_idx]) { + .global_var_decl => tree.globalVarDecl(decl_idx), + .local_var_decl => tree.localVarDecl(decl_idx), + .simple_var_decl => tree.simpleVarDecl(decl_idx), + else => null, + }; + const var_decl = var_decl_maybe orelse continue; - switch (init_node.tag) { - .BuiltinCall => { - const builtin_call = init_node.castTag(.BuiltinCall).?; - try maybeCollectImport(tree, builtin_call, import_arr); - }, - .Period => { - const infix_op = init_node.cast(ast.Node.SimpleInfixOp).?; + const init_node = var_decl.ast.init_node; + const init_node_tag = tags[init_node]; + switch (init_node_tag) { + .builtin_call => try maybeCollectImport(tree, init_node, import_arr), + // @TODO: FIX ME what is the syntax to support for imports using dot notation? + // .Period => { + // const infix_op = init_node.cast(ast.Node.SimpleInfixOp).?; - if (infix_op.lhs.tag != .BuiltinCall) continue; - try maybeCollectImport(tree, infix_op.lhs.castTag(.BuiltinCall).?, import_arr); - }, + // if (infix_op.lhs.tag != .BuiltinCall) continue; + // try maybeCollectImport(tree, infix_op.lhs.castTag(.BuiltinCall).?, import_arr); + // }, else => {}, } } @@ -1134,7 +1154,7 @@ pub fn getFieldAccessType( }; } -pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool { +pub fn isNodePublic(tree: ast.Tree, node: *ast.Node) bool { switch (node.tag) { .VarDecl => { const var_decl = node.castTag(.VarDecl).?; @@ -1148,7 +1168,7 @@ pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool { } } -pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { +pub fn nodeToString(tree: ast.Tree, node: *ast.Node) ?[]const u8 { switch (node.tag) { .ContainerField => { const field = node.castTag(.ContainerField).?; @@ -1176,13 +1196,13 @@ pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { return null; } -fn nodeContainsSourceIndex(tree: *ast.Tree, node: *ast.Node, source_index: usize) bool { +fn nodeContainsSourceIndex(tree: ast.Tree, node: *ast.Node, source_index: usize) bool { const first_token = tree.token_locs[node.firstToken()]; const last_token = tree.token_locs[node.lastToken()]; return source_index >= first_token.start and source_index <= last_token.end; } -pub fn getImportStr(tree: *ast.Tree, source_index: usize) ?[]const u8 { +pub fn getImportStr(tree: ast.Tree, source_index: usize) ?[]const u8 { var node = &tree.root_node.base; var child_idx: usize = 0; @@ -1269,8 +1289,8 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. while (true) { const tok = tokenizer.next(); // Early exits. - switch (tok.id) { - .Invalid, .Invalid_ampersands => { + switch (tok.tag) { + .invalid, .invalid_ampersands => { // Single '@' do not return a builtin token so we check this on our own. if (line[doc_position.line_index - 1] == '@') { return PositionContext{ @@ -1282,16 +1302,16 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. } return .other; }, - .LineComment, .DocComment, .ContainerDocComment => return .comment, - .Eof => break, + .line_comment, .doc_comment, .container_doc_comment => return .comment, + .eof => break, else => {}, } // State changes var curr_ctx = try peek(&stack); - switch (tok.id) { - .StringLiteral, .MultilineStringLiteralLine => curr_ctx.ctx = .{ .string_literal = tok.loc }, - .Identifier => switch (curr_ctx.ctx) { + switch (tok.tag) { + .string_literal, .multiline_string_literal_line => curr_ctx.ctx = .{ .string_literal = tok.loc }, + .identifier => switch (curr_ctx.ctx) { .empty, .pre_label => curr_ctx.ctx = .{ .var_access = tok.loc }, .label => |filled| if (!filled) { curr_ctx.ctx = .{ .label = true }; @@ -1300,11 +1320,11 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. }, else => {}, }, - .Builtin => switch (curr_ctx.ctx) { + .builtin => switch (curr_ctx.ctx) { .empty, .pre_label => curr_ctx.ctx = .{ .builtin = tok.loc }, else => {}, }, - .Period, .PeriodAsterisk => switch (curr_ctx.ctx) { + .period, .period_asterisk => switch (curr_ctx.ctx) { .empty, .pre_label => curr_ctx.ctx = .enum_literal, .enum_literal => curr_ctx.ctx = .empty, .field_access => {}, @@ -1314,31 +1334,31 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. .field_access = tokenRangeAppend(curr_ctx.ctx.range().?, tok), }, }, - .Keyword_break, .Keyword_continue => curr_ctx.ctx = .pre_label, - .Colon => if (curr_ctx.ctx == .pre_label) { + .keyword_break, .keyword_continue => curr_ctx.ctx = .pre_label, + .colon => if (curr_ctx.ctx == .pre_label) { curr_ctx.ctx = .{ .label = false }; } else { curr_ctx.ctx = .empty; }, - .QuestionMark => switch (curr_ctx.ctx) { + .question_mark => switch (curr_ctx.ctx) { .field_access => {}, else => curr_ctx.ctx = .empty, }, - .LParen => try stack.append(.{ .ctx = .empty, .stack_id = .Paren }), - .LBracket => try stack.append(.{ .ctx = .empty, .stack_id = .Bracket }), - .RParen => { + .l_paren => try stack.append(.{ .ctx = .empty, .stack_id = .Paren }), + .l_bracket => try stack.append(.{ .ctx = .empty, .stack_id = .Bracket }), + .r_paren => { _ = stack.pop(); if (curr_ctx.stack_id != .Paren) { (try peek(&stack)).ctx = .empty; } }, - .RBracket => { + .r_bracket => { _ = stack.pop(); if (curr_ctx.stack_id != .Bracket) { (try peek(&stack)).ctx = .empty; } }, - .Keyword_error => curr_ctx.ctx = .global_error_set, + .keyword_error => curr_ctx.ctx = .global_error_set, else => curr_ctx.ctx = .empty, } @@ -1356,8 +1376,8 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. }; } -fn addOutlineNodes(allocator: *std.mem.Allocator, tree: *ast.Tree, child: *ast.Node, context: *GetDocumentSymbolsContext) anyerror!void { - switch (child.tag) { +fn addOutlineNodes(allocator: *std.mem.Allocator, tree: ast.Tree, parent: ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void { + switch (tree.nodes.items(.tag)[parent]) { .StringLiteral, .IntegerLiteral, .BuiltinCall, @@ -1471,13 +1491,13 @@ const GetDocumentSymbolsContext = struct { encoding: offsets.Encoding, }; -fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node, context: *GetDocumentSymbolsContext) anyerror!void { +fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, node: ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void { const name = getDeclName(tree, node) orelse return; if (name.len == 0) return; - const start_loc = context.prev_loc.add(try offsets.tokenRelativeLocation(tree, context.prev_loc.offset, node.firstToken(), context.encoding)); - const end_loc = start_loc.add(try offsets.tokenRelativeLocation(tree, start_loc.offset, node.lastToken(), context.encoding)); + const start_loc = context.prev_loc.add(try offsets.tokenRelativeLocation(tree, context.prev_loc.offset, tree.firstToken(node), context.encoding)); + const end_loc = start_loc.add(try offsets.tokenRelativeLocation(tree, start_loc.offset, tree.lastToken(node), context.encoding)); context.prev_loc = end_loc; const range = types.Range{ .start = .{ @@ -1490,12 +1510,24 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no }, }; + const tags = tree.nodes.items(.tag); (try context.symbols.addOne()).* = .{ .name = name, - .kind = switch (node.tag) { - .FnProto => .Function, - .VarDecl => .Variable, - .ContainerField => .Field, + .kind = switch (tags[node]) { + .fn_proto, + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + => .Function, + .local_var_decl, + .global_var_decl, + .aligned_var_decl, + .simple_var_decl, + => .Variable, + .container_field, + .container_field_align, + .container_field_init, + => .Field, else => .Variable, }, .range = range, @@ -1511,43 +1543,45 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no }; var index: usize = 0; - while (node.iterate(index)) |child| : (index += 1) { - try addOutlineNodes(allocator, tree, child, &child_context); - } + try addOutlineNodes(allocator, tree, node, &child_context); + + // while (node.iterate(index)) |child| : (index += 1) { + // try addOutlineNodes(allocator, tree, child, &child_context); + // } break :ch children.items; }, }; } -pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: *ast.Tree, encoding: offsets.Encoding) ![]types.DocumentSymbol { - var symbols = try std.ArrayList(types.DocumentSymbol).initCapacity(allocator, tree.root_node.decls_len); +pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: ast.Tree, encoding: offsets.Encoding) ![]types.DocumentSymbol { + var symbols = try std.ArrayList(types.DocumentSymbol).initCapacity(allocator, tree.rootDecls().len); var context = GetDocumentSymbolsContext{ .symbols = &symbols, .encoding = encoding, }; - for (tree.root_node.decls()) |node| { - try getDocumentSymbolsInternal(allocator, tree, node, &context); + for (tree.rootDecls()) |idx| { + try getDocumentSymbolsInternal(allocator, tree, idx, &context); } return symbols.items; } pub const Declaration = union(enum) { - ast_node: *ast.Node, - param_decl: *ast.Node.FnProto.ParamDecl, + ast_node: ast.Node, + param_decl: ast.full.FnProto.Param, pointer_payload: struct { - node: *ast.Node.PointerPayload, + node: ast.full.PtrType, condition: *ast.Node, }, array_payload: struct { identifier: *ast.Node, - array_expr: *ast.Node, + array_expr: ast.full.ArrayType, }, switch_payload: struct { - node: *ast.Node.PointerPayload, + node: ast.full.PtrType, switch_expr: *ast.Node, items: []const *ast.Node, }, @@ -1723,18 +1757,18 @@ fn iterateSymbolsContainerInternal( try callback(context, decl); } - for (container_scope.uses) |use| { - if (handle != orig_handle and use.visib_token == null) continue; - if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; - try use_trail.append(use); + // for (container_scope.uses) |use| { + // if (handle != orig_handle and use.visib_token == null) continue; + // if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; + // try use_trail.append(use); - const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; - const use_expr_node = switch (use_expr.type.data) { - .other => |n| n, - else => continue, - }; - try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, orig_handle, callback, context, false, use_trail); - } + // const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; + // const use_expr_node = switch (use_expr.type.data) { + // .other => |n| n, + // else => continue, + // }; + // try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, orig_handle, callback, context, false, use_trail); + // } } } @@ -1790,17 +1824,17 @@ fn iterateSymbolsGlobalInternal( try callback(context, DeclWithHandle{ .decl = &entry.value, .handle = handle }); } - for (scope.uses) |use| { - if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; - try use_trail.append(use); + // for (scope.uses) |use| { + // if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; + // try use_trail.append(use); - const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; - const use_expr_node = switch (use_expr.type.data) { - .other => |n| n, - else => continue, - }; - try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, handle, callback, context, false, use_trail); - } + // const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; + // const use_expr_node = switch (use_expr.type.data) { + // .other => |n| n, + // else => continue, + // }; + // try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, handle, callback, context, false, use_trail); + // } } if (scope.range.start >= source_index) return; @@ -1838,27 +1872,27 @@ pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) Ty fn resolveUse( store: *DocumentStore, arena: *std.heap.ArenaAllocator, - uses: []const *ast.Node.Use, + // uses: []const *ast.Node.Use, symbol: []const u8, handle: *DocumentStore.Handle, use_trail: *std.ArrayList(*ast.Node.Use), ) error{OutOfMemory}!?DeclWithHandle { - for (uses) |use| { - if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; - try use_trail.append(use); + // for (uses) |use| { + // if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; + // try use_trail.append(use); - const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; - const use_expr_node = switch (use_expr.type.data) { - .other => |n| n, - else => continue, - }; - if (try lookupSymbolContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, symbol, false, use_trail)) |candidate| { - if (candidate.handle != handle and !candidate.isPublic()) { - continue; - } - return candidate; - } - } + // const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; + // const use_expr_node = switch (use_expr.type.data) { + // .other => |n| n, + // else => continue, + // }; + // if (try lookupSymbolContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, symbol, false, use_trail)) |candidate| { + // if (candidate.handle != handle and !candidate.isPublic()) { + // continue; + // } + // return candidate; + // } + // } return null; } @@ -1909,7 +1943,7 @@ fn lookupSymbolGlobalInternal( }; } - if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + // if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; } if (scope.range.start > source_index) return null; @@ -1962,7 +1996,7 @@ fn lookupSymbolContainerInternal( return DeclWithHandle{ .decl = &candidate.value, .handle = handle }; } - if (try resolveUse(store, arena, container_scope.uses, symbol, handle, use_trail)) |result| return result; + // if (try resolveUse(store, arena, container_scope.uses, symbol, handle, use_trail)) |result| return result; return null; } @@ -1998,7 +2032,8 @@ pub const DocumentScope = struct { scope.data, scope.range.start, scope.range.end, - scope.uses.len, + {}, + // scope.uses.len, }); var decl_it = scope.decls.iterator(); @@ -2014,7 +2049,7 @@ pub const DocumentScope = struct { pub fn deinit(self: DocumentScope, allocator: *std.mem.Allocator) void { for (self.scopes) |*scope| { scope.decls.deinit(); - allocator.free(scope.uses); + // allocator.free(scope.uses); allocator.free(scope.tests); } allocator.free(self.scopes); @@ -2036,12 +2071,12 @@ pub const Scope = struct { range: SourceRange, decls: std.StringHashMap(Declaration), tests: []const *ast.Node, - uses: []const *ast.Node.Use, + // uses: []const *ast.Node.Data, data: Data, }; -pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: *ast.Tree) !DocumentScope { +pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !DocumentScope { var scopes = std.ArrayListUnmanaged(Scope){}; var error_completions = std.ArrayListUnmanaged(types.CompletionItem){}; var enum_completions = std.ArrayListUnmanaged(types.CompletionItem){}; @@ -2054,7 +2089,7 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: *ast.Tree) !Docume enum_completions.deinit(allocator); } - try makeScopeInternal(allocator, &scopes, &error_completions, &enum_completions, tree, &tree.root_node.base); + try makeScopeInternal(allocator, &scopes, &error_completions, &enum_completions, tree); return DocumentScope{ .scopes = scopes.toOwnedSlice(allocator), .error_completions = error_completions.toOwnedSlice(allocator), @@ -2062,10 +2097,11 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: *ast.Tree) !Docume }; } -fn nodeSourceRange(tree: *ast.Tree, node: *ast.Node) SourceRange { +fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { + const loc = tree.tokenLocation(0, tree.firstToken(node)); return SourceRange{ - .start = tree.token_locs[node.firstToken()].start, - .end = tree.token_locs[node.lastToken()].end, + .start = loc.line_start, + .end = loc.line_end, }; } @@ -2076,39 +2112,42 @@ fn makeScopeInternal( scopes: *std.ArrayListUnmanaged(Scope), error_completions: *std.ArrayListUnmanaged(types.CompletionItem), enum_completions: *std.ArrayListUnmanaged(types.CompletionItem), - tree: *ast.Tree, - node: *ast.Node, + tree: ast.Tree, + node_idx: ast.Node.Index, ) error{OutOfMemory}!void { - if (node.tag == .Root or node.tag == .ContainerDecl or node.tag == .ErrorSetDecl) { - const ast_decls = switch (node.tag) { - .ContainerDecl => node.castTag(.ContainerDecl).?.fieldsAndDeclsConst(), - .Root => node.castTag(.Root).?.declsConst(), - .ErrorSetDecl => node.castTag(.ErrorSetDecl).?.declsConst(), + const nodes = tree.nodes.items(.tag); + const node = nodes[node_idx]; + if (node == .root or node == .container_decl or node == .error_set_decl) { + const ast_decls = switch (node) { + .container_decl => tree.containerDecl(node_idx).ast.members, + .root => tree.rootDecls(), + // @TODO: Fix error set declarations + // .error_set_decl => node.castTag(.ErrorSetDecl).?.declsConst(), else => unreachable, }; (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node), .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .{ .container = node }, }; const scope_idx = scopes.items.len - 1; - var uses = std.ArrayList(*ast.Node.Use).init(allocator); + // var uses = std.ArrayList(*ast.Node.Use).init(allocator); var tests = std.ArrayList(*ast.Node).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); - uses.deinit(); + // uses.deinit(); tests.deinit(); } for (ast_decls) |decl| { - if (decl.castTag(.Use)) |use| { - try uses.append(use); - continue; - } + // if (decl.castTag(.Use)) |use| { + // try uses.append(use); + // continue; + // } try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, decl); const name = getDeclName(tree, decl) orelse continue; @@ -2159,7 +2198,7 @@ fn makeScopeInternal( } scopes.items[scope_idx].tests = tests.toOwnedSlice(); - scopes.items[scope_idx].uses = uses.toOwnedSlice(); + // scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; } @@ -2170,7 +2209,7 @@ fn makeScopeInternal( (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node), .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .{ .function = node }, }; @@ -2204,7 +2243,7 @@ fn makeScopeInternal( .end = tree.token_locs[block.rbrace].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2217,24 +2256,24 @@ fn makeScopeInternal( (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node), .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .{ .block = node }, }; var scope_idx = scopes.items.len - 1; - var uses = std.ArrayList(*ast.Node.Use).init(allocator); + // var uses = std.ArrayList(*ast.Node.Use).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); - uses.deinit(); + // uses.deinit(); } var child_idx: usize = 0; while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { - if (child_node.castTag(.Use)) |use| { - try uses.append(use); - continue; - } + // if (child_node.castTag(.Use)) |use| { + // try uses.append(use); + // continue; + // } try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, child_node); if (child_node.castTag(.VarDecl)) |var_decl| { @@ -2245,7 +2284,7 @@ fn makeScopeInternal( } } - scopes.items[scope_idx].uses = uses.toOwnedSlice(); + // scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; }, .Block => { @@ -2254,24 +2293,24 @@ fn makeScopeInternal( (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node), .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .{ .block = node }, }; var scope_idx = scopes.items.len - 1; - var uses = std.ArrayList(*ast.Node.Use).init(allocator); + // var uses = std.ArrayList(*ast.Node.Use).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); - uses.deinit(); + // uses.deinit(); } var child_idx: usize = 0; while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { - if (child_node.castTag(.Use)) |use| { - try uses.append(use); - continue; - } + // if (child_node.castTag(.Use)) |use| { + // try uses.append(use); + // continue; + // } try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, child_node); if (child_node.castTag(.VarDecl)) |var_decl| { @@ -2282,7 +2321,7 @@ fn makeScopeInternal( } } - scopes.items[scope_idx].uses = uses.toOwnedSlice(); + // scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; }, .Comptime => { @@ -2300,7 +2339,7 @@ fn makeScopeInternal( .end = tree.token_locs[if_node.body.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2328,7 +2367,7 @@ fn makeScopeInternal( .end = tree.token_locs[else_node.body.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2353,7 +2392,7 @@ fn makeScopeInternal( .end = tree.token_locs[while_node.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2373,7 +2412,7 @@ fn makeScopeInternal( .end = tree.token_locs[while_node.body.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2401,7 +2440,7 @@ fn makeScopeInternal( .end = tree.token_locs[else_node.body.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2426,7 +2465,7 @@ fn makeScopeInternal( .end = tree.token_locs[for_node.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2448,7 +2487,7 @@ fn makeScopeInternal( .end = tree.token_locs[for_node.body.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; @@ -2489,7 +2528,7 @@ fn makeScopeInternal( .end = tree.token_locs[case_node.expr.lastToken()].end, }, .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &[0]*ast.Node.Use{}, + // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, .data = .other, }; diff --git a/src/document_store.zig b/src/document_store.zig index fccdbed..de817d6 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -22,7 +22,7 @@ pub const Handle = struct { document: types.TextDocument, count: usize, import_uris: std.ArrayList([]const u8), - tree: *std.zig.ast.Tree, + tree: std.zig.ast.Tree, document_scope: analysis.DocumentScope, associated_build_file: ?*BuildFile, @@ -143,8 +143,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand var handle = try self.allocator.create(Handle); errdefer self.allocator.destroy(handle); - const tree = try std.zig.parse(self.allocator, text); - errdefer tree.deinit(); + var tree = try std.zig.parse(self.allocator, text); + errdefer tree.deinit(self.allocator); const document_scope = try analysis.makeDocumentScope(self.allocator, tree); errdefer document_scope.deinit(self.allocator); @@ -326,7 +326,7 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void { self.decrementBuildFileRefs(build_file); } - entry.value.tree.deinit(); + entry.value.tree.deinit(self.allocator); self.allocator.free(entry.value.document.mem); for (entry.value.import_uris.items) |import_uri| { @@ -354,7 +354,7 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { // Check if the document text is now sane, move it to sane_text if so. fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void { log.debug("New text for document {s}", .{handle.uri()}); - handle.tree.deinit(); + handle.tree.deinit(self.allocator); handle.tree = try std.zig.parse(self.allocator, handle.document.text); handle.document_scope.deinit(self.allocator); @@ -613,7 +613,7 @@ pub fn deinit(self: *DocumentStore) void { var entry_iterator = self.handles.iterator(); while (entry_iterator.next()) |entry| { entry.value.document_scope.deinit(self.allocator); - entry.value.tree.deinit(); + entry.value.tree.deinit(self.allocator); self.allocator.free(entry.value.document.mem); for (entry.value.import_uris.items) |uri| { diff --git a/src/main.zig b/src/main.zig index fed8140..350f472 100644 --- a/src/main.zig +++ b/src/main.zig @@ -199,8 +199,8 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han var diagnostics = std.ArrayList(types.Diagnostic).init(&arena.allocator); - for (tree.errors) |*err| { - const loc = tree.tokenLocation(0, err.loc()); + for (tree.errors) |err| { + const loc = tree.tokenLocation(0, err.token); var mem_buffer: [256]u8 = undefined; var fbs = std.io.fixedBufferStream(&mem_buffer); @@ -209,7 +209,7 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han try diagnostics.append(.{ .range = astLocationToRange(loc), .severity = .Error, - .code = @tagName(err.*), + .code = @tagName(err.tag), .source = "zls", .message = try std.mem.dupe(&arena.allocator, u8, fbs.getWritten()), // .relatedInformation = undefined @@ -217,16 +217,17 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han } if (tree.errors.len == 0) { - for (tree.root_node.decls()) |decl| { - switch (decl.tag) { - .FnProto => blk: { - const func = decl.cast(std.zig.ast.Node.FnProto).?; - const is_extern = func.getExternExportInlineToken() != null; + for (tree.rootDecls()) |decl_idx| { + const decl = tree.nodes.items(.tag)[decl_idx]; + switch (decl) { + .fn_proto => blk: { + const func = tree.fnProto(decl_idx); + const is_extern = func.extern_export_token != null; if (is_extern) break :blk; if (config.warn_style) { - if (func.getNameToken()) |name_token| { + if (func.name_token) |name_token| { const loc = tree.tokenLocation(0, name_token); const is_type_function = analysis.isTypeFunction(tree, func); @@ -397,12 +398,10 @@ fn nodeToCompletion( }, else => break :param_check false, } - } else - false; + } else false; break :blk try analysis.getFunctionSnippet(&arena.allocator, handle.tree, func, skip_self_param); - } else - null; + } else null; const is_type_function = analysis.isTypeFunction(handle.tree, func); diff --git a/src/offsets.zig b/src/offsets.zig index 51fd6ac..fac44ee 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -70,15 +70,15 @@ pub const TokenLocation = struct { } }; -pub fn tokenRelativeLocation(tree: *std.zig.ast.Tree, start_index: usize, token: std.zig.ast.TokenIndex, encoding: Encoding) !TokenLocation { - const token_loc = tree.token_locs[token]; +pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token: std.zig.ast.TokenIndex, encoding: Encoding) !TokenLocation { + const token_loc = tree.tokenLocation(@truncate(u32, start_index), token); var loc = TokenLocation{ .line = 0, .column = 0, .offset = 0, }; - const token_start = token_loc.start; + const token_start = token_loc.line_start; const source = tree.source[start_index..]; var i: usize = 0; while (i + start_index < token_start) { @@ -108,10 +108,10 @@ pub fn tokenRelativeLocation(tree: *std.zig.ast.Tree, start_index: usize, token: } /// Asserts the token is comprised of valid utf8 -pub fn tokenLength(tree: *std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encoding: Encoding) usize { - const token_loc = tree.token_locs[token]; +pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encoding: Encoding) usize { + const token_loc = tree.tokenLocation(0, token); if (encoding == .utf8) - return token_loc.end - token_loc.start; + return token_loc.line_end - token_loc.line_start; var i: usize = token_loc.start; var utf16_len: usize = 0; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 85f6bef..2479c8e 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -65,11 +65,11 @@ const Builder = struct { fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void { const start_idx = if (self.current_token) |current_token| - self.handle.tree.token_locs[current_token].start + self.handle.tree.tokenLocation[current_token].line_start else 0; - if (start_idx > self.handle.tree.token_locs[token].start) + if (start_idx > self.handle.tree.tokenLocation[token].line_start) return; const delta_loc = offsets.tokenRelativeLocation(self.handle.tree, start_idx, token, self.encoding) catch return; From 72605c75495c4c1b6b02875177961f05ba42338f Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sat, 27 Feb 2021 16:38:06 +0100 Subject: [PATCH 02/36] A lot of fixes --- src/analysis.zig | 268 ++++++---- src/semantic_tokens.zig | 1082 ++++++++++++++++++++------------------- 2 files changed, 724 insertions(+), 626 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 60e9c3d..d93538e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -5,17 +5,33 @@ const types = @import("types.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.analysis); -/// Get a declaration's doc comment node -pub fn getDocCommentNode(tree: ast.Tree, node: *ast.Node) ?*ast.Node.DocComment { - if (node.castTag(.FnProto)) |func| { - return func.getDocComments(); - } else if (node.castTag(.VarDecl)) |var_decl| { - return var_decl.getDocComments(); - } else if (node.castTag(.ContainerField)) |field| { - return field.doc_comments; - } else if (node.castTag(.ErrorTag)) |tag| { - return tag.doc_comments; +/// Get a declaration's doc comment token index +pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { + const tags = tree.nodes.items(.tag); + const tokens = tree.tokens.items(.tag); + const current = tree.nodes.items(.main_token)[node]; + + switch (tags[node]) { + .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi => { + var idx = current - 1; + idx -= @boolToInt(tokens[idx] == .keyword_extern); + idx -= @boolToInt(tokens[idx] == .keyword_pub); + return if (tokens[idx] == .doc_comment) idx else null; + }, + .local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => { + return if (tokens[current - 1] == .doc_comment) current - 1 else null; + }, + .container_field, .container_field_init, .container_field_align => { + var idx = current - 2; // skip '.' + return if (tokens[idx] == .doc_comment) idx else null; + }, + else => return null, } + + // @TODO: Implement doc comments for tags + // } else if (node.castTag(.ErrorTag)) |tag| { + // return tag.doc_comments; + // } return null; } @@ -28,11 +44,11 @@ pub fn getDocCommentNode(tree: ast.Tree, node: *ast.Node) ?*ast.Node.DocComment pub fn getDocComments( allocator: *std.mem.Allocator, tree: ast.Tree, - node: *ast.Node, + node: ast.Node.Index, format: types.MarkupContent.Kind, ) !?[]const u8 { - if (getDocCommentNode(tree, node)) |doc_comment_node| { - return try collectDocComments(allocator, tree, doc_comment_node, format); + if (getDocCommentTokenIndex(tree, node)) |doc_comment_index| { + return try collectDocComments(allocator, tree, doc_comment_index, format); } return null; } @@ -40,17 +56,19 @@ pub fn getDocComments( pub fn collectDocComments( allocator: *std.mem.Allocator, tree: ast.Tree, - doc_comments: *ast.Node.DocComment, + doc_comments: ast.TokenIndex, format: types.MarkupContent.Kind, ) ![]const u8 { var lines = std.ArrayList([]const u8).init(allocator); defer lines.deinit(); - var curr_line_tok = doc_comments.first_line; + const token_tags = tree.tokens.items(.tag); + const loc = tree.tokenLocation(0, doc_comments); + + var curr_line_tok = doc_comments; while (true) : (curr_line_tok += 1) { - switch (tree.token_ids[curr_line_tok]) { - .LineComment => continue, - .DocComment, .ContainerDocComment => { + switch (token_tags[curr_line_tok]) { + .doc_comment, .container_doc_comment => { try lines.append(std.mem.trim(u8, tree.tokenSlice(curr_line_tok)[3..], &std.ascii.spaces)); }, else => break, @@ -61,20 +79,15 @@ pub fn collectDocComments( } /// Gets a function signature (keywords, name, return value) -pub fn getFunctionSignature(tree: ast.Tree, func: *ast.Node.FnProto) []const u8 { - const start = tree.token_locs[func.firstToken()].start; - const end = tree.token_locs[ - switch (func.return_type) { - .Explicit, .InferErrorSet => |node| node.lastToken(), - .Invalid => |r_paren| r_paren, - } - ].end; +pub fn getFunctionSignature(tree: ast.Tree, func: *ast.full.FnProto) []const u8 { + const start = tree.tokenLocation(func.ast.fn_token).line_start; + const end = tree.tokenLocation(func.ast.return_type).line_end; return tree.source[start..end]; } /// Gets a function snippet insert text -pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: *ast.Node.FnProto, skip_self_param: bool) ![]const u8 { - const name_tok = func.getNameToken() orelse unreachable; +pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: *ast.full.FnProto, skip_self_param: bool) ![]const u8 { + const name_index = func.name_token orelse unreachable; var buffer = std.ArrayList(u8).init(allocator); try buffer.ensureCapacity(128); @@ -84,18 +97,20 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * var buf_stream = buffer.writer(); - for (func.paramsConst()) |param, param_num| { - if (skip_self_param and param_num == 0) continue; - if (param_num != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); + const token_tags = tree.tokens.items(.tag); - try buf_stream.print("{}:", .{param_num + 1}); + var it = func.iterate(tree); + while (it.next()) |param| { + if (skip_self_param and it.param_i == 0) continue; + if (it.param_i != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); - if (param.comptime_token) |_| { - try buffer.appendSlice("comptime "); - } + try buf_stream.print("{d}", .{it.param_i + 1}); - if (param.noalias_token) |_| { - try buffer.appendSlice("noalias "); + if (param.comptime_noalias) |token_index| { + if (token_tags[token_index] == .keyword_comptime) + try buffer.appendSlice("comptime ") + else + try buffer.appendSlice("noalias "); } if (param.name_token) |name_token| { @@ -103,23 +118,23 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * try buffer.appendSlice(": "); } - switch (param.param_type) { - .any_type => try buffer.appendSlice("anytype"), - .type_expr => |type_expr| { - var curr_tok = type_expr.firstToken(); - var end_tok = type_expr.lastToken(); - while (curr_tok <= end_tok) : (curr_tok += 1) { - const id = tree.token_ids[curr_tok]; - const is_comma = id == .Comma; + if (param.anytype_ellipsis3) |token_index| { + if (token_tags[token_index] == .keyword_anytype) + try buffer.appendSlice("anytype") + else + try buffer.appendSlice("..."); + } else { + var curr_token = param.type_expr; + var end_token = tree.lastToken(func.ast.params[it.param_i]); + while (curr_token <= end_token) : (curr_token += 1) { + const tag = token_tags[curr_token]; + const is_comma = tag == .comma; - if (curr_tok == end_tok and is_comma) continue; - - try buffer.appendSlice(tree.tokenSlice(curr_tok)); - if (is_comma or id == .Keyword_const) try buffer.append(' '); - } - }, + if (curr_token == end_token and is_comma) continue; + try buffer.appendSlice(tree.tokenSlice(curr_token)); + if (is_comma or tag == .Keyword_const) try buffer.append(' '); + } } - try buffer.append('}'); } try buffer.append(')'); @@ -128,16 +143,16 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * } /// Gets a function signature (keywords, name, return value) -pub fn getVariableSignature(tree: ast.Tree, var_decl: *ast.Node.VarDecl) []const u8 { - const start = tree.token_locs[var_decl.firstToken()].start; - const end = tree.token_locs[var_decl.semicolon_token].start; +pub fn getVariableSignature(tree: ast.Tree, var_decl: *ast.full.VarDecl) []const u8 { + const start = tree.tokenLocation(0, var_decl.ast.mut_token).line_start; + const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(var_decl.ast.init_node)).line_end; return tree.source[start..end]; } // analysis.getContainerFieldSignature(handle.tree, field) -pub fn getContainerFieldSignature(tree: ast.Tree, field: *ast.Node.ContainerField) []const u8 { - const start = tree.token_locs[field.firstToken()].start; - const end = tree.token_locs[field.lastToken()].end; +pub fn getContainerFieldSignature(tree: ast.Tree, field: *ast.full.ContainerField) []const u8 { + const start = tree.tokenLocation(0, field.ast.name_token).line_start; + const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(field.ast.value_expr)).line_start; return tree.source[start..end]; } @@ -150,7 +165,7 @@ fn typeIsType(tree: ast.Tree, node: ast.Node.Index) bool { } pub fn isTypeFunction(tree: ast.Tree, func: ast.full.FnProto) bool { - return typeIsType(func.ast.return_type); + return typeIsType(tree, func.ast.return_type); } pub fn isGenericFunction(tree: ast.Tree, func: *ast.full.FnProto) bool { @@ -1570,7 +1585,9 @@ pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: ast.Tree, encodin } pub const Declaration = union(enum) { - ast_node: ast.Node, + /// Index of the ast node + ast_node: ast.Node.Index, + /// Function parameter param_decl: ast.full.FnProto.Param, pointer_payload: struct { node: ast.full.PtrType, @@ -2062,15 +2079,15 @@ pub const DocumentScope = struct { pub const Scope = struct { pub const Data = union(enum) { - container: *ast.Node, // .id is ContainerDecl or Root or ErrorSetDecl - function: *ast.Node, // .id is FnProto - block: *ast.Node, // .id is Block + container: ast.Node.Index, // .tag is ContainerDecl or Root or ErrorSetDecl + function: ast.Node.Index, // .tag is FnProto + block: ast.Node.Index, // .tag is Block other, }; range: SourceRange, decls: std.StringHashMap(Declaration), - tests: []const *ast.Node, + tests: []const ast.Node.Index, // uses: []const *ast.Node.Data, data: Data, @@ -2088,8 +2105,8 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen for (enum_completions.items) |item| if (item.documentation) |doc| allocator.free(doc.value); enum_completions.deinit(allocator); } - - try makeScopeInternal(allocator, &scopes, &error_completions, &enum_completions, tree); + // pass root node index ('0') + try makeScopeInternal(allocator, &scopes, &error_completions, &enum_completions, tree, 0); return DocumentScope{ .scopes = scopes.toOwnedSlice(allocator), .error_completions = error_completions.toOwnedSlice(allocator), @@ -2105,6 +2122,51 @@ fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { }; } +fn isContainer(tag: ast.Node.Tag) bool { + return switch (tag) { + .container_decl, + .container_decl_trailing, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_two, + .container_decl_two_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .root, + .error_set_decl, + => true, + else => false, + }; +} + +/// Returns the member indices of a given declaration container. +/// Asserts given `tag` is a container node +fn declMembers(tree: ast.Tree, tag: ast.Node.Tag) []ast.Node.index { + std.debug.assert(isContainer(tag)); + return switch (tag) { + .container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members, + .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members, + .container_decl_two, .container_decl_two_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.containerDeclTwo(&buffer, node_idx).ast.members; + }, + .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members, + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members, + .tagged_union_two, .tagged_union_two_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.taggedUnionTwo(&buffer, node_idx).ast.members; + }, + .root => tree.rootDecls(), + // @TODO: Fix error set declarations + .error_set_decl => &[_]ast.Node.Index{}, + else => unreachable, + }; +} + // TODO Possibly collect all imports to diff them on changes // as well fn makeScopeInternal( @@ -2115,27 +2177,23 @@ fn makeScopeInternal( tree: ast.Tree, node_idx: ast.Node.Index, ) error{OutOfMemory}!void { - const nodes = tree.nodes.items(.tag); - const node = nodes[node_idx]; - if (node == .root or node == .container_decl or node == .error_set_decl) { - const ast_decls = switch (node) { - .container_decl => tree.containerDecl(node_idx).ast.members, - .root => tree.rootDecls(), - // @TODO: Fix error set declarations - // .error_set_decl => node.castTag(.ErrorSetDecl).?.declsConst(), - else => unreachable, - }; + const tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + const node = tags[node_idx]; + + if (isContainer(node)) { + const ast_decls = declMembers(tree, node); (try scopes.addOne(allocator)).* = .{ - .range = nodeSourceRange(tree, node), + .range = nodeSourceRange(tree, node_idx), .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, .tests = &[0]*ast.Node{}, - .data = .{ .container = node }, + .data = .{ .container = node_idx }, }; const scope_idx = scopes.items.len - 1; // var uses = std.ArrayList(*ast.Node.Use).init(allocator); - var tests = std.ArrayList(*ast.Node).init(allocator); + var tests = std.ArrayList(ast.Node.Index).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); @@ -2144,6 +2202,7 @@ fn makeScopeInternal( } for (ast_decls) |decl| { + // @TODO: Implement using namespace // if (decl.castTag(.Use)) |use| { // try uses.append(use); // continue; @@ -2151,12 +2210,13 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, decl); const name = getDeclName(tree, decl) orelse continue; - if (decl.tag == .TestDecl) { - try tests.append(decl); - continue; - } + // @TODO: implement tests + // if (decl.tag == .TestDecl) { + // try tests.append(decl); + // continue; + // } - if (node.tag == .ErrorSetDecl) { + if (tags[decl] == .error_set_decl) { (try error_completions.addOne(allocator)).* = .{ .label = name, .kind = .Constant, @@ -2167,15 +2227,39 @@ fn makeScopeInternal( }; } - if (decl.castTag(.ContainerField)) |field| { - const empty_field = field.type_expr == null and field.value_expr == null; - if (empty_field and node.tag == .Root) { + const container_field: ?ast.full.ContainerField = switch (decl) { + .container_field => tree.containerField(decl), + .container_field_align => tree.containerFieldAlign(decl), + .container_field_init => tree.containerFieldInit(decl), + else => null, + }; + + if (container_field) |field| { + const empty_field = field.type_expr == 0 and field.value_expr == 0; + if (empty_field and node == .root) { continue; } - if (node.castTag(.ContainerDecl)) |container| { - const kind = tree.token_ids[container.kind_token]; - if (empty_field and (kind == .Keyword_struct or (kind == .Keyword_union and container.init_arg_expr == .None))) { + // @TODO: We can probably just use node_idx directly instead of first transforming to container + const container_decl: ?ast.full.ContainerDecl = switch (node) { + .container_decl, .container_decl_trailing => tree.containerDecl(node_idx), + .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx), + .container_decl_two, .container_decl_two_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.containerDeclTwo(&buffer, node_idx); + }, + .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx), + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx), + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => blk: { + var buffer: [2]ast.Node.Index = undefined; + break :blk tree.taggedUnionTwo(&buffer, node_idx); + }, + else => null, + }; + + if (container_decl) |container| { + const kind = token_tags[container.ast.main_token]; + if (empty_field and (kind == .keyword_struct or (kind == .keyword_union and container.ast.arg == 0))) { continue; } @@ -2183,7 +2267,7 @@ fn makeScopeInternal( (try enum_completions.addOne(allocator)).* = .{ .label = name, .kind = .Constant, - .documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| + .documentation = if (try getDocComments(allocator, tree, node_idx, .Markdown)) |docs| .{ .kind = .Markdown, .value = docs } else null, @@ -2202,7 +2286,7 @@ fn makeScopeInternal( return; } - switch (node.tag) { + switch (node) { .FnProto => { const func = node.castTag(.FnProto).?; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 2479c8e..68c16df 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -254,568 +254,582 @@ fn writeContainerField( } } +// @TODO: Fix semantic tokens // TODO This is very slow and does a lot of extra work, improve in the future. -fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?*ast.Node) error{OutOfMemory}!void { - if (maybe_node == null) return; - const node = maybe_node.?; - const handle = builder.handle; +// fn writeNodeTokens( +// builder: *Builder, +// arena: *std.heap.ArenaAllocator, +// store: *DocumentStore, +// maybe_node: ?ast.Node.Index, +// tree: ast.Tree, +// ) error{OutOfMemory}!void { +// if (maybe_node == null) return; - const FrameSize = @sizeOf(@Frame(writeNodeTokens)); - var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); - defer arena.child_allocator.free(child_frame); +// const node_tags = tree.nodes.items(.tag); +// const token_tags = tree.tokens.items(.tag); +// const nodes_data = tree.nodes.items(.data); +// const main_tokens = tree.nodes.items(.main_token); - switch (node.tag) { - .Root, .Block, .LabeledBlock => { - const first_tok = if (node.castTag(.LabeledBlock)) |block_node| block: { - try writeToken(builder, block_node.label, .label); - break :block block_node.lbrace + 1; - } else if (node.castTag(.Block)) |block_node| - block_node.lbrace + 1 - else - 0; +// const node = maybe_node.?; +// const handle = builder.handle; - var gap_highlighter = GapHighlighter.init(builder, first_tok); - var child_idx: usize = 0; - while (node.iterate(child_idx)) |child| : (child_idx += 1) { - try gap_highlighter.next(child); - if (child.cast(ast.Node.ContainerField)) |container_field| { - try writeContainerField(builder, arena, store, container_field, .field, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } +// const FrameSize = @sizeOf(@Frame(writeNodeTokens)); +// var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); +// defer arena.child_allocator.free(child_frame); - if (node.tag == .Root) { - try gap_highlighter.end(handle.tree.token_ids.len - 1); - } else { - try gap_highlighter.end(node.lastToken()); - } - }, - .VarDecl => { - const var_decl = node.cast(ast.Node.VarDecl).?; - if (var_decl.getDocComments()) |doc| try writeDocComments(builder, handle.tree, doc); - try writeToken(builder, var_decl.getVisibToken(), .keyword); - try writeToken(builder, var_decl.getExternExportToken(), .keyword); - try writeToken(builder, var_decl.getThreadLocalToken(), .keyword); - try writeToken(builder, var_decl.getComptimeToken(), .keyword); - try writeToken(builder, var_decl.mut_token, .keyword); - if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { - try colorIdentifierBasedOnType(builder, decl_type, var_decl.name_token, .{ .declaration = true }); - } else { - try writeTokenMod(builder, var_decl.name_token, .variable, .{ .declaration = true }); - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getTypeNode() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getAlignNode() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getSectionNode() }); - try writeToken(builder, var_decl.getEqToken(), .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getInitNode() }); - }, - .Use => { - const use = node.cast(ast.Node.Use).?; - if (use.doc_comments) |docs| try writeDocComments(builder, builder.handle.tree, docs); - try writeToken(builder, use.visib_token, .keyword); - try writeToken(builder, use.use_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, use.expr }); - }, - .ErrorSetDecl => { - const error_set = node.cast(ast.Node.ErrorSetDecl).?; - try writeToken(builder, error_set.error_token, .keyword); - for (error_set.declsConst()) |decl| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl }); - }, - .ContainerDecl => { - const container_decl = node.cast(ast.Node.ContainerDecl).?; - try writeToken(builder, container_decl.layout_token, .keyword); - try writeToken(builder, container_decl.kind_token, .keyword); - switch (container_decl.init_arg_expr) { - .None => {}, - .Enum => |enum_expr| if (enum_expr) |expr| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, expr }) - else - try writeToken(builder, container_decl.kind_token + 2, .keyword), - .Type => |type_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_node }), - } +// switch (node_tags[node]) { +// .root, .block, .block_semicolon => |tag| { +// const first_tok = if (tag != block_semicolon) block: { +// const lbrace = main_tokens[node]; +// if (token_tags[lbrace - 1] == .colon and token_tags[lbrace - 2] == .identifier) +// try writeToken(builder, lbrace - 2, .label); - var gap_highlighter = GapHighlighter.init(builder, container_decl.lbrace_token + 1); - const field_token_type = fieldTokenType(container_decl, handle); - for (container_decl.fieldsAndDeclsConst()) |child| { - try gap_highlighter.next(child); - if (child.cast(ast.Node.ContainerField)) |container_field| { - try writeContainerField(builder, arena, store, container_field, field_token_type, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } - try gap_highlighter.end(node.lastToken()); - }, - .ErrorTag => { - const error_tag = node.cast(ast.Node.ErrorTag).?; - if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, error_tag.firstToken(), .errorTag); - }, - .Identifier => { - if (analysis.isTypeIdent(handle.tree, node.firstToken())) { - return try writeToken(builder, node.firstToken(), .type); - } +// break :block lbrace + 1; +// } else 0; - if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| { - if (child.decl.* == .param_decl) { - return try writeToken(builder, node.firstToken(), .parameter); - } - var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); - if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { - try colorIdentifierBasedOnType(builder, decl_type, node.firstToken(), .{}); - } else { - try writeTokenMod(builder, node.firstToken(), .variable, .{}); - } - } - }, - .FnProto => { - const fn_proto = node.cast(ast.Node.FnProto).?; - if (fn_proto.getDocComments()) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, fn_proto.getVisibToken(), .keyword); - try writeToken(builder, fn_proto.getExternExportInlineToken(), .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getLibName() }); - try writeToken(builder, fn_proto.fn_token, .keyword); +// var gap_highlighter = GapHighlighter.init(builder, first_tok); +// var child_idx: usize = 0; +// while (node.iterate(child_idx)) |child| : (child_idx += 1) { +// try gap_highlighter.next(child); +// if (child.cast(ast.Node.ContainerField)) |container_field| { +// try writeContainerField(builder, arena, store, container_field, .field, child_frame); +// } else { +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); +// } +// } - const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) - .type - else - .function; +// if (node.tag == .Root) { +// try gap_highlighter.end(handle.tree.token_ids.len - 1); +// } else { +// try gap_highlighter.end(node.lastToken()); +// } +// }, +// .VarDecl => { +// const var_decl = node.cast(ast.Node.VarDecl).?; +// if (var_decl.getDocComments()) |doc| try writeDocComments(builder, handle.tree, doc); +// try writeToken(builder, var_decl.getVisibToken(), .keyword); +// try writeToken(builder, var_decl.getExternExportToken(), .keyword); +// try writeToken(builder, var_decl.getThreadLocalToken(), .keyword); +// try writeToken(builder, var_decl.getComptimeToken(), .keyword); +// try writeToken(builder, var_decl.mut_token, .keyword); +// if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { +// try colorIdentifierBasedOnType(builder, decl_type, var_decl.name_token, .{ .declaration = true }); +// } else { +// try writeTokenMod(builder, var_decl.name_token, .variable, .{ .declaration = true }); +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getTypeNode() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getAlignNode() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getSectionNode() }); +// try writeToken(builder, var_decl.getEqToken(), .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getInitNode() }); +// }, +// .Use => { +// const use = node.cast(ast.Node.Use).?; +// if (use.doc_comments) |docs| try writeDocComments(builder, builder.handle.tree, docs); +// try writeToken(builder, use.visib_token, .keyword); +// try writeToken(builder, use.use_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, use.expr }); +// }, +// .ErrorSetDecl => { +// const error_set = node.cast(ast.Node.ErrorSetDecl).?; +// try writeToken(builder, error_set.error_token, .keyword); +// for (error_set.declsConst()) |decl| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl }); +// }, +// .ContainerDecl => { +// const container_decl = node.cast(ast.Node.ContainerDecl).?; +// try writeToken(builder, container_decl.layout_token, .keyword); +// try writeToken(builder, container_decl.kind_token, .keyword); +// switch (container_decl.init_arg_expr) { +// .None => {}, +// .Enum => |enum_expr| if (enum_expr) |expr| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, expr }) +// else +// try writeToken(builder, container_decl.kind_token + 2, .keyword), +// .Type => |type_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_node }), +// } - const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) - TokenModifiers{ .generic = true } - else - TokenModifiers{}; +// var gap_highlighter = GapHighlighter.init(builder, container_decl.lbrace_token + 1); +// const field_token_type = fieldTokenType(container_decl, handle); +// for (container_decl.fieldsAndDeclsConst()) |child| { +// try gap_highlighter.next(child); +// if (child.cast(ast.Node.ContainerField)) |container_field| { +// try writeContainerField(builder, arena, store, container_field, field_token_type, child_frame); +// } else { +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); +// } +// } +// try gap_highlighter.end(node.lastToken()); +// }, +// .ErrorTag => { +// const error_tag = node.cast(ast.Node.ErrorTag).?; +// if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, error_tag.firstToken(), .errorTag); +// }, +// .Identifier => { +// if (analysis.isTypeIdent(handle.tree, node.firstToken())) { +// return try writeToken(builder, node.firstToken(), .type); +// } - try writeTokenMod(builder, fn_proto.getNameToken(), func_name_tok_type, tok_mod); +// if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| { +// if (child.decl.* == .param_decl) { +// return try writeToken(builder, node.firstToken(), .parameter); +// } +// var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); +// if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { +// try colorIdentifierBasedOnType(builder, decl_type, node.firstToken(), .{}); +// } else { +// try writeTokenMod(builder, node.firstToken(), .variable, .{}); +// } +// } +// }, +// .FnProto => { +// const fn_proto = node.cast(ast.Node.FnProto).?; +// if (fn_proto.getDocComments()) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, fn_proto.getVisibToken(), .keyword); +// try writeToken(builder, fn_proto.getExternExportInlineToken(), .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getLibName() }); +// try writeToken(builder, fn_proto.fn_token, .keyword); - for (fn_proto.paramsConst()) |param_decl| { - if (param_decl.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, param_decl.noalias_token, .keyword); - try writeToken(builder, param_decl.comptime_token, .keyword); - try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); - switch (param_decl.param_type) { - .any_type => |var_node| try writeToken(builder, var_node.firstToken(), .type), - .type_expr => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), - } - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getAlignExpr() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getSectionExpr() }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getCallconvExpr() }); +// const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) +// .type +// else +// .function; - switch (fn_proto.return_type) { - .Explicit => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), - .InferErrorSet => |type_expr| { - try writeToken(builder, type_expr.firstToken() - 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }); - }, - .Invalid => {}, - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getBodyNode() }); - }, - .AnyFrameType => { - const any_frame_type = node.cast(ast.Node.AnyFrameType).?; - try writeToken(builder, any_frame_type.anyframe_token, .type); - if (any_frame_type.result) |any_frame_result| { - try writeToken(builder, any_frame_result.arrow_token, .type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, any_frame_result.return_type }); - } - }, - .Defer => { - const defer_node = node.cast(ast.Node.Defer).?; - try writeToken(builder, defer_node.defer_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.expr }); - }, - .Comptime => { - const comptime_node = node.cast(ast.Node.Comptime).?; - if (comptime_node.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); - try writeToken(builder, comptime_node.comptime_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, comptime_node.expr }); - }, - .Nosuspend => { - const nosuspend_node = node.cast(ast.Node.Nosuspend).?; - try writeToken(builder, nosuspend_node.nosuspend_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, nosuspend_node.expr }); - }, - .Payload => { - const payload = node.cast(ast.Node.Payload).?; - try writeToken(builder, payload.lpipe, .operator); - try writeToken(builder, payload.error_symbol.firstToken(), .variable); - try writeToken(builder, payload.rpipe, .operator); - }, - .PointerPayload => { - const payload = node.cast(ast.Node.PointerPayload).?; - try writeToken(builder, payload.lpipe, .operator); - try writeToken(builder, payload.ptr_token, .operator); - try writeToken(builder, payload.value_symbol.firstToken(), .variable); - try writeToken(builder, payload.rpipe, .operator); - }, - .PointerIndexPayload => { - const payload = node.cast(ast.Node.PointerIndexPayload).?; - try writeToken(builder, payload.lpipe, .operator); - try writeToken(builder, payload.ptr_token, .operator); - try writeToken(builder, payload.value_symbol.firstToken(), .variable); - if (payload.index_symbol) |index_symbol| try writeToken(builder, index_symbol.firstToken(), .variable); - try writeToken(builder, payload.rpipe, .operator); - }, - .Else => { - const else_node = node.cast(ast.Node.Else).?; - try writeToken(builder, else_node.else_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.body }); - }, - .Switch => { - const switch_node = node.cast(ast.Node.Switch).?; - try writeToken(builder, switch_node.switch_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_node.expr }); +// const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) +// TokenModifiers{ .generic = true } +// else +// TokenModifiers{}; - var gap_highlighter = GapHighlighter.init(builder, switch_node.expr.lastToken() + 3); - for (switch_node.casesConst()) |case_node| { - try gap_highlighter.next(case_node); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); - } - try gap_highlighter.end(node.lastToken()); - }, - .SwitchCase => { - const switch_case = node.cast(ast.Node.SwitchCase).?; - for (switch_case.itemsConst()) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); - try writeToken(builder, switch_case.arrow_token, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.expr }); - }, - .SwitchElse => { - const switch_else = node.cast(ast.Node.SwitchElse).?; - try writeToken(builder, switch_else.token, .keyword); - }, - .While => { - const while_node = node.cast(ast.Node.While).?; - try writeToken(builder, while_node.label, .label); - try writeToken(builder, while_node.inline_token, .keyword); - try writeToken(builder, while_node.while_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.condition }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.continue_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.body }); - if (while_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .For => { - const for_node = node.cast(ast.Node.For).?; - try writeToken(builder, for_node.label, .label); - try writeToken(builder, for_node.inline_token, .keyword); - try writeToken(builder, for_node.for_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.array_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.body }); - if (for_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .If => { - const if_node = node.cast(ast.Node.If).?; - try writeToken(builder, if_node.if_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.condition }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.body }); - if (if_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .ArrayInitializer => { - const array_initializer = node.cast(ast.Node.ArrayInitializer).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_initializer.lhs }); - for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); - }, - .ArrayInitializerDot => { - const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?; - for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); - }, - .StructInitializer => { - const struct_initializer = node.cast(ast.Node.StructInitializer).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_initializer.lhs }); - const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) { - .other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl| - fieldTokenType(container_decl, handle) - else - null, - else => null, - } else null; +// try writeTokenMod(builder, fn_proto.getNameToken(), func_name_tok_type, tok_mod); - var gap_highlighter = GapHighlighter.init(builder, struct_initializer.lhs.lastToken() + 1); - for (struct_initializer.listConst()) |field_init_node| { - try gap_highlighter.next(field_init_node); - std.debug.assert(field_init_node.tag == .FieldInitializer); - const field_init = field_init_node.cast(ast.Node.FieldInitializer).?; - if (field_token_type) |tok_type| { - try writeToken(builder, field_init.period_token, tok_type); - try writeToken(builder, field_init.name_token, tok_type); - } - try writeToken(builder, field_init.name_token + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); - } - try gap_highlighter.end(struct_initializer.rtoken); - }, - .StructInitializerDot => { - const struct_initializer = node.castTag(.StructInitializerDot).?; +// for (fn_proto.paramsConst()) |param_decl| { +// if (param_decl.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, param_decl.noalias_token, .keyword); +// try writeToken(builder, param_decl.comptime_token, .keyword); +// try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); +// switch (param_decl.param_type) { +// .any_type => |var_node| try writeToken(builder, var_node.firstToken(), .type), +// .type_expr => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), +// } +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getAlignExpr() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getSectionExpr() }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getCallconvExpr() }); - var gap_highlighter = GapHighlighter.init(builder, struct_initializer.dot + 1); - for (struct_initializer.listConst()) |field_init_node| { - try gap_highlighter.next(field_init_node); - std.debug.assert(field_init_node.tag == .FieldInitializer); - const field_init = field_init_node.castTag(.FieldInitializer).?; - try writeToken(builder, field_init.period_token, .field); - try writeToken(builder, field_init.name_token, .field); - try writeToken(builder, field_init.name_token + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); - } - try gap_highlighter.end(struct_initializer.rtoken); - }, - .Call => { - const call = node.cast(ast.Node.Call).?; - try writeToken(builder, call.async_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.lhs }); - if (builder.current_token) |curr_tok| { - if (curr_tok != call.lhs.lastToken() and handle.tree.token_ids[call.lhs.lastToken()] == .Identifier) { - try writeToken(builder, call.lhs.lastToken(), .function); - } - } - for (call.paramsConst()) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); - }, - .Slice => { - const slice = node.castTag(.Slice).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.lhs }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.start }); - try writeToken(builder, slice.start.lastToken() + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.end }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.sentinel }); - }, - .ArrayAccess => { - const arr_acc = node.castTag(.ArrayAccess).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.lhs }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.index_expr }); - }, - .Deref, .UnwrapOptional => { - const suffix = node.cast(ast.Node.SimpleSuffixOp).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suffix.lhs }); - try writeToken(builder, suffix.rtoken, .operator); - }, - .GroupedExpression => { - const grouped_expr = node.cast(ast.Node.GroupedExpression).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, grouped_expr.expr }); - }, - .Return, .Break, .Continue => { - const cfe = node.cast(ast.Node.ControlFlowExpression).?; - try writeToken(builder, cfe.ltoken, .keyword); - switch (node.tag) { - .Break => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), - .Continue => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), - else => {}, - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, cfe.getRHS() }); - }, - .Suspend => { - const suspend_node = node.cast(ast.Node.Suspend).?; - try writeToken(builder, suspend_node.suspend_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suspend_node.body }); - }, - .IntegerLiteral => { - try writeToken(builder, node.firstToken(), .number); - }, - .EnumLiteral => { - const enum_literal = node.cast(ast.Node.EnumLiteral).?; - try writeToken(builder, enum_literal.dot, .enumMember); - try writeToken(builder, enum_literal.name, .enumMember); - }, - .FloatLiteral => { - try writeToken(builder, node.firstToken(), .number); - }, - .BuiltinCall => { - const builtin_call = node.cast(ast.Node.BuiltinCall).?; - try writeToken(builder, builtin_call.builtin_token, .builtin); - for (builtin_call.paramsConst()) |param| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); - }, - .StringLiteral, .CharLiteral => { - try writeToken(builder, node.firstToken(), .string); - }, - .MultilineStringLiteral => { - const multi_line = node.cast(ast.Node.MultilineStringLiteral).?; - for (multi_line.linesConst()) |line| try writeToken(builder, line, .string); - }, - .BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable => { - try writeToken(builder, node.firstToken(), .keywordLiteral); - }, - .ErrorType => { - try writeToken(builder, node.firstToken(), .keyword); - }, - .Asm => { - const asm_expr = node.cast(ast.Node.Asm).?; - try writeToken(builder, asm_expr.asm_token, .keyword); - try writeToken(builder, asm_expr.volatile_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_expr.template }); - // TODO Inputs, outputs. - }, - .AnyType => { - try writeToken(builder, node.firstToken(), .type); - }, - .TestDecl => { - const test_decl = node.cast(ast.Node.TestDecl).?; - if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc); - try writeToken(builder, test_decl.test_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.name }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.body_node }); - }, - .Catch => { - const catch_expr = node.cast(ast.Node.Catch).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.lhs }); - try writeToken(builder, catch_expr.op_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.rhs }); - }, - .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolAnd, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Period, .Range, .Sub, .SubWrap, .OrElse => { - const infix_op = node.cast(ast.Node.SimpleInfixOp).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.lhs }); - if (node.tag != .Period) { - const token_type: TokenType = switch (node.tag) { - .BoolAnd, .BoolOr, .OrElse => .keyword, - else => .operator, - }; +// switch (fn_proto.return_type) { +// .Explicit => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), +// .InferErrorSet => |type_expr| { +// try writeToken(builder, type_expr.firstToken() - 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }); +// }, +// .Invalid => {}, +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getBodyNode() }); +// }, +// .AnyFrameType => { +// const any_frame_type = node.cast(ast.Node.AnyFrameType).?; +// try writeToken(builder, any_frame_type.anyframe_token, .type); +// if (any_frame_type.result) |any_frame_result| { +// try writeToken(builder, any_frame_result.arrow_token, .type); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, any_frame_result.return_type }); +// } +// }, +// .Defer => { +// const defer_node = node.cast(ast.Node.Defer).?; +// try writeToken(builder, defer_node.defer_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.expr }); +// }, +// .Comptime => { +// const comptime_node = node.cast(ast.Node.Comptime).?; +// if (comptime_node.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); +// try writeToken(builder, comptime_node.comptime_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, comptime_node.expr }); +// }, +// .Nosuspend => { +// const nosuspend_node = node.cast(ast.Node.Nosuspend).?; +// try writeToken(builder, nosuspend_node.nosuspend_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, nosuspend_node.expr }); +// }, +// .Payload => { +// const payload = node.cast(ast.Node.Payload).?; +// try writeToken(builder, payload.lpipe, .operator); +// try writeToken(builder, payload.error_symbol.firstToken(), .variable); +// try writeToken(builder, payload.rpipe, .operator); +// }, +// .PointerPayload => { +// const payload = node.cast(ast.Node.PointerPayload).?; +// try writeToken(builder, payload.lpipe, .operator); +// try writeToken(builder, payload.ptr_token, .operator); +// try writeToken(builder, payload.value_symbol.firstToken(), .variable); +// try writeToken(builder, payload.rpipe, .operator); +// }, +// .PointerIndexPayload => { +// const payload = node.cast(ast.Node.PointerIndexPayload).?; +// try writeToken(builder, payload.lpipe, .operator); +// try writeToken(builder, payload.ptr_token, .operator); +// try writeToken(builder, payload.value_symbol.firstToken(), .variable); +// if (payload.index_symbol) |index_symbol| try writeToken(builder, index_symbol.firstToken(), .variable); +// try writeToken(builder, payload.rpipe, .operator); +// }, +// .Else => { +// const else_node = node.cast(ast.Node.Else).?; +// try writeToken(builder, else_node.else_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.body }); +// }, +// .Switch => { +// const switch_node = node.cast(ast.Node.Switch).?; +// try writeToken(builder, switch_node.switch_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_node.expr }); - try writeToken(builder, infix_op.op_token, token_type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.rhs }); - } - switch (node.tag) { - .Period => { - const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken()); +// var gap_highlighter = GapHighlighter.init(builder, switch_node.expr.lastToken() + 3); +// for (switch_node.casesConst()) |case_node| { +// try gap_highlighter.next(case_node); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); +// } +// try gap_highlighter.end(node.lastToken()); +// }, +// .SwitchCase => { +// const switch_case = node.cast(ast.Node.SwitchCase).?; +// for (switch_case.itemsConst()) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); +// try writeToken(builder, switch_case.arrow_token, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.expr }); +// }, +// .SwitchElse => { +// const switch_else = node.cast(ast.Node.SwitchElse).?; +// try writeToken(builder, switch_else.token, .keyword); +// }, +// .While => { +// const while_node = node.cast(ast.Node.While).?; +// try writeToken(builder, while_node.label, .label); +// try writeToken(builder, while_node.inline_token, .keyword); +// try writeToken(builder, while_node.while_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.condition }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.continue_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.body }); +// if (while_node.@"else") |else_node| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); +// }, +// .For => { +// const for_node = node.cast(ast.Node.For).?; +// try writeToken(builder, for_node.label, .label); +// try writeToken(builder, for_node.inline_token, .keyword); +// try writeToken(builder, for_node.for_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.array_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.body }); +// if (for_node.@"else") |else_node| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); +// }, +// .If => { +// const if_node = node.cast(ast.Node.If).?; +// try writeToken(builder, if_node.if_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.condition }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.payload }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.body }); +// if (if_node.@"else") |else_node| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); +// }, +// .ArrayInitializer => { +// const array_initializer = node.cast(ast.Node.ArrayInitializer).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_initializer.lhs }); +// for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); +// }, +// .ArrayInitializerDot => { +// const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?; +// for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); +// }, +// .StructInitializer => { +// const struct_initializer = node.cast(ast.Node.StructInitializer).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_initializer.lhs }); +// const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) { +// .other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl| +// fieldTokenType(container_decl, handle) +// else +// null, +// else => null, +// } else null; - // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added - // writeToken code. - // Maybe we can hook into it insead? Also applies to Identifier and VarDecl - var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); - const lhs_type = try analysis.resolveFieldAccessLhsType( - store, - arena, - (try analysis.resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.lhs, - .handle = handle, - }, &bound_type_params)) orelse return, - &bound_type_params, - ); - const left_type_node = switch (lhs_type.type.data) { - .other => |n| n, - else => return, - }; - if (try analysis.lookupSymbolContainer(store, arena, .{ .node = left_type_node, .handle = lhs_type.handle }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { - switch (decl_type.decl.*) { - .ast_node => |decl_node| { - if (decl_node.tag == .ContainerField) { - const tok_type: ?TokenType = if (left_type_node.cast(ast.Node.ContainerDecl)) |container_decl| - fieldTokenType(container_decl, lhs_type.handle) - else if (left_type_node.tag == .Root) - TokenType.field - else - null; +// var gap_highlighter = GapHighlighter.init(builder, struct_initializer.lhs.lastToken() + 1); +// for (struct_initializer.listConst()) |field_init_node| { +// try gap_highlighter.next(field_init_node); +// std.debug.assert(field_init_node.tag == .FieldInitializer); +// const field_init = field_init_node.cast(ast.Node.FieldInitializer).?; +// if (field_token_type) |tok_type| { +// try writeToken(builder, field_init.period_token, tok_type); +// try writeToken(builder, field_init.name_token, tok_type); +// } +// try writeToken(builder, field_init.name_token + 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); +// } +// try gap_highlighter.end(struct_initializer.rtoken); +// }, +// .StructInitializerDot => { +// const struct_initializer = node.castTag(.StructInitializerDot).?; - if (tok_type) |tt| try writeToken(builder, infix_op.rhs.firstToken(), tt); - return; - } else if (decl_node.tag == .ErrorTag) { - try writeToken(builder, infix_op.rhs.firstToken(), .errorTag); - } - }, - else => {}, - } +// var gap_highlighter = GapHighlighter.init(builder, struct_initializer.dot + 1); +// for (struct_initializer.listConst()) |field_init_node| { +// try gap_highlighter.next(field_init_node); +// std.debug.assert(field_init_node.tag == .FieldInitializer); +// const field_init = field_init_node.castTag(.FieldInitializer).?; +// try writeToken(builder, field_init.period_token, .field); +// try writeToken(builder, field_init.name_token, .field); +// try writeToken(builder, field_init.name_token + 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); +// } +// try gap_highlighter.end(struct_initializer.rtoken); +// }, +// .Call => { +// const call = node.cast(ast.Node.Call).?; +// try writeToken(builder, call.async_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.lhs }); +// if (builder.current_token) |curr_tok| { +// if (curr_tok != call.lhs.lastToken() and handle.tree.token_ids[call.lhs.lastToken()] == .Identifier) { +// try writeToken(builder, call.lhs.lastToken(), .function); +// } +// } +// for (call.paramsConst()) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); +// }, +// .Slice => { +// const slice = node.castTag(.Slice).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.lhs }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.start }); +// try writeToken(builder, slice.start.lastToken() + 1, .operator); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.end }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.sentinel }); +// }, +// .ArrayAccess => { +// const arr_acc = node.castTag(.ArrayAccess).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.lhs }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.index_expr }); +// }, +// .Deref, .UnwrapOptional => { +// const suffix = node.cast(ast.Node.SimpleSuffixOp).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suffix.lhs }); +// try writeToken(builder, suffix.rtoken, .operator); +// }, +// .GroupedExpression => { +// const grouped_expr = node.cast(ast.Node.GroupedExpression).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, grouped_expr.expr }); +// }, +// .Return, .Break, .Continue => { +// const cfe = node.cast(ast.Node.ControlFlowExpression).?; +// try writeToken(builder, cfe.ltoken, .keyword); +// switch (node.tag) { +// .Break => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), +// .Continue => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), +// else => {}, +// } +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, cfe.getRHS() }); +// }, +// .Suspend => { +// const suspend_node = node.cast(ast.Node.Suspend).?; +// try writeToken(builder, suspend_node.suspend_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suspend_node.body }); +// }, +// .IntegerLiteral => { +// try writeToken(builder, node.firstToken(), .number); +// }, +// .EnumLiteral => { +// const enum_literal = node.cast(ast.Node.EnumLiteral).?; +// try writeToken(builder, enum_literal.dot, .enumMember); +// try writeToken(builder, enum_literal.name, .enumMember); +// }, +// .FloatLiteral => { +// try writeToken(builder, node.firstToken(), .number); +// }, +// .BuiltinCall => { +// const builtin_call = node.cast(ast.Node.BuiltinCall).?; +// try writeToken(builder, builtin_call.builtin_token, .builtin); +// for (builtin_call.paramsConst()) |param| +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); +// }, +// .StringLiteral, .CharLiteral => { +// try writeToken(builder, node.firstToken(), .string); +// }, +// .MultilineStringLiteral => { +// const multi_line = node.cast(ast.Node.MultilineStringLiteral).?; +// for (multi_line.linesConst()) |line| try writeToken(builder, line, .string); +// }, +// .BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable => { +// try writeToken(builder, node.firstToken(), .keywordLiteral); +// }, +// .ErrorType => { +// try writeToken(builder, node.firstToken(), .keyword); +// }, +// .Asm => { +// const asm_expr = node.cast(ast.Node.Asm).?; +// try writeToken(builder, asm_expr.asm_token, .keyword); +// try writeToken(builder, asm_expr.volatile_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_expr.template }); +// // TODO Inputs, outputs. +// }, +// .AnyType => { +// try writeToken(builder, node.firstToken(), .type); +// }, +// .TestDecl => { +// const test_decl = node.cast(ast.Node.TestDecl).?; +// if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc); +// try writeToken(builder, test_decl.test_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.name }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.body_node }); +// }, +// .Catch => { +// const catch_expr = node.cast(ast.Node.Catch).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.lhs }); +// try writeToken(builder, catch_expr.op_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.rhs }); +// }, +// .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolAnd, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Period, .Range, .Sub, .SubWrap, .OrElse => { +// const infix_op = node.cast(ast.Node.SimpleInfixOp).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.lhs }); +// if (node.tag != .Period) { +// const token_type: TokenType = switch (node.tag) { +// .BoolAnd, .BoolOr, .OrElse => .keyword, +// else => .operator, +// }; - if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { - try colorIdentifierBasedOnType(builder, resolved_type, infix_op.rhs.firstToken(), .{}); - } - } - }, - else => {}, - } - }, - .SliceType => { - const slice_type = node.castTag(.SliceType).?; - const ptr_info = slice_type.ptr_info; - if (ptr_info.align_info) |align_info| { - try writeToken(builder, slice_type.op_token + 2, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); - } - try writeToken(builder, ptr_info.const_token, .keyword); - try writeToken(builder, ptr_info.volatile_token, .keyword); - try writeToken(builder, ptr_info.allowzero_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice_type.rhs }); - }, - .PtrType => { - const pointer_type = node.castTag(.PtrType).?; - const tok_ids = builder.handle.tree.token_ids; +// try writeToken(builder, infix_op.op_token, token_type); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.rhs }); +// } +// switch (node.tag) { +// .Period => { +// const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken()); - const ptr_info = switch (tok_ids[pointer_type.op_token]) { - .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.ptr_info, - else => pointer_type.ptr_info, - }; - const rhs = switch (tok_ids[pointer_type.op_token]) { - .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.rhs, - else => pointer_type.rhs, - }; +// // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added +// // writeToken code. +// // Maybe we can hook into it insead? Also applies to Identifier and VarDecl +// var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); +// const lhs_type = try analysis.resolveFieldAccessLhsType( +// store, +// arena, +// (try analysis.resolveTypeOfNodeInternal(store, arena, .{ +// .node = infix_op.lhs, +// .handle = handle, +// }, &bound_type_params)) orelse return, +// &bound_type_params, +// ); +// const left_type_node = switch (lhs_type.type.data) { +// .other => |n| n, +// else => return, +// }; +// if (try analysis.lookupSymbolContainer(store, arena, .{ .node = left_type_node, .handle = lhs_type.handle }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { +// switch (decl_type.decl.*) { +// .ast_node => |decl_node| { +// if (decl_node.tag == .ContainerField) { +// const tok_type: ?TokenType = if (left_type_node.cast(ast.Node.ContainerDecl)) |container_decl| +// fieldTokenType(container_decl, lhs_type.handle) +// else if (left_type_node.tag == .Root) +// TokenType.field +// else +// null; - const off = switch (tok_ids[pointer_type.op_token]) { - .Asterisk, .AsteriskAsterisk => blk: { - try writeToken(builder, pointer_type.op_token, .operator); - break :blk pointer_type.op_token + 1; - }, - .LBracket => blk: { - try writeToken(builder, pointer_type.op_token + 1, .operator); - const is_c_ptr = tok_ids[pointer_type.op_token + 2] == .Identifier; +// if (tok_type) |tt| try writeToken(builder, infix_op.rhs.firstToken(), tt); +// return; +// } else if (decl_node.tag == .ErrorTag) { +// try writeToken(builder, infix_op.rhs.firstToken(), .errorTag); +// } +// }, +// else => {}, +// } - if (is_c_ptr) { - try writeToken(builder, pointer_type.op_token + 2, .operator); - } +// if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { +// try colorIdentifierBasedOnType(builder, resolved_type, infix_op.rhs.firstToken(), .{}); +// } +// } +// }, +// else => {}, +// } +// }, +// .SliceType => { +// const slice_type = node.castTag(.SliceType).?; +// const ptr_info = slice_type.ptr_info; +// if (ptr_info.align_info) |align_info| { +// try writeToken(builder, slice_type.op_token + 2, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); +// } +// try writeToken(builder, ptr_info.const_token, .keyword); +// try writeToken(builder, ptr_info.volatile_token, .keyword); +// try writeToken(builder, ptr_info.allowzero_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice_type.rhs }); +// }, +// .PtrType => { +// const pointer_type = node.castTag(.PtrType).?; +// const tok_ids = builder.handle.tree.token_ids; - if (ptr_info.sentinel) |sentinel| { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); - break :blk sentinel.lastToken() + 2; - } +// const ptr_info = switch (tok_ids[pointer_type.op_token]) { +// .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.ptr_info, +// else => pointer_type.ptr_info, +// }; +// const rhs = switch (tok_ids[pointer_type.op_token]) { +// .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.rhs, +// else => pointer_type.rhs, +// }; - break :blk pointer_type.op_token + 3 + @boolToInt(is_c_ptr); - }, - else => 0, - }; +// const off = switch (tok_ids[pointer_type.op_token]) { +// .Asterisk, .AsteriskAsterisk => blk: { +// try writeToken(builder, pointer_type.op_token, .operator); +// break :blk pointer_type.op_token + 1; +// }, +// .LBracket => blk: { +// try writeToken(builder, pointer_type.op_token + 1, .operator); +// const is_c_ptr = tok_ids[pointer_type.op_token + 2] == .Identifier; - if (ptr_info.align_info) |align_info| { - try writeToken(builder, off, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); - } - try writeToken(builder, ptr_info.const_token, .keyword); - try writeToken(builder, ptr_info.volatile_token, .keyword); - try writeToken(builder, ptr_info.allowzero_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, rhs }); - }, - .ArrayType => { - const array_type = node.castTag(.ArrayType).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); - }, - .ArrayTypeSentinel => { - const array_type = node.castTag(.ArrayTypeSentinel).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.sentinel }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); - }, - .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { - const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; - const tok_type: TokenType = switch (node.tag) { - .Try, .Await, .Resume => .keyword, - else => .operator, - }; - try writeToken(builder, prefix_op.op_token, tok_type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, prefix_op.rhs }); - }, - else => {}, - } -} +// if (is_c_ptr) { +// try writeToken(builder, pointer_type.op_token + 2, .operator); +// } + +// if (ptr_info.sentinel) |sentinel| { +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); +// break :blk sentinel.lastToken() + 2; +// } + +// break :blk pointer_type.op_token + 3 + @boolToInt(is_c_ptr); +// }, +// else => 0, +// }; + +// if (ptr_info.align_info) |align_info| { +// try writeToken(builder, off, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); +// } +// try writeToken(builder, ptr_info.const_token, .keyword); +// try writeToken(builder, ptr_info.volatile_token, .keyword); +// try writeToken(builder, ptr_info.allowzero_token, .keyword); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, rhs }); +// }, +// .ArrayType => { +// const array_type = node.castTag(.ArrayType).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); +// }, +// .ArrayTypeSentinel => { +// const array_type = node.castTag(.ArrayTypeSentinel).?; +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.sentinel }); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); +// }, +// .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { +// const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; +// const tok_type: TokenType = switch (node.tag) { +// .Try, .Await, .Resume => .keyword, +// else => .operator, +// }; +// try writeToken(builder, prefix_op.op_token, tok_type); +// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, prefix_op.rhs }); +// }, +// else => {}, +// } +// } // TODO Range version, edit version. pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); - try writeNodeTokens(&builder, arena, store, &handle.tree.root_node.base); + // pass root node, which always has index '0' + // try writeNodeTokens(&builder, arena, store, 0, handle.tree); return builder.toOwnedSlice(); } From 5f5d6ce4ea406f6edd9db38b69448a019a388154 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sat, 27 Feb 2021 21:55:39 +0100 Subject: [PATCH 03/36] Progress in makeScopeInternal --- src/analysis.zig | 404 +++++++++++++++++++++++------------------------ 1 file changed, 202 insertions(+), 202 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index d93538e..d5ce43d 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1542,6 +1542,8 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod .container_field, .container_field_align, .container_field_init, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, => .Field, else => .Variable, }, @@ -1558,7 +1560,8 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod }; var index: usize = 0; - try addOutlineNodes(allocator, tree, node, &child_context); + if (true) @panic("FIX: addOutlineNodes"); + // try addOutlineNodes(allocator, tree, node, &child_context); // while (node.iterate(index)) |child| : (index += 1) { // try addOutlineNodes(allocator, tree, child, &child_context); @@ -1590,8 +1593,8 @@ pub const Declaration = union(enum) { /// Function parameter param_decl: ast.full.FnProto.Param, pointer_payload: struct { - node: ast.full.PtrType, - condition: *ast.Node, + name: ast.TokenIndex, + condition: ast.Node.Index, }, array_payload: struct { identifier: *ast.Node, @@ -1602,7 +1605,7 @@ pub const Declaration = union(enum) { switch_expr: *ast.Node, items: []const *ast.Node, }, - label_decl: *ast.Node, // .id is While, For or Block (firstToken will be the label) + label_decl: ast.TokenIndex, // .id is While, For or Block (firstToken will be the label) }; pub const DeclWithHandle = struct { @@ -2145,7 +2148,7 @@ fn isContainer(tag: ast.Node.Tag) bool { /// Returns the member indices of a given declaration container. /// Asserts given `tag` is a container node -fn declMembers(tree: ast.Tree, tag: ast.Node.Tag) []ast.Node.index { +fn declMembers(tree: ast.Tree, tag: ast.Node.Tag, node_idx: ast.Node.Index) []const ast.Node.Index { std.debug.assert(isContainer(tag)); return switch (tag) { .container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members, @@ -2167,6 +2170,18 @@ fn declMembers(tree: ast.Tree, tag: ast.Node.Tag) []ast.Node.index { }; } +/// Returns an `ast.full.VarDecl` for a given node index. +/// Returns null if the tag doesn't match +fn varDecl(tree: ast.Tree, node_idx: ast.Node.Index) ?ast.full.VarDecl { + return switch (tree.nodes.items(.tag)[node_idx]) { + .global_var_decl => tree.globalVarDecl(node_idx), + .local_var_decl => tree.localVarDecl(node_idx), + .aligned_var_decl => tree.alignedVarDecl(node_idx), + .simple_var_decl => tree.simpleVarDecl(node_idx), + else => null, + }; +} + // TODO Possibly collect all imports to diff them on changes // as well fn makeScopeInternal( @@ -2179,16 +2194,18 @@ fn makeScopeInternal( ) error{OutOfMemory}!void { const tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); + const data = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const node = tags[node_idx]; if (isContainer(node)) { - const ast_decls = declMembers(tree, node); + const ast_decls = declMembers(tree, node, node_idx); (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, + .tests = &.{}, .data = .{ .container = node_idx }, }; const scope_idx = scopes.items.len - 1; @@ -2227,7 +2244,7 @@ fn makeScopeInternal( }; } - const container_field: ?ast.full.ContainerField = switch (decl) { + const container_field: ?ast.full.ContainerField = switch (tags[decl]) { .container_field => tree.containerField(decl), .container_field_align => tree.containerFieldAlign(decl), .container_field_init => tree.containerFieldInit(decl), @@ -2235,7 +2252,7 @@ fn makeScopeInternal( }; if (container_field) |field| { - const empty_field = field.type_expr == 0 and field.value_expr == 0; + const empty_field = field.ast.type_expr == 0 and field.ast.value_expr == 0; if (empty_field and node == .root) { continue; } @@ -2250,7 +2267,7 @@ fn makeScopeInternal( }, .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx), .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx), - .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => blk: { + .tagged_union_two, .tagged_union_two_trailing => blk: { var buffer: [2]ast.Node.Index = undefined; break :blk tree.taggedUnionTwo(&buffer, node_idx); }, @@ -2287,83 +2304,98 @@ fn makeScopeInternal( } switch (node) { - .FnProto => { - const func = node.castTag(.FnProto).?; + .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => { + var buf: [1]ast.Node.Index = undefined; + const func: ast.full.FnProto = switch (node) { + .fn_proto => tree.fnProto(node_idx), + .fn_proto_one => tree.fnProtoOne(&buf, node_idx), + .fn_proto_simple => tree.fnProtoSimple(&buf, node_idx), + .fn_proto_multi => tree.fnProtoMulti(node_idx), + .fn_decl => tree.fnProto(data[node_idx].lhs), + else => unreachable, + }; (try scopes.addOne(allocator)).* = .{ - .range = nodeSourceRange(tree, node), + .range = nodeSourceRange(tree, node_idx), .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, + .tests = &.{}, .data = .{ .function = node }, }; var scope_idx = scopes.items.len - 1; errdefer scopes.items[scope_idx].decls.deinit(); - for (func.params()) |*param| { - if (param.name_token) |name_tok| { - if (try scopes.items[scope_idx].decls.fetchPut(tree.tokenSlice(name_tok), .{ .param_decl = param })) |existing| { - // TODO Record a redefinition error + var it = func.iterate(tree); + while (it.next()) |param| { + if (param.name_token) |name_token| { + if (try scopes.items[scope_idx].decls.fetchPut(tree.tokenSlice(name_token), .{ .param_decl = param })) |existing| { + // TODO record a redefinition error } } } - if (func.getBodyNode()) |body| { - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, body); + if (node == .fn_decl) { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); } return; }, - .TestDecl => { - return try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, node.castTag(.TestDecl).?.body_node); + .test_decl => { + return try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); }, - .LabeledBlock => { - const block = node.castTag(.LabeledBlock).?; - std.debug.assert(tree.token_ids[block.label] == .Identifier); - var scope = try scopes.addOne(allocator); - scope.* = .{ - .range = .{ - .start = tree.token_locs[block.lbrace].start, - .end = tree.token_locs[block.rbrace].end, + .block, .block_semicolon, .block_two, .block_two_semicolon => { + const first_token = tree.firstToken(node_idx); + const last_token = tree.lastToken(node_idx); + + // if labeled block + if (token_tags[first_token] == .identifier) { + const scope = try scopes.addOne(allocator); + scope.* = .{ + .range = .{ + .start = tree.tokenLocation(main_tokens[node_idx]).line_start, + .end = tree.tokenLocation(@truncate(u32, start), last_token).line_start, + }, + .decls = std.StringHashMap(Declaration).init(allocator), + // .uses = &[0]*ast.Node.Use{}, + .tests = &[0]*ast.Node{}, + .data = .other, + }; + errdefer scope.decls.deinit(); + try scopes.items[scope_idx].decls.putNoClobber(tree.tokenSlice(first_token), .{ .label_decl = first_token }); + } + + (try scopes.addOne(allocator)).* = .{ + .range = nodeSourceRange(tree, node_idx), + .decls = std.StringHashMap(Declaration).init(allocator), + // .uses = &[0]*ast.Node.Use{}, + .tests = &.{}, + .data = .{ .block = node_idx }, + }; + var scope_idx = scopes.items.len - 1; + // var uses = std.ArrayList(*ast.Node.Use).init(allocator); + + errdefer { + scopes.items[scope_idx].decls.deinit(); + // uses.deinit(); + } + + const statements: []const ast.Node.Index = switch (node) { + .block, .block_semicolon => tree.extra_data[data[node_idx].lhs..data[node_idx].rhs], + .block_two, .block_two_semicolon => blk: { + const statements = [2]ast.Node.Index{ data[node_idx].lhs, data[node_idx].rhs }; + const len: usize = if (data[node_idx].lhs == 0) 0 else if (data[node_idx].rhs == 0) 1 else 2; + break :blk statements[0..len]; }, - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .other, + else => unreachable, }; - errdefer scope.decls.deinit(); - try scope.decls.putNoClobber(tree.tokenSlice(block.label), .{ - .label_decl = node, - }); - - (try scopes.addOne(allocator)).* = .{ - .range = nodeSourceRange(tree, node), - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .{ .block = node }, - }; - var scope_idx = scopes.items.len - 1; - // var uses = std.ArrayList(*ast.Node.Use).init(allocator); - - errdefer { - scopes.items[scope_idx].decls.deinit(); - // uses.deinit(); - } - - var child_idx: usize = 0; - while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { - // if (child_node.castTag(.Use)) |use| { - // try uses.append(use); - // continue; - // } - - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, child_node); - if (child_node.castTag(.VarDecl)) |var_decl| { - const name = tree.tokenSlice(var_decl.name_token); - if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = child_node })) |existing| { - // TODO Record a redefinition error. + for (statements[0..len]) |idx| { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, idx); + // if (tags[ + if (varDecl(idx)) |var_decl| { + const name = tree.tokenSlice(var_decl.ast.mut_token + 1); + if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| { + // TODO record a redefinition error. } } } @@ -2371,171 +2403,139 @@ fn makeScopeInternal( // scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; }, - .Block => { - const block = node.castTag(.Block).?; + .@"comptime", .@"nosuspend" => { + return try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].lhs); + }, + .@"if", .if_simple => { + const if_node: ast.full.If = if (node == .@"if") + tree.ifFull(node_idx) + else + tree.ifSimple(node_idx); - (try scopes.addOne(allocator)).* = .{ - .range = nodeSourceRange(tree, node), - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .{ .block = node }, + if (if_node.payload_token) |payload| { + var scope = try scopes.addOne(allocator); + scope.* = .{ + .range = .{ + .start = tree.tokenLocation(0, payload).line_start, + .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(if_node.ast.then_expr)).line_end, + }, + .decls = std.StringHashMap(Declaration).init(allocator), + // .uses = &[0]*ast.Node.Use{}, + .tests = &.{}, + .data = .other, + }; + errdefer scope.decls.deinit(); + + const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); + std.debug.assert(token_tags[name_token] == .identifier); + + const name = tree.tokenSlice(name_token); + try scope.decls.putNoClobber(name, .{ + .pointer_payload = .{ + .name = name_token, + .condition = if_node.ast.cond_expr, + }, + }); + } + + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, if_node.ast.then_expr); + + if (if_node.ast.else_expr != 0) { + if (if_node.error_token) |err_token| { + std.debug.assert(token_tags[err_token] == .identifier); + var scope = try scopes.addOne(allocator); + scope.* = .{ + .range = .{ + .start = tree.tokenLocation(0, err_token).line_start, + .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(if_node.ast.else_expr)).line_end, + }, + .decls = std.StringHashMap(Declaration).init(allocator), + // .uses = &[0]*ast.Node.Use{}, + .tests = &.{}, + .data = .other, + }; + errdefer scope.decls.deinit(); + + const name = tree.tokenSlice(err_token); + try scope.decls.putNoClobber(name, .{ .ast_node = if_node.ast.else_expr }); + } + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, if_node.ast.else_expr); + } + }, + .@"while", .while_simple, .while_cont, .@"for", .for_simple => { + const while_node: ast.full.While = switch (node) { + .@"while" => tree.whileFull(node_idx), + .while_simple => tree.whileSimple(node_idx), + .while_cont => tree.whileCont(node_idx), + .@"for" => tree.forFull(node_idx), + .for_simple => tree.forSimple(node_idx), + else => unreachable, }; - var scope_idx = scopes.items.len - 1; - // var uses = std.ArrayList(*ast.Node.Use).init(allocator); - - errdefer { - scopes.items[scope_idx].decls.deinit(); - // uses.deinit(); - } - - var child_idx: usize = 0; - while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { - // if (child_node.castTag(.Use)) |use| { - // try uses.append(use); - // continue; - // } - - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, child_node); - if (child_node.castTag(.VarDecl)) |var_decl| { - const name = tree.tokenSlice(var_decl.name_token); - if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = child_node })) |existing| { - // TODO Record a redefinition error. - } - } - } - - // scopes.items[scope_idx].uses = uses.toOwnedSlice(); - return; - }, - .Comptime => { - return try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, node.castTag(.Comptime).?.expr); - }, - .If => { - const if_node = node.castTag(.If).?; - - if (if_node.payload) |payload| { - std.debug.assert(payload.tag == .PointerPayload); + if (while_node.label_token) |label| { + std.debug.assert(tags[label] == .identifier); var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.token_locs[payload.firstToken()].start, - .end = tree.token_locs[if_node.body.lastToken()].end, + .start = tree.tokenLocation(0, main_tokens[node_idx]).line_start, + .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(while_node.ast.then_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, + .tests = &.{}, .data = .other, }; errdefer scope.decls.deinit(); - const ptr_payload = payload.castTag(.PointerPayload).?; - std.debug.assert(ptr_payload.value_symbol.tag == .Identifier); - const name = tree.tokenSlice(ptr_payload.value_symbol.firstToken()); + try scope.decls.putNoClobber(tree.tokenSlice(label), .{ .label_decl = label }); + } + + if (while_node.payload_token) |payload| { + var scope = try scopes.addOne(allocator); + scope.* = .{ + .range = .{ + .start = tree.tokenLocation(0, payload).line_start, + .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(while_node.ast.then_exp)).line_end, + }, + .decls = std.StringHashMap(Declaration).init(allocator), + // .uses = &[0]*ast.Node.Use{}, + .tests = &.{}, + .data = .other, + }; + errdefer scope.decls.deinit(); + + const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); + std.debug.assert(token_tags[name_token] == .identifier); + + const name = tree.tokenSlice(name_token); try scope.decls.putNoClobber(name, .{ .pointer_payload = .{ - .node = ptr_payload, - .condition = if_node.condition, + .name = name_token, + .condition = while_node.ast.cond_expr, }, }); } - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, if_node.body); + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, while_node.ast.then_expr); - if (if_node.@"else") |else_node| { - if (else_node.payload) |payload| { - std.debug.assert(payload.tag == .Payload); + if (while_node.ast.else_expr != 0) { + if (while_node.error_token) |err_token| { + std.debug.assert(token_tags[err_token] == .identifier); var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.token_locs[payload.firstToken()].start, - .end = tree.token_locs[else_node.body.lastToken()].end, + .start = tree.tokenLocation(0, err_token).line_start, + .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(if_node.ast.else_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, + .tests = &.{}, .data = .other, }; errdefer scope.decls.deinit(); - const err_payload = payload.castTag(.Payload).?; - std.debug.assert(err_payload.error_symbol.tag == .Identifier); - const name = tree.tokenSlice(err_payload.error_symbol.firstToken()); - try scope.decls.putNoClobber(name, .{ .ast_node = payload }); + const name = tree.tokenSlice(err_token); + try scope.decls.putNoClobber(name, .{ .ast_node = if_node.ast.else_expr }); } - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, else_node.body); - } - }, - .While => { - const while_node = node.castTag(.While).?; - if (while_node.label) |label| { - std.debug.assert(tree.token_ids[label] == .Identifier); - var scope = try scopes.addOne(allocator); - scope.* = .{ - .range = .{ - .start = tree.token_locs[while_node.while_token].start, - .end = tree.token_locs[while_node.lastToken()].end, - }, - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .other, - }; - errdefer scope.decls.deinit(); - - try scope.decls.putNoClobber(tree.tokenSlice(label), .{ - .label_decl = node, - }); - } - - if (while_node.payload) |payload| { - std.debug.assert(payload.tag == .PointerPayload); - var scope = try scopes.addOne(allocator); - scope.* = .{ - .range = .{ - .start = tree.token_locs[payload.firstToken()].start, - .end = tree.token_locs[while_node.body.lastToken()].end, - }, - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .other, - }; - errdefer scope.decls.deinit(); - - const ptr_payload = payload.castTag(.PointerPayload).?; - std.debug.assert(ptr_payload.value_symbol.tag == .Identifier); - const name = tree.tokenSlice(ptr_payload.value_symbol.firstToken()); - try scope.decls.putNoClobber(name, .{ - .pointer_payload = .{ - .node = ptr_payload, - .condition = while_node.condition, - }, - }); - } - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, while_node.body); - - if (while_node.@"else") |else_node| { - if (else_node.payload) |payload| { - std.debug.assert(payload.tag == .Payload); - var scope = try scopes.addOne(allocator); - scope.* = .{ - .range = .{ - .start = tree.token_locs[payload.firstToken()].start, - .end = tree.token_locs[else_node.body.lastToken()].end, - }, - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .other, - }; - errdefer scope.decls.deinit(); - - const err_payload = payload.castTag(.Payload).?; - std.debug.assert(err_payload.error_symbol.tag == .Identifier); - const name = tree.tokenSlice(err_payload.error_symbol.firstToken()); - try scope.decls.putNoClobber(name, .{ .ast_node = payload }); - } - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, else_node.body); + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, while_node.ast.else_expr); } }, .For => { From 709c1a70fdc4ac3c19e62de1e4dfb0ef40f06076 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 28 Feb 2021 13:09:10 +0100 Subject: [PATCH 04/36] Implement makeScopeInternal logic --- src/analysis.zig | 166 ++++++++++++++++------------------------------- 1 file changed, 57 insertions(+), 109 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index d5ce43d..56b3447 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1601,9 +1601,9 @@ pub const Declaration = union(enum) { array_expr: ast.full.ArrayType, }, switch_payload: struct { - node: ast.full.PtrType, - switch_expr: *ast.Node, - items: []const *ast.Node, + node: ast.TokenIndex, + switch_expr: ast.Node.Index, + items: []const ast.Node.Index, }, label_decl: ast.TokenIndex, // .id is While, For or Block (firstToken will be the label) }; @@ -2320,7 +2320,7 @@ fn makeScopeInternal( .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, .tests = &.{}, - .data = .{ .function = node }, + .data = .{ .function = node_idx }, }; var scope_idx = scopes.items.len - 1; errdefer scopes.items[scope_idx].decls.deinit(); @@ -2352,16 +2352,16 @@ fn makeScopeInternal( const scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(main_tokens[node_idx]).line_start, - .end = tree.tokenLocation(@truncate(u32, start), last_token).line_start, + .start = tree.tokenLocation(0, main_tokens[node_idx]).line_start, + .end = tree.tokenLocation(0, last_token).line_start, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, + .tests = &.{}, .data = .other, }; errdefer scope.decls.deinit(); - try scopes.items[scope_idx].decls.putNoClobber(tree.tokenSlice(first_token), .{ .label_decl = first_token }); + try scope.decls.putNoClobber(tree.tokenSlice(first_token), .{ .label_decl = first_token }); } (try scopes.addOne(allocator)).* = .{ @@ -2382,17 +2382,22 @@ fn makeScopeInternal( const statements: []const ast.Node.Index = switch (node) { .block, .block_semicolon => tree.extra_data[data[node_idx].lhs..data[node_idx].rhs], .block_two, .block_two_semicolon => blk: { - const statements = [2]ast.Node.Index{ data[node_idx].lhs, data[node_idx].rhs }; - const len: usize = if (data[node_idx].lhs == 0) 0 else if (data[node_idx].rhs == 0) 1 else 2; + const statements = &[_]ast.Node.Index{ data[node_idx].lhs, data[node_idx].rhs }; + const len: usize = if (data[node_idx].lhs == 0) + @as(usize, 0) + else if (data[node_idx].rhs == 0) + @as(usize, 1) + else + @as(usize, 2); break :blk statements[0..len]; }, else => unreachable, }; - for (statements[0..len]) |idx| { + for (statements) |idx| { try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, idx); // if (tags[ - if (varDecl(idx)) |var_decl| { + if (varDecl(tree, idx)) |var_decl| { const name = tree.tokenSlice(var_decl.ast.mut_token + 1); if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| { // TODO record a redefinition error. @@ -2417,7 +2422,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(if_node.ast.then_expr)).line_end, + .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.then_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, @@ -2447,7 +2452,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = tree.tokenLocation(0, err_token).line_start, - .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(if_node.ast.else_expr)).line_end, + .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.else_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, @@ -2477,7 +2482,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = tree.tokenLocation(0, main_tokens[node_idx]).line_start, - .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(while_node.ast.then_expr)).line_end, + .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.then_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, @@ -2494,7 +2499,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(while_node.ast.then_exp)).line_end, + .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.then_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, @@ -2523,7 +2528,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = tree.tokenLocation(0, err_token).line_start, - .end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(if_node.ast.else_expr)).line_end, + .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.else_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, @@ -2533,120 +2538,63 @@ fn makeScopeInternal( errdefer scope.decls.deinit(); const name = tree.tokenSlice(err_token); - try scope.decls.putNoClobber(name, .{ .ast_node = if_node.ast.else_expr }); + try scope.decls.putNoClobber(name, .{ .ast_node = while_node.ast.else_expr }); } try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, while_node.ast.else_expr); } }, - .For => { - const for_node = node.castTag(.For).?; - if (for_node.label) |label| { - std.debug.assert(tree.token_ids[label] == .Identifier); + .switch_case, .switch_case_one => { + const switch_case: ast.full.SwitchCase = switch (node) { + .switch_case => tree.switchCase(node_idx), + .switch_case_one => tree.switchCaseOne(node_idx), + else => unreachable, + }; + + if (switch_case.payload_token) |payload| { var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.token_locs[for_node.for_token].start, - .end = tree.token_locs[for_node.lastToken()].end, + .start = tree.tokenLocation(0, payload).line_start, + .end = tree.tokenLocation(0, tree.lastToken(switch_case.ast.target_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, + .tests = &.{}, .data = .other, }; errdefer scope.decls.deinit(); - try scope.decls.putNoClobber(tree.tokenSlice(label), .{ - .label_decl = node, + // if payload is *name than get next token + const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); + const name = tree.tokenSlice(name_token); + + try scope.decls.putNoClobber(name, .{ + .switch_payload = .{ + .node = payload, + .switch_expr = switch_case.ast.target_expr, + .items = switch_case.ast.values, + }, }); } - std.debug.assert(for_node.payload.tag == .PointerIndexPayload); - const ptr_idx_payload = for_node.payload.castTag(.PointerIndexPayload).?; - std.debug.assert(ptr_idx_payload.value_symbol.tag == .Identifier); - - var scope = try scopes.addOne(allocator); - scope.* = .{ - .range = .{ - .start = tree.token_locs[ptr_idx_payload.firstToken()].start, - .end = tree.token_locs[for_node.body.lastToken()].end, - }, - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .other, - }; - errdefer scope.decls.deinit(); - - const value_name = tree.tokenSlice(ptr_idx_payload.value_symbol.firstToken()); - try scope.decls.putNoClobber(value_name, .{ - .array_payload = .{ - .identifier = ptr_idx_payload.value_symbol, - .array_expr = for_node.array_expr, - }, - }); - - if (ptr_idx_payload.index_symbol) |index_symbol| { - std.debug.assert(index_symbol.tag == .Identifier); - const index_name = tree.tokenSlice(index_symbol.firstToken()); - if (try scope.decls.fetchPut(index_name, .{ .ast_node = index_symbol })) |existing| { - // TODO Record a redefinition error - } - } - - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, for_node.body); - if (for_node.@"else") |else_node| { - std.debug.assert(else_node.payload == null); - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, else_node.body); - } + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, switch_case.ast.target_expr); }, - .Switch => { - const switch_node = node.castTag(.Switch).?; - for (switch_node.casesConst()) |case| { - if (case.*.castTag(.SwitchCase)) |case_node| { - if (case_node.payload) |payload| { - std.debug.assert(payload.tag == .PointerPayload); - var scope = try scopes.addOne(allocator); - scope.* = .{ - .range = .{ - .start = tree.token_locs[payload.firstToken()].start, - .end = tree.token_locs[case_node.expr.lastToken()].end, - }, - .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, - .tests = &[0]*ast.Node{}, - .data = .other, - }; - errdefer scope.decls.deinit(); + .global_var_decl, .local_var_decl, .aligned_var_decl, .simple_var_decl => { + const var_decl = varDecl(tree, node_idx).?; + if (var_decl.ast.type_node != 0) { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, var_decl.ast.type_node); + } - const ptr_payload = payload.castTag(.PointerPayload).?; - std.debug.assert(ptr_payload.value_symbol.tag == .Identifier); - const name = tree.tokenSlice(ptr_payload.value_symbol.firstToken()); - try scope.decls.putNoClobber(name, .{ - .switch_payload = .{ - .node = ptr_payload, - .switch_expr = switch_node.expr, - .items = case_node.itemsConst(), - }, - }); - } - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, case_node.expr); - } - } - }, - .VarDecl => { - const var_decl = node.castTag(.VarDecl).?; - if (var_decl.getTypeNode()) |type_node| { - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, type_node); - } - if (var_decl.getInitNode()) |init_node| { - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, init_node); + if (var_decl.ast.init_node != 0) { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, var_decl.ast.init_node); } }, else => { - var child_idx: usize = 0; - while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, child_node); - } + // @TODO: Could we just do node_idx + 1 here? + // var child_idx: usize = 0; + // while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { + // try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, child_node); + // } }, } } From b175a01fcef51017a168cc1cb413c0185d9fa1ae Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 28 Feb 2021 17:42:34 +0100 Subject: [PATCH 05/36] Progress in resolveTypeOfNodeInternal --- src/analysis.zig | 327 ++++++++++++++++++++++++++++------------------- 1 file changed, 193 insertions(+), 134 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 56b3447..436f857 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -353,42 +353,15 @@ fn findReturnStatement(tree: ast.Tree, fn_decl: *ast.Node.FnProto) ?*ast.Node.Co pub fn resolveReturnType( store: *DocumentStore, arena: *std.heap.ArenaAllocator, - fn_decl: *ast.Node.FnProto, + fn_decl: ast.full.FnProto, handle: *DocumentStore.Handle, bound_type_params: *BoundTypeParams, ) !?TypeWithHandle { - if (isTypeFunction(handle.tree, fn_decl) and fn_decl.getBodyNode() != null) { - // If this is a type function and it only contains a single return statement that returns - // a container declaration, we will return that declaration. - const ret = findReturnStatement(handle.tree, fn_decl) orelse return null; - if (ret.getRHS()) |rhs| { - return try resolveTypeOfNodeInternal(store, arena, .{ - .node = rhs, - .handle = handle, - }, bound_type_params); - } - - return null; - } - - return switch (fn_decl.return_type) { - .InferErrorSet => |return_type| block: { - const child_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = return_type, - .handle = handle, - }, bound_type_params)) orelse return null; - const child_type_node = switch (child_type.type.data) { - .other => |n| n, - else => return null, - }; - break :block TypeWithHandle{ .type = .{ .data = .{ .error_union = child_type_node }, .is_type_val = false }, .handle = child_type.handle }; - }, - .Explicit => |return_type| ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = return_type, - .handle = handle, - }, bound_type_params)) orelse return null).instanceTypeVal(), - .Invalid => null, - }; + // @TODO: Confirm this can handle inferred error sets etc + return resolveTypeOfNodeInternal(store, arena, .{ + .node = fn_decl.ast.return_type, + .handle = handle, + }, bound_type_params); } /// Resolves the child type of an optional type @@ -397,6 +370,7 @@ fn resolveUnwrapOptionalType( arena: *std.heap.ArenaAllocator, opt: TypeWithHandle, bound_type_params: *BoundTypeParams, + tree: ast.Tree, ) !?TypeWithHandle { const opt_node = switch (opt.type.data) { .other => |n| n, @@ -442,6 +416,17 @@ fn resolveUnwrapErrorType( return null; } +fn isPtrType(tree: ast.Tree, node: ast.Node.Index) bool { + return switch (tree.nodes.items(.tag)[node]) { + .ptr_type, + .ptr_type_aligned, + .ptr_type_bit_range, + .ptr_type_sentinel, + => true, + else => false, + }; +} + /// Resolves the child type of a deref type fn resolveDerefType( store: *DocumentStore, @@ -453,16 +438,19 @@ fn resolveDerefType( .other => |n| n, else => return null, }; + const tree = deref.handle.tree; + const main_token = tree.nodes.items(.main_token)[deref_node]; + const token_tag = tree.tokens.items(.tag)[main_token]; - if (deref_node.castTag(.PtrType)) |ptr_type| { - switch (deref.handle.tree.token_ids[ptr_type.op_token]) { - .Asterisk => { + if (isPtrType(deref_node)) { + switch (token_tag) { + .asterisk => { return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = ptr_type.rhs, + .node = tree.nodes.items(.data)[deref_node].rhs, .handle = deref.handle, }, bound_type_params)) orelse return null).instanceTypeVal(); }, - .LBracket, .AsteriskAsterisk => return null, + .l_bracket, .asterisk_asterisk => return null, else => unreachable, } } @@ -524,7 +512,7 @@ pub fn resolveFieldAccessLhsType( return (try resolveDerefType(store, arena, lhs, bound_type_params)) orelse lhs; } -pub const BoundTypeParams = std.AutoHashMap(*const ast.Node.FnProto.ParamDecl, TypeWithHandle); +pub const BoundTypeParams = std.AutoHashMap(*const ast.full.FnProto.Param, TypeWithHandle); fn allDigits(str: []const u8) bool { for (str) |c| { @@ -566,37 +554,44 @@ pub fn resolveTypeOfNodeInternal( ) error{OutOfMemory}!?TypeWithHandle { const node = node_handle.node; const handle = node_handle.handle; + const tree = handle.tree; - switch (node.tag) { - .VarDecl => { - const vari = node.castTag(.VarDecl).?; - if (vari.getTypeNode()) |type_node| block: { + const main_tokens = tree.nodes.items(.main_token); + const node_tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + const token_tags = tree.tokens.items(.tag); + const starts = tree.tokens.items(.start); + + switch (node_tags[node]) { + .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + const var_decl = varDecl(tree, node).?; + if (var_decl.ast.type_node != 0) block: { return ((try resolveTypeOfNodeInternal( store, arena, - .{ .node = type_node, .handle = handle }, + .{ .node = var_decl.ast.type_node, .handle = handle }, bound_type_params, )) orelse break :block).instanceTypeVal(); } - const init_node = vari.getInitNode() orelse return null; - - return try resolveTypeOfNodeInternal(store, arena, .{ .node = init_node, .handle = handle }, bound_type_params); + return if (var_decl.ast.init_node != 0) + try resolveTypeOfNodeInternal(store, arena, .{ .node = var_decl.ast.init_node, .handle = handle }, bound_type_params) + else + null; }, - .Identifier => { - if (isTypeIdent(handle.tree, node.firstToken())) { + .identifier => { + if (isTypeIdent(handle.tree, tree.firstToken(node))) { return TypeWithHandle{ .type = .{ .data = .primitive, .is_type_val = true }, .handle = handle, }; } - if (try lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| { + if (try lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), starts[tree.firstToken(node)])) |child| { switch (child.decl.*) { .ast_node => |n| { if (n == node) return null; - if (n.castTag(.VarDecl)) |var_decl| { - if (var_decl.getInitNode()) |init_node| - if (init_node == node) return null; + if (varDecl(tree, n)) |var_decl| { + if (var_decl.ast.init_node == node) return null; } }, else => {}, @@ -605,21 +600,42 @@ pub fn resolveTypeOfNodeInternal( } return null; }, - .ContainerField => { - const field = node.castTag(.ContainerField).?; + .container_field, .container_field_init, .container_field_align => |c| { + const field: ast.full.ContainerField = switch (c) { + .container_field => tree.containerField(node), + .container_field_align => tree.containerFieldAlign(node), + .container_field_init => tree.containerFieldInit(node), + else => unreachable, + }; + + if (field.ast.type_expr == 0) return null; return ((try resolveTypeOfNodeInternal( store, arena, - .{ .node = field.type_expr orelse return null, .handle = handle }, + .{ .node = field.ast.type_expr, .handle = handle }, bound_type_params, )) orelse return null).instanceTypeVal(); }, - .Call => { - const call = node.castTag(.Call).?; + .call, + .call_comma, + .async_call, + .async_call_comma, + .call_one, + .call_one_comma, + .async_call_one, + .async_call_one_comma, + => |c| { + var params: [1]ast.Node.Index = undefined; + const call: ast.full.Call = switch (c) { + .call, .call_comma, .async_call, .async_call_comma => tree.callFull(node), + .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node), + else => unreachable, + }; + const decl = (try resolveTypeOfNodeInternal( store, arena, - .{ .node = call.lhs, .handle = handle }, + .{ .node = call.ast.fn_expr, .handle = handle }, bound_type_params, )) orelse return null; @@ -628,28 +644,30 @@ pub fn resolveTypeOfNodeInternal( .other => |n| n, else => return null, }; - if (decl_node.castTag(.FnProto)) |fn_decl| { - var has_self_param: u8 = 0; - if (call.lhs.cast(ast.Node.SimpleInfixOp)) |lhs_infix_op| { - if (call.lhs.tag == .Period) { - has_self_param = 1; - } - } - // Bidn type params to the expressions passed in the calls. - const param_len = std.math.min(call.params_len + has_self_param, fn_decl.params_len); - for (fn_decl.paramsConst()) |*decl_param, param_idx| { - if (param_idx < has_self_param) continue; - if (param_idx >= param_len) break; + var buf: [1]ast.Node.Index = undefined; + const func_maybe: ?ast.full.FnProto = switch (node_tags[decl_node]) { + .fn_proto => tree.fnProto(decl_node), + .fn_proto_one => tree.fnProtoOne(&buf, decl_node), + .fn_proto_multi => tree.fnProtoMulti(decl_node), + .fn_proto_simple => tree.fnProtoSimple(&buf, decl_node), + else => null, + }; - const type_param = switch (decl_param.param_type) { - .type_expr => |type_node| typeIsType(decl.handle.tree, type_node), - else => false, - }; - if (!type_param) continue; + if (func_maybe) |fn_decl| { + // check for x.y(..). if '.' is found, it means first param should be skipped + const has_self_param = token_tags[call.ast.lparen - 2] == .period; + var it = fn_decl.iterate(); + + // Bind type params to the expressions passed in the calls. + const param_len = std.math.min(call.ast.params.len + @boolToInt(has_self_param), fn_decl.ast.params.len); + while (it.next()) |decl_param| { + if (it.param_i == 0 and has_self_param) continue; + if (it.param_i >= param_len) break; + if (!typeIsType(decl_param.type_expr)) continue; const call_param_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = call.paramsConst()[param_idx - has_self_param], + .node = call.ast.params[it.param_i - @boolToInt(has_self_param)], .handle = handle, }, bound_type_params)) orelse continue; if (!call_param_type.type.is_type_val) continue; @@ -870,10 +888,10 @@ pub fn resolveTypeOfNodeInternal( // TODO Make this better, nested levels of type vals pub const Type = struct { data: union(enum) { - pointer: *ast.Node, - slice: *ast.Node, - error_union: *ast.Node, - other: *ast.Node, + pointer: ast.Node.Index, + slice: ast.Node.Index, + error_union: ast.Node.Index, + other: ast.Node.Index, primitive, }, /// If true, the type `type`, the attached data is the value of the type value. @@ -1035,7 +1053,7 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: ast.Tree) !v } pub const NodeWithHandle = struct { - node: *ast.Node, + node: ast.Node.Index, handle: *DocumentStore.Handle, }; @@ -1061,17 +1079,17 @@ pub fn getFieldAccessType( while (true) { const tok = tokenizer.next(); - switch (tok.id) { - .Eof => return FieldAccessReturn{ + switch (tok.tag) { + .eof => return FieldAccessReturn{ .original = current_type, .unwrapped = try resolveDerefType(store, arena, current_type, &bound_type_params), }, - .Identifier => { + .identifier => { if (try lookupSymbolGlobal(store, arena, current_type.handle, tokenizer.buffer[tok.loc.start..tok.loc.end], source_index)) |child| { current_type = (try child.resolveType(store, arena, &bound_type_params)) orelse return null; } else return null; }, - .Period => { + .period => { const after_period = tokenizer.next(); switch (after_period.id) { .Eof => return FieldAccessReturn{ @@ -1111,10 +1129,10 @@ pub fn getFieldAccessType( }, } }, - .PeriodAsterisk => { + .period_asterisk => { current_type = (try resolveDerefType(store, arena, current_type, &bound_type_params)) orelse return null; }, - .LParen => { + .l_paren => { const current_type_node = switch (current_type.type.data) { .other => |n| n, else => return null, @@ -1139,7 +1157,7 @@ pub fn getFieldAccessType( } else return null; } else return null; }, - .LBracket => { + .l_bracket => { var brack_count: usize = 1; var next = tokenizer.next(); var is_range = false; @@ -1157,7 +1175,7 @@ pub fn getFieldAccessType( current_type = (try resolveBracketAccessType(store, arena, current_type, if (is_range) .Range else .Single, &bound_type_params)) orelse return null; }, else => { - log.debug("Unimplemented token: {}", .{tok.id}); + log.debug("Unimplemented token: {}", .{tok.tag}); return null; }, } @@ -1169,39 +1187,43 @@ pub fn getFieldAccessType( }; } -pub fn isNodePublic(tree: ast.Tree, node: *ast.Node) bool { - switch (node.tag) { - .VarDecl => { +pub fn isNodePublic(tree: ast.Tree, node: ast.Node.Index) bool { + switch (tree.nodes.items(.tag)[node]) { + .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { const var_decl = node.castTag(.VarDecl).?; - return var_decl.getVisibToken() != null; - }, - .FnProto => { - const func = node.castTag(.FnProto).?; - return func.getVisibToken() != null; + const var_decl = varDecl(tree, node).?; + return var_decl.visib_token != null; }, + .fn_proto => tree.fnProto(node).visib_token != null, + .fn_proto_one => tree.fnProtoOne(node).visib_token != null, + .fn_proto_simple => tree.fnProtoSimple(node).visib_token != null, + .fn_proto_multi => tree.fnProtoMulti(node).visib_token != null, else => return true, } } -pub fn nodeToString(tree: ast.Tree, node: *ast.Node) ?[]const u8 { - switch (node.tag) { - .ContainerField => { - const field = node.castTag(.ContainerField).?; - return tree.tokenSlice(field.name_token); +pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { + switch (tree.nodes.items(.tag)[node]) { + .container_field => return tree.tokenSlice(tree.containerField(node).ast.name_token), + .container_field_init => return tree.tokenSlice(tree.containerFieldInit(node).ast.name_token), + .container_field_align => return tree.tokenSlice(tree.containerFieldAlign(node).ast.name_token), + // @TODO: Error tag name + // .ErrorTag => { + // const tag = node.castTag(.ErrorTag).?; + // return tree.tokenSlice(tag.name_token); + // }, + .identifier => return tree.tokenSlice(node), + .fn_proto => if (tree.fnProto(node).name_token) |name| { + return tree.tokenSlice(name); }, - .ErrorTag => { - const tag = node.castTag(.ErrorTag).?; - return tree.tokenSlice(tag.name_token); + .fn_proto_one => if (tree.fnProtoOne(node).name_token) |name| { + return tree.tokenSlice(name); }, - .Identifier => { - const field = node.castTag(.Identifier).?; - return tree.tokenSlice(field.token); + .fn_proto_multi => if (tree.fnProtoMulti(node).name_token) |name| { + return tree.tokenSlice(name); }, - .FnProto => { - const func = node.castTag(.FnProto).?; - if (func.getNameToken()) |name_token| { - return tree.tokenSlice(name_token); - } + .fn_proto_simple => if (tree.fnProtoSimple(node).name_token) |name| { + return tree.tokenSlice(name); }, else => { log.debug("INVALID: {}", .{node.tag}); @@ -1211,35 +1233,72 @@ pub fn nodeToString(tree: ast.Tree, node: *ast.Node) ?[]const u8 { return null; } -fn nodeContainsSourceIndex(tree: ast.Tree, node: *ast.Node, source_index: usize) bool { - const first_token = tree.token_locs[node.firstToken()]; - const last_token = tree.token_locs[node.lastToken()]; - return source_index >= first_token.start and source_index <= last_token.end; +fn nodeContainsSourceIndex(tree: ast.Tree, node: ast.Node.Index, source_index: usize) bool { + const first_token = tree.tokenLocation(0, tree.firstToken(node)).line_start; + const last_token = tree.tokenLocation(@truncate(u32, first_token), tree.lastToken(node)).line_end; + return source_index >= first_token and source_index <= last_token; } -pub fn getImportStr(tree: ast.Tree, source_index: usize) ?[]const u8 { - var node = &tree.root_node.base; +fn isBuiltinCall(tree: ast.Tree, node: ast.Node.Index) bool { + return switch (tree.nodes.items(.tag)[node]) { + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => true, + else => false, + }; +} - var child_idx: usize = 0; - while (node.iterate(child_idx)) |child| { - if (!nodeContainsSourceIndex(tree, child, source_index)) { - child_idx += 1; +fn builtinCallParams(tree: ast.Tree, node: ast.Node.Index) []const ast.Node.Index { + std.debug.assert(isBuiltinCall(tree, node)); + const datas = tree.node.items(.data); + + return switch (tree.nodes.items(.tag)[node]) { + .builtin_call, .builtin_call_comma => tree.extra_data[datas[node].lhs..datas[node].rhs], + .builtin_call_two, .builtin_call_two_comma => if (datas[node].lhs == 0) + &.{} + else if (datas[node].rhs == 0) + &.{datas[node].lhs} + else + &.{ datas[node].lhs, datas[node].rhs }, + else => unreachable, + }; +} + +pub fn getImportStr(tree: ast.Tree, node: ast.Node.Index, source_index: usize) ?[]const u8 { + const node_tags = tree.nodes.items(.tag); + var buf: [2]ast.Node.Index = undefined; + const decls = switch (node_tags[node]) { + .root => tree.rootDecls(), + .container_decl => tree.containerDecl(node).ast.members, + .container_decl => tree.containerDeclArg(node).ast.members, + .container_decl => tree.containerDeclTwo(&buf, node).ast.members, + else => return null, + }; + + for (decls) |decl_idx| { + if (!nodeContainsSourceIndex(tree, decl_idx, source_index)) { continue; } - if (child.castTag(.BuiltinCall)) |builtin_call| blk: { - const call_name = tree.tokenSlice(builtin_call.builtin_token); - if (!std.mem.eql(u8, call_name, "@import")) break :blk; - if (builtin_call.params_len != 1) break :blk; + if (isBuiltinCall(tree, decl_idx)) { + const builtin_token = tree.nodes.items(.main_token)[decl_idx]; + const call_name = tree.tokenSlice(builtin_token); - const import_param = builtin_call.paramsConst()[0]; - const import_str_node = import_param.castTag(.StringLiteral) orelse break :blk; - const import_str = tree.tokenSlice(import_str_node.token); + if (!std.mem.eql(u8, call_name, "@import")) continue; + const params = builtinCallParams(tree, decl_idx); + if (params.len != 1) continue; + + const import_str = tree.tokenSlice(tree.firstToken(params[0])); return import_str[1 .. import_str.len - 1]; } - node = child; - child_idx = 0; + + if (getImportStr(tree, decl_idx, source_index)) |name| { + return name; + } } + return null; } @@ -1317,7 +1376,7 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. } return .other; }, - .line_comment, .doc_comment, .container_doc_comment => return .comment, + .doc_comment, .container_doc_comment => return .comment, .eof => break, else => {}, } From 96fcac89a46cf653526215bab1144be9d325c994 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 28 Feb 2021 20:37:21 +0100 Subject: [PATCH 06/36] Complete resolveTypeOfNodeInternal --- src/analysis.zig | 135 +++++++++++++++++++++++++---------------------- 1 file changed, 72 insertions(+), 63 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 436f857..8f824fd 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -442,7 +442,7 @@ fn resolveDerefType( const main_token = tree.nodes.items(.main_token)[deref_node]; const token_tag = tree.tokens.items(.tag)[main_token]; - if (isPtrType(deref_node)) { + if (isPtrType(tree, deref_node)) { switch (token_tag) { .asterisk => { return ((try resolveTypeOfNodeInternal(store, arena, .{ @@ -679,63 +679,57 @@ pub fn resolveTypeOfNodeInternal( } return null; }, - .Comptime => { - const ct = node.castTag(.Comptime).?; - return try resolveTypeOfNodeInternal(store, arena, .{ .node = ct.expr, .handle = handle }, bound_type_params); + .@"comptime", .@"nosuspend" => { + return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params); }, - .GroupedExpression => { - const grouped = node.castTag(.GroupedExpression).?; - return try resolveTypeOfNodeInternal(store, arena, .{ .node = grouped.expr, .handle = handle }, bound_type_params); + .grouped_expression => { + return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params); }, - .StructInitializer => { + .struct_init, .struct_init_comma, .struct_init_one, .struct_init_one_comma => { const struct_init = node.castTag(.StructInitializer).?; return ((try resolveTypeOfNodeInternal( store, arena, - .{ .node = struct_init.lhs, .handle = handle }, + .{ .node = datas[node].lhs, .handle = handle }, bound_type_params, )) orelse return null).instanceTypeVal(); }, - .ErrorSetDecl => { + .error_set_decl => { return TypeWithHandle.typeVal(node_handle); }, - .Slice => { - const slice = node.castTag(.Slice).?; + .slice, .slice_sentinel, .slice_open => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = slice.lhs, + .node = dates[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveBracketAccessType(store, arena, left_type, .Range, bound_type_params); }, - .Deref, .UnwrapOptional => { - const suffix = node.cast(ast.Node.SimpleSuffixOp).?; + .deref, .unwrap_optional => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = suffix.lhs, + .node = dates[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; - return switch (node.tag) { - .UnwrapOptional => try resolveUnwrapOptionalType(store, arena, left_type, bound_type_params), - .Deref => try resolveDerefType(store, arena, left_type, bound_type_params), + return switch (node_tags[node]) { + .unwrap_optional => try resolveUnwrapOptionalType(store, arena, left_type, bound_type_params), + .deref => try resolveDerefType(store, arena, left_type, bound_type_params), else => unreachable, }; }, - .ArrayAccess => { - const arr_acc = node.castTag(.ArrayAccess).?; + .array_access => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = arr_acc.lhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveBracketAccessType(store, arena, left_type, .Single, bound_type_params); }, - .Period => { - const infix_op = node.cast(ast.Node.SimpleInfixOp).?; - const rhs_str = nodeToString(handle.tree, infix_op.rhs) orelse return null; + .field_access => { + const rhs_str = nodeToString(handle.tree, datas[node].rhs) orelse return null; // If we are accessing a pointer type, remove one pointerness level :) const left_type = try resolveFieldAccessLhsType( store, arena, (try resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.lhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null, bound_type_params, @@ -756,40 +750,39 @@ pub fn resolveTypeOfNodeInternal( return try child.resolveType(store, arena, bound_type_params); } else return null; }, - .OrElse => { - const infix_op = node.cast(ast.Node.SimpleInfixOp).?; + .@"orelse" => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.lhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveUnwrapOptionalType(store, arena, left_type, bound_type_params); }, - .Catch => { - const infix_op = node.cast(ast.Node.Catch).?; + .@"catch" => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.lhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveUnwrapErrorType(store, arena, left_type, bound_type_params); }, - .ErrorUnion => return TypeWithHandle.typeVal(node_handle), - .SliceType, - .ArrayType, - .OptionalType, - .PtrType, + .error_union => return TypeWithHandle.typeVal(node_handle), + .array_type, + .array_type_sentinel, + .optional_type, + .ptr_type_aligned, + .ptr_type.aligned, + .ptr_type, + .ptr_type_bit_range, => return TypeWithHandle.typeVal(node_handle), - .Try => { - const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; + .@"try" => { const rhs_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = prefix_op.rhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveUnwrapErrorType(store, arena, rhs_type, bound_type_params); }, - .AddressOf => { - const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; + .address_of => { const rhs_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = prefix_op.rhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; @@ -803,12 +796,13 @@ pub fn resolveTypeOfNodeInternal( .handle = rhs_type.handle, }; }, - .BuiltinCall => { - const builtin_call = node.castTag(.BuiltinCall).?; - const call_name = handle.tree.tokenSlice(builtin_call.builtin_token); + .builtin_call, .builtin_call_comma, .builtin_call_two, .builtin_call_two_comma => { + const params = builtinCallParams(tree, node); + + const call_name = tree.tokenSlice(main_tokens[node]); if (std.mem.eql(u8, call_name, "@This")) { - if (builtin_call.params_len != 0) return null; - return innermostContainer(handle, handle.tree.token_locs[builtin_call.firstToken()].start); + if (params.len != 0) return null; + return innermostContainer(handle, starts[tree.firstToken(node)]); } const cast_map = std.ComptimeStringMap(void, .{ @@ -825,9 +819,9 @@ pub fn resolveTypeOfNodeInternal( .{"@ptrCast"}, }); if (cast_map.has(call_name)) { - if (builtin_call.params_len < 1) return null; + if (params.len < 1) return null; return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = builtin_call.paramsConst()[0], + .node = params[0], .handle = handle, }, bound_type_params)) orelse return null).instanceTypeVal(); } @@ -835,9 +829,9 @@ pub fn resolveTypeOfNodeInternal( // Almost the same as the above, return a type value though. // TODO Do peer type resolution, we just keep the first for now. if (std.mem.eql(u8, call_name, "@TypeOf")) { - if (builtin_call.params_len < 1) return null; + if (params.len < 1) return null; var resolved_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = builtin_call.paramsConst()[0], + .node = params[0], .handle = handle, }, bound_type_params)) orelse return null; @@ -847,35 +841,50 @@ pub fn resolveTypeOfNodeInternal( } if (!std.mem.eql(u8, call_name, "@import")) return null; - if (builtin_call.params_len < 1) return null; + if (params.len < 1) return null; - const import_param = builtin_call.paramsConst()[0]; - if (import_param.tag != .StringLiteral) return null; + const import_param = params[0]; + if (node_tags[import_param] != .string_literal) return null; - const import_str = handle.tree.tokenSlice(import_param.castTag(.StringLiteral).?.token); + const import_str = tree.tokenSlice(main_tokens[import_param]); const new_handle = (store.resolveImport(handle, import_str[1 .. import_str.len - 1]) catch |err| { log.debug("Error {} while processing import {s}", .{ err, import_str }); return null; }) orelse return null; - return TypeWithHandle.typeVal(.{ .node = &new_handle.tree.root_node.base, .handle = new_handle }); + // reference to node '0' which is root + return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle }); }, - .ContainerDecl => { - const container = node.castTag(.ContainerDecl).?; - const kind = handle.tree.token_ids[container.kind_token]; + .container_decl, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_trailing, + .container_decl_two, + .container_decl_two_trailing, + => { return TypeWithHandle.typeVal(node_handle); }, - .FnProto => { + .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple => { + var buf: [1]ast.Node.Index = undefined; + const fn_proto: ast.full.FnProto = switch (node_tags[node]) { + .fn_proto => tree.fnProto(node), + .fn_proto_multi => tree.fnProtoMulti(node), + .fn_proto_one => tree.fnProtoOne(&buf, node), + .fn_proto_simple => tree.fnProtoSimple(&buf, node), + else => unreachable, + }; + // This is a function type - if (node.castTag(.FnProto).?.getNameToken() == null) { + if (fn_proto.name_token == null) { return TypeWithHandle.typeVal(node_handle); } + return TypeWithHandle{ .type = .{ .data = .{ .other = node }, .is_type_val = false }, .handle = handle, }; }, - .MultilineStringLiteral, .StringLiteral => return TypeWithHandle{ + .multiline_string_literal, .string_literal => return TypeWithHandle{ .type = .{ .data = .{ .other = node }, .is_type_val = false }, .handle = handle, }, From b651a79380ad557f834365f441b4c67ed2fd96d6 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 28 Feb 2021 21:57:15 +0100 Subject: [PATCH 07/36] Type resolving fixes --- src/analysis.zig | 236 ++++++++++++++++++++++++----------------------- 1 file changed, 121 insertions(+), 115 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 8f824fd..5fa25ef 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -132,7 +132,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * if (curr_token == end_token and is_comma) continue; try buffer.appendSlice(tree.tokenSlice(curr_token)); - if (is_comma or tag == .Keyword_const) try buffer.append(' '); + if (is_comma or tag == .keyword_const) try buffer.append(' '); } } try buffer.append('}'); @@ -240,7 +240,7 @@ fn getDeclName(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { fn isContainerDecl(decl_handle: DeclWithHandle) bool { return switch (decl_handle.decl.*) { - .ast_node => |inner_node| inner_node.tag == .ContainerDecl or inner_node.tag == .Root, + .ast_node => |inner_node| isContainer(decl_handle.handle.tree.nodes.items(.tag)[inner_node]) or inner_node == 0, else => false, }; } @@ -370,20 +370,17 @@ fn resolveUnwrapOptionalType( arena: *std.heap.ArenaAllocator, opt: TypeWithHandle, bound_type_params: *BoundTypeParams, - tree: ast.Tree, ) !?TypeWithHandle { const opt_node = switch (opt.type.data) { .other => |n| n, else => return null, }; - if (opt_node.cast(ast.Node.SimplePrefixOp)) |prefix_op| { - if (opt_node.tag == .OptionalType) { - return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = prefix_op.rhs, - .handle = opt.handle, - }, bound_type_params)) orelse return null).instanceTypeVal(); - } + if (opt.handle.tree.nodes.items(.tag)[opt_node] == .optional_type) { + return ((try resolveTypeOfNodeInternal(store, arena, .{ + .node = opt.handle.tree.nodes.items(.data)[opt_node].lhs, + .handle = opt.handle, + }, bound_type_params)) orelse return null).instanceTypeVal(); } return null; @@ -403,14 +400,11 @@ fn resolveUnwrapErrorType( }, .primitive, .slice, .pointer => return null, }; - - if (rhs_node.cast(ast.Node.SimpleInfixOp)) |infix_op| { - if (rhs_node.tag == .ErrorUnion) { - return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.rhs, - .handle = rhs.handle, - }, bound_type_params)) orelse return null).instanceTypeVal(); - } + if (rhs.handle.tree.nodes.items(.tag)[rhs_node] == .error_union) { + return ((try resolveTypeOfNodeInternal(store, arena, .{ + .node = rhs.handle.tree.nodes.items(.data)[rhs_node].rhs, + .handle = rhs.handle, + }, bound_type_params)) orelse return null).instanceTypeVal(); } return null; @@ -470,32 +464,27 @@ fn resolveBracketAccessType( else => return null, }; - if (lhs_node.castTag(.SliceType)) |slice_type| { + const tags = lhs.handle.tree.nodes.items(.tag); + const tag = tags[lhs_node]; + const data = lhs.handle.tree.nodes.items(.data)[lhs_node]; + if (tag == .array_type or tag == .array_type_sentinel) { if (rhs == .Single) return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = slice_type.rhs, - .handle = lhs.handle, - }, bound_type_params)) orelse return null).instanceTypeVal(); - return lhs; - } else if (lhs_node.castTag(.ArrayType)) |array_type| { - if (rhs == .Single) - return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = array_type.rhs, + .node = data.rhs, .handle = lhs.handle, }, bound_type_params)) orelse return null).instanceTypeVal(); return TypeWithHandle{ - .type = .{ .data = .{ .slice = array_type.rhs }, .is_type_val = false }, + .type = .{ .data = .{ .slice = data.rhs }, .is_type_val = false }, .handle = lhs.handle, }; - } else if (lhs_node.castTag(.PtrType)) |ptr_type| { - if (ptr_type.rhs.castTag(.ArrayType)) |child_arr| { + } else if (isPtrType(tree, lhs_node)) { + if (tags[data.rhs] == .array_type or tags[data.rhs] == .array_type_sentinel) { if (rhs == .Single) { return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = child_arr.rhs, + .node = lhs.handle.tree.nodes.items(.data)[data.rhs].rhs, .handle = lhs.handle, }, bound_type_params)) orelse return null).instanceTypeVal(); } - return lhs; } } @@ -931,28 +920,26 @@ pub const TypeWithHandle = struct { fn isRoot(self: TypeWithHandle) bool { switch (self.type.data) { - .other => |n| return n.tag == .Root, + // root is always index 0 + .other => |n| return n == 0, else => return false, } } - fn isContainer(self: TypeWithHandle, container_kind_tok: std.zig.Token.Id) bool { + fn isContainer(self: TypeWithHandle, container_kind_tok: std.zig.Token.Tag, tree: ast.Tree) bool { + const main_tokens = tree.nodes.items(.main_token); + const tags = tree.tokens.items(.tag); switch (self.type.data) { - .other => |n| { - if (n.castTag(.ContainerDecl)) |cont| { - return self.handle.tree.token_ids[cont.kind_token] == container_kind_tok; - } - return false; - }, + .other => |n| return tags[main_tokens[n]] == container_kind_tok, else => return false, } } - pub fn isStructType(self: TypeWithHandle) bool { - return self.isContainer(.Keyword_struct) or self.isRoot(); + pub fn isStructType(self: TypeWithHandle, tree: ast.Tree) bool { + return self.isContainer(.keyword_struct, tree) or self.isRoot(); } - pub fn isNamespace(self: TypeWithHandle) bool { + pub fn isNamespace(self: TypeWithHandle, tree: ast.Tree) bool { if (!self.isStructType()) return false; var idx: usize = 0; while (self.type.data.other.iterate(idx)) |child| : (idx += 1) { @@ -962,46 +949,56 @@ pub const TypeWithHandle = struct { return true; } - pub fn isEnumType(self: TypeWithHandle) bool { - return self.isContainer(.Keyword_enum); + pub fn isEnumType(self: TypeWithHandle, tree: ast.Tree) bool { + return self.isContainer(.keyword_enum, tree); } - pub fn isUnionType(self: TypeWithHandle) bool { - return self.isContainer(.Keyword_union); + pub fn isUnionType(self: TypeWithHandle, tree: ast.Tree) bool { + return self.isContainer(.keyword_union, tree); } - pub fn isOpaqueType(self: TypeWithHandle) bool { - return self.isContainer(.Keyword_opaque); + pub fn isOpaqueType(self: TypeWithHandle, tree: ast.Tree) bool { + return self.isContainer(.keyword_opaque, tree); } - pub fn isTypeFunc(self: TypeWithHandle) bool { + pub fn isTypeFunc(self: TypeWithHandle, tree: ast.Tree) bool { + var buf: [1]ast.Node.Index = undefined; switch (self.type.data) { - .other => |n| { - if (n.castTag(.FnProto)) |fn_proto| { - return isTypeFunction(self.handle.tree, fn_proto); - } - return false; + .other => |n| return switch (tree.nodes.items(.tag)[n]) { + .fn_proto => isTypeFunction(tree, tree.fnProto(n)), + .fn_proto_multi => isTypeFunction(tree, tree.fnProtoMulti(n)), + .fn_proto_one => isTypeFunction(tree, tree.fnProtoOne(&buf, n)), + .fn_proto_simple => isTypeFunction(tree, tree.fnProtoSimple(&buf, n)), + else => false, }, else => return false, } } - pub fn isGenericFunc(self: TypeWithHandle) bool { + pub fn isGenericFunc(self: TypeWithHandle, tree: ast.Tree) bool { + var buf: [1]ast.Node.Index = undefined; switch (self.type.data) { - .other => |n| { - if (n.castTag(.FnProto)) |fn_proto| { - return isGenericFunction(self.handle.tree, fn_proto); - } - return false; + .other => |n| return switch (tree.nodes.items(.tag)[n]) { + .fn_proto => isGenericFunction(tree, tree.fnProto(n)), + .fn_proto_multi => isGenericFunction(tree, tree.fnProtoMulti(n)), + .fn_proto_one => isGenericFunction(tree, tree.fnProtoOne(&buf, n)), + .fn_proto_simple => isGenericFunction(tree, tree.fnProtoSimple(&buf, n)), + else => false, }, else => return false, } } - pub fn isFunc(self: TypeWithHandle) bool { + pub fn isFunc(self: TypeWithHandle, tree: ast.Tree) bool { + const tags = tree.nodes.items(.tag); switch (self.type.data) { - .other => |n| { - return n.tag == .FnProto; + .other => |n| return switch (tags[n]) { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + => true, + else => false, }, else => return false, } @@ -1100,7 +1097,7 @@ pub fn getFieldAccessType( }, .period => { const after_period = tokenizer.next(); - switch (after_period.id) { + switch (after_period.tag) { .Eof => return FieldAccessReturn{ .original = current_type, .unwrapped = try resolveDerefType(store, arena, current_type, &bound_type_params), @@ -1133,7 +1130,7 @@ pub fn getFieldAccessType( current_type = (try resolveUnwrapOptionalType(store, arena, current_type, &bound_type_params)) orelse return null; }, else => { - log.debug("Unrecognized token {} after period.", .{after_period.id}); + log.debug("Unrecognized token {} after period.", .{after_period.tag}); return null; }, } @@ -1155,11 +1152,11 @@ pub fn getFieldAccessType( // Skip to the right paren var paren_count: usize = 1; var next = tokenizer.next(); - while (next.id != .Eof) : (next = tokenizer.next()) { - if (next.id == .RParen) { + while (next.tag != .Eof) : (next = tokenizer.next()) { + if (next.tag == .RParen) { paren_count -= 1; if (paren_count == 0) break; - } else if (next.id == .LParen) { + } else if (next.tag == .LParen) { paren_count += 1; } } else return null; @@ -1170,13 +1167,13 @@ pub fn getFieldAccessType( var brack_count: usize = 1; var next = tokenizer.next(); var is_range = false; - while (next.id != .Eof) : (next = tokenizer.next()) { - if (next.id == .RBracket) { + while (next.tag != .Eof) : (next = tokenizer.next()) { + if (next.tag == .RBracket) { brack_count -= 1; if (brack_count == 0) break; - } else if (next.id == .LBracket) { + } else if (next.tag == .LBracket) { brack_count += 1; - } else if (next.id == .Ellipsis2 and brack_count == 1) { + } else if (next.tag == .Ellipsis2 and brack_count == 1) { is_range = true; } } else return null; @@ -1664,10 +1661,10 @@ pub const Declaration = union(enum) { name: ast.TokenIndex, condition: ast.Node.Index, }, - array_payload: struct { - identifier: *ast.Node, - array_expr: ast.full.ArrayType, - }, + // array_payload: struct { + // identifier: *ast.Node, + // array_expr: ast.full.ArrayType, + // }, switch_payload: struct { node: ast.TokenIndex, switch_expr: ast.Node.Index, @@ -1686,7 +1683,7 @@ pub const DeclWithHandle = struct { .ast_node => |n| getDeclNameToken(tree, n).?, .param_decl => |p| p.name_token.?, .pointer_payload => |pp| pp.node.value_symbol.firstToken(), - .array_payload => |ap| ap.identifier.firstToken(), + // .array_payload => |ap| ap.identifier.firstToken(), .switch_payload => |sp| sp.node.value_symbol.firstToken(), .label_decl => |ld| ld.firstToken(), }; @@ -1705,30 +1702,30 @@ pub const DeclWithHandle = struct { } pub fn resolveType(self: DeclWithHandle, store: *DocumentStore, arena: *std.heap.ArenaAllocator, bound_type_params: *BoundTypeParams) !?TypeWithHandle { + const tree = self.handle.tree; + const node_tags = tree.nodes.items(.tag); + const main_tokens = tree.nodes.items(.main_token); return switch (self.decl.*) { .ast_node => |node| try resolveTypeOfNodeInternal(store, arena, .{ .node = node, .handle = self.handle }, bound_type_params), - .param_decl => |param_decl| switch (param_decl.param_type) { - .type_expr => |type_node| { - if (typeIsType(self.handle.tree, type_node)) { - var bound_param_it = bound_type_params.iterator(); - while (bound_param_it.next()) |entry| { - if (entry.key == param_decl) return entry.value; - } - return null; - } else if (type_node.castTag(.Identifier)) |type_ident| { - if (param_decl.name_token) |name_tok| { - if (std.mem.eql(u8, self.handle.tree.tokenSlice(type_ident.firstToken()), self.handle.tree.tokenSlice(name_tok))) - return null; - } + .param_decl => |*param_decl| { + if (typeIsType(self.handle.tree, param_decl.type_expr)) { + var bound_param_it = bound_type_params.iterator(); + while (bound_param_it.next()) |entry| { + if (entry.key == param_decl) return entry.value; } - return ((try resolveTypeOfNodeInternal( - store, - arena, - .{ .node = type_node, .handle = self.handle }, - bound_type_params, - )) orelse return null).instanceTypeVal(); - }, - else => null, + return null; + } else if (node_tags[param_decl.type_expr] == .identifier) { + if (param_decl.name_token) |name_tok| { + if (std.mem.eql(u8, tree.tokenSlice(tree.firstToken(param_decl.type_expr)), tree.tokenSlice(name_tok))) + return null; + } + } + return ((try resolveTypeOfNodeInternal( + store, + arena, + .{ .node = param_decl.type_expr, .handle = self.handle }, + bound_type_params, + )) orelse return null).instanceTypeVal(); }, .pointer_payload => |pay| try resolveUnwrapOptionalType( store, @@ -1739,16 +1736,16 @@ pub const DeclWithHandle = struct { }, bound_type_params)) orelse return null, bound_type_params, ), - .array_payload => |pay| try resolveBracketAccessType( - store, - arena, - (try resolveTypeOfNodeInternal(store, arena, .{ - .node = pay.array_expr, - .handle = self.handle, - }, bound_type_params)) orelse return null, - .Single, - bound_type_params, - ), + // .array_payload => |pay| try resolveBracketAccessType( + // store, + // arena, + // (try resolveTypeOfNodeInternal(store, arena, .{ + // .node = pay.array_expr, + // .handle = self.handle, + // }, bound_type_params)) orelse return null, + // .Single, + // bound_type_params, + // ), .label_decl => return null, .switch_payload => |pay| { if (pay.items.len == 0) return null; @@ -1757,20 +1754,20 @@ pub const DeclWithHandle = struct { .node = pay.switch_expr, .handle = self.handle, }, bound_type_params)) orelse return null; - if (!switch_expr_type.isUnionType()) + if (!switch_expr_type.isUnionType(tree)) return null; - if (pay.items[0].castTag(.EnumLiteral)) |enum_lit| { + if (node_tags[pay.items[0]] == .enum_literal) { const scope = findContainerScope(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null; - if (scope.decls.getEntry(self.handle.tree.tokenSlice(enum_lit.name))) |candidate| { + if (scope.decls.getEntry(self.handle.tree.tokenSlice(main_tokens[pay.items[0]]))) |candidate| { switch (candidate.value) { .ast_node => |node| { - if (node.castTag(.ContainerField)) |container_field| { - if (container_field.type_expr) |type_expr| { + if (containerField(tree, node)) |container_field| { + if (container_field.ast.type_expr != 0) { return ((try resolveTypeOfNodeInternal( store, arena, - .{ .node = type_expr, .handle = switch_expr_type.handle }, + .{ .node = container_field.ast.type_expr, .handle = switch_expr_type.handle }, bound_type_params, )) orelse return null).instanceTypeVal(); } @@ -1787,6 +1784,15 @@ pub const DeclWithHandle = struct { } }; +fn containerField(tree: ast.Tree, node: ast.Node.Index) ?ast.full.ContainerField { + return switch (tree.nodes.items(.tag)[node]) { + .container_field => tree.containerField(node), + .container_field_init => tree.containerFieldInit(node), + .container_field_align => tree.containerFieldAlign(node), + else => null, + }; +} + fn findContainerScope(container_handle: NodeWithHandle) ?*Scope { const container = container_handle.node; const handle = container_handle.handle; From 1c9da7053c6dbe87b60397b792ebcb51ba0258fd Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Mon, 1 Mar 2021 14:32:19 +0100 Subject: [PATCH 08/36] More type resolving --- src/analysis.zig | 192 ++++++++++++++++++++++++--------------------- src/main.zig | 48 ++++++++---- src/references.zig | 45 +++++++---- 3 files changed, 163 insertions(+), 122 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 5fa25ef..c442dfc 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -252,20 +252,35 @@ fn resolveVarDeclAliasInternal( root: bool, ) error{OutOfMemory}!?DeclWithHandle { const handle = node_handle.handle; - if (node_handle.node.castTag(.Identifier)) |ident| { - return try lookupSymbolGlobal(store, arena, handle, handle.tree.tokenSlice(ident.token), handle.tree.token_locs[ident.token].start); + const tree = handle.tree; + const node_tags = tree.nodes.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + const datas = tree.nodes.items(.data); + + if (node_tags[node_handle.node] == .identifier) { + const token = main_tokens[node_handle.node]; + return try lookupSymbolGlobal( + store, + arena, + handle, + tree.tokenSlice(token), + tree.tokenLocation(0, token).line_start, + ); } - if (node_handle.node.cast(ast.Node.SimpleInfixOp)) |infix_op| { - if (node_handle.node.tag != .Period) return null; + if (node_tags[node_handle.node] == .field_access) { + const lhs = datas[node_handle.node].lhs; - const container_node = if (infix_op.lhs.castTag(.BuiltinCall)) |builtin_call| block: { - if (!std.mem.eql(u8, handle.tree.tokenSlice(builtin_call.builtin_token), "@import")) + const container_node = if (isBuiltinCall(tree, lhs)) block: { + const builtin = builtinCallParams(tree, lhs); + if (!std.mem.eql(u8, tree.tokenSlice(main_tokens[lhs]), "@import")) return null; - const inner_node = (try resolveTypeOfNode(store, arena, .{ .node = infix_op.lhs, .handle = handle })) orelse return null; - std.debug.assert(inner_node.type.data.other.tag == .Root); + + const inner_node = (try resolveTypeOfNode(store, arena, .{ .node = lhs, .handle = handle })) orelse return null; + // assert root node + std.debug.assert(inner_node.type.data.other == 0); break :block NodeWithHandle{ .node = inner_node.type.data.other, .handle = inner_node.handle }; - } else if (try resolveVarDeclAliasInternal(store, arena, .{ .node = infix_op.lhs, .handle = handle }, false)) |decl_handle| block: { + } else if (try resolveVarDeclAliasInternal(store, arena, .{ .node = lhs, .handle = handle }, false)) |decl_handle| block: { if (decl_handle.decl.* != .ast_node) return null; const resolved = (try resolveTypeOfNode(store, arena, .{ .node = decl_handle.decl.ast_node, .handle = decl_handle.handle })) orelse return null; const resolved_node = switch (resolved.type.data) { @@ -273,11 +288,11 @@ fn resolveVarDeclAliasInternal( else => return null, }; - if (resolved_node.tag != .ContainerDecl and resolved_node.tag != .Root) return null; + if (!isContainer(node_tags[resolved_node])) return null; break :block NodeWithHandle{ .node = resolved_node, .handle = resolved.handle }; } else return null; - if (try lookupSymbolContainer(store, arena, container_node, handle.tree.tokenSlice(infix_op.rhs.firstToken()), false)) |inner_decl| { + if (try lookupSymbolContainer(store, arena, container_node, tree.tokenSlice(tree.firstToken(datas[lhs].rhs)), false)) |inner_decl| { if (root) return inner_decl; return inner_decl; } @@ -294,18 +309,22 @@ fn resolveVarDeclAliasInternal( pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocator, decl_handle: NodeWithHandle) !?DeclWithHandle { const decl = decl_handle.node; const handle = decl_handle.handle; + const tree = handle.tree; + const token_tags = tree.tokens.items(.tag); + const main_tokes = tree.nodes.items(.main_token); + const node_tags = tree.nodes.items(.tag); - if (decl.castTag(.VarDecl)) |var_decl| { - const base_expr = var_decl.getInitNode() orelse return null; - if (handle.tree.token_ids[var_decl.mut_token] != .Keyword_const) return null; + if (varDecl(handle.tree, decl)) |var_decl| { + if (var_decl.ast.init_node == 0) return null; + const base_exp = var_decl.ast.init_node; + if (token_tags[main_tokes[base_exp]] != .keyword_const) return null; - if (base_expr.cast(ast.Node.SimpleInfixOp)) |infix_op| { - if (base_expr.tag != .Period) return null; - const name = handle.tree.tokenSlice(infix_op.rhs.firstToken()); - if (!std.mem.eql(u8, handle.tree.tokenSlice(var_decl.name_token), name)) + if (node_tags[base_exp] == .field_access) { + const name = tree.tokenSlice(tree.firstToken(tree.nodes.items(.data)[base_exp].rhs)); + if (!std.mem.eql(u8, tree.tokenSlice(var_decl.ast.mut_token + 1), name)) return null; - return try resolveVarDeclAliasInternal(store, arena, .{ .node = base_expr, .handle = handle }, true); + return try resolveVarDeclAliasInternal(store, arena, .{ .node = base_exp, .handle = handle }, true); } } @@ -320,6 +339,7 @@ fn findReturnStatementInternal( ) ?*ast.Node.ControlFlowExpression { var result: ?*ast.Node.ControlFlowExpression = null; var child_idx: usize = 0; + while (base_node.iterate(child_idx)) |child_node| : (child_idx += 1) { if (child_node.castTag(.Return)) |cfe| { // If we are calling ourselves recursively, ignore this return. @@ -464,9 +484,10 @@ fn resolveBracketAccessType( else => return null, }; - const tags = lhs.handle.tree.nodes.items(.tag); + const tree = lhs.handle.tree; + const tags = tree.nodes.items(.tag); const tag = tags[lhs_node]; - const data = lhs.handle.tree.nodes.items(.data)[lhs_node]; + const data = tree.nodes.items(.data)[lhs_node]; if (tag == .array_type or tag == .array_type_sentinel) { if (rhs == .Single) return ((try resolveTypeOfNodeInternal(store, arena, .{ @@ -481,7 +502,7 @@ fn resolveBracketAccessType( if (tags[data.rhs] == .array_type or tags[data.rhs] == .array_type_sentinel) { if (rhs == .Single) { return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = lhs.handle.tree.nodes.items(.data)[data.rhs].rhs, + .node = tree.nodes.items(.data)[data.rhs].rhs, .handle = lhs.handle, }, bound_type_params)) orelse return null).instanceTypeVal(); } @@ -635,25 +656,19 @@ pub fn resolveTypeOfNodeInternal( }; var buf: [1]ast.Node.Index = undefined; - const func_maybe: ?ast.full.FnProto = switch (node_tags[decl_node]) { - .fn_proto => tree.fnProto(decl_node), - .fn_proto_one => tree.fnProtoOne(&buf, decl_node), - .fn_proto_multi => tree.fnProtoMulti(decl_node), - .fn_proto_simple => tree.fnProtoSimple(&buf, decl_node), - else => null, - }; + const func_maybe = fnProto(tree, decl_node, &buf); if (func_maybe) |fn_decl| { // check for x.y(..). if '.' is found, it means first param should be skipped const has_self_param = token_tags[call.ast.lparen - 2] == .period; - var it = fn_decl.iterate(); + var it = fn_decl.iterate(tree); // Bind type params to the expressions passed in the calls. const param_len = std.math.min(call.ast.params.len + @boolToInt(has_self_param), fn_decl.ast.params.len); while (it.next()) |decl_param| { if (it.param_i == 0 and has_self_param) continue; if (it.param_i >= param_len) break; - if (!typeIsType(decl_param.type_expr)) continue; + if (!typeIsType(tree, decl_param.type_expr)) continue; const call_param_type = (try resolveTypeOfNodeInternal(store, arena, .{ .node = call.ast.params[it.param_i - @boolToInt(has_self_param)], @@ -661,7 +676,7 @@ pub fn resolveTypeOfNodeInternal( }, bound_type_params)) orelse continue; if (!call_param_type.type.is_type_val) continue; - _ = try bound_type_params.put(decl_param, call_param_type); + _ = try bound_type_params.put(&decl_param, call_param_type); } return try resolveReturnType(store, arena, fn_decl, decl.handle, bound_type_params); @@ -675,7 +690,6 @@ pub fn resolveTypeOfNodeInternal( return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params); }, .struct_init, .struct_init_comma, .struct_init_one, .struct_init_one_comma => { - const struct_init = node.castTag(.StructInitializer).?; return ((try resolveTypeOfNodeInternal( store, arena, @@ -688,14 +702,14 @@ pub fn resolveTypeOfNodeInternal( }, .slice, .slice_sentinel, .slice_open => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = dates[node].lhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveBracketAccessType(store, arena, left_type, .Range, bound_type_params); }, .deref, .unwrap_optional => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = dates[node].lhs, + .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return switch (node_tags[node]) { @@ -758,7 +772,6 @@ pub fn resolveTypeOfNodeInternal( .array_type_sentinel, .optional_type, .ptr_type_aligned, - .ptr_type.aligned, .ptr_type, .ptr_type_bit_range, => return TypeWithHandle.typeVal(node_handle), @@ -855,13 +868,7 @@ pub fn resolveTypeOfNodeInternal( }, .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple => { var buf: [1]ast.Node.Index = undefined; - const fn_proto: ast.full.FnProto = switch (node_tags[node]) { - .fn_proto => tree.fnProto(node), - .fn_proto_multi => tree.fnProtoMulti(node), - .fn_proto_one => tree.fnProtoOne(&buf, node), - .fn_proto_simple => tree.fnProtoSimple(&buf, node), - else => unreachable, - }; + const fn_proto = fnProto(tree, node, &buf).?; // This is a function type if (fn_proto.name_token == null) { @@ -965,10 +972,11 @@ pub const TypeWithHandle = struct { var buf: [1]ast.Node.Index = undefined; switch (self.type.data) { .other => |n| return switch (tree.nodes.items(.tag)[n]) { - .fn_proto => isTypeFunction(tree, tree.fnProto(n)), - .fn_proto_multi => isTypeFunction(tree, tree.fnProtoMulti(n)), - .fn_proto_one => isTypeFunction(tree, tree.fnProtoOne(&buf, n)), - .fn_proto_simple => isTypeFunction(tree, tree.fnProtoSimple(&buf, n)), + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + => isTypeFunction(fnProto(tree, n, &buf).?), else => false, }, else => return false, @@ -979,10 +987,11 @@ pub const TypeWithHandle = struct { var buf: [1]ast.Node.Index = undefined; switch (self.type.data) { .other => |n| return switch (tree.nodes.items(.tag)[n]) { - .fn_proto => isGenericFunction(tree, tree.fnProto(n)), - .fn_proto_multi => isGenericFunction(tree, tree.fnProtoMulti(n)), - .fn_proto_one => isGenericFunction(tree, tree.fnProtoOne(&buf, n)), - .fn_proto_simple => isGenericFunction(tree, tree.fnProtoSimple(&buf, n)), + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + => isGenericFunction(fnProto(tree, n, &buf).?), else => false, }, else => return false, @@ -1082,6 +1091,7 @@ pub fn getFieldAccessType( // TODO Actually bind params here when calling functions instead of just skipping args. var bound_type_params = BoundTypeParams.init(&arena.allocator); + const tree = handle.tree; while (true) { const tok = tokenizer.next(); @@ -1098,11 +1108,11 @@ pub fn getFieldAccessType( .period => { const after_period = tokenizer.next(); switch (after_period.tag) { - .Eof => return FieldAccessReturn{ + .eof => return FieldAccessReturn{ .original = current_type, .unwrapped = try resolveDerefType(store, arena, current_type, &bound_type_params), }, - .Identifier => { + .identifier => { if (after_period.loc.end == tokenizer.buffer.len) { return FieldAccessReturn{ .original = current_type, @@ -1126,7 +1136,7 @@ pub fn getFieldAccessType( current_type = (try child.resolveType(store, arena, &bound_type_params)) orelse return null; } else return null; }, - .QuestionMark => { + .question_mark => { current_type = (try resolveUnwrapOptionalType(store, arena, current_type, &bound_type_params)) orelse return null; }, else => { @@ -1146,17 +1156,18 @@ pub fn getFieldAccessType( // Can't call a function type, we need a function type instance. if (current_type.type.is_type_val) return null; - if (current_type_node.castTag(.FnProto)) |func| { + var buf: [1]ast.Node.Index = undefined; + if (fnProto(tree, current_type_node, &buf)) |func| { if (try resolveReturnType(store, arena, func, current_type.handle, &bound_type_params)) |ret| { current_type = ret; // Skip to the right paren var paren_count: usize = 1; var next = tokenizer.next(); - while (next.tag != .Eof) : (next = tokenizer.next()) { - if (next.tag == .RParen) { + while (next.tag != .eof) : (next = tokenizer.next()) { + if (next.tag == .r_paren) { paren_count -= 1; if (paren_count == 0) break; - } else if (next.tag == .LParen) { + } else if (next.tag == .l_paren) { paren_count += 1; } } else return null; @@ -1167,13 +1178,13 @@ pub fn getFieldAccessType( var brack_count: usize = 1; var next = tokenizer.next(); var is_range = false; - while (next.tag != .Eof) : (next = tokenizer.next()) { - if (next.tag == .RBracket) { + while (next.tag != .eof) : (next = tokenizer.next()) { + if (next.tag == .r_bracket) { brack_count -= 1; if (brack_count == 0) break; - } else if (next.tag == .LBracket) { + } else if (next.tag == .l_bracket) { brack_count += 1; - } else if (next.tag == .Ellipsis2 and brack_count == 1) { + } else if (next.tag == .ellipsis2 and brack_count == 1) { is_range = true; } } else return null; @@ -1272,14 +1283,25 @@ fn builtinCallParams(tree: ast.Tree, node: ast.Node.Index) []const ast.Node.Inde }; } +pub fn fnProto(tree: ast.Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?ast.full.FnProto { + return switch (tree.nodes.items(.tag)[node]) { + .fn_proto => tree.fnProto(node), + .fn_proto_multi => tree.fnProtoMulti(node), + .fn_proto_one => tree.fnProtoOne(buf, node), + .fn_proto_simple => tree.fnProtoSimple(buf, node), + .fn_decl => tree.fnProto(tree.nodes.items(.data)[node].lhs), + else => null, + }; +} + pub fn getImportStr(tree: ast.Tree, node: ast.Node.Index, source_index: usize) ?[]const u8 { const node_tags = tree.nodes.items(.tag); var buf: [2]ast.Node.Index = undefined; const decls = switch (node_tags[node]) { .root => tree.rootDecls(), - .container_decl => tree.containerDecl(node).ast.members, - .container_decl => tree.containerDeclArg(node).ast.members, - .container_decl => tree.containerDeclTwo(&buf, node).ast.members, + .container_decl, .container_decl_trailing => tree.containerDecl(node).ast.members, + .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node).ast.members, + .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node).ast.members, else => return null, }; @@ -1679,13 +1701,14 @@ pub const DeclWithHandle = struct { pub fn nameToken(self: DeclWithHandle) ast.TokenIndex { const tree = self.handle.tree; + const token_tags = tree.tokens.items(.tag); return switch (self.decl.*) { .ast_node => |n| getDeclNameToken(tree, n).?, .param_decl => |p| p.name_token.?, - .pointer_payload => |pp| pp.node.value_symbol.firstToken(), + .pointer_payload => |pp| pp.name, // .array_payload => |ap| ap.identifier.firstToken(), - .switch_payload => |sp| sp.node.value_symbol.firstToken(), - .label_decl => |ld| ld.firstToken(), + .switch_payload => |sp| sp.node + @boolToInt(token_tags[sp.node] == .asterisk), + .label_decl => |ld| ld, }; } @@ -1797,22 +1820,18 @@ fn findContainerScope(container_handle: NodeWithHandle) ?*Scope { const container = container_handle.node; const handle = container_handle.handle; - if (container.tag != .ContainerDecl and container.tag != .Root and container.tag != .ErrorSetDecl) { - return null; - } + if (!isContainer(handle.tree.nodes.items(.tag)[container])) return null; // Find the container scope. var container_scope: ?*Scope = null; - for (handle.document_scope.scopes) |*scope| { + return for (handle.document_scope.scopes) |*scope| { switch (scope.*.data) { .container => |node| if (node == container) { - container_scope = scope; - break; + break container_scope; }, else => {}, } - } - return container_scope; + } else null; } fn iterateSymbolsContainerInternal( @@ -1907,7 +1926,7 @@ fn iterateSymbolsGlobalInternal( source_index: usize, comptime callback: anytype, context: anytype, - use_trail: *std.ArrayList(*ast.Node.Use), + use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!void { for (handle.document_scope.scopes) |scope| { if (source_index >= scope.range.start and source_index < scope.range.end) { @@ -1943,7 +1962,7 @@ pub fn iterateSymbolsGlobal( comptime callback: anytype, context: anytype, ) error{OutOfMemory}!void { - var use_trail = std.ArrayList(*ast.Node.Use).init(&arena.allocator); + var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); return try iterateSymbolsGlobalInternal(store, arena, handle, source_index, callback, context, &use_trail); } @@ -2019,7 +2038,7 @@ fn lookupSymbolGlobalInternal( handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize, - use_trail: *std.ArrayList(*ast.Node.Use), + use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope| { if (source_index >= scope.range.start and source_index < scope.range.end) { @@ -2053,7 +2072,7 @@ pub fn lookupSymbolGlobal( symbol: []const u8, source_index: usize, ) error{OutOfMemory}!?DeclWithHandle { - var use_trail = std.ArrayList(*ast.Node.Use).init(&arena.allocator); + var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); return try lookupSymbolGlobalInternal(store, arena, handle, symbol, source_index, &use_trail); } @@ -2065,7 +2084,7 @@ fn lookupSymbolContainerInternal( /// If true, we are looking up the symbol like we are accessing through a field access /// of an instance of the type, otherwise as a field access of the type value itself. instance_access: bool, - use_trail: *std.ArrayList(*ast.Node.Use), + use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { const container = container_handle.node; const handle = container_handle.handle; @@ -2106,7 +2125,7 @@ pub fn lookupSymbolContainer( /// of an instance of the type, otherwise as a field access of the type value itself. instance_access: bool, ) error{OutOfMemory}!?DeclWithHandle { - var use_trail = std.ArrayList(*ast.Node.Use).init(&arena.allocator); + var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); return try lookupSymbolContainerInternal(store, arena, container_handle, symbol, instance_access, &use_trail); } @@ -2380,14 +2399,7 @@ fn makeScopeInternal( switch (node) { .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => { var buf: [1]ast.Node.Index = undefined; - const func: ast.full.FnProto = switch (node) { - .fn_proto => tree.fnProto(node_idx), - .fn_proto_one => tree.fnProtoOne(&buf, node_idx), - .fn_proto_simple => tree.fnProtoSimple(&buf, node_idx), - .fn_proto_multi => tree.fnProtoMulti(node_idx), - .fn_decl => tree.fnProto(data[node_idx].lhs), - else => unreachable, - }; + const func = fnProto(tree, node_idx, &buf).?; (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), diff --git a/src/main.zig b/src/main.zig index 350f472..c38b06c 100644 --- a/src/main.zig +++ b/src/main.zig @@ -616,10 +616,10 @@ fn hoverSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())}) else try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())}), - .array_payload => |payload| if (hover_kind == .Markdown) - try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.identifier.firstToken())}) - else - try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.identifier.firstToken())}), + // .array_payload => |payload| if (hover_kind == .Markdown) + // try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.identifier.firstToken())}) + // else + // try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.identifier.firstToken())}), .switch_payload => |payload| if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())}) else @@ -751,7 +751,7 @@ fn hoverDefinitionFieldAccess( fn gotoDefinitionString(arena: *std.heap.ArenaAllocator, id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void { const tree = handle.tree; - const import_str = analysis.getImportStr(tree, pos_index) orelse return try respondGeneric(id, null_result_response); + const import_str = analysis.getImportStr(tree, 0, pos_index) orelse return try respondGeneric(id, null_result_response); const uri = (try document_store.uriFromImportStr( &arena.allocator, handle.*, @@ -864,6 +864,15 @@ const DeclToCompletionContext = struct { orig_handle: *DocumentStore.Handle, }; +fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool { + const token_starts = tree.tokens.items(.start); + + const start = token_starts[start_token]; + const end = token_starts[end_token]; + + return std.mem.indexOf(u8, tree.source[start..end], "//") != null; +} + fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) !void { const tree = decl_handle.handle.tree; @@ -871,7 +880,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl .ast_node => |node| try nodeToCompletion(context.arena, context.completions, .{ .node = node, .handle = decl_handle.handle }, null, context.orig_handle, false, context.config.*), .param_decl => |param| { const doc_kind: types.MarkupContent.Kind = if (client_capabilities.completion_doc_supports_md) .Markdown else .PlainText; - const doc = if (param.doc_comments) |doc_comments| + const doc = if (param.first_doc_comment) |doc_comments| types.MarkupContent{ .kind = doc_kind, .value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments, doc_kind), @@ -879,34 +888,41 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl else null; + const first_token = param.first_doc_comment orelse + param.comptime_noalias orelse + param.name_token orelse + param.anytype_ellipsis3 orelse + tree.firstToken(param.type_expr); + const last_token = tree.lastToken(param.type_expr); + try context.completions.append(.{ .label = tree.tokenSlice(param.name_token.?), .kind = .Constant, .documentation = doc, - .detail = tree.source[tree.token_locs[param.firstToken()].start..tree.token_locs[param.lastToken()].end], + .detail = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end], }); }, .pointer_payload => |payload| { try context.completions.append(.{ - .label = tree.tokenSlice(payload.node.value_symbol.firstToken()), - .kind = .Variable, - }); - }, - .array_payload => |payload| { - try context.completions.append(.{ - .label = tree.tokenSlice(payload.identifier.firstToken()), + .label = tree.tokenSlice(payload.name), .kind = .Variable, }); }, + // .array_payload => |payload| { + // try context.completions.append(.{ + // .label = tree.tokenSlice(payload.identifier.firstToken()), + // .kind = .Variable, + // }); + // }, .switch_payload => |payload| { try context.completions.append(.{ - .label = tree.tokenSlice(payload.node.value_symbol.firstToken()), + .label = tree.tokenSlice(tree.firstToken(payload.node)), .kind = .Variable, }); }, .label_decl => |label_decl| { try context.completions.append(.{ - .label = tree.tokenSlice(label_decl.firstToken()), + .label = tree.tokenSlice(label_decl), .kind = .Variable, }); }, diff --git a/src/references.zig b/src/references.zig index c2dd82c..6bef4f5 100644 --- a/src/references.zig +++ b/src/references.zig @@ -40,11 +40,13 @@ pub fn labelReferences( ) !void { std.debug.assert(decl.decl.* == .label_decl); const handle = decl.handle; + const tree = handle.tree; + const token_tags = tree.tokens.items(.tag); // Find while / for / block from label -> iterate over children nodes, find break and continues, change their labels if they match. // This case can be implemented just by scanning tokens. - const first_tok = decl.decl.label_decl.firstToken(); - const last_tok = decl.decl.label_decl.lastToken(); + const first_tok = tree.firstToken(decl.decl.label_decl); + const last_tok = tree.firstToken(decl.decl.label_decl); if (include_decl) { // The first token is always going to be the label @@ -53,11 +55,11 @@ pub fn labelReferences( var curr_tok = first_tok + 1; while (curr_tok < last_tok - 2) : (curr_tok += 1) { - const curr_id = handle.tree.token_ids[curr_tok]; - if ((curr_id == .Keyword_break or curr_id == .Keyword_continue) and handle.tree.token_ids[curr_tok + 1] == .Colon and - handle.tree.token_ids[curr_tok + 2] == .Identifier) + const curr_id = token_tags[curr_tok]; + if ((curr_id == .keyword_break or curr_id == .keyword_continue) and token_tags[curr_tok + 1] == .colon and + token_tags[curr_tok + 2] == .identifier) { - if (std.mem.eql(u8, handle.tree.tokenSlice(curr_tok + 2), handle.tree.tokenSlice(first_tok))) { + if (std.mem.eql(u8, tree.tokenSlice(curr_tok + 2), tree.tokenSlice(first_tok))) { try tokenReference(handle, first_tok, encoding, context, handler); } } @@ -388,7 +390,7 @@ pub fn symbolReferences( try tokenReference(curr_handle, decl_handle.nameToken(), encoding, context, handler); } - try symbolReferencesInternal(arena, store, .{ .node = &handle.tree.root_node.base, .handle = handle }, decl_handle, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = handle }, decl_handle, encoding, context, handler); } }, .param_decl => |param| { @@ -396,13 +398,27 @@ pub fn symbolReferences( if (include_decl) { try tokenReference(curr_handle, decl_handle.nameToken(), encoding, context, handler); } - const fn_node = loop: for (curr_handle.document_scope.scopes) |scope| { + const fn_node: ast.full.FnProto = loop: for (curr_handle.document_scope.scopes) |scope| { switch (scope.data) { .function => |proto| { - const fn_proto = proto.cast(std.zig.ast.Node.FnProto).?; - for (fn_proto.paramsConst()) |*candidate| { - if (candidate == param) + var buf: [1]ast.Node.Index = undefined; + const fn_proto = analysis.fnProto(curr_handle.tree, proto, &buf).?; + var it = fn_proto.iterate(curr_handle.tree); + while (it.next()) |candidate| { + if (std.meta.eql(candidate, param)) { + if (curr_handle.tree.nodes.items(.tag)[proto] == .fn_decl) { + try symbolReferencesInternal( + arena, + store, + .{ .node = curr_handle.tree.nodes.items(.data)[proto].rhs, .handle = curr_handle }, + decl_handle, + encoding, + context, + handler, + ); + } break :loop fn_proto; + } } }, else => {}, @@ -411,15 +427,12 @@ pub fn symbolReferences( log.warn("Could not find param decl's function", .{}); return; }; - if (fn_node.getBodyNode()) |body| { - try symbolReferencesInternal(arena, store, .{ .node = body, .handle = curr_handle }, decl_handle, encoding, context, handler); - } }, - .pointer_payload, .array_payload, .switch_payload => { + .pointer_payload, .switch_payload => { if (include_decl) { try tokenReference(curr_handle, decl_handle.nameToken(), encoding, context, handler); } - try symbolReferencesInternal(arena, store, .{ .node = &curr_handle.tree.root_node.base, .handle = curr_handle }, decl_handle, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = curr_handle }, decl_handle, encoding, context, handler); }, .label_decl => unreachable, } From 46456384344af82c9eeb72d4cfe054967433d15a Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Mon, 1 Mar 2021 16:02:24 +0100 Subject: [PATCH 09/36] Fixes and completions --- src/analysis.zig | 26 ++++--- src/main.zig | 176 ++++++++++++++++++++++----------------------- src/offsets.zig | 4 +- src/references.zig | 2 +- 4 files changed, 108 insertions(+), 100 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index c442dfc..ea5f0e5 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -79,14 +79,14 @@ pub fn collectDocComments( } /// Gets a function signature (keywords, name, return value) -pub fn getFunctionSignature(tree: ast.Tree, func: *ast.full.FnProto) []const u8 { +pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { const start = tree.tokenLocation(func.ast.fn_token).line_start; const end = tree.tokenLocation(func.ast.return_type).line_end; return tree.source[start..end]; } /// Gets a function snippet insert text -pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: *ast.full.FnProto, skip_self_param: bool) ![]const u8 { +pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: ast.full.FnProto, skip_self_param: bool) ![]const u8 { const name_index = func.name_token orelse unreachable; var buffer = std.ArrayList(u8).init(allocator); @@ -143,14 +143,14 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: * } /// Gets a function signature (keywords, name, return value) -pub fn getVariableSignature(tree: ast.Tree, var_decl: *ast.full.VarDecl) []const u8 { +pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 { const start = tree.tokenLocation(0, var_decl.ast.mut_token).line_start; const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(var_decl.ast.init_node)).line_end; return tree.source[start..end]; } // analysis.getContainerFieldSignature(handle.tree, field) -pub fn getContainerFieldSignature(tree: ast.Tree, field: *ast.full.ContainerField) []const u8 { +pub fn getContainerFieldSignature(tree: ast.Tree, field: ast.full.ContainerField) []const u8 { const start = tree.tokenLocation(0, field.ast.name_token).line_start; const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(field.ast.value_expr)).line_start; return tree.source[start..end]; @@ -430,7 +430,7 @@ fn resolveUnwrapErrorType( return null; } -fn isPtrType(tree: ast.Tree, node: ast.Node.Index) bool { +pub fn isPtrType(tree: ast.Tree, node: ast.Node.Index) bool { return switch (tree.nodes.items(.tag)[node]) { .ptr_type, .ptr_type_aligned, @@ -1807,7 +1807,7 @@ pub const DeclWithHandle = struct { } }; -fn containerField(tree: ast.Tree, node: ast.Node.Index) ?ast.full.ContainerField { +pub fn containerField(tree: ast.Tree, node: ast.Node.Index) ?ast.full.ContainerField { return switch (tree.nodes.items(.tag)[node]) { .container_field => tree.containerField(node), .container_field_init => tree.containerFieldInit(node), @@ -1816,6 +1816,16 @@ fn containerField(tree: ast.Tree, node: ast.Node.Index) ?ast.full.ContainerField }; } +pub fn ptrType(tree: ast.Tree, node: ast.Node.Index) ?ast.full.PtrType { + return switch (tree.nodes.items(.tag)[node]) { + .ptr_type => tree.ptrType(node), + .ptr_type_aligned => tree.ptrTypeAligned(node), + .ptr_type_bit_range => tree.ptrTypeBitRange(node), + .ptr_type_sentinel => tree.ptrTypeSentinel(node), + else => null, + }; +} + fn findContainerScope(container_handle: NodeWithHandle) ?*Scope { const container = container_handle.node; const handle = container_handle.handle; @@ -2218,7 +2228,7 @@ fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { }; } -fn isContainer(tag: ast.Node.Tag) bool { +pub fn isContainer(tag: ast.Node.Tag) bool { return switch (tag) { .container_decl, .container_decl_trailing, @@ -2265,7 +2275,7 @@ fn declMembers(tree: ast.Tree, tag: ast.Node.Tag, node_idx: ast.Node.Index) []co /// Returns an `ast.full.VarDecl` for a given node index. /// Returns null if the tag doesn't match -fn varDecl(tree: ast.Tree, node_idx: ast.Node.Index) ?ast.full.VarDecl { +pub fn varDecl(tree: ast.Tree, node_idx: ast.Node.Index) ?ast.full.VarDecl { return switch (tree.nodes.items(.tag)[node_idx]) { .global_var_decl => tree.globalVarDecl(node_idx), .local_var_decl => tree.localVarDecl(node_idx), diff --git a/src/main.zig b/src/main.zig index c38b06c..e9cce95 100644 --- a/src/main.zig +++ b/src/main.zig @@ -332,6 +332,10 @@ fn nodeToCompletion( ) error{OutOfMemory}!void { const node = node_handle.node; const handle = node_handle.handle; + const tree = handle.tree; + const node_tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + const token_tags = tree.tokens.items(.tag); const doc_kind: types.MarkupContent.Kind = if (client_capabilities.completion_doc_supports_md) .Markdown @@ -351,7 +355,7 @@ fn nodeToCompletion( else null; - if (node.tag == .ErrorSetDecl or node.tag == .Root or node.tag == .ContainerDecl) { + if (analysis.isContainer(node_tags[node])) { const context = DeclToCompletionContext{ .completions = list, .config = &config, @@ -363,44 +367,43 @@ fn nodeToCompletion( if (is_type_val) return; - switch (node.tag) { - .FnProto => { - const func = node.cast(std.zig.ast.Node.FnProto).?; - if (func.getNameToken()) |name_token| { + switch (node_tags[node]) { + .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_multi, .fn_decl => { + var buf: [1]std.zig.ast.Node.Index = undefined; + const func = analysis.fnProto(tree, node, &buf).?; + if (func.name_token) |name_token| { const use_snippets = config.enable_snippets and client_capabilities.supports_snippets; const insert_text = if (use_snippets) blk: { // TODO Also check if we are dot accessing from a type val and dont skip in that case. - const skip_self_param = if (func.params_len > 0) param_check: { - const in_container = analysis.innermostContainer(handle, handle.tree.token_locs[func.firstToken()].start); + const skip_self_param = if (func.ast.params.len > 0) param_check: { + const in_container = analysis.innermostContainer(handle, tree.tokenLocation(0, func.ast.fn_token).line_start); - switch (func.paramsConst()[0].param_type) { - .type_expr => |type_node| { - if (try analysis.resolveTypeOfNode(&document_store, arena, .{ - .node = type_node, - .handle = handle, - })) |resolved_type| { - if (std.meta.eql(in_container, resolved_type)) - break :param_check true; - } + var it = func.iterate(tree); + const param = it.next().?; - if (type_node.castTag(.PtrType)) |ptr_type| { - if (try analysis.resolveTypeOfNode(&document_store, arena, .{ - .node = ptr_type.rhs, - .handle = handle, - })) |resolved_prefix_op| { - if (std.meta.eql(in_container, resolved_prefix_op)) - break :param_check true; - } - } - - break :param_check false; - }, - else => break :param_check false, + if (try analysis.resolveTypeOfNode(&document_store, arena, .{ + .node = param.type_expr, + .handle = handle, + })) |resolved_type| { + if (std.meta.eql(in_container, resolved_type)) + break :param_check true; } + + if (analysis.isPtrType(tree, param.type_expr)) { + if (try analysis.resolveTypeOfNode(&document_store, arena, .{ + .node = datas[param.type_expr].rhs, + .handle = handle, + })) |resolved_prefix_op| { + if (std.meta.eql(in_container, resolved_prefix_op)) + break :param_check true; + } + } + + break :param_check false; } else false; - break :blk try analysis.getFunctionSnippet(&arena.allocator, handle.tree, func, skip_self_param); + break :blk try analysis.getFunctionSnippet(&arena.allocator, tree, func, skip_self_param); } else null; const is_type_function = analysis.isTypeFunction(handle.tree, func); @@ -415,9 +418,9 @@ fn nodeToCompletion( }); } }, - .VarDecl => { - const var_decl = node.cast(std.zig.ast.Node.VarDecl).?; - const is_const = handle.tree.token_ids[var_decl.mut_token] == .Keyword_const; + .global_var_decl, .local_var_decl, .aligned_var_decl, .simple_var_decl => { + const var_decl = analysis.varDecl(tree, node).?; + const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const; if (try analysis.resolveVarDeclAlias(&document_store, arena, node_handle)) |result| { const context = DeclToCompletionContext{ @@ -430,57 +433,46 @@ fn nodeToCompletion( } try list.append(.{ - .label = handle.tree.tokenSlice(var_decl.name_token), + .label = handle.tree.tokenSlice(var_decl.ast.mut_token + 1), .kind = if (is_const) .Constant else .Variable, .documentation = doc, - .detail = analysis.getVariableSignature(handle.tree, var_decl), + .detail = analysis.getVariableSignature(tree, var_decl), }); }, - .ContainerField => { - const field = node.cast(std.zig.ast.Node.ContainerField).?; + .container_field, .container_field_align, .container_field_init => { + const field = analysis.containerField(tree, node).?; try list.append(.{ - .label = handle.tree.tokenSlice(field.name_token), + .label = handle.tree.tokenSlice(field.ast.name_token), .kind = .Field, .documentation = doc, .detail = analysis.getContainerFieldSignature(handle.tree, field), }); }, - .SliceType => { - try list.append(.{ - .label = "len", - .kind = .Field, - }); - try list.append(.{ - .label = "ptr", - .kind = .Field, - }); - }, - .ArrayType => { + .array_type, .array_type_sentinel => { try list.append(.{ .label = "len", .kind = .Field, }); }, - .PtrType => { - if (config.operator_completions) { - try list.append(.{ - .label = "*", - .kind = .Operator, - }); + .ptr_type, .ptr_type_aligned, .ptr_type_bit_range, .ptr_type_sentinel => { + const ptr_type = analysis.ptrType(tree, node).?; + + switch (ptr_type.size) { + .One, .C => if (config.operator_completions) { + try list.append(.{ + .label = "*", + .kind = .Operator, + }); + }, + .Many, .Slice => return list.append(.{ .label = "len", .kind = .Field }), } - const ptr_type = node.castTag(.PtrType).?; - if (ptr_type.rhs.castTag(.ArrayType) != null) { - try list.append(.{ - .label = "len", - .kind = .Field, - }); - } else if (unwrapped) |actual_type| { + if (unwrapped) |actual_type| { try typeToCompletion(arena, list, .{ .original = actual_type }, orig_handle, config); } return; }, - .OptionalType => { + .optional_type => { if (config.operator_completions) { try list.append(.{ .label = "?", @@ -489,7 +481,7 @@ fn nodeToCompletion( } return; }, - .StringLiteral => { + .string_literal => { try list.append(.{ .label = "len", .kind = .Field, @@ -500,7 +492,7 @@ fn nodeToCompletion( .label = string, .kind = .Field, .documentation = doc, - .detail = handle.tree.getNodeSource(node), + .detail = tree.getNodeSource(node), }); }, } @@ -564,8 +556,13 @@ fn gotoDefinitionSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, de }); } -fn hoverSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle: analysis.DeclWithHandle) (std.os.WriteError || error{OutOfMemory})!void { +fn hoverSymbol( + id: types.RequestId, + arena: *std.heap.ArenaAllocator, + decl_handle: analysis.DeclWithHandle, +) (std.os.WriteError || error{OutOfMemory})!void { const handle = decl_handle.handle; + const tree = handle.tree; const hover_kind: types.MarkupContent.Kind = if (client_capabilities.hover_supports_md) .Markdown else .PlainText; const md_string = switch (decl_handle.decl.*) { @@ -574,26 +571,20 @@ fn hoverSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle return try hoverSymbol(id, arena, result); } - const doc_str = if (try analysis.getDocComments(&arena.allocator, handle.tree, node, hover_kind)) |str| + const doc_str = if (try analysis.getDocComments(&arena.allocator, tree, node, hover_kind)) |str| str else ""; - const signature_str = switch (node.tag) { - .VarDecl => blk: { - const var_decl = node.cast(std.zig.ast.Node.VarDecl).?; - break :blk analysis.getVariableSignature(handle.tree, var_decl); - }, - .FnProto => blk: { - const fn_decl = node.cast(std.zig.ast.Node.FnProto).?; - break :blk analysis.getFunctionSignature(handle.tree, fn_decl); - }, - .ContainerField => blk: { - const field = node.cast(std.zig.ast.Node.ContainerField).?; - break :blk analysis.getContainerFieldSignature(handle.tree, field); - }, - else => analysis.nodeToString(handle.tree, node) orelse return try respondGeneric(id, null_result_response), - }; + var buf: [1]std.zig.ast.Node.Index = undefined; + const signature_str = if (analysis.varDecl(tree, node)) |var_decl| blk: { + break :blk analysis.getVariableSignature(tree, var_decl); + } else if (analysis.fnProto(tree, node, &buf)) |fn_proto| blk: { + break :blk analysis.getFunctionSignature(tree, fn_proto); + } else if (analysis.containerField(tree, node)) |field| blk: { + break :blk analysis.getContainerFieldSignature(tree, field); + } else analysis.nodeToString(tree, node) orelse + return try respondGeneric(id, null_result_response); break :ast_node if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```\n{s}", .{ signature_str, doc_str }) @@ -601,31 +592,38 @@ fn hoverSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle try std.fmt.allocPrint(&arena.allocator, "{s}\n{s}", .{ signature_str, doc_str }); }, .param_decl => |param| param_decl: { - const doc_str = if (param.doc_comments) |doc_comments| + const doc_str = if (param.first_doc_comment) |doc_comments| try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments, hover_kind) else ""; - const signature_str = handle.tree.source[handle.tree.token_locs[param.firstToken()].start..handle.tree.token_locs[param.lastToken()].end]; + const first_token = param.first_doc_comment orelse + param.comptime_noalias orelse + param.name_token orelse + param.anytype_ellipsis3 orelse + tree.firstToken(param.type_expr); + const last_token = tree.lastToken(param.type_expr); + + const signature_str = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end]; break :param_decl if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```\n{s}", .{ signature_str, doc_str }) else try std.fmt.allocPrint(&arena.allocator, "{s}\n{s}", .{ signature_str, doc_str }); }, .pointer_payload => |payload| if (hover_kind == .Markdown) - try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())}) + try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{tree.tokenSlice(payload.name)}) else - try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())}), + try std.fmt.allocPrint(&arena.allocator, "{s}", .{tree.tokenSlice(payload.name)}), // .array_payload => |payload| if (hover_kind == .Markdown) // try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.identifier.firstToken())}) // else // try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.identifier.firstToken())}), .switch_payload => |payload| if (hover_kind == .Markdown) - try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())}) + try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{tree.tokenSlice(payload.node)}) else - try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())}), + try std.fmt.allocPrint(&arena.allocator, "{s}", .{tree.tokenSlice(payload.node)}), .label_decl => |label_decl| block: { - const source = handle.tree.source[handle.tree.token_locs[label_decl.firstToken()].start..handle.tree.token_locs[label_decl.lastToken()].end]; + const source = tree.tokenSlice(label_decl); break :block if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{source}) else diff --git a/src/offsets.zig b/src/offsets.zig index fac44ee..8201924 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -113,9 +113,9 @@ pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encodi if (encoding == .utf8) return token_loc.line_end - token_loc.line_start; - var i: usize = token_loc.start; + var i: usize = token_loc.line_start; var utf16_len: usize = 0; - while (i < token_loc.end) { + while (i < token_loc.line_end) { const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable; const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable; if (codepoint < 0x10000) { diff --git a/src/references.zig b/src/references.zig index 6bef4f5..73b2095 100644 --- a/src/references.zig +++ b/src/references.zig @@ -78,7 +78,7 @@ fn symbolReferencesInternal( const node = node_handle.node; const handle = node_handle.handle; - switch (node.tag) { + switch (handle.tree.nodes.items(.tag)[node]) { .ContainerDecl, .Root, .Block => { var idx: usize = 0; while (node.iterate(idx)) |child| : (idx += 1) { From e82ea30e6fb21ec6f22eb5c23ca572c6a7148679 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Mon, 1 Mar 2021 16:30:43 +0100 Subject: [PATCH 10/36] Last fixes in src/analysis.zig --- src/analysis.zig | 62 ++++++++++++++++++++++++++++-------------------- src/main.zig | 2 +- 2 files changed, 37 insertions(+), 27 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index ea5f0e5..a3be758 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -80,8 +80,8 @@ pub fn collectDocComments( /// Gets a function signature (keywords, name, return value) pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { - const start = tree.tokenLocation(func.ast.fn_token).line_start; - const end = tree.tokenLocation(func.ast.return_type).line_end; + const start = tree.tokenLocation(0, func.ast.fn_token).line_start; + const end = tree.tokenLocation(0, func.ast.return_type).line_end; return tree.source[start..end]; } @@ -92,7 +92,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a var buffer = std.ArrayList(u8).init(allocator); try buffer.ensureCapacity(128); - try buffer.appendSlice(tree.tokenSlice(name_tok)); + try buffer.appendSlice(tree.tokenSlice(name_index)); try buffer.append('('); var buf_stream = buffer.writer(); @@ -1205,21 +1205,22 @@ pub fn getFieldAccessType( } pub fn isNodePublic(tree: ast.Tree, node: ast.Node.Index) bool { - switch (tree.nodes.items(.tag)[node]) { + var buf: [1]ast.Node.Index = undefined; + return switch (tree.nodes.items(.tag)[node]) { .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { - const var_decl = node.castTag(.VarDecl).?; const var_decl = varDecl(tree, node).?; return var_decl.visib_token != null; }, .fn_proto => tree.fnProto(node).visib_token != null, - .fn_proto_one => tree.fnProtoOne(node).visib_token != null, - .fn_proto_simple => tree.fnProtoSimple(node).visib_token != null, + .fn_proto_one => tree.fnProtoOne(&buf, node).visib_token != null, + .fn_proto_simple => tree.fnProtoSimple(&buf, node).visib_token != null, .fn_proto_multi => tree.fnProtoMulti(node).visib_token != null, - else => return true, - } + else => true, + }; } pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { + var buf: [1]ast.Node.Index = undefined; switch (tree.nodes.items(.tag)[node]) { .container_field => return tree.tokenSlice(tree.containerField(node).ast.name_token), .container_field_init => return tree.tokenSlice(tree.containerFieldInit(node).ast.name_token), @@ -1233,17 +1234,17 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { .fn_proto => if (tree.fnProto(node).name_token) |name| { return tree.tokenSlice(name); }, - .fn_proto_one => if (tree.fnProtoOne(node).name_token) |name| { + .fn_proto_one => if (tree.fnProtoOne(&buf, node).name_token) |name| { return tree.tokenSlice(name); }, .fn_proto_multi => if (tree.fnProtoMulti(node).name_token) |name| { return tree.tokenSlice(name); }, - .fn_proto_simple => if (tree.fnProtoSimple(node).name_token) |name| { + .fn_proto_simple => if (tree.fnProtoSimple(&buf, node).name_token) |name| { return tree.tokenSlice(name); }, else => { - log.debug("INVALID: {}", .{node.tag}); + log.debug("INVALID: {}", .{tree.nodes.items(.tag)[node]}); }, } @@ -1269,16 +1270,16 @@ fn isBuiltinCall(tree: ast.Tree, node: ast.Node.Index) bool { fn builtinCallParams(tree: ast.Tree, node: ast.Node.Index) []const ast.Node.Index { std.debug.assert(isBuiltinCall(tree, node)); - const datas = tree.node.items(.data); + const datas = tree.nodes.items(.data); return switch (tree.nodes.items(.tag)[node]) { .builtin_call, .builtin_call_comma => tree.extra_data[datas[node].lhs..datas[node].rhs], .builtin_call_two, .builtin_call_two_comma => if (datas[node].lhs == 0) - &.{} + &[_]ast.Node.Index{} else if (datas[node].rhs == 0) - &.{datas[node].lhs} + &[_]ast.Node.Index{datas[node].lhs} else - &.{ datas[node].lhs, datas[node].rhs }, + &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }, else => unreachable, }; } @@ -1852,13 +1853,18 @@ fn iterateSymbolsContainerInternal( comptime callback: anytype, context: anytype, instance_access: bool, - use_trail: *std.ArrayList(*ast.Node.Use), + use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!void { const container = container_handle.node; const handle = container_handle.handle; - const is_enum = if (container.castTag(.ContainerDecl)) |cont_decl| - handle.tree.token_ids[cont_decl.kind_token] == .Keyword_enum + const tree = handle.tree; + const node_tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + const main_token = tree.nodes.items(.main_token)[container]; + + const is_enum = if (isContainer(node_tags[container])) + token_tags[main_token] == .keyword_enum else false; @@ -1867,7 +1873,7 @@ fn iterateSymbolsContainerInternal( while (decl_it.next()) |entry| { switch (entry.value) { .ast_node => |node| { - if (node.tag == .ContainerField) { + if (node_tags[node].isContainerField()) { if (!instance_access and !is_enum) continue; if (instance_access and is_enum) continue; } @@ -1904,7 +1910,7 @@ pub fn iterateSymbolsContainer( context: anytype, instance_access: bool, ) error{OutOfMemory}!void { - var use_trail = std.ArrayList(*ast.Node.Use).init(&arena.allocator); + var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); return try iterateSymbolsContainerInternal(store, arena, container_handle, orig_handle, callback, context, instance_access, &use_trail); } @@ -1942,7 +1948,7 @@ fn iterateSymbolsGlobalInternal( if (source_index >= scope.range.start and source_index < scope.range.end) { var decl_it = scope.decls.iterator(); while (decl_it.next()) |entry| { - if (entry.value == .ast_node and entry.value.ast_node.tag == .ContainerField) continue; + if (entry.value == .ast_node and handle.tree.nodes.items(.tag)[entry.value.ast_node].isContainerField()) continue; if (entry.value == .label_decl) continue; try callback(context, DeclWithHandle{ .decl = &entry.value, .handle = handle }); } @@ -2055,7 +2061,7 @@ fn lookupSymbolGlobalInternal( if (scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value) { .ast_node => |node| { - if (node.tag == .ContainerField) continue; + if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; }, .label_decl => continue, else => {}, @@ -2098,9 +2104,13 @@ fn lookupSymbolContainerInternal( ) error{OutOfMemory}!?DeclWithHandle { const container = container_handle.node; const handle = container_handle.handle; + const tree = handle.tree; + const node_tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + const main_token = tree.nodes.items(.main_token)[container]; - const is_enum = if (container.castTag(.ContainerDecl)) |cont_decl| - handle.tree.token_ids[cont_decl.kind_token] == .Keyword_enum + const is_enum = if (isContainer(node_tags[container])) + token_tags[main_token] == .keyword_enum else false; @@ -2108,7 +2118,7 @@ fn lookupSymbolContainerInternal( if (container_scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value) { .ast_node => |node| { - if (node.tag == .ContainerField) { + if (node_tags[node].isContainerField()) { if (!instance_access and !is_enum) return null; if (instance_access and is_enum) return null; } diff --git a/src/main.zig b/src/main.zig index e9cce95..46c63d7 100644 --- a/src/main.zig +++ b/src/main.zig @@ -368,7 +368,7 @@ fn nodeToCompletion( if (is_type_val) return; switch (node_tags[node]) { - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_multi, .fn_decl => { + .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_decl => { var buf: [1]std.zig.ast.Node.Index = undefined; const func = analysis.fnProto(tree, node, &buf).?; if (func.name_token) |name_token| { From ada0d13ba929e5aaf1c283e1e852df9e206d755e Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Mon, 1 Mar 2021 19:34:28 +0100 Subject: [PATCH 11/36] Fix symbolReferencesInternal --- src/analysis.zig | 13 +- src/references.zig | 557 ++++++++++++++++++++++++++------------------- 2 files changed, 333 insertions(+), 237 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index a3be758..92dc92d 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2261,21 +2261,15 @@ pub fn isContainer(tag: ast.Node.Tag) bool { /// Returns the member indices of a given declaration container. /// Asserts given `tag` is a container node -fn declMembers(tree: ast.Tree, tag: ast.Node.Tag, node_idx: ast.Node.Index) []const ast.Node.Index { +pub fn declMembers(tree: ast.Tree, tag: ast.Node.Tag, node_idx: ast.Node.Index, buffer: *[2]ast.Node.Index) []const ast.Node.Index { std.debug.assert(isContainer(tag)); return switch (tag) { .container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members, .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members, - .container_decl_two, .container_decl_two_trailing => blk: { - var buffer: [2]ast.Node.Index = undefined; - break :blk tree.containerDeclTwo(&buffer, node_idx).ast.members; - }, + .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buffer, node_idx).ast.members, .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members, .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members, - .tagged_union_two, .tagged_union_two_trailing => blk: { - var buffer: [2]ast.Node.Index = undefined; - break :blk tree.taggedUnionTwo(&buffer, node_idx).ast.members; - }, + .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buffer, node_idx).ast.members, .root => tree.rootDecls(), // @TODO: Fix error set declarations .error_set_decl => &[_]ast.Node.Index{}, @@ -2312,6 +2306,7 @@ fn makeScopeInternal( const node = tags[node_idx]; if (isContainer(node)) { + var buf: [2]ast.Node.Index = undefined; const ast_decls = declMembers(tree, node, node_idx); (try scopes.addOne(allocator)).* = .{ diff --git a/src/references.zig b/src/references.zig index 73b2095..2361f7b 100644 --- a/src/references.zig +++ b/src/references.zig @@ -77,263 +77,326 @@ fn symbolReferencesInternal( ) error{OutOfMemory}!void { const node = node_handle.node; const handle = node_handle.handle; + const tree = handle.tree; + const node_tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const starts = tree.tokens.items(.start); - switch (handle.tree.nodes.items(.tag)[node]) { - .ContainerDecl, .Root, .Block => { - var idx: usize = 0; - while (node.iterate(idx)) |child| : (idx += 1) { - try symbolReferencesInternal(arena, store, .{ .node = child, .handle = handle }, decl, encoding, context, handler); - } - }, - .VarDecl => { - const var_decl = node.cast(ast.Node.VarDecl).?; - if (var_decl.getTypeNode()) |type_node| { - try symbolReferencesInternal(arena, store, .{ .node = type_node, .handle = handle }, decl, encoding, context, handler); - } - if (var_decl.getInitNode()) |init_node| { - try symbolReferencesInternal(arena, store, .{ .node = init_node, .handle = handle }, decl, encoding, context, handler); - } - }, - .Use => { - const use = node.cast(ast.Node.Use).?; - try symbolReferencesInternal(arena, store, .{ .node = use.expr, .handle = handle }, decl, encoding, context, handler); - }, - .ContainerField => { - const field = node.cast(ast.Node.ContainerField).?; - if (field.type_expr) |type_node| { - try symbolReferencesInternal(arena, store, .{ .node = type_node, .handle = handle }, decl, encoding, context, handler); - } - if (field.value_expr) |init_node| { - try symbolReferencesInternal(arena, store, .{ .node = init_node, .handle = handle }, decl, encoding, context, handler); - } - }, - .Identifier => { - if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| { - if (std.meta.eql(decl, child)) { - try tokenReference(handle, node.firstToken(), encoding, context, handler); - } - } - }, - .FnProto => { - const fn_proto = node.cast(ast.Node.FnProto).?; - for (fn_proto.paramsConst()) |param| { - switch (param.param_type) { - .type_expr => |type_node| { - try symbolReferencesInternal(arena, store, .{ .node = type_node, .handle = handle }, decl, encoding, context, handler); - }, - else => {}, - } - } - switch (fn_proto.return_type) { - .Explicit, .InferErrorSet => |type_node| { - try symbolReferencesInternal(arena, store, .{ .node = type_node, .handle = handle }, decl, encoding, context, handler); + switch (node_tags[node]) { + .block, .block_semicolon, .block_two, .block_two_semicolon => { + const statements: []const ast.Node.Index = switch (node_tags[node]) { + .block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs], + .block_two, .block_two_semicolon => blk: { + const statements = &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; + const len: usize = if (datas[node].lhs == 0) + @as(usize, 0) + else if (datas[node].rhs == 0) + @as(usize, 1) + else + @as(usize, 2); + break :blk statements[0..len]; }, - else => {}, + else => unreachable, + }; + for (statements) |stmt| + try symbolReferencesInternal(arena, store, .{ .node = stmt, .handle = handle }, decl, encoding, context, handler); + }, + .container_decl, + .container_decl_trailing, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_two, + .container_decl_two_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .root, + .error_set_decl, + => { + var buf: [2]ast.Node.Index = undefined; + for (analysis.declMembers(tree, node_tags[node], node, &buf)) |member| + try symbolReferencesInternal(arena, store, .{ .node = member, .handle = handle }, decl, encoding, context, handler); + }, + .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + const var_decl = analysis.varDecl(tree, node).?; + if (var_decl.ast.type_node != 0) { + try symbolReferencesInternal(arena, store, .{ .node = var_decl.ast.type_node, .handle = handle }, decl, encoding, context, handler); } - if (fn_proto.getAlignExpr()) |align_expr| { - try symbolReferencesInternal(arena, store, .{ .node = align_expr, .handle = handle }, decl, encoding, context, handler); - } - if (fn_proto.getSectionExpr()) |section_expr| { - try symbolReferencesInternal(arena, store, .{ .node = section_expr, .handle = handle }, decl, encoding, context, handler); - } - if (fn_proto.getCallconvExpr()) |callconv_expr| { - try symbolReferencesInternal(arena, store, .{ .node = callconv_expr, .handle = handle }, decl, encoding, context, handler); - } - if (fn_proto.getBodyNode()) |body| { - try symbolReferencesInternal(arena, store, .{ .node = body, .handle = handle }, decl, encoding, context, handler); + if (var_decl.ast.init_node != 0) { + try symbolReferencesInternal(arena, store, .{ .node = var_decl.ast.init_node, .handle = handle }, decl, encoding, context, handler); } }, - .AnyFrameType => { - const anyframe_type = node.cast(ast.Node.AnyFrameType).?; - if (anyframe_type.result) |result| { - try symbolReferencesInternal(arena, store, .{ .node = result.return_type, .handle = handle }, decl, encoding, context, handler); + // @TODO: Usingnamespace + // .Use => { + // const use = node.cast(ast.Node.Use).?; + // try symbolReferencesInternal(arena, store, .{ .node = use.expr, .handle = handle }, decl, encoding, context, handler); + // }, + .container_field, .container_field_align, .container_field_init => { + const field = analysis.containerField(node).?; + if (field.ast.type_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = field.ast.type_expr, .handle = handle }, decl, encoding, context, handler); + } + if (field.ast.value_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = field.ast.value_expr, .handle = handle }, decl, encoding, context, handler); } }, - .Defer => { - const defer_node = node.cast(ast.Node.Defer).?; - try symbolReferencesInternal(arena, store, .{ .node = defer_node.expr, .handle = handle }, decl, encoding, context, handler); - }, - .Comptime => { - const comptime_node = node.cast(ast.Node.Comptime).?; - try symbolReferencesInternal(arena, store, .{ .node = comptime_node.expr, .handle = handle }, decl, encoding, context, handler); - }, - .Nosuspend => { - const nosuspend_node = node.cast(ast.Node.Nosuspend).?; - try symbolReferencesInternal(arena, store, .{ .node = nosuspend_node.expr, .handle = handle }, decl, encoding, context, handler); - }, - .Switch => { - // TODO When renaming a union(enum) field, also rename switch items that refer to it. - const switch_node = node.cast(ast.Node.Switch).?; - try symbolReferencesInternal(arena, store, .{ .node = switch_node.expr, .handle = handle }, decl, encoding, context, handler); - for (switch_node.casesConst()) |case| { - if (case.*.cast(ast.Node.SwitchCase)) |case_node| { - try symbolReferencesInternal(arena, store, .{ .node = case_node.expr, .handle = handle }, decl, encoding, context, handler); + .identifier => { + if (try analysis.lookupSymbolGlobal(store, arena, handle, tree.getNodeSource(node), starts[main_tokens[nodes]])) |child| { + if (std.meta.eql(decl, child)) { + try tokenReference(handle, main_tokens[node], encoding, context, handler); } } }, - .While => { - const while_node = node.cast(ast.Node.While).?; - try symbolReferencesInternal(arena, store, .{ .node = while_node.condition, .handle = handle }, decl, encoding, context, handler); - if (while_node.continue_expr) |cont_expr| { - try symbolReferencesInternal(arena, store, .{ .node = cont_expr, .handle = handle }, decl, encoding, context, handler); + .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => { + var buf: [1]ast.Node.Index = undefined; + const fn_proto = analysis.fnProto(tree, node, &buf).?; + var it = fn_proto.iterate(tree); + while (it.next()) |param| { + if (param.type_expr != 0) + try symbolReferencesInternal(arena, store, .{ .node = param.type_expr, .handle = handle }, decl, encoding, context, handler); } - try symbolReferencesInternal(arena, store, .{ .node = while_node.body, .handle = handle }, decl, encoding, context, handler); - if (while_node.@"else") |else_node| { - try symbolReferencesInternal(arena, store, .{ .node = else_node.body, .handle = handle }, decl, encoding, context, handler); + + if (fn_proto.ast.return_type != 0) { + try symbolReferencesInternal(arena, store, .{ .node = fn_proto.ast.return_type, .handle = handle }, decl, encoding, context, handler); + } + if (fn_proto.ast.align_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = fn_proto.ast.align_expr, .handle = handle }, decl, encoding, context, handler); + } + if (fn_proto.ast.section_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = fn_proto.ast.section_expr, .handle = handle }, decl, encoding, context, handler); + } + if (fn_proto.ast.callconv_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = fn_proto.ast.callconv_expr, .handle = handle }, decl, encoding, context, handler); + } + if (node_tags[node] == .fn_decl) { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); } }, - .For => { - const for_node = node.cast(ast.Node.For).?; - try symbolReferencesInternal(arena, store, .{ .node = for_node.array_expr, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = for_node.body, .handle = handle }, decl, encoding, context, handler); - if (for_node.@"else") |else_node| { - try symbolReferencesInternal(arena, store, .{ .node = else_node.body, .handle = handle }, decl, encoding, context, handler); + .anyframe_type => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); + }, + .@"defer" => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); + }, + .@"comptime" => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + }, + .@"nosuspend" => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + }, + .@"switch", .switch_comma => { + // TODO When renaming a union(enum) field, also rename switch items that refer to it. + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); + const cases = tree.extra_data[extra.start..extra.end]; + for (cases) |case| { + try symbolReferencesInternal(arena, store, .{ .node = case, .handle = handle }, decl, encoding, context, handler); } }, - .If => { - const if_node = node.cast(ast.Node.If).?; - try symbolReferencesInternal(arena, store, .{ .node = if_node.condition, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = if_node.body, .handle = handle }, decl, encoding, context, handler); - if (if_node.@"else") |else_node| { - try symbolReferencesInternal(arena, store, .{ .node = else_node.body, .handle = handle }, decl, encoding, context, handler); + .switch_case_one => { + const case_one = tree.switchCaseOne(node); + for (case_one.ast.values) |val| + try symbolReferencesInternal(arena, store, .{ .node = val, .handle = handle }, decl, encoding, context, handler); + }, + .switch_case => { + const case = tree.switchCase(node); + for (case_one.ast.values) |val| + try symbolReferencesInternal(arena, store, .{ .node = val, .handle = handle }, decl, encoding, context, handler); + }, + .@"while", .while_simple, .while_con, .for_simple, .@"for" => { + const loop: ast.full.While = switch (node_tags[node]) { + .@"while" => tree.whileFull(node), + .while_simple => tree.whileSimple(node), + .while_con => tree.whileCont(node), + .for_simple => tree.forSimple(node), + .@"for" => tree.forFull(node), + else => unreachable, + }; + try symbolReferencesInternal(arena, store, .{ .node = loop.ast.cond_expr, .handle = handle }, decl, encoding, context, handler); + if (loop.ast.cont_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = loop.ast.cont_expr, .handle = handle }, decl, encoding, context, handler); + } + try symbolReferencesInternal(arena, store, .{ .node = loop.ast.then_expr, .handle = handle }, decl, encoding, context, handler); + if (loop.ast.else_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = loop.ast.else_expr, .handle = handle }, decl, encoding, context, handler); } }, - .ArrayType => { - const info = node.castTag(.ArrayType).?; - try symbolReferencesInternal(arena, store, .{ .node = info.len_expr, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = info.rhs, .handle = handle }, decl, encoding, context, handler); + .@"if", .if_simple => { + const if_node: ast.full.If = if (node_tags[node] == .@"if") tree.ifFull(node) else tree.ifSimple(node); + + try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.cond_expr, .handle = handle }, decl, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.then_expr, .handle = handle }, decl, encoding, context, handler); + if (if_node.ast.else_expr != 0) { + try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.else_expr, .handle = handle }, decl, encoding, context, handler); + } }, - .ArrayTypeSentinel => { - const info = node.castTag(.ArrayTypeSentinel).?; - try symbolReferencesInternal(arena, store, .{ .node = info.len_expr, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = info.sentinel, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = info.rhs, .handle = handle }, decl, encoding, context, handler); + .array_type, .array_type_sentinel => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); }, - .PtrType, .SliceType => { - const info = switch (node.tag) { - .PtrType => node.castTag(.PtrType).?.ptr_info, - .SliceType => node.castTag(.SliceType).?.ptr_info, + .ptr_type, .ptr_type_aligned, .ptr_type_bit_range, .ptr_type_sentinel => { + const ptr_type = analysis.ptrType(tree, node).?; + + if (ptr_type.ast.align_node != 0) { + try symbolReferencesInternal(arena, store, .{ .node = ptr_type.ast.align_node, .handle = handle }, decl, encoding, context, handler); + if (node_tags[node] == .ptr_type_bit_range) { + try symbolReferencesInternal(arena, store, .{ + .node = ptr_type.ast.bit_range_start, + .handle = handle, + }, decl, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ + .node = ptr_type.ast.bit_range_end, + .handle = handle, + }, decl, encoding, context, handler); + } + } + if (ptr_type.ast.sentinel != 0) { + try symbolReferencesInternal(arena, store, .{ .node = ptr_type.ast.sentinel, .handle = handle }, decl, encoding, context, handler); + } + + try symbolReferencesInternal(arena, store, .{ .node = ptr_type.ast.child_type, .handle = handle }, decl, encoding, context, handler); + }, + .address_of, .@"await", .bit_not, .bool_not, .optional_type, .negation, .negation_wrap, .@"resume", .@"try" => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + }, + .array_init, + .array_init_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init_one, + .array_init_one_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + => |n| { + var buf: [2]ast.Node.Index = undefined; + const array_init = switch (n) { + .array_init, .array_init_comma => tree.arrayInit(node), + .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), + .array_init_one, .array_init_one_comma => tree.arrayInitOne(&buf[0..1], node), + .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node), + else => unreachable, + }; + if (array_init.ast.type_expr != 0) + try symbolReferencesInternal(arena, store, .{ .node = array_init.ast.type_expr, .handle = handle }, decl, encoding, context, handler); + for (array_init.ast.elements) |e| + try symbolReferencesInternal(arena, store, .{ .node = e, .handle = handle }, decl, encoding, context, handler); + }, + .struct_init, + .struct_init_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .struct_init_one, + .struct_init_one_comma, + => |n| { + var buf: [2]ast.Node.Index = undefined; + const struct_init: ast.full.StructInit = switch (n) { + .struct_init, .struct_init_comma => tree.structInit(node), + .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), + .struct_init_one, .struct_init_one_comma => tree.structInitOne(&buf[0..1], node), + .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node), + else => unreachable, + }; + if (struct_init.ast.type_expr != 0) + try symbolReferencesInternal(arena, store, .{ .node = struct_init.ast.type_expr, .handle = handle }, decl, encoding, context, handler); + for (struct_init.ast.fields) |field| + try symbolReferencesInternal(arena, store, .{ .node = field, .handle = handle }, decl, encoding, context, handler); + }, + .call, + .call_comma, + .call_one, + .call_one_comma, + .async_call, + .async_call_comma, + .async_call_one, + .async_call_one_comma, + => |c| { + var buf: [1]ast.Node.Index = undefined; + const call: ast.full.Call = switch (c) { + .call, .call_comma, .async_call, .async_call_comma => tree.callFull(node), + .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(&buf, node), + else => unreachable, + }; + if (call.ast.fn_expr != 0) + try symbolReferencesInternal(arena, store, .{ .node = call.ast.fn_expr, .handle = handle }, decl, encoding, context, handler); + + for (call.ast.params) |param| { + try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler); + } + }, + .slice, .slice_sentinel, .slice_open => |s| { + const slice: ast.full.Slice = switch (s) { + .slice => tree.slice(node), + .slice_open => tree.sliceOpen(node), + .slice_sentinel => tree.sliceSentinel(node), else => unreachable, }; - if (info.align_info) |align_info| { - try symbolReferencesInternal(arena, store, .{ .node = align_info.node, .handle = handle }, decl, encoding, context, handler); - if (align_info.bit_range) |range| { - try symbolReferencesInternal(arena, store, .{ .node = range.start, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = range.end, .handle = handle }, decl, encoding, context, handler); - } - } - if (info.sentinel) |sentinel| { - try symbolReferencesInternal(arena, store, .{ .node = sentinel, .handle = handle }, decl, encoding, context, handler); - } - switch (node.tag) { - .PtrType => try symbolReferencesInternal(arena, store, .{ .node = node.castTag(.PtrType).?.rhs, .handle = handle }, decl, encoding, context, handler), - .SliceType => try symbolReferencesInternal(arena, store, .{ .node = node.castTag(.SliceType).?.rhs, .handle = handle }, decl, encoding, context, handler), - else => unreachable, + try symbolReferencesInternal(arena, store, .{ .node = slice.ast.sliced, .handle = handle }, decl, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = slice.ast.start, .handle = handle }, decl, encoding, context, handler); + if (slice.ast.end != 0) + try symbolReferencesInternal(arena, store, .{ .node = slice.ast.end, .handle = handle }, decl, encoding, context, handler); + if (slice.ast.sentinel != 0) + try symbolReferencesInternal(arena, store, .{ .node = slice.ast.sentinel, .handle = handle }, decl, encoding, context, handler); + }, + .array_access => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); + }, + .deref, .unwrap_optional => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + }, + .grouped_expression => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + }, + .@"return", .@"break", .@"continue" => { + if (datas[node].lhs != 0) { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); } }, - .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { - const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; - try symbolReferencesInternal(arena, store, .{ .node = prefix_op.rhs, .handle = handle }, decl, encoding, context, handler); - }, - .FieldInitializer => { - // TODO Rename field initializer names when needed - const field_init = node.cast(ast.Node.FieldInitializer).?; - try symbolReferencesInternal(arena, store, .{ .node = field_init.expr, .handle = handle }, decl, encoding, context, handler); - }, - .ArrayInitializer => { - const array_init = node.cast(ast.Node.ArrayInitializer).?; - try symbolReferencesInternal(arena, store, .{ .node = array_init.lhs, .handle = handle }, decl, encoding, context, handler); - for (array_init.listConst()) |child| { - try symbolReferencesInternal(arena, store, .{ .node = child, .handle = handle }, decl, encoding, context, handler); + .@"suspend" => { + if (datas[node].lhs != 0) { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); } }, - .ArrayInitializerDot => { - const array_init = node.cast(ast.Node.ArrayInitializerDot).?; - for (array_init.listConst()) |child| { - try symbolReferencesInternal(arena, store, .{ .node = child, .handle = handle }, decl, encoding, context, handler); - } - }, - .StructInitializer => { - // TODO Rename field initializer names when needed - const struct_init = node.cast(ast.Node.StructInitializer).?; - try symbolReferencesInternal(arena, store, .{ .node = struct_init.lhs, .handle = handle }, decl, encoding, context, handler); - for (struct_init.listConst()) |child| { - try symbolReferencesInternal(arena, store, .{ .node = child, .handle = handle }, decl, encoding, context, handler); - } - }, - .StructInitializerDot => { - const struct_init = node.cast(ast.Node.StructInitializerDot).?; - for (struct_init.listConst()) |child| { - try symbolReferencesInternal(arena, store, .{ .node = child, .handle = handle }, decl, encoding, context, handler); - } - }, - .Call => { - const call = node.cast(ast.Node.Call).?; - try symbolReferencesInternal(arena, store, .{ .node = call.lhs, .handle = handle }, decl, encoding, context, handler); - for (call.paramsConst()) |param| { + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => { + const builtin_call = analysis.builtinCallParams(); + for (analysis.builtinCallParams()) |param| try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler); - } }, - .Slice => { - const slice = node.castTag(.Slice).?; - try symbolReferencesInternal(arena, store, .{ .node = slice.lhs, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = slice.start, .handle = handle }, decl, encoding, context, handler); - if (slice.end) |end| { - try symbolReferencesInternal(arena, store, .{ .node = end, .handle = handle }, decl, encoding, context, handler); - } - if (slice.sentinel) |sentinel| { - try symbolReferencesInternal(arena, store, .{ .node = sentinel, .handle = handle }, decl, encoding, context, handler); - } - }, - .ArrayAccess => { - const arr_acc = node.castTag(.ArrayAccess).?; - try symbolReferencesInternal(arena, store, .{ .node = arr_acc.lhs, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = arr_acc.index_expr, .handle = handle }, decl, encoding, context, handler); - }, - .Deref, .UnwrapOptional => { - const suffix = node.cast(ast.Node.SimpleSuffixOp).?; - try symbolReferencesInternal(arena, store, .{ .node = suffix.lhs, .handle = handle }, decl, encoding, context, handler); - }, - .GroupedExpression => { - const grouped = node.cast(ast.Node.GroupedExpression).?; - try symbolReferencesInternal(arena, store, .{ .node = grouped.expr, .handle = handle }, decl, encoding, context, handler); - }, - .Return, .Break, .Continue => { - const cfe = node.cast(ast.Node.ControlFlowExpression).?; - if (cfe.getRHS()) |rhs| { - try symbolReferencesInternal(arena, store, .{ .node = rhs, .handle = handle }, decl, encoding, context, handler); - } - }, - .Suspend => { - const suspend_node = node.cast(ast.Node.Suspend).?; - if (suspend_node.body) |body| { - try symbolReferencesInternal(arena, store, .{ .node = body, .handle = handle }, decl, encoding, context, handler); - } - }, - .BuiltinCall => { - const builtin_call = node.cast(ast.Node.BuiltinCall).?; - for (builtin_call.paramsConst()) |param| { - try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler); - } - }, - // TODO Inline asm expr - .TestDecl => { - const test_decl = node.cast(ast.Node.TestDecl).?; - try symbolReferencesInternal(arena, store, .{ .node = test_decl.body_node, .handle = handle }, decl, encoding, context, handler); - }, - .Period => { - const infix_op = node.cast(ast.Node.SimpleInfixOp).?; + .@"asm", .asm_simple => |a| { + const _asm: ast.full.Asm = if (a == .@"asm") tree.asmFull(node) else tree.asmSimple(node); + if (_asm.ast.items.len == 0) + try symbolReferencesInternal(arena, store, .{ .node = _asm.ast.template, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = infix_op.lhs, .handle = handle }, decl, encoding, context, handler); + for (_asm.inputs) |input| + try symbolReferencesInternal(arena, store, .{ .node = input, .handle = handle }, decl, encoding, context, handler); - const rhs_str = analysis.nodeToString(handle.tree, infix_op.rhs) orelse return; + for (_asm.outputs) |output| + try symbolReferencesInternal(arena, store, .{ .node = output, .handle = handle }, decl, encoding, context, handler); + }, + .test_decl => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); + }, + .field_access => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + + const rhs_str = analysis.nodeToString(handle.tree, datas[node].rhs) orelse return; var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); const left_type = try analysis.resolveFieldAccessLhsType( store, arena, (try analysis.resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.lhs, + .node = datas[node].lhs, .handle = handle, }, &bound_type_params)) orelse return, &bound_type_params, @@ -352,15 +415,53 @@ fn symbolReferencesInternal( !left_type.type.is_type_val, )) |child| { if (std.meta.eql(child, decl)) { - try tokenReference(handle, infix_op.rhs.firstToken(), encoding, context, handler); + try tokenReference(handle, tree.firstToken(datas[node].rhs), encoding, context, handler); } } }, - .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Range, .Sub, .SubWrap, .OrElse => { - const infix_op = node.cast(ast.Node.SimpleInfixOp).?; - - try symbolReferencesInternal(arena, store, .{ .node = infix_op.lhs, .handle = handle }, decl, encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = infix_op.rhs, .handle = handle }, decl, encoding, context, handler); + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_or, + .div, + .equal_equal, + .error_union, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .range, + .sub, + .sub_wrap, + .@"orelse", + => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); }, else => {}, } From c8a2467facd17a1f780a356299343b2a1319762f Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Mon, 1 Mar 2021 22:18:38 +0100 Subject: [PATCH 12/36] Compiles without errors. Still needs improvement --- src/analysis.zig | 292 ++++++++++++++++++++++++++------------------- src/main.zig | 2 +- src/offsets.zig | 4 +- src/references.zig | 19 ++- 4 files changed, 181 insertions(+), 136 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 92dc92d..1f3980b 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1055,13 +1055,12 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: ast.Tree) !v const init_node_tag = tags[init_node]; switch (init_node_tag) { .builtin_call => try maybeCollectImport(tree, init_node, import_arr), - // @TODO: FIX ME what is the syntax to support for imports using dot notation? - // .Period => { - // const infix_op = init_node.cast(ast.Node.SimpleInfixOp).?; - - // if (infix_op.lhs.tag != .BuiltinCall) continue; - // try maybeCollectImport(tree, infix_op.lhs.castTag(.BuiltinCall).?, import_arr); - // }, + .field_access => { + const lhs = tree.nodes.items(.data)[init_node].lhs; + if (isBuiltinCall(tree, lhs)) { + try maybeCollectImport(tree, lhs, import_arr); + } + }, else => {}, } } @@ -1268,7 +1267,7 @@ fn isBuiltinCall(tree: ast.Tree, node: ast.Node.Index) bool { }; } -fn builtinCallParams(tree: ast.Tree, node: ast.Node.Index) []const ast.Node.Index { +pub fn builtinCallParams(tree: ast.Tree, node: ast.Node.Index) []const ast.Node.Index { std.debug.assert(isBuiltinCall(tree, node)); const datas = tree.nodes.items(.data); @@ -1290,7 +1289,7 @@ pub fn fnProto(tree: ast.Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?a .fn_proto_multi => tree.fnProtoMulti(node), .fn_proto_one => tree.fnProtoOne(buf, node), .fn_proto_simple => tree.fnProtoSimple(buf, node), - .fn_decl => tree.fnProto(tree.nodes.items(.data)[node].lhs), + // .fn_decl => tree.fnProto(tree.nodes.items(.data)[node].lhs), else => null, }; } @@ -1479,107 +1478,147 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types. }; } -fn addOutlineNodes(allocator: *std.mem.Allocator, tree: ast.Tree, parent: ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void { - switch (tree.nodes.items(.tag)[parent]) { - .StringLiteral, - .IntegerLiteral, - .BuiltinCall, - .Call, - .Identifier, - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .ErrorUnion, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Period, - .Range, - .Sub, - .SubWrap, - .OrElse, - .AddressOf, - .Await, - .BitNot, - .BoolNot, - .OptionalType, - .Negation, - .NegationWrap, - .Resume, - .Try, - .ArrayType, - .ArrayTypeSentinel, - .PtrType, - .SliceType, - .Slice, - .Deref, - .UnwrapOptional, - .ArrayAccess, - .Return, - .Break, - .Continue, - .ArrayInitializerDot, - .SwitchElse, - .SwitchCase, - .For, - .EnumLiteral, - .PointerIndexPayload, - .StructInitializerDot, - .PointerPayload, - .While, - .Switch, - .Else, - .BoolLiteral, - .NullLiteral, - .Defer, - .StructInitializer, - .FieldInitializer, - .If, - .MultilineStringLiteral, - .UndefinedLiteral, - .AnyType, - .Block, - .ErrorSetDecl, +fn addOutlineNodes(allocator: *std.mem.Allocator, tree: ast.Tree, child: ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void { + switch (tree.nodes.items(.tag)[child]) { + .string_literal, + .integer_literal, + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + .call, + .call_comma, + .call_one, + .call_one_comma, + .async_call, + .async_call_comma, + .async_call_one, + .async_call_one_comma, + .identifier, + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .error_union, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .field_access, + .switch_range, + .sub, + .sub_wrap, + .@"orelse", + .address_of, + .@"await", + .bit_not, + .bool_not, + .optional_type, + .negation, + .negation_wrap, + .@"resume", + .@"try", + .array_type, + .array_type_sentinel, + .ptr_type, + .ptr_type_aligned, + .ptr_type_bit_range, + .ptr_type_sentinel, + .slice_open, + .slice_sentinel, + .deref, + .unwrap_optional, + .array_access, + .@"return", + .@"break", + .@"continue", + .array_init, + .array_init_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + .array_init_one, + .array_init_one_comma, + .@"switch", + .switch_comma, + .switch_case, + .switch_case_one, + .@"for", + .for_simple, + .enum_literal, + .struct_init, + .struct_init_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .struct_init_one, + .struct_init_one_comma, + .@"while", + .while_simple, + .while_cont, + .true_literal, + .false_literal, + .null_literal, + .@"defer", + .@"if", + .if_simple, + .multiline_string_literal, + .undefined_literal, + .@"anytype", + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + .error_set_decl, => return, - - .ContainerDecl => { - const decl = child.castTag(.ContainerDecl).?; - - for (decl.fieldsAndDecls()) |cchild| - try addOutlineNodes(allocator, tree, cchild, context); + .container_decl, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_two, + .container_decl_two_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + => { + var buf: [2]ast.Node.Index = undefined; + for (declMembers(tree, tree.nodes.items(.tag)[child], child, &buf)) |member| + try addOutlineNodes(allocator, tree, member, context); return; }, - else => {}, + else => |t| {}, } try getDocumentSymbolsInternal(allocator, tree, child, context); } @@ -1614,6 +1653,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod }; const tags = tree.nodes.items(.tag); + log.debug("{s} - {s}", .{ name, tags[node] }); (try context.symbols.addOne()).* = .{ .name = name, .kind = switch (tags[node]) { @@ -1621,6 +1661,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod .fn_proto_simple, .fn_proto_multi, .fn_proto_one, + .fn_decl, => .Function, .local_var_decl, .global_var_decl, @@ -1632,6 +1673,10 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod .container_field_init, .tagged_union_enum_tag, .tagged_union_enum_tag_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, => .Field, else => .Variable, }, @@ -1647,14 +1692,16 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod .encoding = context.encoding, }; - var index: usize = 0; - if (true) @panic("FIX: addOutlineNodes"); - // try addOutlineNodes(allocator, tree, node, &child_context); - - // while (node.iterate(index)) |child| : (index += 1) { - // try addOutlineNodes(allocator, tree, child, &child_context); - // } + if (isContainer(tags[node])) { + var buf: [2]ast.Node.Index = undefined; + for (declMembers(tree, tags[node], node, &buf)) |child| + try addOutlineNodes(allocator, tree, child, &child_context); + } + if (varDecl(tree, node)) |var_decl| { + if (var_decl.ast.init_node != 0) + try addOutlineNodes(allocator, tree, var_decl.ast.init_node, &child_context); + } break :ch children.items; }, }; @@ -2056,7 +2103,7 @@ fn lookupSymbolGlobalInternal( source_index: usize, use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { - for (handle.document_scope.scopes) |scope| { + for (handle.document_scope.scopes) |scope, i| { if (source_index >= scope.range.start and source_index < scope.range.end) { if (scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value) { @@ -2266,10 +2313,10 @@ pub fn declMembers(tree: ast.Tree, tag: ast.Node.Tag, node_idx: ast.Node.Index, return switch (tag) { .container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members, .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members, - .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buffer, node_idx).ast.members, + .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx).ast.members, .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members, .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members, - .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buffer, node_idx).ast.members, + .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members, .root => tree.rootDecls(), // @TODO: Fix error set declarations .error_set_decl => &[_]ast.Node.Index{}, @@ -2307,7 +2354,7 @@ fn makeScopeInternal( if (isContainer(node)) { var buf: [2]ast.Node.Index = undefined; - const ast_decls = declMembers(tree, node, node_idx); + const ast_decls = declMembers(tree, node, node_idx, &buf); (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), @@ -2412,7 +2459,10 @@ fn makeScopeInternal( } switch (node) { - .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => { + .fn_decl => { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); + }, + .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi => { var buf: [1]ast.Node.Index = undefined; const func = fnProto(tree, node_idx, &buf).?; @@ -2435,10 +2485,6 @@ fn makeScopeInternal( } } - if (node == .fn_decl) { - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); - } - return; }, .test_decl => { diff --git a/src/main.zig b/src/main.zig index 46c63d7..31cbc39 100644 --- a/src/main.zig +++ b/src/main.zig @@ -650,6 +650,7 @@ fn getLabelGlobal(pos_index: usize, handle: *DocumentStore.Handle) !?analysis.De fn getSymbolGlobal(arena: *std.heap.ArenaAllocator, pos_index: usize, handle: *DocumentStore.Handle) !?analysis.DeclWithHandle { const name = identifierFromPosition(pos_index, handle.*); + logger.debug("Name: {s}", .{name}); if (name.len == 0) return null; return try analysis.lookupSymbolGlobal(&document_store, arena, handle, name, pos_index); @@ -1314,7 +1315,6 @@ fn hoverHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: reque if (req.params.position.character >= 0) { const doc_position = try offsets.documentPosition(handle.document, req.params.position, offset_encoding); const pos_context = try analysis.documentPositionContext(arena, handle.document, doc_position); - switch (pos_context) { .builtin => try hoverDefinitionBuiltin(arena, id, doc_position.absolute_index, handle), .var_access => try hoverDefinitionGlobal(arena, id, doc_position.absolute_index, handle, config), diff --git a/src/offsets.zig b/src/offsets.zig index 8201924..40d40e0 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -71,14 +71,14 @@ pub const TokenLocation = struct { }; pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token: std.zig.ast.TokenIndex, encoding: Encoding) !TokenLocation { - const token_loc = tree.tokenLocation(@truncate(u32, start_index), token); + const start = tree.tokens.items(.start)[token]; var loc = TokenLocation{ .line = 0, .column = 0, .offset = 0, }; - const token_start = token_loc.line_start; + const token_start = start; const source = tree.source[start_index..]; var i: usize = 0; while (i + start_index < token_start) { diff --git a/src/references.zig b/src/references.zig index 2361f7b..aba30c7 100644 --- a/src/references.zig +++ b/src/references.zig @@ -136,7 +136,7 @@ fn symbolReferencesInternal( // try symbolReferencesInternal(arena, store, .{ .node = use.expr, .handle = handle }, decl, encoding, context, handler); // }, .container_field, .container_field_align, .container_field_init => { - const field = analysis.containerField(node).?; + const field = analysis.containerField(tree, node).?; if (field.ast.type_expr != 0) { try symbolReferencesInternal(arena, store, .{ .node = field.ast.type_expr, .handle = handle }, decl, encoding, context, handler); } @@ -145,7 +145,7 @@ fn symbolReferencesInternal( } }, .identifier => { - if (try analysis.lookupSymbolGlobal(store, arena, handle, tree.getNodeSource(node), starts[main_tokens[nodes]])) |child| { + if (try analysis.lookupSymbolGlobal(store, arena, handle, tree.getNodeSource(node), starts[main_tokens[node]])) |child| { if (std.meta.eql(decl, child)) { try tokenReference(handle, main_tokens[node], encoding, context, handler); } @@ -204,14 +204,14 @@ fn symbolReferencesInternal( }, .switch_case => { const case = tree.switchCase(node); - for (case_one.ast.values) |val| + for (case.ast.values) |val| try symbolReferencesInternal(arena, store, .{ .node = val, .handle = handle }, decl, encoding, context, handler); }, - .@"while", .while_simple, .while_con, .for_simple, .@"for" => { + .@"while", .while_simple, .while_cont, .for_simple, .@"for" => { const loop: ast.full.While = switch (node_tags[node]) { .@"while" => tree.whileFull(node), .while_simple => tree.whileSimple(node), - .while_con => tree.whileCont(node), + .while_cont => tree.whileCont(node), .for_simple => tree.forSimple(node), .@"for" => tree.forFull(node), else => unreachable, @@ -276,7 +276,7 @@ fn symbolReferencesInternal( const array_init = switch (n) { .array_init, .array_init_comma => tree.arrayInit(node), .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), - .array_init_one, .array_init_one_comma => tree.arrayInitOne(&buf[0..1], node), + .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node), .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node), else => unreachable, }; @@ -298,7 +298,7 @@ fn symbolReferencesInternal( const struct_init: ast.full.StructInit = switch (n) { .struct_init, .struct_init_comma => tree.structInit(node), .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), - .struct_init_one, .struct_init_one_comma => tree.structInitOne(&buf[0..1], node), + .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node), .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node), else => unreachable, }; @@ -369,8 +369,7 @@ fn symbolReferencesInternal( .builtin_call_two, .builtin_call_two_comma, => { - const builtin_call = analysis.builtinCallParams(); - for (analysis.builtinCallParams()) |param| + for (analysis.builtinCallParams(tree, node)) |param| try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler); }, .@"asm", .asm_simple => |a| { @@ -455,7 +454,7 @@ fn symbolReferencesInternal( .mod, .mul, .mul_wrap, - .range, + .switch_range, .sub, .sub_wrap, .@"orelse", From 1dd39914ec5f6d6b3cec530b4788fbe201d0cbdb Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Tue, 2 Mar 2021 15:32:38 +0100 Subject: [PATCH 13/36] Hovering fixes --- src/analysis.zig | 182 ++++++++++++++++++++++++++++----------------- src/main.zig | 1 - src/references.zig | 16 +++- 3 files changed, 127 insertions(+), 72 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 1f3980b..be5542c 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -11,21 +11,34 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI const tokens = tree.tokens.items(.tag); const current = tree.nodes.items(.main_token)[node]; + var idx = current; switch (tags[node]) { - .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi => { - var idx = current - 1; + .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => { + idx -= 1; idx -= @boolToInt(tokens[idx] == .keyword_extern); idx -= @boolToInt(tokens[idx] == .keyword_pub); - return if (tokens[idx] == .doc_comment) idx else null; }, .local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => { - return if (tokens[current - 1] == .doc_comment) current - 1 else null; + idx -= 1; + idx -= @boolToInt(tokens[idx] == .keyword_pub); }, .container_field, .container_field_init, .container_field_align => { - var idx = current - 2; // skip '.' - return if (tokens[idx] == .doc_comment) idx else null; + idx -= 2; }, - else => return null, + else => { + if (isContainer(tags[node])) { + idx -= 2; // go to '=' + idx -= 1; // mutability + idx -= 1; // possible 'pub' + idx -= @boolToInt(tokens[idx] == .keyword_pub); // doc comment + } + }, + } + + // Find first doc comment token + if (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) { + while (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) : (idx -= 1) {} + return idx + 1; } // @TODO: Implement doc comments for tags @@ -63,7 +76,6 @@ pub fn collectDocComments( defer lines.deinit(); const token_tags = tree.tokens.items(.tag); - const loc = tree.tokenLocation(0, doc_comments); var curr_line_tok = doc_comments; while (true) : (curr_line_tok += 1) { @@ -81,8 +93,8 @@ pub fn collectDocComments( /// Gets a function signature (keywords, name, return value) pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { const start = tree.tokenLocation(0, func.ast.fn_token).line_start; - const end = tree.tokenLocation(0, func.ast.return_type).line_end; - return tree.source[start..end]; + const end = tree.tokenLocation(0, tree.nodes.items(.main_token)[func.ast.return_type]).line_end; + return tree.source[start .. end - 1]; } /// Gets a function snippet insert text @@ -200,16 +212,15 @@ pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { .aligned_var_decl => return tree.alignedVarDecl(node).ast.mut_token + 1, // function declaration names - .fn_proto => return tree.fnProto(node).name_token, - .fn_proto_simple => { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => { var params: [1]ast.Node.Index = undefined; - return tree.fnProtoSimple(¶ms, node).name_token; + return fnProto(tree, node, ¶ms).?.name_token; }, - .fn_proto_one => { - var params: [1]ast.Node.Index = undefined; - return tree.fnProtoOne(¶ms, node).name_token; - }, - .fn_proto_multi => return tree.fnProtoMulti(node).name_token, // containers .container_field => return tree.containerField(node).ast.name_token, @@ -272,7 +283,17 @@ fn resolveVarDeclAliasInternal( const lhs = datas[node_handle.node].lhs; const container_node = if (isBuiltinCall(tree, lhs)) block: { - const builtin = builtinCallParams(tree, lhs); + const data = datas[lhs]; + const builtin = switch (node_tags[lhs]) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; if (!std.mem.eql(u8, tree.tokenSlice(main_tokens[lhs]), "@import")) return null; @@ -726,13 +747,16 @@ pub fn resolveTypeOfNodeInternal( return try resolveBracketAccessType(store, arena, left_type, .Single, bound_type_params); }, .field_access => { - const rhs_str = nodeToString(handle.tree, datas[node].rhs) orelse return null; + const field_access = datas[node]; + log.debug("Rhs: {s}", .{node_tags[field_access.rhs]}); + const rhs_str = nodeToString(handle.tree, field_access.rhs) orelse return null; + log.debug("Acces string: {s}", .{rhs_str}); // If we are accessing a pointer type, remove one pointerness level :) const left_type = try resolveFieldAccessLhsType( store, arena, (try resolveTypeOfNodeInternal(store, arena, .{ - .node = datas[node].lhs, + .node = field_access.lhs, .handle = handle, }, bound_type_params)) orelse return null, bound_type_params, @@ -799,7 +823,17 @@ pub fn resolveTypeOfNodeInternal( }; }, .builtin_call, .builtin_call_comma, .builtin_call_two, .builtin_call_two_comma => { - const params = builtinCallParams(tree, node); + const data = datas[node]; + const params = switch (node_tags[node]) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; const call_name = tree.tokenSlice(main_tokens[node]); if (std.mem.eql(u8, call_name, "@This")) { @@ -843,7 +877,7 @@ pub fn resolveTypeOfNodeInternal( } if (!std.mem.eql(u8, call_name, "@import")) return null; - if (params.len < 1) return null; + if (params.len == 0) return null; const import_param = params[0]; if (node_tags[import_param] != .string_literal) return null; @@ -866,7 +900,7 @@ pub fn resolveTypeOfNodeInternal( => { return TypeWithHandle.typeVal(node_handle); }, - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple => { + .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => { var buf: [1]ast.Node.Index = undefined; const fn_proto = fnProto(tree, node, &buf).?; @@ -976,6 +1010,7 @@ pub const TypeWithHandle = struct { .fn_proto_multi, .fn_proto_one, .fn_proto_simple, + .fn_decl, => isTypeFunction(fnProto(tree, n, &buf).?), else => false, }, @@ -991,6 +1026,7 @@ pub const TypeWithHandle = struct { .fn_proto_multi, .fn_proto_one, .fn_proto_simple, + .fn_decl, => isGenericFunction(fnProto(tree, n, &buf).?), else => false, }, @@ -1006,6 +1042,7 @@ pub const TypeWithHandle = struct { .fn_proto_multi, .fn_proto_one, .fn_proto_simple, + .fn_decl, => true, else => false, }, @@ -1024,16 +1061,26 @@ fn maybeCollectImport(tree: ast.Tree, builtin_call: ast.Node.Index, arr: *std.Ar const datas = tree.nodes.items(.data); const builtin_tag = tags[builtin_call]; - const builtin_data = datas[builtin_call]; + const data = datas[builtin_call]; - std.debug.assert(builtin_tag == .builtin_call); + std.debug.assert(isBuiltinCall(tree, builtin_call)); if (!std.mem.eql(u8, tree.tokenSlice(builtin_call), "@import")) return; - const params = tree.extra_data[builtin_data.lhs..builtin_data.rhs]; + + const params = switch (builtin_tag) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; if (params.len > 1) return; if (tags[params[0]] != .string_literal) return; - const import_str = tree.tokenSlice(params[0]); + const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]); try arr.append(import_str[1 .. import_str.len - 1]); } @@ -1219,6 +1266,8 @@ pub fn isNodePublic(tree: ast.Tree, node: ast.Node.Index) bool { } pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { + const data = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); var buf: [1]ast.Node.Index = undefined; switch (tree.nodes.items(.tag)[node]) { .container_field => return tree.tokenSlice(tree.containerField(node).ast.name_token), @@ -1267,29 +1316,13 @@ fn isBuiltinCall(tree: ast.Tree, node: ast.Node.Index) bool { }; } -pub fn builtinCallParams(tree: ast.Tree, node: ast.Node.Index) []const ast.Node.Index { - std.debug.assert(isBuiltinCall(tree, node)); - const datas = tree.nodes.items(.data); - - return switch (tree.nodes.items(.tag)[node]) { - .builtin_call, .builtin_call_comma => tree.extra_data[datas[node].lhs..datas[node].rhs], - .builtin_call_two, .builtin_call_two_comma => if (datas[node].lhs == 0) - &[_]ast.Node.Index{} - else if (datas[node].rhs == 0) - &[_]ast.Node.Index{datas[node].lhs} - else - &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }, - else => unreachable, - }; -} - pub fn fnProto(tree: ast.Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?ast.full.FnProto { return switch (tree.nodes.items(.tag)[node]) { .fn_proto => tree.fnProto(node), .fn_proto_multi => tree.fnProtoMulti(node), .fn_proto_one => tree.fnProtoOne(buf, node), .fn_proto_simple => tree.fnProtoSimple(buf, node), - // .fn_decl => tree.fnProto(tree.nodes.items(.data)[node].lhs), + .fn_decl => fnProto(tree, tree.nodes.items(.data)[node].lhs, buf), else => null, }; } @@ -1315,10 +1348,21 @@ pub fn getImportStr(tree: ast.Tree, node: ast.Node.Index, source_index: usize) ? const call_name = tree.tokenSlice(builtin_token); if (!std.mem.eql(u8, call_name, "@import")) continue; - const params = builtinCallParams(tree, decl_idx); + const data = tree.nodes.items(.data)[decl_idx]; + const params = switch (node_tags[decl_idx]) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; + if (params.len != 1) continue; - const import_str = tree.tokenSlice(tree.firstToken(params[0])); + const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]); return import_str[1 .. import_str.len - 1]; } @@ -1653,7 +1697,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod }; const tags = tree.nodes.items(.tag); - log.debug("{s} - {s}", .{ name, tags[node] }); + // log.debug("{s} - {s}", .{ name, tags[node] }); (try context.symbols.addOne()).* = .{ .name = name, .kind = switch (tags[node]) { @@ -2104,24 +2148,24 @@ fn lookupSymbolGlobalInternal( use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope, i| { - if (source_index >= scope.range.start and source_index < scope.range.end) { - if (scope.decls.getEntry(symbol)) |candidate| { - switch (candidate.value) { - .ast_node => |node| { - if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; - }, - .label_decl => continue, - else => {}, - } - return DeclWithHandle{ - .decl = &candidate.value, - .handle = handle, - }; + // if (source_index >= scope.range.start and source_index < scope.range.end) { + if (scope.decls.getEntry(symbol)) |candidate| { + switch (candidate.value) { + .ast_node => |node| { + if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; + }, + .label_decl => continue, + else => {}, } - - // if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + return DeclWithHandle{ + .decl = &candidate.value, + .handle = handle, + }; } + // if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + // } + if (scope.range.start > source_index) return null; } @@ -2459,10 +2503,7 @@ fn makeScopeInternal( } switch (node) { - .fn_decl => { - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); - }, - .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi => { + .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => |fn_tag| { var buf: [1]ast.Node.Index = undefined; const func = fnProto(tree, node_idx, &buf).?; @@ -2485,6 +2526,10 @@ fn makeScopeInternal( } } + if (fn_tag == .fn_decl) { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); + } + return; }, .test_decl => { @@ -2543,7 +2588,6 @@ fn makeScopeInternal( for (statements) |idx| { try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, idx); - // if (tags[ if (varDecl(tree, idx)) |var_decl| { const name = tree.tokenSlice(var_decl.ast.mut_token + 1); if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| { @@ -2624,7 +2668,7 @@ fn makeScopeInternal( else => unreachable, }; if (while_node.label_token) |label| { - std.debug.assert(tags[label] == .identifier); + std.debug.assert(token_tags[label] == .identifier); var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ diff --git a/src/main.zig b/src/main.zig index 31cbc39..ac2e313 100644 --- a/src/main.zig +++ b/src/main.zig @@ -650,7 +650,6 @@ fn getLabelGlobal(pos_index: usize, handle: *DocumentStore.Handle) !?analysis.De fn getSymbolGlobal(arena: *std.heap.ArenaAllocator, pos_index: usize, handle: *DocumentStore.Handle) !?analysis.DeclWithHandle { const name = identifierFromPosition(pos_index, handle.*); - logger.debug("Name: {s}", .{name}); if (name.len == 0) return null; return try analysis.lookupSymbolGlobal(&document_store, arena, handle, name, pos_index); diff --git a/src/references.zig b/src/references.zig index aba30c7..d9e8a4a 100644 --- a/src/references.zig +++ b/src/references.zig @@ -368,8 +368,20 @@ fn symbolReferencesInternal( .builtin_call_comma, .builtin_call_two, .builtin_call_two_comma, - => { - for (analysis.builtinCallParams(tree, node)) |param| + => |builtin_tag| { + const data = datas[node]; + const params = switch (builtin_tag) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; + + for (params) |param| try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler); }, .@"asm", .asm_simple => |a| { From 92adeb88267598e9f62266c0ccf0b1a370d1e70f Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Tue, 2 Mar 2021 22:01:13 +0100 Subject: [PATCH 14/36] Completion fixes --- src/analysis.zig | 91 +++++++++++++++++++++++++++++------------------- src/main.zig | 13 +++---- 2 files changed, 61 insertions(+), 43 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index be5542c..f9b12ca 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -12,32 +12,37 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI const current = tree.nodes.items(.main_token)[node]; var idx = current; + if (idx == 0) return null; switch (tags[node]) { .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => { idx -= 1; - idx -= @boolToInt(tokens[idx] == .keyword_extern); - idx -= @boolToInt(tokens[idx] == .keyword_pub); + if (tokens[idx] == .keyword_extern and idx > 0) + idx -= 1; + if (tokens[idx] == .keyword_pub and idx < 0) + idx -= 1; }, .local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => { idx -= 1; - idx -= @boolToInt(tokens[idx] == .keyword_pub); + if (tokens[idx] == .keyword_pub and idx > 0) + idx -= 1; }, .container_field, .container_field_init, .container_field_align => { - idx -= 2; + idx -= 2; // skip '.' token }, else => { if (isContainer(tags[node])) { - idx -= 2; // go to '=' + idx -= 1; // go to '=' idx -= 1; // mutability idx -= 1; // possible 'pub' - idx -= @boolToInt(tokens[idx] == .keyword_pub); // doc comment + if (tokens[idx] == .keyword_pub and idx > 0) + idx -= 1; } }, } // Find first doc comment token if (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) { - while (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) : (idx -= 1) {} + while ((tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) and idx > 0) : (idx -= 1) {} return idx + 1; } @@ -164,14 +169,15 @@ pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const // analysis.getContainerFieldSignature(handle.tree, field) pub fn getContainerFieldSignature(tree: ast.Tree, field: ast.full.ContainerField) []const u8 { const start = tree.tokenLocation(0, field.ast.name_token).line_start; - const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(field.ast.value_expr)).line_start; - return tree.source[start..end]; + const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr; + const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(end_node)).line_end; + return tree.source[start .. end - 1]; } /// The type node is "type" fn typeIsType(tree: ast.Tree, node: ast.Node.Index) bool { if (tree.nodes.items(.tag)[node] == .identifier) { - return std.mem.eql(u8, tree.tokenSlice(node), "type"); + return std.mem.eql(u8, tree.tokenSlice(tree.nodes.items(.main_token)[node]), "type"); } return false; } @@ -182,7 +188,6 @@ pub fn isTypeFunction(tree: ast.Tree, func: ast.full.FnProto) bool { pub fn isGenericFunction(tree: ast.Tree, func: *ast.full.FnProto) bool { var it = func.iterate(); - var slice = tree.nodes.items(.tag); while (it.next()) |param| { if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) { return true; @@ -204,12 +209,13 @@ pub fn isPascalCase(name: []const u8) bool { pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { const tags = tree.nodes.items(.tag); - switch (tags[node]) { + const main_token = tree.nodes.items(.main_token)[node]; + return switch (tags[node]) { // regular declaration names. + 1 to mut token because name comes after 'const'/'var' - .local_var_decl => return tree.localVarDecl(node).ast.mut_token + 1, - .global_var_decl => return tree.globalVarDecl(node).ast.mut_token + 1, - .simple_var_decl => return tree.simpleVarDecl(node).ast.mut_token + 1, - .aligned_var_decl => return tree.alignedVarDecl(node).ast.mut_token + 1, + .local_var_decl => tree.localVarDecl(node).ast.mut_token + 1, + .global_var_decl => tree.globalVarDecl(node).ast.mut_token + 1, + .simple_var_decl => tree.simpleVarDecl(node).ast.mut_token + 1, + .aligned_var_decl => tree.alignedVarDecl(node).ast.mut_token + 1, // function declaration names .fn_proto, @@ -217,15 +223,17 @@ pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { .fn_proto_one, .fn_proto_simple, .fn_decl, - => { + => blk: { var params: [1]ast.Node.Index = undefined; - return fnProto(tree, node, ¶ms).?.name_token; + break :blk fnProto(tree, node, ¶ms).?.name_token; }, // containers - .container_field => return tree.containerField(node).ast.name_token, - .container_field_init => return tree.containerFieldInit(node).ast.name_token, - .container_field_align => return tree.containerFieldAlign(node).ast.name_token, + .container_field => tree.containerField(node).ast.name_token, + .container_field_init => tree.containerFieldInit(node).ast.name_token, + .container_field_align => tree.containerFieldAlign(node).ast.name_token, + + .identifier => main_token, // @TODO: Errors // .error_=> { @@ -234,11 +242,12 @@ pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { // }, // lhs of main token is name token, so use `node` - 1 - .test_decl => return getDeclNameToken(tree, node - 1), - else => {}, - } - - return null; + .test_decl => if (tree.tokens.items(.tag)[main_token + 1] == .string_literal) + return main_token + 1 + else + null, + else => null, + }; } fn getDeclName(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { @@ -251,7 +260,7 @@ fn getDeclName(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { fn isContainerDecl(decl_handle: DeclWithHandle) bool { return switch (decl_handle.decl.*) { - .ast_node => |inner_node| isContainer(decl_handle.handle.tree.nodes.items(.tag)[inner_node]) or inner_node == 0, + .ast_node => |inner_node| isContainer(decl_handle.handle.tree.nodes.items(.tag)[inner_node]), else => false, }; } @@ -748,9 +757,8 @@ pub fn resolveTypeOfNodeInternal( }, .field_access => { const field_access = datas[node]; - log.debug("Rhs: {s}", .{node_tags[field_access.rhs]}); - const rhs_str = nodeToString(handle.tree, field_access.rhs) orelse return null; - log.debug("Acces string: {s}", .{rhs_str}); + const rhs_str = nodeToString(handle.tree, node) orelse return null; + // If we are accessing a pointer type, remove one pointerness level :) const left_type = try resolveFieldAccessLhsType( store, @@ -918,7 +926,9 @@ pub fn resolveTypeOfNodeInternal( .type = .{ .data = .{ .other = node }, .is_type_val = false }, .handle = handle, }, - else => {}, + else => { + log.debug("TODO: implement type resolving for ast tag: {s}", .{node_tags[node]}); + }, } return null; } @@ -1267,7 +1277,7 @@ pub fn isNodePublic(tree: ast.Tree, node: ast.Node.Index) bool { pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { const data = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); + const main_token = tree.nodes.items(.main_token)[node]; var buf: [1]ast.Node.Index = undefined; switch (tree.nodes.items(.tag)[node]) { .container_field => return tree.tokenSlice(tree.containerField(node).ast.name_token), @@ -1291,6 +1301,16 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { .fn_proto_simple => if (tree.fnProtoSimple(&buf, node).name_token) |name| { return tree.tokenSlice(name); }, + // .call, + // .call_comma, + // .call_one, + // .call_one_comma, + // .async_call, + // .async_call_comma, + // .async_call_one, + // .async_call_one_comma, + // => return tree.tokenSlice(main_token - 1), + .field_access => return tree.tokenSlice(data[node].rhs), else => { log.debug("INVALID: {}", .{tree.nodes.items(.tag)[node]}); }, @@ -1820,6 +1840,7 @@ pub const DeclWithHandle = struct { const tree = self.handle.tree; const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); + return switch (self.decl.*) { .ast_node => |node| try resolveTypeOfNodeInternal(store, arena, .{ .node = node, .handle = self.handle }, bound_type_params), .param_decl => |*param_decl| { @@ -1925,11 +1946,10 @@ fn findContainerScope(container_handle: NodeWithHandle) ?*Scope { if (!isContainer(handle.tree.nodes.items(.tag)[container])) return null; // Find the container scope. - var container_scope: ?*Scope = null; return for (handle.document_scope.scopes) |*scope| { - switch (scope.*.data) { + switch (scope.data) { .container => |node| if (node == container) { - break container_scope; + break scope; }, else => {}, } @@ -2148,6 +2168,7 @@ fn lookupSymbolGlobalInternal( use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope, i| { + // @TODO: Fix scope positions // if (source_index >= scope.range.start and source_index < scope.range.end) { if (scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value) { diff --git a/src/main.zig b/src/main.zig index ac2e313..8ec29db 100644 --- a/src/main.zig +++ b/src/main.zig @@ -220,11 +220,10 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han for (tree.rootDecls()) |decl_idx| { const decl = tree.nodes.items(.tag)[decl_idx]; switch (decl) { - .fn_proto => blk: { - const func = tree.fnProto(decl_idx); - const is_extern = func.extern_export_token != null; - if (is_extern) - break :blk; + .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => blk: { + var buf: [1]std.zig.ast.Node.Index = undefined; + const func = analysis.fnProto(tree, decl_idx, &buf).?; + if (func.extern_export_token != null) break :blk; if (config.warn_style) { if (func.name_token) |name_token| { @@ -362,7 +361,7 @@ fn nodeToCompletion( .arena = arena, .orig_handle = orig_handle, }; - try analysis.iterateSymbolsContainer(&document_store, arena, node_handle, orig_handle, declToCompletion, context, !is_type_val); + try analysis.iterateSymbolsContainer(&document_store, arena, node_handle, orig_handle, declToCompletion, context, is_type_val); } if (is_type_val) return; @@ -873,7 +872,6 @@ fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenI fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) !void { const tree = decl_handle.handle.tree; - switch (decl_handle.decl.*) { .ast_node => |node| try nodeToCompletion(context.arena, context.completions, .{ .node = node, .handle = decl_handle.handle }, null, context.orig_handle, false, context.config.*), .param_decl => |param| { @@ -1236,7 +1234,6 @@ fn completionHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: if (req.params.position.character >= 0) { const doc_position = try offsets.documentPosition(handle.document, req.params.position, offset_encoding); const pos_context = try analysis.documentPositionContext(arena, handle.document, doc_position); - const use_snippets = config.enable_snippets and client_capabilities.supports_snippets; switch (pos_context) { .builtin => try completeBuiltin(arena, id, config), From a699dab2f74ae3d4f31dc69cc36d11eaa9b6fc33 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Wed, 3 Mar 2021 16:34:24 +0100 Subject: [PATCH 15/36] Referencing fixes and more --- src/analysis.zig | 233 ++++++++++++++++++++++----------------------- src/main.zig | 2 +- src/references.zig | 8 +- 3 files changed, 119 insertions(+), 124 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index f9b12ca..123af3c 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -30,13 +30,13 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI idx -= 2; // skip '.' token }, else => { - if (isContainer(tags[node])) { - idx -= 1; // go to '=' - idx -= 1; // mutability - idx -= 1; // possible 'pub' - if (tokens[idx] == .keyword_pub and idx > 0) - idx -= 1; - } + // if (isContainer(tags[node])) { + // idx -= 1; // go to '=' + // idx -= 1; // mutability + // idx -= 1; // possible 'pub' + // if (tokens[idx] == .keyword_pub and idx > 0) + // idx -= 1; + // } }, } @@ -284,7 +284,7 @@ fn resolveVarDeclAliasInternal( arena, handle, tree.tokenSlice(token), - tree.tokenLocation(0, token).line_start, + tree.tokens.items(.start)[token], ); } @@ -322,8 +322,7 @@ fn resolveVarDeclAliasInternal( break :block NodeWithHandle{ .node = resolved_node, .handle = resolved.handle }; } else return null; - if (try lookupSymbolContainer(store, arena, container_node, tree.tokenSlice(tree.firstToken(datas[lhs].rhs)), false)) |inner_decl| { - if (root) return inner_decl; + if (try lookupSymbolContainer(store, arena, container_node, tree.tokenSlice(datas[node_handle.node].rhs), false)) |inner_decl| { return inner_decl; } } @@ -347,10 +346,10 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato if (varDecl(handle.tree, decl)) |var_decl| { if (var_decl.ast.init_node == 0) return null; const base_exp = var_decl.ast.init_node; - if (token_tags[main_tokes[base_exp]] != .keyword_const) return null; + if (token_tags[var_decl.ast.mut_token] != .keyword_const) return null; if (node_tags[base_exp] == .field_access) { - const name = tree.tokenSlice(tree.firstToken(tree.nodes.items(.data)[base_exp].rhs)); + const name = tree.tokenSlice(tree.nodes.items(.data)[base_exp].rhs); if (!std.mem.eql(u8, tree.tokenSlice(var_decl.ast.mut_token + 1), name)) return null; @@ -406,8 +405,15 @@ pub fn resolveReturnType( fn_decl: ast.full.FnProto, handle: *DocumentStore.Handle, bound_type_params: *BoundTypeParams, + fn_body: ?ast.Node.Index, ) !?TypeWithHandle { - // @TODO: Confirm this can handle inferred error sets etc + const tree = handle.tree; + if (isTypeFunction(tree, fn_decl) and fn_body != null) { + // @TODO: find return statement inside fn body of `type` (generic) functions + } + + if (fn_decl.ast.return_type == 0) return null; + log.debug("Ret type: '{s}'", .{tree.nodes.items(.tag)[fn_decl.ast.return_type]}); return resolveTypeOfNodeInternal(store, arena, .{ .node = fn_decl.ast.return_type, .handle = handle, @@ -487,10 +493,11 @@ fn resolveDerefType( const token_tag = tree.tokens.items(.tag)[main_token]; if (isPtrType(tree, deref_node)) { + const ptr_type = ptrType(tree, deref_node).?; switch (token_tag) { .asterisk => { return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = tree.nodes.items(.data)[deref_node].rhs, + .node = ptr_type.ast.child_type, .handle = deref.handle, }, bound_type_params)) orelse return null).instanceTypeVal(); }, @@ -536,6 +543,7 @@ fn resolveBracketAccessType( .handle = lhs.handle, }, bound_type_params)) orelse return null).instanceTypeVal(); } + return lhs; } } @@ -552,7 +560,7 @@ pub fn resolveFieldAccessLhsType( return (try resolveDerefType(store, arena, lhs, bound_type_params)) orelse lhs; } -pub const BoundTypeParams = std.AutoHashMap(*const ast.full.FnProto.Param, TypeWithHandle); +pub const BoundTypeParams = std.AutoHashMap(ast.full.FnProto.Param, TypeWithHandle); fn allDigits(str: []const u8) bool { for (str) |c| { @@ -619,14 +627,14 @@ pub fn resolveTypeOfNodeInternal( null; }, .identifier => { - if (isTypeIdent(handle.tree, tree.firstToken(node))) { + if (isTypeIdent(handle.tree, main_tokens[node])) { return TypeWithHandle{ .type = .{ .data = .primitive, .is_type_val = true }, .handle = handle, }; } - if (try lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), starts[tree.firstToken(node)])) |child| { + if (try lookupSymbolGlobal(store, arena, handle, tree.getNodeSource(node), starts[main_tokens[node]])) |child| { switch (child.decl.*) { .ast_node => |n| { if (n == node) return null; @@ -636,6 +644,7 @@ pub fn resolveTypeOfNodeInternal( }, else => {}, } + log.debug("Resolving child: {s}", .{tree.tokenSlice(child.nameToken())}); return try child.resolveType(store, arena, bound_type_params); } return null; @@ -672,6 +681,7 @@ pub fn resolveTypeOfNodeInternal( else => unreachable, }; + log.debug("Call fn expr: {s}", .{node_tags[call.ast.fn_expr]}); const decl = (try resolveTypeOfNodeInternal( store, arena, @@ -684,7 +694,7 @@ pub fn resolveTypeOfNodeInternal( .other => |n| n, else => return null, }; - + log.debug("CALL: {d} - {d}", .{ call.ast.fn_expr, decl_node }); var buf: [1]ast.Node.Index = undefined; const func_maybe = fnProto(tree, decl_node, &buf); @@ -706,10 +716,12 @@ pub fn resolveTypeOfNodeInternal( }, bound_type_params)) orelse continue; if (!call_param_type.type.is_type_val) continue; - _ = try bound_type_params.put(&decl_param, call_param_type); + _ = try bound_type_params.put(decl_param, call_param_type); } - return try resolveReturnType(store, arena, fn_decl, decl.handle, bound_type_params); + const has_body = node_tags[decl_node] == .fn_decl; + const body = datas[decl_node].rhs; + return try resolveReturnType(store, arena, fn_decl, decl.handle, bound_type_params, if (has_body) body else null); } return null; }, @@ -774,14 +786,15 @@ pub fn resolveTypeOfNodeInternal( .other => |n| n, else => return null, }; - + log.debug("Left_type_node: '{s}' for rhs: '{s}'", .{ node_tags[left_type_node], rhs_str }); if (try lookupSymbolContainer( store, arena, .{ .node = left_type_node, .handle = left_type.handle }, rhs_str, - !left_type.type.is_type_val, + left_type.type.is_type_val, )) |child| { + log.debug("Found child: '{s}'", .{tree.tokenSlice(child.nameToken())}); return try child.resolveType(store, arena, bound_type_params); } else return null; }, @@ -993,6 +1006,7 @@ pub const TypeWithHandle = struct { pub fn isNamespace(self: TypeWithHandle, tree: ast.Tree) bool { if (!self.isStructType()) return false; var idx: usize = 0; + // @TODO: FIX ME while (self.type.data.other.iterate(idx)) |child| : (idx += 1) { if (child.tag == .ContainerField) return false; @@ -1014,40 +1028,28 @@ pub const TypeWithHandle = struct { pub fn isTypeFunc(self: TypeWithHandle, tree: ast.Tree) bool { var buf: [1]ast.Node.Index = undefined; - switch (self.type.data) { - .other => |n| return switch (tree.nodes.items(.tag)[n]) { - .fn_proto, - .fn_proto_multi, - .fn_proto_one, - .fn_proto_simple, - .fn_decl, - => isTypeFunction(fnProto(tree, n, &buf).?), - else => false, - }, - else => return false, - } + return switch (self.type.data) { + .other => |n| if (fnProto(tree, n, &buf)) |fn_proto| blk: { + break :blk isTypeFunction(tree, fn_proto); + } else false, + else => false, + }; } pub fn isGenericFunc(self: TypeWithHandle, tree: ast.Tree) bool { var buf: [1]ast.Node.Index = undefined; - switch (self.type.data) { - .other => |n| return switch (tree.nodes.items(.tag)[n]) { - .fn_proto, - .fn_proto_multi, - .fn_proto_one, - .fn_proto_simple, - .fn_decl, - => isGenericFunction(fnProto(tree, n, &buf).?), - else => false, - }, - else => return false, - } + return switch (self.type.data) { + .other => |n| if (fnProto(tree, n, &buf)) |fn_proto| blk: { + break :blk isGenericFunction(tree, fn_proto); + } else false, + else => false, + }; } pub fn isFunc(self: TypeWithHandle, tree: ast.Tree) bool { const tags = tree.nodes.items(.tag); - switch (self.type.data) { - .other => |n| return switch (tags[n]) { + return switch (self.type.data) { + .other => |n| switch (tags[n]) { .fn_proto, .fn_proto_multi, .fn_proto_one, @@ -1056,8 +1058,8 @@ pub const TypeWithHandle = struct { => true, else => false, }, - else => return false, - } + else => false, + }; } }; @@ -1107,11 +1109,16 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: ast.Tree) !v else => null, }; const var_decl = var_decl_maybe orelse continue; + if (var_decl.ast.init_node == 0) continue; const init_node = var_decl.ast.init_node; const init_node_tag = tags[init_node]; switch (init_node_tag) { - .builtin_call => try maybeCollectImport(tree, init_node, import_arr), + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => try maybeCollectImport(tree, init_node, import_arr), .field_access => { const lhs = tree.nodes.items(.data)[init_node].lhs; if (isBuiltinCall(tree, lhs)) { @@ -1214,7 +1221,9 @@ pub fn getFieldAccessType( if (current_type.type.is_type_val) return null; var buf: [1]ast.Node.Index = undefined; if (fnProto(tree, current_type_node, &buf)) |func| { - if (try resolveReturnType(store, arena, func, current_type.handle, &bound_type_params)) |ret| { + const has_body = tree.nodes.items(.tag)[current_type_node] == .fn_decl; + const body = tree.nodes.items(.data)[current_type_node].rhs; + if (try resolveReturnType(store, arena, func, current_type.handle, &bound_type_params, if (has_body) body else null)) |ret| { current_type = ret; // Skip to the right paren var paren_count: usize = 1; @@ -1263,14 +1272,17 @@ pub fn getFieldAccessType( pub fn isNodePublic(tree: ast.Tree, node: ast.Node.Index) bool { var buf: [1]ast.Node.Index = undefined; return switch (tree.nodes.items(.tag)[node]) { - .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { - const var_decl = varDecl(tree, node).?; - return var_decl.visib_token != null; - }, - .fn_proto => tree.fnProto(node).visib_token != null, - .fn_proto_one => tree.fnProtoOne(&buf, node).visib_token != null, - .fn_proto_simple => tree.fnProtoSimple(&buf, node).visib_token != null, - .fn_proto_multi => tree.fnProtoMulti(node).visib_token != null, + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => varDecl(tree, node).?.visib_token != null, + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => fnProto(tree, node, &buf).?.visib_token != null, else => true, }; } @@ -1288,32 +1300,16 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { // const tag = node.castTag(.ErrorTag).?; // return tree.tokenSlice(tag.name_token); // }, - .identifier => return tree.tokenSlice(node), - .fn_proto => if (tree.fnProto(node).name_token) |name| { - return tree.tokenSlice(name); - }, - .fn_proto_one => if (tree.fnProtoOne(&buf, node).name_token) |name| { - return tree.tokenSlice(name); - }, - .fn_proto_multi => if (tree.fnProtoMulti(node).name_token) |name| { - return tree.tokenSlice(name); - }, - .fn_proto_simple => if (tree.fnProtoSimple(&buf, node).name_token) |name| { - return tree.tokenSlice(name); - }, - // .call, - // .call_comma, - // .call_one, - // .call_one_comma, - // .async_call, - // .async_call_comma, - // .async_call_one, - // .async_call_one_comma, - // => return tree.tokenSlice(main_token - 1), + .identifier => return tree.tokenSlice(main_token), + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => if (fnProto(tree, node, &buf).?.name_token) |name| + return tree.tokenSlice(name), .field_access => return tree.tokenSlice(data[node].rhs), - else => { - log.debug("INVALID: {}", .{tree.nodes.items(.tag)[node]}); - }, + else => |tag| log.debug("INVALID: {}", .{tag}), } return null; @@ -1350,13 +1346,7 @@ pub fn fnProto(tree: ast.Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?a pub fn getImportStr(tree: ast.Tree, node: ast.Node.Index, source_index: usize) ?[]const u8 { const node_tags = tree.nodes.items(.tag); var buf: [2]ast.Node.Index = undefined; - const decls = switch (node_tags[node]) { - .root => tree.rootDecls(), - .container_decl, .container_decl_trailing => tree.containerDecl(node).ast.members, - .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node).ast.members, - .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node).ast.members, - else => return null, - }; + const decls = declMembers(tree, node_tags[node], node, &buf); for (decls) |decl_idx| { if (!nodeContainsSourceIndex(tree, decl_idx, source_index)) { @@ -1717,7 +1707,6 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod }; const tags = tree.nodes.items(.tag); - // log.debug("{s} - {s}", .{ name, tags[node] }); (try context.symbols.addOne()).* = .{ .name = name, .kind = switch (tags[node]) { @@ -1843,16 +1832,16 @@ pub const DeclWithHandle = struct { return switch (self.decl.*) { .ast_node => |node| try resolveTypeOfNodeInternal(store, arena, .{ .node = node, .handle = self.handle }, bound_type_params), - .param_decl => |*param_decl| { + .param_decl => |param_decl| { if (typeIsType(self.handle.tree, param_decl.type_expr)) { var bound_param_it = bound_type_params.iterator(); while (bound_param_it.next()) |entry| { - if (entry.key == param_decl) return entry.value; + if (std.meta.eql(entry.key, param_decl)) return entry.value; } return null; } else if (node_tags[param_decl.type_expr] == .identifier) { if (param_decl.name_token) |name_tok| { - if (std.mem.eql(u8, tree.tokenSlice(tree.firstToken(param_decl.type_expr)), tree.tokenSlice(name_tok))) + if (std.mem.eql(u8, tree.tokenSlice(main_tokens[param_decl.type_expr]), tree.tokenSlice(name_tok))) return null; } } @@ -1895,7 +1884,7 @@ pub const DeclWithHandle = struct { if (node_tags[pay.items[0]] == .enum_literal) { const scope = findContainerScope(.{ .node = switch_expr_type.type.data.other, .handle = switch_expr_type.handle }) orelse return null; - if (scope.decls.getEntry(self.handle.tree.tokenSlice(main_tokens[pay.items[0]]))) |candidate| { + if (scope.decls.getEntry(tree.tokenSlice(main_tokens[pay.items[0]]))) |candidate| { switch (candidate.value) { .ast_node => |node| { if (containerField(tree, node)) |container_field| { @@ -1974,7 +1963,7 @@ fn iterateSymbolsContainerInternal( const token_tags = tree.tokens.items(.tag); const main_token = tree.nodes.items(.main_token)[container]; - const is_enum = if (isContainer(node_tags[container])) + const is_enum = if (isContainer(node_tags[container]) and node_tags[container] != .root) token_tags[main_token] == .keyword_enum else false; @@ -2167,25 +2156,25 @@ fn lookupSymbolGlobalInternal( source_index: usize, use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { - for (handle.document_scope.scopes) |scope, i| { + for (handle.document_scope.scopes) |scope| { // @TODO: Fix scope positions - // if (source_index >= scope.range.start and source_index < scope.range.end) { - if (scope.decls.getEntry(symbol)) |candidate| { - switch (candidate.value) { - .ast_node => |node| { - if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; - }, - .label_decl => continue, - else => {}, + if (source_index >= scope.range.start and source_index < scope.range.end) { + if (scope.decls.getEntry(symbol)) |candidate| { + switch (candidate.value) { + .ast_node => |node| { + if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; + }, + .label_decl => continue, + else => {}, + } + return DeclWithHandle{ + .decl = &candidate.value, + .handle = handle, + }; } - return DeclWithHandle{ - .decl = &candidate.value, - .handle = handle, - }; - } - // if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; - // } + // if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + } if (scope.range.start > source_index) return null; } @@ -2221,7 +2210,7 @@ fn lookupSymbolContainerInternal( const token_tags = tree.tokens.items(.tag); const main_token = tree.nodes.items(.main_token)[container]; - const is_enum = if (isContainer(node_tags[container])) + const is_enum = if (isContainer(node_tags[container]) and node_tags[container] != .root) token_tags[main_token] == .keyword_enum else false; @@ -2343,10 +2332,11 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen } fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { - const loc = tree.tokenLocation(0, tree.firstToken(node)); + const loc_start = tree.tokenLocation(0, tree.firstToken(node)); + const loc_end = tree.tokenLocation(@truncate(u32, loc_start.line_start), tree.lastToken(node)); return SourceRange{ - .start = loc.line_start, - .end = loc.line_end, + .start = loc_start.line_start, + .end = loc_end.line_end, }; } @@ -2504,7 +2494,7 @@ fn makeScopeInternal( (try enum_completions.addOne(allocator)).* = .{ .label = name, .kind = .Constant, - .documentation = if (try getDocComments(allocator, tree, node_idx, .Markdown)) |docs| + .documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| .{ .kind = .Markdown, .value = docs } else null, @@ -2693,8 +2683,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, main_tokens[node_idx]).line_start, - .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.then_expr)).line_end, + .start = tree.tokenLocation(0, while_node.ast.while_token).line_start, + .end = tree.tokenLocation(0, tree.lastToken(node_idx)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), // .uses = &[0]*ast.Node.Use{}, @@ -2802,6 +2792,7 @@ fn makeScopeInternal( } }, else => { + // log.debug("Implement makeScopeInternal for node type: '{s}'", .{node}); // @TODO: Could we just do node_idx + 1 here? // var child_idx: usize = 0; // while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { diff --git a/src/main.zig b/src/main.zig index 8ec29db..a81fd5e 100644 --- a/src/main.zig +++ b/src/main.zig @@ -361,7 +361,7 @@ fn nodeToCompletion( .arena = arena, .orig_handle = orig_handle, }; - try analysis.iterateSymbolsContainer(&document_store, arena, node_handle, orig_handle, declToCompletion, context, is_type_val); + try analysis.iterateSymbolsContainer(&document_store, arena, node_handle, orig_handle, declToCompletion, context, !is_type_val); } if (is_type_val) return; diff --git a/src/references.zig b/src/references.zig index d9e8a4a..975c7ce 100644 --- a/src/references.zig +++ b/src/references.zig @@ -199,11 +199,15 @@ fn symbolReferencesInternal( }, .switch_case_one => { const case_one = tree.switchCaseOne(node); + if (case_one.ast.target_expr != 0) + try symbolReferencesInternal(arena, store, .{ .node = case_one.ast.target_expr, .handle = handle }, decl, encoding, context, handler); for (case_one.ast.values) |val| try symbolReferencesInternal(arena, store, .{ .node = val, .handle = handle }, decl, encoding, context, handler); }, .switch_case => { const case = tree.switchCase(node); + if (case.ast.target_expr != 0) + try symbolReferencesInternal(arena, store, .{ .node = case.ast.target_expr, .handle = handle }, decl, encoding, context, handler); for (case.ast.values) |val| try symbolReferencesInternal(arena, store, .{ .node = val, .handle = handle }, decl, encoding, context, handler); }, @@ -401,7 +405,7 @@ fn symbolReferencesInternal( .field_access => { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); - const rhs_str = analysis.nodeToString(handle.tree, datas[node].rhs) orelse return; + const rhs_str = analysis.nodeToString(handle.tree, node) orelse return; var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); const left_type = try analysis.resolveFieldAccessLhsType( store, @@ -426,7 +430,7 @@ fn symbolReferencesInternal( !left_type.type.is_type_val, )) |child| { if (std.meta.eql(child, decl)) { - try tokenReference(handle, tree.firstToken(datas[node].rhs), encoding, context, handler); + try tokenReference(handle, datas[node].rhs, encoding, context, handler); } } }, From d7ccf6a3c214d2acb5bf592184447c38ce25f2e2 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Wed, 3 Mar 2021 17:45:42 +0100 Subject: [PATCH 16/36] References working for current file --- src/analysis.zig | 41 ++++++++++++++++++++++------------------- src/references.zig | 7 +++++-- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 123af3c..cc26195 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -413,7 +413,6 @@ pub fn resolveReturnType( } if (fn_decl.ast.return_type == 0) return null; - log.debug("Ret type: '{s}'", .{tree.nodes.items(.tag)[fn_decl.ast.return_type]}); return resolveTypeOfNodeInternal(store, arena, .{ .node = fn_decl.ast.return_type, .handle = handle, @@ -639,12 +638,11 @@ pub fn resolveTypeOfNodeInternal( .ast_node => |n| { if (n == node) return null; if (varDecl(tree, n)) |var_decl| { - if (var_decl.ast.init_node == node) return null; + if (var_decl.ast.init_node != 0 and var_decl.ast.init_node == node) return null; } }, else => {}, } - log.debug("Resolving child: {s}", .{tree.tokenSlice(child.nameToken())}); return try child.resolveType(store, arena, bound_type_params); } return null; @@ -681,7 +679,6 @@ pub fn resolveTypeOfNodeInternal( else => unreachable, }; - log.debug("Call fn expr: {s}", .{node_tags[call.ast.fn_expr]}); const decl = (try resolveTypeOfNodeInternal( store, arena, @@ -694,41 +691,37 @@ pub fn resolveTypeOfNodeInternal( .other => |n| n, else => return null, }; - log.debug("CALL: {d} - {d}", .{ call.ast.fn_expr, decl_node }); var buf: [1]ast.Node.Index = undefined; - const func_maybe = fnProto(tree, decl_node, &buf); + const func_maybe = fnProto(decl.handle.tree, decl_node, &buf); if (func_maybe) |fn_decl| { // check for x.y(..). if '.' is found, it means first param should be skipped const has_self_param = token_tags[call.ast.lparen - 2] == .period; - var it = fn_decl.iterate(tree); + var it = fn_decl.iterate(decl.handle.tree); // Bind type params to the expressions passed in the calls. const param_len = std.math.min(call.ast.params.len + @boolToInt(has_self_param), fn_decl.ast.params.len); while (it.next()) |decl_param| { if (it.param_i == 0 and has_self_param) continue; if (it.param_i >= param_len) break; - if (!typeIsType(tree, decl_param.type_expr)) continue; + if (!typeIsType(decl.handle.tree, decl_param.type_expr)) continue; const call_param_type = (try resolveTypeOfNodeInternal(store, arena, .{ .node = call.ast.params[it.param_i - @boolToInt(has_self_param)], - .handle = handle, + .handle = decl.handle, }, bound_type_params)) orelse continue; if (!call_param_type.type.is_type_val) continue; _ = try bound_type_params.put(decl_param, call_param_type); } - const has_body = node_tags[decl_node] == .fn_decl; - const body = datas[decl_node].rhs; + const has_body = decl.handle.tree.nodes.items(.tag)[decl_node] == .fn_decl; + const body = decl.handle.tree.nodes.items(.data)[decl_node].rhs; return try resolveReturnType(store, arena, fn_decl, decl.handle, bound_type_params, if (has_body) body else null); } return null; }, - .@"comptime", .@"nosuspend" => { - return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params); - }, - .grouped_expression => { + .@"comptime", .@"nosuspend", .grouped_expression => { return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params); }, .struct_init, .struct_init_comma, .struct_init_one, .struct_init_one_comma => { @@ -769,8 +762,9 @@ pub fn resolveTypeOfNodeInternal( }, .field_access => { const field_access = datas[node]; - const rhs_str = nodeToString(handle.tree, node) orelse return null; + if (datas[node].rhs == 0) return null; + const rhs_str = tree.tokenSlice(datas[node].rhs); // If we are accessing a pointer type, remove one pointerness level :) const left_type = try resolveFieldAccessLhsType( store, @@ -786,7 +780,6 @@ pub fn resolveTypeOfNodeInternal( .other => |n| n, else => return null, }; - log.debug("Left_type_node: '{s}' for rhs: '{s}'", .{ node_tags[left_type_node], rhs_str }); if (try lookupSymbolContainer( store, arena, @@ -794,7 +787,6 @@ pub fn resolveTypeOfNodeInternal( rhs_str, left_type.type.is_type_val, )) |child| { - log.debug("Found child: '{s}'", .{tree.tokenSlice(child.nameToken())}); return try child.resolveType(store, arena, bound_type_params); } else return null; }, @@ -939,8 +931,9 @@ pub fn resolveTypeOfNodeInternal( .type = .{ .data = .{ .other = node }, .is_type_val = false }, .handle = handle, }, + .root => return TypeWithHandle.typeVal(node_handle), else => { - log.debug("TODO: implement type resolving for ast tag: {s}", .{node_tags[node]}); + // log.debug("TODO: implement type resolving for ast tag: {s}", .{node_tags[node]}); }, } return null; @@ -1309,6 +1302,16 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { => if (fnProto(tree, node, &buf).?.name_token) |name| return tree.tokenSlice(name), .field_access => return tree.tokenSlice(data[node].rhs), + .call, + .call_comma, + .async_call, + .async_call_comma, + => return tree.tokenSlice(tree.callFull(node).ast.lparen - 1), + .call_one, + .call_one_comma, + .async_call_one, + .async_call_one_comma, + => return tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1), else => |tag| log.debug("INVALID: {}", .{tag}), } diff --git a/src/references.zig b/src/references.zig index 975c7ce..951476b 100644 --- a/src/references.zig +++ b/src/references.zig @@ -78,6 +78,7 @@ fn symbolReferencesInternal( const node = node_handle.node; const handle = node_handle.handle; const tree = handle.tree; + if (node > tree.nodes.len) return; const node_tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); @@ -405,7 +406,7 @@ fn symbolReferencesInternal( .field_access => { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); - const rhs_str = analysis.nodeToString(handle.tree, node) orelse return; + const rhs_str = tree.tokenSlice(datas[node].rhs); var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); const left_type = try analysis.resolveFieldAccessLhsType( store, @@ -504,9 +505,11 @@ pub fn symbolReferences( for (handles.items) |handle| { if (include_decl and handle == curr_handle) { try tokenReference(curr_handle, decl_handle.nameToken(), encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = handle }, decl_handle, encoding, context, handler); } - try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = handle }, decl_handle, encoding, context, handler); + // @TODO: make references across files working + // try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = handle }, decl_handle, encoding, context, handler); } }, .param_decl => |param| { From 7495aab28b5ae719cc2d1497e2dea8a3432a4c82 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Wed, 3 Mar 2021 21:02:31 +0100 Subject: [PATCH 17/36] Start implementing usingnamespace --- src/analysis.zig | 54 ++++++++++++++++++++++++++---------------------- 1 file changed, 29 insertions(+), 25 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index cc26195..4f5d159 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2056,7 +2056,7 @@ fn iterateSymbolsGlobalInternal( try callback(context, DeclWithHandle{ .decl = &entry.value, .handle = handle }); } - // for (scope.uses) |use| { + // for (Index) |use| { // if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; // try use_trail.append(use); @@ -2270,7 +2270,7 @@ pub const DocumentScope = struct { scope.range.start, scope.range.end, {}, - // scope.uses.len, + scope.uses.len, }); var decl_it = scope.decls.iterator(); @@ -2286,7 +2286,7 @@ pub const DocumentScope = struct { pub fn deinit(self: DocumentScope, allocator: *std.mem.Allocator) void { for (self.scopes) |*scope| { scope.decls.deinit(); - // allocator.free(scope.uses); + allocator.free(scope.uses); allocator.free(scope.tests); } allocator.free(self.scopes); @@ -2308,7 +2308,7 @@ pub const Scope = struct { range: SourceRange, decls: std.StringHashMap(Declaration), tests: []const ast.Node.Index, - // uses: []const *ast.Node.Data, + uses: []const ast.Node.Index, data: Data, }; @@ -2417,26 +2417,25 @@ fn makeScopeInternal( (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .{ .container = node_idx }, }; const scope_idx = scopes.items.len - 1; - // var uses = std.ArrayList(*ast.Node.Use).init(allocator); + var uses = std.ArrayList(ast.Node.Index).init(allocator); var tests = std.ArrayList(ast.Node.Index).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); - // uses.deinit(); + uses.deinit(); tests.deinit(); } for (ast_decls) |decl| { - // @TODO: Implement using namespace - // if (decl.castTag(.Use)) |use| { - // try uses.append(use); - // continue; - // } + if (tags[decl] == .@"usingnamespace") { + try uses.append(decl); + continue; + } try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, decl); const name = getDeclName(tree, decl) orelse continue; @@ -2512,7 +2511,7 @@ fn makeScopeInternal( } scopes.items[scope_idx].tests = tests.toOwnedSlice(); - // scopes.items[scope_idx].uses = uses.toOwnedSlice(); + scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; } @@ -2524,7 +2523,7 @@ fn makeScopeInternal( (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .{ .function = node_idx }, }; @@ -2562,7 +2561,7 @@ fn makeScopeInternal( .end = tree.tokenLocation(0, last_token).line_start, }, .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .other, }; @@ -2573,16 +2572,16 @@ fn makeScopeInternal( (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .{ .block = node_idx }, }; var scope_idx = scopes.items.len - 1; - // var uses = std.ArrayList(*ast.Node.Use).init(allocator); + var uses = std.ArrayList(ast.Node.Index).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); - // uses.deinit(); + uses.deinit(); } const statements: []const ast.Node.Index = switch (node) { @@ -2601,6 +2600,11 @@ fn makeScopeInternal( }; for (statements) |idx| { + if (tags[idx] == .@"usingnamespace") { + try uses.append(idx); + continue; + } + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, idx); if (varDecl(tree, idx)) |var_decl| { const name = tree.tokenSlice(var_decl.ast.mut_token + 1); @@ -2610,7 +2614,7 @@ fn makeScopeInternal( } } - // scopes.items[scope_idx].uses = uses.toOwnedSlice(); + scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; }, .@"comptime", .@"nosuspend" => { @@ -2630,7 +2634,7 @@ fn makeScopeInternal( .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.then_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .other, }; @@ -2660,7 +2664,7 @@ fn makeScopeInternal( .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.else_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .other, }; @@ -2690,7 +2694,7 @@ fn makeScopeInternal( .end = tree.tokenLocation(0, tree.lastToken(node_idx)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .other, }; @@ -2707,7 +2711,7 @@ fn makeScopeInternal( .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.then_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .other, }; @@ -2736,7 +2740,7 @@ fn makeScopeInternal( .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.else_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .other, }; @@ -2763,7 +2767,7 @@ fn makeScopeInternal( .end = tree.tokenLocation(0, tree.lastToken(switch_case.ast.target_expr)).line_end, }, .decls = std.StringHashMap(Declaration).init(allocator), - // .uses = &[0]*ast.Node.Use{}, + .uses = &.{}, .tests = &.{}, .data = .other, }; From a80e9b262e5bd82602d2a516ce534190486dd601 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 4 Mar 2021 13:26:11 +0100 Subject: [PATCH 18/36] Implement usingnamespace and fix function snippets --- src/analysis.zig | 100 +++++++++++++++++++++++------------------------ 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 4f5d159..8a16e24 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -18,7 +18,7 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI idx -= 1; if (tokens[idx] == .keyword_extern and idx > 0) idx -= 1; - if (tokens[idx] == .keyword_pub and idx < 0) + if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; }, .local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => { @@ -118,10 +118,10 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a var it = func.iterate(tree); while (it.next()) |param| { - if (skip_self_param and it.param_i == 0) continue; - if (it.param_i != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); + if (skip_self_param and it.param_i -1 == 0) continue; + if (it.param_i -1 != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); - try buf_stream.print("{d}", .{it.param_i + 1}); + try buf_stream.print("{d}:", .{it.param_i}); if (param.comptime_noalias) |token_index| { if (token_tags[token_index] == .keyword_comptime) @@ -141,8 +141,8 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a else try buffer.appendSlice("..."); } else { - var curr_token = param.type_expr; - var end_token = tree.lastToken(func.ast.params[it.param_i]); + var curr_token = tree.firstToken(param.type_expr); + var end_token = tree.lastToken(param.type_expr); while (curr_token <= end_token) : (curr_token += 1) { const tag = token_tags[curr_token]; const is_comma = tag == .comma; @@ -1989,18 +1989,20 @@ fn iterateSymbolsContainerInternal( try callback(context, decl); } - // for (container_scope.uses) |use| { - // if (handle != orig_handle and use.visib_token == null) continue; - // if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; - // try use_trail.append(use); + for (container_scope.uses) |use| { + const use_token = tree.nodes.items(.main_token)[use]; + const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub; + if (handle != orig_handle and !is_pub) continue; + if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; + try use_trail.append(use); - // const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; - // const use_expr_node = switch (use_expr.type.data) { - // .other => |n| n, - // else => continue, - // }; - // try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, orig_handle, callback, context, false, use_trail); - // } + const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = tree.nodes.items(.data)[use].rhs, .handle = handle })) orelse continue; + const use_expr_node = switch (use_expr.type.data) { + .other => |n| n, + else => continue, + }; + try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, orig_handle, callback, context, false, use_trail); + } } } @@ -2056,17 +2058,17 @@ fn iterateSymbolsGlobalInternal( try callback(context, DeclWithHandle{ .decl = &entry.value, .handle = handle }); } - // for (Index) |use| { - // if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; - // try use_trail.append(use); + for (scope.uses) |use| { + if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; + try use_trail.append(use); - // const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; - // const use_expr_node = switch (use_expr.type.data) { - // .other => |n| n, - // else => continue, - // }; - // try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, handle, callback, context, false, use_trail); - // } + const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle })) orelse continue; + const use_expr_node = switch (use_expr.type.data) { + .other => |n| n, + else => continue, + }; + try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, handle, callback, context, false, use_trail); + } } if (scope.range.start >= source_index) return; @@ -2104,27 +2106,27 @@ pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) Ty fn resolveUse( store: *DocumentStore, arena: *std.heap.ArenaAllocator, - // uses: []const *ast.Node.Use, + uses: []const ast.Node.Index, symbol: []const u8, handle: *DocumentStore.Handle, - use_trail: *std.ArrayList(*ast.Node.Use), + use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { - // for (uses) |use| { - // if (std.mem.indexOfScalar(*ast.Node.Use, use_trail.items, use) != null) continue; - // try use_trail.append(use); + for (uses) |use| { + if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; + try use_trail.append(use); - // const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = use.expr, .handle = handle })) orelse continue; - // const use_expr_node = switch (use_expr.type.data) { - // .other => |n| n, - // else => continue, - // }; - // if (try lookupSymbolContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, symbol, false, use_trail)) |candidate| { - // if (candidate.handle != handle and !candidate.isPublic()) { - // continue; - // } - // return candidate; - // } - // } + const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle })) orelse continue; + const use_expr_node = switch (use_expr.type.data) { + .other => |n| n, + else => continue, + }; + if (try lookupSymbolContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, symbol, false, use_trail)) |candidate| { + if (candidate.handle != handle and !candidate.isPublic()) { + continue; + } + return candidate; + } + } return null; } @@ -2160,7 +2162,6 @@ fn lookupSymbolGlobalInternal( use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope| { - // @TODO: Fix scope positions if (source_index >= scope.range.start and source_index < scope.range.end) { if (scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value) { @@ -2176,7 +2177,7 @@ fn lookupSymbolGlobalInternal( }; } - // if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; } if (scope.range.start > source_index) return null; @@ -2233,7 +2234,7 @@ fn lookupSymbolContainerInternal( return DeclWithHandle{ .decl = &candidate.value, .handle = handle }; } - // if (try resolveUse(store, arena, container_scope.uses, symbol, handle, use_trail)) |result| return result; + if (try resolveUse(store, arena, container_scope.uses, symbol, handle, use_trail)) |result| return result; return null; } @@ -2262,14 +2263,13 @@ pub const DocumentScope = struct { for (self.scopes) |scope| { log.debug( \\-------------------------- - \\Scope {}, range: [{}, {}) - \\ {} usingnamespaces + \\Scope {}, range: [{d}, {d}) + \\ {d} usingnamespaces \\Decls: , .{ scope.data, scope.range.start, scope.range.end, - {}, scope.uses.len, }); From 08075a1261b4b8bd1499eb8fd2d3a360c381829e Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 4 Mar 2021 16:14:30 +0100 Subject: [PATCH 19/36] Start fixing semantic tokens --- src/analysis.zig | 14 +- src/semantic_tokens.zig | 1155 ++++++++++++++++++++------------------- 2 files changed, 587 insertions(+), 582 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 8a16e24..519d1fa 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -118,8 +118,8 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a var it = func.iterate(tree); while (it.next()) |param| { - if (skip_self_param and it.param_i -1 == 0) continue; - if (it.param_i -1 != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); + if (skip_self_param and it.param_i - 1 == 0) continue; + if (it.param_i - 1 != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); try buf_stream.print("{d}:", .{it.param_i}); @@ -186,7 +186,7 @@ pub fn isTypeFunction(tree: ast.Tree, func: ast.full.FnProto) bool { return typeIsType(tree, func.ast.return_type); } -pub fn isGenericFunction(tree: ast.Tree, func: *ast.full.FnProto) bool { +pub fn isGenericFunction(tree: ast.Tree, func: ast.full.FnProto) bool { var it = func.iterate(); while (it.next()) |param| { if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) { @@ -699,15 +699,15 @@ pub fn resolveTypeOfNodeInternal( const has_self_param = token_tags[call.ast.lparen - 2] == .period; var it = fn_decl.iterate(decl.handle.tree); - // Bind type params to the expressions passed in the calls. + // Bind type params to the expressions passed in txhe calls. const param_len = std.math.min(call.ast.params.len + @boolToInt(has_self_param), fn_decl.ast.params.len); while (it.next()) |decl_param| { - if (it.param_i == 0 and has_self_param) continue; - if (it.param_i >= param_len) break; + if (it.param_i - 1 == 0 and has_self_param) continue; + if (it.param_i - 1 >= param_len) break; if (!typeIsType(decl.handle.tree, decl_param.type_expr)) continue; const call_param_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = call.ast.params[it.param_i - @boolToInt(has_self_param)], + .node = call.ast.params[it.param_i - 1 - @boolToInt(has_self_param)], .handle = decl.handle, }, bound_type_params)) orelse continue; if (!call_param_type.type.is_type_val) continue; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 68c16df..e01aae6 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -107,25 +107,24 @@ fn writeTokenMod( } } -fn writeDocComments(builder: *Builder, tree: *ast.Tree, doc: *ast.Node.DocComment) !void { - var tok_idx = doc.first_line; - while (tree.token_ids[tok_idx] == .DocComment or - tree.token_ids[tok_idx] == .ContainerDocComment or - tree.token_ids[tok_idx] == .LineComment) : (tok_idx += 1) +fn writeDocComments(builder: *Builder, tree: *ast.Tree, doc: *ast.TokenIndex) !void { + const token_tags = tree.tokens.items(.tag); + var tok_idx = doc; + while (token_tags[tok_idx] == .doc_comment or + token_tags[tok_idx] == .container_doc_comment) : (tok_idx += 1) { var tok_mod = TokenModifiers{}; - if (tree.token_ids[tok_idx] == .DocComment or tree.token_ids[tok_idx] == .ContainerDocComment) - tok_mod.set("documentation"); + tok_mod.set("documentation"); try builder.add(tok_idx, .comment, tok_mod); } } -fn fieldTokenType(container_decl: *ast.Node.ContainerDecl, handle: *DocumentStore.Handle) ?TokenType { - if (container_decl.kind_token > handle.tree.token_ids.len) return null; - return @as(?TokenType, switch (handle.tree.token_ids[container_decl.kind_token]) { - .Keyword_struct => .field, - .Keyword_union, .Keyword_enum => .enumMember, +fn fieldTokenType(container_decl: *ast.full.ContainerDecl, handle: *DocumentStore.Handle) ?TokenType { + if (container_decl.ast.main_token > handle.tree.tokens.len) return null; + return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[container_decl.ast.main_token]) { + .keyword_Struct => .field, + .keyword_union, .keyword_enum => .enumMember, else => null, }); } @@ -139,35 +138,33 @@ const GapHighlighter = struct { // TODO More highlighting here fn handleTok(self: *GapHighlighter, tok: ast.TokenIndex) !void { - const tok_id = self.builder.handle.tree.token_ids[tok]; - if (tok_id == .LineComment) { - try writeToken(self.builder, tok, .comment); - } else if (tok_id == .ContainerDocComment or tok_id == .DocComment) { + const tok_id = self.builder.handle.tree.tokens.items(.tag)[tok]; + if (tok_id == .container_doc_comment or tok_id == .doc_comment) { try writeTokenMod(self.builder, tok, .comment, .{ .documentation = true }); - } else if (@enumToInt(tok_id) >= @enumToInt(std.zig.Token.Id.Keyword_align) and - @enumToInt(tok_id) <= @enumToInt(std.zig.Token.Id.Keyword_while)) + } else if (@enumToInt(tok_id) >= @enumToInt(std.zig.Token.Tag.keyword_align) and + @enumToInt(tok_id) <= @enumToInt(std.zig.Token.Tag.keyword_while)) { const tok_type: TokenType = switch (tok_id) { - .Keyword_true, - .Keyword_false, - .Keyword_null, - .Keyword_undefined, - .Keyword_unreachable, + .keyword_true, + .keyword_false, + .keyword_null, + .keyword_undefined, + .keyword_unreachable, => .keywordLiteral, else => .keyword, }; try writeToken(self.builder, tok, tok_type); - } else if (@enumToInt(tok_id) >= @enumToInt(std.zig.Token.Id.Bang) and - @enumToInt(tok_id) <= @enumToInt(std.zig.Token.Id.Tilde) and - tok_id != .Period and tok_id != .Comma and tok_id != .RParen and - tok_id != .LParen and tok_id != .RBrace and tok_id != .LBrace and - tok_id != .Semicolon and tok_id != .Colon) + } else if (@enumToInt(tok_id) >= @enumToInt(std.zig.Token.Tag.bang) and + @enumToInt(tok_id) <= @enumToInt(std.zig.Token.Tag.tilde) and + tok_id != .period and tok_id != .comma and tok_id != .r_paren and + tok_id != .l_paren and tok_id != .r_bracce and tok_id != .l_brace and + tok_id != .semicolon and tok_id != .colon) { try writeToken(self.builder, tok, .operator); - } else if (tok_id == .IntegerLiteral or tok_id == .FloatLiteral) { + } else if (tok_id == .integer_literal or tok_id == .float_literal) { try writeToken(self.builder, tok, .number); - } else if (tok_id == .StringLiteral or tok_id == .MultilineStringLiteralLine or tok_id == .CharLiteral) { + } else if (tok_id == .string_literal or tok_id == .multiline_string_literal_line or tok_id == .char_literal) { try writeToken(self.builder, tok, .string); } } @@ -177,7 +174,7 @@ const GapHighlighter = struct { } fn next(self: *GapHighlighter, node: *ast.Node) !void { - if (self.current_idx > 0 and self.builder.handle.tree.token_ids[self.current_idx - 1] == .ContainerDocComment) { + if (self.current_idx > 0 and self.builder.handle.tree.token_ids[self.current_idx - 1] == .container_doc_comment) { try self.handleTok(self.current_idx - 1); } @@ -254,582 +251,590 @@ fn writeContainerField( } } -// @TODO: Fix semantic tokens // TODO This is very slow and does a lot of extra work, improve in the future. -// fn writeNodeTokens( -// builder: *Builder, -// arena: *std.heap.ArenaAllocator, -// store: *DocumentStore, -// maybe_node: ?ast.Node.Index, -// tree: ast.Tree, -// ) error{OutOfMemory}!void { -// if (maybe_node == null) return; +fn writeNodeTokens( + builder: *Builder, + arena: *std.heap.ArenaAllocator, + store: *DocumentStore, + maybe_node: ?ast.Node.Index, + tree: ast.Tree, +) error{OutOfMemory}!void { + if (maybe_node == null) return; -// const node_tags = tree.nodes.items(.tag); -// const token_tags = tree.tokens.items(.tag); -// const nodes_data = tree.nodes.items(.data); -// const main_tokens = tree.nodes.items(.main_token); + const node = maybe_node.?; + const node_tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + const datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const tag = node_tags[node]; + const main_token = main_tokens[node]; + const handle = builder.handle; -// const node = maybe_node.?; -// const handle = builder.handle; + const FrameSize = @sizeOf(@Frame(writeNodeTokens)); + var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); + defer arena.child_allocator.free(child_frame); -// const FrameSize = @sizeOf(@Frame(writeNodeTokens)); -// var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); -// defer arena.child_allocator.free(child_frame); + switch (tag) { + .root => { + var gap_highlighter = GapHighlighter.init(builder, 0); + var buf: [2]ast.Node.Index = undefined; + for (analysis.declMembers(tree, .root, 0, &buf)) |child| { + try gap_highlighter.next(child); + if (node_tags[child].isContainerField()) { + try writeContainerField(builder, arena, store, analysis.containerField(tree, child), .field, child_frame); + } else { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); + } + } + try gap_highlighter.end(handle.tree.tokens.len - 1); + }, + .block, .block_semicolon, .block_two, .block_two_semicolon => { + const first_tok = if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == identifier) block: { + try writeToken(builder, main_token - 2, .label); + break :block main_token + 1; + } else 0; -// switch (node_tags[node]) { -// .root, .block, .block_semicolon => |tag| { -// const first_tok = if (tag != block_semicolon) block: { -// const lbrace = main_tokens[node]; -// if (token_tags[lbrace - 1] == .colon and token_tags[lbrace - 2] == .identifier) -// try writeToken(builder, lbrace - 2, .label); + var gap_highlighter = GapHighlighter.init(builder, first_tok); + const statements: []const ast.Node.Index = switch (tag) { + .block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs], + .block_two, .block_two_semicolon => blk: { + const statements = &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; + const len: usize = if (datas[node].lhs == 0) + @as(usize, 0) + else if (datas[node].rhs == 0) + @as(usize, 1) + else + @as(usize, 2); + break :blk statements[0..len]; + }, + else => unreachable, + }; -// break :block lbrace + 1; -// } else 0; + for (statements) |child| { + try gap_highlighter.next(child); + if (node_tags[child].isContainerField()) { + try writeContainerField(builder, arena, store, analysis.containerField(tree, child), .field, child_frame); + } else { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); + } + } -// var gap_highlighter = GapHighlighter.init(builder, first_tok); -// var child_idx: usize = 0; -// while (node.iterate(child_idx)) |child| : (child_idx += 1) { -// try gap_highlighter.next(child); -// if (child.cast(ast.Node.ContainerField)) |container_field| { -// try writeContainerField(builder, arena, store, container_field, .field, child_frame); -// } else { -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); -// } -// } + try gap_highlighter.end(tree.lastToken(node)); + }, + .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + const var_decl = analysis.varDecl(tree, node).?; + if (analysis.getDocCommentTokenIndex(tree, node)) |comment_idx| + try writeDocComments(builder, handle.tree, comment_idx); -// if (node.tag == .Root) { -// try gap_highlighter.end(handle.tree.token_ids.len - 1); -// } else { -// try gap_highlighter.end(node.lastToken()); -// } -// }, -// .VarDecl => { -// const var_decl = node.cast(ast.Node.VarDecl).?; -// if (var_decl.getDocComments()) |doc| try writeDocComments(builder, handle.tree, doc); -// try writeToken(builder, var_decl.getVisibToken(), .keyword); -// try writeToken(builder, var_decl.getExternExportToken(), .keyword); -// try writeToken(builder, var_decl.getThreadLocalToken(), .keyword); -// try writeToken(builder, var_decl.getComptimeToken(), .keyword); -// try writeToken(builder, var_decl.mut_token, .keyword); -// if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { -// try colorIdentifierBasedOnType(builder, decl_type, var_decl.name_token, .{ .declaration = true }); -// } else { -// try writeTokenMod(builder, var_decl.name_token, .variable, .{ .declaration = true }); -// } -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getTypeNode() }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getAlignNode() }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getSectionNode() }); -// try writeToken(builder, var_decl.getEqToken(), .operator); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.getInitNode() }); -// }, -// .Use => { -// const use = node.cast(ast.Node.Use).?; -// if (use.doc_comments) |docs| try writeDocComments(builder, builder.handle.tree, docs); -// try writeToken(builder, use.visib_token, .keyword); -// try writeToken(builder, use.use_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, use.expr }); -// }, -// .ErrorSetDecl => { -// const error_set = node.cast(ast.Node.ErrorSetDecl).?; -// try writeToken(builder, error_set.error_token, .keyword); -// for (error_set.declsConst()) |decl| -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl }); -// }, -// .ContainerDecl => { -// const container_decl = node.cast(ast.Node.ContainerDecl).?; -// try writeToken(builder, container_decl.layout_token, .keyword); -// try writeToken(builder, container_decl.kind_token, .keyword); -// switch (container_decl.init_arg_expr) { -// .None => {}, -// .Enum => |enum_expr| if (enum_expr) |expr| -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, expr }) -// else -// try writeToken(builder, container_decl.kind_token + 2, .keyword), -// .Type => |type_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_node }), -// } + try writeToken(builder, var_decl.visib_token, .keyword); + try writeToken(builder, var_decl.extern_export_token, .keyword); + try writeToken(builder, var_decl.threadlocal_token, .keyword); + try writeToken(builder, var_decl.comptime_token, .keyword); + try writeToken(builder, var_decl.ast.mut_token, .keyword); + if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { + try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true }); + } else { + try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true }); + } + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node }); + try writeToken(builder, var_decl.ast.mut_token + 2, .operator); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.init_node }); + }, + .@"usingnamespace" => { + const first_tok = tree.firstToken(node); + if (first_tok > 0 and token_tags[first_tok - 1] == .doc_comment) + try writeDocComments(builder, builder.handle.tree, first_tok - 1); + try writeToken(builder, if (token_tags[first_tok] == .keyword_pub) first_tok else null, .keyword); + try writeToken(builder, main_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + }, + .error_set_decl => { + // @TODO: Semantic highlighting for error set decl + }, + .container_decl, + .container_decl_trailing, + .container_decl_two, + .container_decl_two_trailing, + .container_decl_arg, + .container_decl_arg_trailing, + => { + var buf: [2]ast.Node.Index = undefined; + const decl: ast.full.ContainerDecl = switch (tag) { + .container_decl, .container_decl_trailing => tree.containerDecl(node), + .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node), + .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node), + else => unreachable, + }; -// var gap_highlighter = GapHighlighter.init(builder, container_decl.lbrace_token + 1); -// const field_token_type = fieldTokenType(container_decl, handle); -// for (container_decl.fieldsAndDeclsConst()) |child| { -// try gap_highlighter.next(child); -// if (child.cast(ast.Node.ContainerField)) |container_field| { -// try writeContainerField(builder, arena, store, container_field, field_token_type, child_frame); -// } else { -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); -// } -// } -// try gap_highlighter.end(node.lastToken()); -// }, -// .ErrorTag => { -// const error_tag = node.cast(ast.Node.ErrorTag).?; -// if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); -// try writeToken(builder, error_tag.firstToken(), .errorTag); -// }, -// .Identifier => { -// if (analysis.isTypeIdent(handle.tree, node.firstToken())) { -// return try writeToken(builder, node.firstToken(), .type); -// } + try writeToken(builder, decl.layout_token, .keyword); + try writeToken(builder, decl.ast.main_token, .keyword); + if (decl.enum_token) |enum_token| { + if (decl.ast.arg != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }) + else + try writeToken(builder, enum_token, .keyword); + } else if (decl.ast.arg != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }); -// if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| { -// if (child.decl.* == .param_decl) { -// return try writeToken(builder, node.firstToken(), .parameter); -// } -// var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); -// if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { -// try colorIdentifierBasedOnType(builder, decl_type, node.firstToken(), .{}); -// } else { -// try writeTokenMod(builder, node.firstToken(), .variable, .{}); -// } -// } -// }, -// .FnProto => { -// const fn_proto = node.cast(ast.Node.FnProto).?; -// if (fn_proto.getDocComments()) |docs| try writeDocComments(builder, handle.tree, docs); -// try writeToken(builder, fn_proto.getVisibToken(), .keyword); -// try writeToken(builder, fn_proto.getExternExportInlineToken(), .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getLibName() }); -// try writeToken(builder, fn_proto.fn_token, .keyword); + var gap_highlighter = GapHighlighter.init(builder, main_token + 1); + const field_token_type = fieldTokenType(decl, handle); + for (decl.ast.members) |child| { + try gap_highlighter.next(child); + if (node_tags[node].isContainerField()) { + try writeContainerField(builder, arena, store, analysis.containerField(tree, node), field_token_type, child_frame); + } else { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); + } + } + try gap_highlighter.end(tree.lastToken(node)); + }, + .error_value => { + // if (error_tag.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); + try writeToken(builder, datas[node].rhs, .errorTag); + }, + .identifier => { + if (analysis.isTypeIdent(handle.tree, main_token)) { + return try writeToken(builder, main_token, .type); + } -// const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) -// .type -// else -// .function; + if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.items(.start)[main_token])) |child| { + if (child.decl.* == .param_decl) { + return try writeToken(builder, main_token, .parameter); + } + var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); + if (try child.resolveType(store, arena, &bound_type_params)) |decl_type| { + try colorIdentifierBasedOnType(builder, decl_type, main_token, .{}); + } else { + try writeTokenMod(builder, main_token, .variable, .{}); + } + } + }, + .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multiple, .fn_decl => { + var buf: [1]ast.Node.Index = undefined; + const fn_proto: ast.full.FnProto = analysis.fnProto(tree, node, &buf).?; + if (analysis.getDocCommentTokenIndex(tree, node)) |cocs| + try writeDocComments(builder, handle.tree, docs); -// const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) -// TokenModifiers{ .generic = true } -// else -// TokenModifiers{}; + try writeToken(builder, fn_proto.visib_token, .keyword); + try writeToken(builder, fn_proto.extern_export_token, .keyword); + try writeToken(builder, fn_proto.lib_name, .string); + try writeToken(builder, fn_proto.ast.fn_token, .keyword); -// try writeTokenMod(builder, fn_proto.getNameToken(), func_name_tok_type, tok_mod); + const func_name_tok_type: TokenType = if (analysis.isTypeFunction(handle.tree, fn_proto)) + .type + else + .function; -// for (fn_proto.paramsConst()) |param_decl| { -// if (param_decl.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); -// try writeToken(builder, param_decl.noalias_token, .keyword); -// try writeToken(builder, param_decl.comptime_token, .keyword); -// try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); -// switch (param_decl.param_type) { -// .any_type => |var_node| try writeToken(builder, var_node.firstToken(), .type), -// .type_expr => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), -// } -// } -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getAlignExpr() }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getSectionExpr() }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getCallconvExpr() }); + const tok_mod = if (analysis.isGenericFunction(handle.tree, fn_proto)) + TokenModifiers{ .generic = true } + else + TokenModifiers{}; -// switch (fn_proto.return_type) { -// .Explicit => |type_expr| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }), -// .InferErrorSet => |type_expr| { -// try writeToken(builder, type_expr.firstToken() - 1, .operator); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, type_expr }); -// }, -// .Invalid => {}, -// } -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.getBodyNode() }); -// }, -// .AnyFrameType => { -// const any_frame_type = node.cast(ast.Node.AnyFrameType).?; -// try writeToken(builder, any_frame_type.anyframe_token, .type); -// if (any_frame_type.result) |any_frame_result| { -// try writeToken(builder, any_frame_result.arrow_token, .type); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, any_frame_result.return_type }); -// } -// }, -// .Defer => { -// const defer_node = node.cast(ast.Node.Defer).?; -// try writeToken(builder, defer_node.defer_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.payload }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, defer_node.expr }); -// }, -// .Comptime => { -// const comptime_node = node.cast(ast.Node.Comptime).?; -// if (comptime_node.doc_comments) |docs| try writeDocComments(builder, handle.tree, docs); -// try writeToken(builder, comptime_node.comptime_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, comptime_node.expr }); -// }, -// .Nosuspend => { -// const nosuspend_node = node.cast(ast.Node.Nosuspend).?; -// try writeToken(builder, nosuspend_node.nosuspend_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, nosuspend_node.expr }); -// }, -// .Payload => { -// const payload = node.cast(ast.Node.Payload).?; -// try writeToken(builder, payload.lpipe, .operator); -// try writeToken(builder, payload.error_symbol.firstToken(), .variable); -// try writeToken(builder, payload.rpipe, .operator); -// }, -// .PointerPayload => { -// const payload = node.cast(ast.Node.PointerPayload).?; -// try writeToken(builder, payload.lpipe, .operator); -// try writeToken(builder, payload.ptr_token, .operator); -// try writeToken(builder, payload.value_symbol.firstToken(), .variable); -// try writeToken(builder, payload.rpipe, .operator); -// }, -// .PointerIndexPayload => { -// const payload = node.cast(ast.Node.PointerIndexPayload).?; -// try writeToken(builder, payload.lpipe, .operator); -// try writeToken(builder, payload.ptr_token, .operator); -// try writeToken(builder, payload.value_symbol.firstToken(), .variable); -// if (payload.index_symbol) |index_symbol| try writeToken(builder, index_symbol.firstToken(), .variable); -// try writeToken(builder, payload.rpipe, .operator); -// }, -// .Else => { -// const else_node = node.cast(ast.Node.Else).?; -// try writeToken(builder, else_node.else_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.payload }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, else_node.body }); -// }, -// .Switch => { -// const switch_node = node.cast(ast.Node.Switch).?; -// try writeToken(builder, switch_node.switch_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_node.expr }); + try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod); -// var gap_highlighter = GapHighlighter.init(builder, switch_node.expr.lastToken() + 3); -// for (switch_node.casesConst()) |case_node| { -// try gap_highlighter.next(case_node); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); -// } -// try gap_highlighter.end(node.lastToken()); -// }, -// .SwitchCase => { -// const switch_case = node.cast(ast.Node.SwitchCase).?; -// for (switch_case.itemsConst()) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); -// try writeToken(builder, switch_case.arrow_token, .operator); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.payload }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.expr }); -// }, -// .SwitchElse => { -// const switch_else = node.cast(ast.Node.SwitchElse).?; -// try writeToken(builder, switch_else.token, .keyword); -// }, -// .While => { -// const while_node = node.cast(ast.Node.While).?; -// try writeToken(builder, while_node.label, .label); -// try writeToken(builder, while_node.inline_token, .keyword); -// try writeToken(builder, while_node.while_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.condition }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.payload }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.continue_expr }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.body }); -// if (while_node.@"else") |else_node| -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); -// }, -// .For => { -// const for_node = node.cast(ast.Node.For).?; -// try writeToken(builder, for_node.label, .label); -// try writeToken(builder, for_node.inline_token, .keyword); -// try writeToken(builder, for_node.for_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.array_expr }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.payload }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.body }); -// if (for_node.@"else") |else_node| -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); -// }, -// .If => { -// const if_node = node.cast(ast.Node.If).?; -// try writeToken(builder, if_node.if_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.condition }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.payload }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.body }); -// if (if_node.@"else") |else_node| -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); -// }, -// .ArrayInitializer => { -// const array_initializer = node.cast(ast.Node.ArrayInitializer).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_initializer.lhs }); -// for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); -// }, -// .ArrayInitializerDot => { -// const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?; -// for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); -// }, -// .StructInitializer => { -// const struct_initializer = node.cast(ast.Node.StructInitializer).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_initializer.lhs }); -// const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) { -// .other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl| -// fieldTokenType(container_decl, handle) -// else -// null, -// else => null, -// } else null; + var it = fn_proto.iterate(tree); + while (it.next()) |param_decl| { + if (param_decl.first_doc_comment) |docs| try writeDocComments(builder, handle.tree, docs); -// var gap_highlighter = GapHighlighter.init(builder, struct_initializer.lhs.lastToken() + 1); -// for (struct_initializer.listConst()) |field_init_node| { -// try gap_highlighter.next(field_init_node); -// std.debug.assert(field_init_node.tag == .FieldInitializer); -// const field_init = field_init_node.cast(ast.Node.FieldInitializer).?; -// if (field_token_type) |tok_type| { -// try writeToken(builder, field_init.period_token, tok_type); -// try writeToken(builder, field_init.name_token, tok_type); -// } -// try writeToken(builder, field_init.name_token + 1, .operator); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); -// } -// try gap_highlighter.end(struct_initializer.rtoken); -// }, -// .StructInitializerDot => { -// const struct_initializer = node.castTag(.StructInitializerDot).?; + try writeToken(builder, param_decl.comptime_noalias, .keyword); + try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); + if (param_decl.anytype_ellipsis3) |any_token| { + try writeToken(builder, var_node.firstToken(), .type); + } else if (param_decl.type_expr != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param_decl.type_expr }); + } -// var gap_highlighter = GapHighlighter.init(builder, struct_initializer.dot + 1); -// for (struct_initializer.listConst()) |field_init_node| { -// try gap_highlighter.next(field_init_node); -// std.debug.assert(field_init_node.tag == .FieldInitializer); -// const field_init = field_init_node.castTag(.FieldInitializer).?; -// try writeToken(builder, field_init.period_token, .field); -// try writeToken(builder, field_init.name_token, .field); -// try writeToken(builder, field_init.name_token + 1, .operator); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); -// } -// try gap_highlighter.end(struct_initializer.rtoken); -// }, -// .Call => { -// const call = node.cast(ast.Node.Call).?; -// try writeToken(builder, call.async_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.lhs }); -// if (builder.current_token) |curr_tok| { -// if (curr_tok != call.lhs.lastToken() and handle.tree.token_ids[call.lhs.lastToken()] == .Identifier) { -// try writeToken(builder, call.lhs.lastToken(), .function); -// } -// } -// for (call.paramsConst()) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); -// }, -// .Slice => { -// const slice = node.castTag(.Slice).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.lhs }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.start }); -// try writeToken(builder, slice.start.lastToken() + 1, .operator); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.end }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.sentinel }); -// }, -// .ArrayAccess => { -// const arr_acc = node.castTag(.ArrayAccess).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.lhs }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.index_expr }); -// }, -// .Deref, .UnwrapOptional => { -// const suffix = node.cast(ast.Node.SimpleSuffixOp).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suffix.lhs }); -// try writeToken(builder, suffix.rtoken, .operator); -// }, -// .GroupedExpression => { -// const grouped_expr = node.cast(ast.Node.GroupedExpression).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, grouped_expr.expr }); -// }, -// .Return, .Break, .Continue => { -// const cfe = node.cast(ast.Node.ControlFlowExpression).?; -// try writeToken(builder, cfe.ltoken, .keyword); -// switch (node.tag) { -// .Break => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), -// .Continue => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), -// else => {}, -// } -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, cfe.getRHS() }); -// }, -// .Suspend => { -// const suspend_node = node.cast(ast.Node.Suspend).?; -// try writeToken(builder, suspend_node.suspend_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suspend_node.body }); -// }, -// .IntegerLiteral => { -// try writeToken(builder, node.firstToken(), .number); -// }, -// .EnumLiteral => { -// const enum_literal = node.cast(ast.Node.EnumLiteral).?; -// try writeToken(builder, enum_literal.dot, .enumMember); -// try writeToken(builder, enum_literal.name, .enumMember); -// }, -// .FloatLiteral => { -// try writeToken(builder, node.firstToken(), .number); -// }, -// .BuiltinCall => { -// const builtin_call = node.cast(ast.Node.BuiltinCall).?; -// try writeToken(builder, builtin_call.builtin_token, .builtin); -// for (builtin_call.paramsConst()) |param| -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); -// }, -// .StringLiteral, .CharLiteral => { -// try writeToken(builder, node.firstToken(), .string); -// }, -// .MultilineStringLiteral => { -// const multi_line = node.cast(ast.Node.MultilineStringLiteral).?; -// for (multi_line.linesConst()) |line| try writeToken(builder, line, .string); -// }, -// .BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable => { -// try writeToken(builder, node.firstToken(), .keywordLiteral); -// }, -// .ErrorType => { -// try writeToken(builder, node.firstToken(), .keyword); -// }, -// .Asm => { -// const asm_expr = node.cast(ast.Node.Asm).?; -// try writeToken(builder, asm_expr.asm_token, .keyword); -// try writeToken(builder, asm_expr.volatile_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_expr.template }); -// // TODO Inputs, outputs. -// }, -// .AnyType => { -// try writeToken(builder, node.firstToken(), .type); -// }, -// .TestDecl => { -// const test_decl = node.cast(ast.Node.TestDecl).?; -// if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc); -// try writeToken(builder, test_decl.test_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.name }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.body_node }); -// }, -// .Catch => { -// const catch_expr = node.cast(ast.Node.Catch).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.lhs }); -// try writeToken(builder, catch_expr.op_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.rhs }); -// }, -// .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolAnd, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Period, .Range, .Sub, .SubWrap, .OrElse => { -// const infix_op = node.cast(ast.Node.SimpleInfixOp).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.lhs }); -// if (node.tag != .Period) { -// const token_type: TokenType = switch (node.tag) { -// .BoolAnd, .BoolOr, .OrElse => .keyword, -// else => .operator, -// }; + if (fn_proto.ast.align_expr != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr != 0 }); + if (fn_proto.ast.section_expr != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr != 0 }); + if (fn_proto.ast.callconv_expr != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr != 0 }); -// try writeToken(builder, infix_op.op_token, token_type); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.rhs }); -// } -// switch (node.tag) { -// .Period => { -// const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken()); + if (fn_proto.ast.return_type != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.return_type }); -// // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added -// // writeToken code. -// // Maybe we can hook into it insead? Also applies to Identifier and VarDecl -// var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); -// const lhs_type = try analysis.resolveFieldAccessLhsType( -// store, -// arena, -// (try analysis.resolveTypeOfNodeInternal(store, arena, .{ -// .node = infix_op.lhs, -// .handle = handle, -// }, &bound_type_params)) orelse return, -// &bound_type_params, -// ); -// const left_type_node = switch (lhs_type.type.data) { -// .other => |n| n, -// else => return, -// }; -// if (try analysis.lookupSymbolContainer(store, arena, .{ .node = left_type_node, .handle = lhs_type.handle }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { -// switch (decl_type.decl.*) { -// .ast_node => |decl_node| { -// if (decl_node.tag == .ContainerField) { -// const tok_type: ?TokenType = if (left_type_node.cast(ast.Node.ContainerDecl)) |container_decl| -// fieldTokenType(container_decl, lhs_type.handle) -// else if (left_type_node.tag == .Root) -// TokenType.field -// else -// null; + if (tag == .fn_decl) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); + }, + .anyframe_type => { + try writeToken(builder, main_token, .type); + if (datas[node].rhs != 0) { + try writeToken(builder, datas[node].lhs, .type); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); + } + }, + .@"defer" => { + try writeToken(builder, main_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); + }, + .@"comptime", @"nosuspend" => { + if (analysis.getDocCommentTokenIndex(tree, node)) |doc| + try writeDocComments(builder, handle.tree, doc); + try writeToken(builder, main_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + }, + .@"switch", .switch_comma => { + try writeToken(builder, main_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); + const cases = tree.extra_data[extra.start..extra.end]; -// if (tok_type) |tt| try writeToken(builder, infix_op.rhs.firstToken(), tt); -// return; -// } else if (decl_node.tag == .ErrorTag) { -// try writeToken(builder, infix_op.rhs.firstToken(), .errorTag); -// } -// }, -// else => {}, -// } + var gap_highlighter = GapHighlighter.init(builder, switch_node.expr.lastToken() + 3); + for (cases) |case_node| { + try gap_highlighter.next(case_node); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); + } + try gap_highlighter.end(node.lastToken()); + }, + .switch_case_one, .switch_case => { + const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node); + for (switch_case.ast.values) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); + // check it it's 'else' + if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword); + try writeToken(builder, switch_case.ast.arrow_token, .operator); + if (switch_case.payload_token) |payload_token| { + const p_token = @boolToInt(token_tags[payload_token] == .asterisk); + try writeToken(builder, p_token, .variable); + } + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.ast.target_expr }); + }, + .@"while", .while_simple, .while_cont, .for_simple, .@"for" => { + const while_node: ast.full.While = switch (node) { + .@"while" => tree.whileFull(node_idx), + .while_simple => tree.whileSimple(node_idx), + .while_cont => tree.whileCont(node_idx), + .@"for" => tree.forFull(node_idx), + .for_simple => tree.forSimple(node_idx), + else => unreachable, + }; -// if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { -// try colorIdentifierBasedOnType(builder, resolved_type, infix_op.rhs.firstToken(), .{}); -// } -// } -// }, -// else => {}, -// } -// }, -// .SliceType => { -// const slice_type = node.castTag(.SliceType).?; -// const ptr_info = slice_type.ptr_info; -// if (ptr_info.align_info) |align_info| { -// try writeToken(builder, slice_type.op_token + 2, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); -// } -// try writeToken(builder, ptr_info.const_token, .keyword); -// try writeToken(builder, ptr_info.volatile_token, .keyword); -// try writeToken(builder, ptr_info.allowzero_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice_type.rhs }); -// }, -// .PtrType => { -// const pointer_type = node.castTag(.PtrType).?; -// const tok_ids = builder.handle.tree.token_ids; + try writeToken(builder, while_node.label_token, .label); + try writeToken(builder, while_node.inline_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.condition }); + try writeToken(builder, while_node.payload_token, .variable); + if (while_node.ast.cont_expr != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr}); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.body }); + if (while_node.@"else") |else_node| + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); + }, + .For => { + const for_node = node.cast(ast.Node.For).?; + try writeToken(builder, for_node.label, .label); + try writeToken(builder, for_node.inline_token, .keyword); + try writeToken(builder, for_node.for_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.array_expr }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.payload }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.body }); + if (for_node.@"else") |else_node| + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); + }, + .If => { + const if_node = node.cast(ast.Node.If).?; + try writeToken(builder, if_node.if_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.condition }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.payload }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.body }); + if (if_node.@"else") |else_node| + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); + }, + .ArrayInitializer => { + const array_initializer = node.cast(ast.Node.ArrayInitializer).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_initializer.lhs }); + for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); + }, + .ArrayInitializerDot => { + const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?; + for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); + }, + .StructInitializer => { + const struct_initializer = node.cast(ast.Node.StructInitializer).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_initializer.lhs }); + const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) { + .other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl| + fieldTokenType(container_decl, handle) + else + null, + else => null, + } else null; -// const ptr_info = switch (tok_ids[pointer_type.op_token]) { -// .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.ptr_info, -// else => pointer_type.ptr_info, -// }; -// const rhs = switch (tok_ids[pointer_type.op_token]) { -// .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.rhs, -// else => pointer_type.rhs, -// }; + var gap_highlighter = GapHighlighter.init(builder, struct_initializer.lhs.lastToken() + 1); + for (struct_initializer.listConst()) |field_init_node| { + try gap_highlighter.next(field_init_node); + std.debug.assert(field_init_node.tag == .FieldInitializer); + const field_init = field_init_node.cast(ast.Node.FieldInitializer).?; + if (field_token_type) |tok_type| { + try writeToken(builder, field_init.period_token, tok_type); + try writeToken(builder, field_init.name_token, tok_type); + } + try writeToken(builder, field_init.name_token + 1, .operator); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); + } + try gap_highlighter.end(struct_initializer.rtoken); + }, + .StructInitializerDot => { + const struct_initializer = node.castTag(.StructInitializerDot).?; -// const off = switch (tok_ids[pointer_type.op_token]) { -// .Asterisk, .AsteriskAsterisk => blk: { -// try writeToken(builder, pointer_type.op_token, .operator); -// break :blk pointer_type.op_token + 1; -// }, -// .LBracket => blk: { -// try writeToken(builder, pointer_type.op_token + 1, .operator); -// const is_c_ptr = tok_ids[pointer_type.op_token + 2] == .Identifier; + var gap_highlighter = GapHighlighter.init(builder, struct_initializer.dot + 1); + for (struct_initializer.listConst()) |field_init_node| { + try gap_highlighter.next(field_init_node); + std.debug.assert(field_init_node.tag == .FieldInitializer); + const field_init = field_init_node.castTag(.FieldInitializer).?; + try writeToken(builder, field_init.period_token, .field); + try writeToken(builder, field_init.name_token, .field); + try writeToken(builder, field_init.name_token + 1, .operator); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); + } + try gap_highlighter.end(struct_initializer.rtoken); + }, + .Call => { + const call = node.cast(ast.Node.Call).?; + try writeToken(builder, call.async_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.lhs }); + if (builder.current_token) |curr_tok| { + if (curr_tok != call.lhs.lastToken() and handle.tree.token_ids[call.lhs.lastToken()] == .Identifier) { + try writeToken(builder, call.lhs.lastToken(), .function); + } + } + for (call.paramsConst()) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); + }, + .Slice => { + const slice = node.castTag(.Slice).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.lhs }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.start }); + try writeToken(builder, slice.start.lastToken() + 1, .operator); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.end }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.sentinel }); + }, + .ArrayAccess => { + const arr_acc = node.castTag(.ArrayAccess).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.lhs }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.index_expr }); + }, + .Deref, .UnwrapOptional => { + const suffix = node.cast(ast.Node.SimpleSuffixOp).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suffix.lhs }); + try writeToken(builder, suffix.rtoken, .operator); + }, + .GroupedExpression => { + const grouped_expr = node.cast(ast.Node.GroupedExpression).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, grouped_expr.expr }); + }, + .Return, .Break, .Continue => { + const cfe = node.cast(ast.Node.ControlFlowExpression).?; + try writeToken(builder, cfe.ltoken, .keyword); + switch (node.tag) { + .Break => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), + .Continue => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), + else => {}, + } + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, cfe.getRHS() }); + }, + .Suspend => { + const suspend_node = node.cast(ast.Node.Suspend).?; + try writeToken(builder, suspend_node.suspend_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suspend_node.body }); + }, + .IntegerLiteral => { + try writeToken(builder, node.firstToken(), .number); + }, + .EnumLiteral => { + const enum_literal = node.cast(ast.Node.EnumLiteral).?; + try writeToken(builder, enum_literal.dot, .enumMember); + try writeToken(builder, enum_literal.name, .enumMember); + }, + .FloatLiteral => { + try writeToken(builder, node.firstToken(), .number); + }, + .BuiltinCall => { + const builtin_call = node.cast(ast.Node.BuiltinCall).?; + try writeToken(builder, builtin_call.builtin_token, .builtin); + for (builtin_call.paramsConst()) |param| + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); + }, + .StringLiteral, .CharLiteral => { + try writeToken(builder, node.firstToken(), .string); + }, + .MultilineStringLiteral => { + const multi_line = node.cast(ast.Node.MultilineStringLiteral).?; + for (multi_line.linesConst()) |line| try writeToken(builder, line, .string); + }, + .BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable => { + try writeToken(builder, node.firstToken(), .keywordLiteral); + }, + .ErrorType => { + try writeToken(builder, node.firstToken(), .keyword); + }, + .Asm => { + const asm_expr = node.cast(ast.Node.Asm).?; + try writeToken(builder, asm_expr.asm_token, .keyword); + try writeToken(builder, asm_expr.volatile_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_expr.template }); + // TODO Inputs, outputs. + }, + .AnyType => { + try writeToken(builder, node.firstToken(), .type); + }, + .TestDecl => { + const test_decl = node.cast(ast.Node.TestDecl).?; + if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc); + try writeToken(builder, test_decl.test_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.name }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.body_node }); + }, + .Catch => { + const catch_expr = node.cast(ast.Node.Catch).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.lhs }); + try writeToken(builder, catch_expr.op_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.rhs }); + }, + .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolAnd, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Period, .Range, .Sub, .SubWrap, .OrElse => { + const infix_op = node.cast(ast.Node.SimpleInfixOp).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.lhs }); + if (node.tag != .Period) { + const token_type: TokenType = switch (node.tag) { + .BoolAnd, .BoolOr, .OrElse => .keyword, + else => .operator, + }; -// if (is_c_ptr) { -// try writeToken(builder, pointer_type.op_token + 2, .operator); -// } + try writeToken(builder, infix_op.op_token, token_type); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.rhs }); + } + switch (node.tag) { + .Period => { + const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken()); -// if (ptr_info.sentinel) |sentinel| { -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); -// break :blk sentinel.lastToken() + 2; -// } + // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added + // writeToken code. + // Maybe we can hook into it insead? Also applies to Identifier and VarDecl + var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); + const lhs_type = try analysis.resolveFieldAccessLhsType( + store, + arena, + (try analysis.resolveTypeOfNodeInternal(store, arena, .{ + .node = infix_op.lhs, + .handle = handle, + }, &bound_type_params)) orelse return, + &bound_type_params, + ); + const left_type_node = switch (lhs_type.type.data) { + .other => |n| n, + else => return, + }; + if (try analysis.lookupSymbolContainer(store, arena, .{ .node = left_type_node, .handle = lhs_type.handle }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { + switch (decl_type.decl.*) { + .ast_node => |decl_node| { + if (decl_node.tag == .ContainerField) { + const tok_type: ?TokenType = if (left_type_node.cast(ast.Node.ContainerDecl)) |container_decl| + fieldTokenType(container_decl, lhs_type.handle) + else if (left_type_node.tag == .Root) + TokenType.field + else + null; -// break :blk pointer_type.op_token + 3 + @boolToInt(is_c_ptr); -// }, -// else => 0, -// }; + if (tok_type) |tt| try writeToken(builder, infix_op.rhs.firstToken(), tt); + return; + } else if (decl_node.tag == .ErrorTag) { + try writeToken(builder, infix_op.rhs.firstToken(), .errorTag); + } + }, + else => {}, + } -// if (ptr_info.align_info) |align_info| { -// try writeToken(builder, off, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); -// } -// try writeToken(builder, ptr_info.const_token, .keyword); -// try writeToken(builder, ptr_info.volatile_token, .keyword); -// try writeToken(builder, ptr_info.allowzero_token, .keyword); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, rhs }); -// }, -// .ArrayType => { -// const array_type = node.castTag(.ArrayType).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); -// }, -// .ArrayTypeSentinel => { -// const array_type = node.castTag(.ArrayTypeSentinel).?; -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.sentinel }); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); -// }, -// .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { -// const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; -// const tok_type: TokenType = switch (node.tag) { -// .Try, .Await, .Resume => .keyword, -// else => .operator, -// }; -// try writeToken(builder, prefix_op.op_token, tok_type); -// try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, prefix_op.rhs }); -// }, -// else => {}, -// } -// } + if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { + try colorIdentifierBasedOnType(builder, resolved_type, infix_op.rhs.firstToken(), .{}); + } + } + }, + else => {}, + } + }, + .SliceType => { + const slice_type = node.castTag(.SliceType).?; + const ptr_info = slice_type.ptr_info; + if (ptr_info.align_info) |align_info| { + try writeToken(builder, slice_type.op_token + 2, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); + } + try writeToken(builder, ptr_info.const_token, .keyword); + try writeToken(builder, ptr_info.volatile_token, .keyword); + try writeToken(builder, ptr_info.allowzero_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice_type.rhs }); + }, + .PtrType => { + const pointer_type = node.castTag(.PtrType).?; + const tok_ids = builder.handle.tree.token_ids; + + const ptr_info = switch (tok_ids[pointer_type.op_token]) { + .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.ptr_info, + else => pointer_type.ptr_info, + }; + const rhs = switch (tok_ids[pointer_type.op_token]) { + .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.rhs, + else => pointer_type.rhs, + }; + + const off = switch (tok_ids[pointer_type.op_token]) { + .Asterisk, .AsteriskAsterisk => blk: { + try writeToken(builder, pointer_type.op_token, .operator); + break :blk pointer_type.op_token + 1; + }, + .LBracket => blk: { + try writeToken(builder, pointer_type.op_token + 1, .operator); + const is_c_ptr = tok_ids[pointer_type.op_token + 2] == .Identifier; + + if (is_c_ptr) { + try writeToken(builder, pointer_type.op_token + 2, .operator); + } + + if (ptr_info.sentinel) |sentinel| { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); + break :blk sentinel.lastToken() + 2; + } + + break :blk pointer_type.op_token + 3 + @boolToInt(is_c_ptr); + }, + else => 0, + }; + + if (ptr_info.align_info) |align_info| { + try writeToken(builder, off, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); + } + try writeToken(builder, ptr_info.const_token, .keyword); + try writeToken(builder, ptr_info.volatile_token, .keyword); + try writeToken(builder, ptr_info.allowzero_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, rhs }); + }, + .ArrayType => { + const array_type = node.castTag(.ArrayType).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); + }, + .ArrayTypeSentinel => { + const array_type = node.castTag(.ArrayTypeSentinel).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.sentinel }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); + }, + .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { + const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; + const tok_type: TokenType = switch (node.tag) { + .Try, .Await, .Resume => .keyword, + else => .operator, + }; + try writeToken(builder, prefix_op.op_token, tok_type); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, prefix_op.rhs }); + }, + else => {}, + } +} // TODO Range version, edit version. pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); // pass root node, which always has index '0' - // try writeNodeTokens(&builder, arena, store, 0, handle.tree); + try writeNodeTokens(&builder, arena, store, 0, handle.tree); return builder.toOwnedSlice(); } From 9224bbd4dc77e9bc415270d581f92725a966a42e Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 4 Mar 2021 22:30:25 +0100 Subject: [PATCH 20/36] Implement all of the semantic tokens --- src/analysis.zig | 41 +- src/semantic_tokens.zig | 876 ++++++++++++++++++++++++---------------- 2 files changed, 544 insertions(+), 373 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 519d1fa..c0814f2 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -30,13 +30,13 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI idx -= 2; // skip '.' token }, else => { - // if (isContainer(tags[node])) { - // idx -= 1; // go to '=' - // idx -= 1; // mutability - // idx -= 1; // possible 'pub' - // if (tokens[idx] == .keyword_pub and idx > 0) - // idx -= 1; - // } + if (isContainer(tags[node])) { + idx -= 1; // go to '=' + idx -= 1; // mutability + idx -= 1; // possible 'pub' + if (tokens[idx] == .keyword_pub and idx > 0) + idx -= 1; + } else log.debug("Doc comment check for tag: {s}", .{tags[node]}); }, } @@ -187,7 +187,7 @@ pub fn isTypeFunction(tree: ast.Tree, func: ast.full.FnProto) bool { } pub fn isGenericFunction(tree: ast.Tree, func: ast.full.FnProto) bool { - var it = func.iterate(); + var it = func.iterate(tree); while (it.next()) |param| { if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) { return true; @@ -983,7 +983,7 @@ pub const TypeWithHandle = struct { } } - fn isContainer(self: TypeWithHandle, container_kind_tok: std.zig.Token.Tag, tree: ast.Tree) bool { + fn isContainerKind(self: TypeWithHandle, container_kind_tok: std.zig.Token.Tag, tree: ast.Tree) bool { const main_tokens = tree.nodes.items(.main_token); const tags = tree.tokens.items(.tag); switch (self.type.data) { @@ -993,30 +993,33 @@ pub const TypeWithHandle = struct { } pub fn isStructType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainer(.keyword_struct, tree) or self.isRoot(); + return self.isContainerKind(.keyword_struct, tree) or self.isRoot(); } pub fn isNamespace(self: TypeWithHandle, tree: ast.Tree) bool { - if (!self.isStructType()) return false; - var idx: usize = 0; - // @TODO: FIX ME - while (self.type.data.other.iterate(idx)) |child| : (idx += 1) { - if (child.tag == .ContainerField) - return false; + if (!self.isStructType(tree)) return false; + + const node = self.type.data.other; + const tags = tree.nodes.items(.tag); + if (isContainer(tags[node])) { + var buf: [2]ast.Node.Index = undefined; + for (declMembers(tree, tags[node], node, &buf)) |child| { + if (tags[child].isContainerField()) return false; + } } return true; } pub fn isEnumType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainer(.keyword_enum, tree); + return self.isContainerKind(.keyword_enum, tree); } pub fn isUnionType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainer(.keyword_union, tree); + return self.isContainerKind(.keyword_union, tree); } pub fn isOpaqueType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainer(.keyword_opaque, tree); + return self.isContainerKind(.keyword_opaque, tree); } pub fn isTypeFunc(self: TypeWithHandle, tree: ast.Tree) bool { diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index e01aae6..344e223 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -65,11 +65,11 @@ const Builder = struct { fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void { const start_idx = if (self.current_token) |current_token| - self.handle.tree.tokenLocation[current_token].line_start + self.handle.tree.tokenLocation(0, current_token).line_start else 0; - if (start_idx > self.handle.tree.tokenLocation[token].line_start) + if (start_idx > self.handle.tree.tokenLocation(0, token).line_start) return; const delta_loc = offsets.tokenRelativeLocation(self.handle.tree, start_idx, token, self.encoding) catch return; @@ -107,7 +107,7 @@ fn writeTokenMod( } } -fn writeDocComments(builder: *Builder, tree: *ast.Tree, doc: *ast.TokenIndex) !void { +fn writeDocComments(builder: *Builder, tree: ast.Tree, doc: ast.TokenIndex) !void { const token_tags = tree.tokens.items(.tag); var tok_idx = doc; while (token_tags[tok_idx] == .doc_comment or @@ -120,10 +120,11 @@ fn writeDocComments(builder: *Builder, tree: *ast.Tree, doc: *ast.TokenIndex) !v } } -fn fieldTokenType(container_decl: *ast.full.ContainerDecl, handle: *DocumentStore.Handle) ?TokenType { - if (container_decl.ast.main_token > handle.tree.tokens.len) return null; - return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[container_decl.ast.main_token]) { - .keyword_Struct => .field, +fn fieldTokenType(container_decl: ast.Node.Index, handle: *DocumentStore.Handle) ?TokenType { + const main_token = handle.tree.nodes.items(.main_token)[container_decl]; + if (main_token > handle.tree.tokens.len) return null; + return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) { + .keyword_struct => .field, .keyword_union, .keyword_enum => .enumMember, else => null, }); @@ -158,7 +159,7 @@ const GapHighlighter = struct { } else if (@enumToInt(tok_id) >= @enumToInt(std.zig.Token.Tag.bang) and @enumToInt(tok_id) <= @enumToInt(std.zig.Token.Tag.tilde) and tok_id != .period and tok_id != .comma and tok_id != .r_paren and - tok_id != .l_paren and tok_id != .r_bracce and tok_id != .l_brace and + tok_id != .l_paren and tok_id != .r_brace and tok_id != .l_brace and tok_id != .semicolon and tok_id != .colon) { try writeToken(self.builder, tok, .operator); @@ -173,16 +174,17 @@ const GapHighlighter = struct { return .{ .builder = builder, .current_idx = start }; } - fn next(self: *GapHighlighter, node: *ast.Node) !void { - if (self.current_idx > 0 and self.builder.handle.tree.token_ids[self.current_idx - 1] == .container_doc_comment) { + fn next(self: *GapHighlighter, node: ast.Node.Index) !void { + const tree = self.builder.handle.tree; + if (self.current_idx > 0 and tree.tokens.items(.tag)[self.current_idx - 1] == .container_doc_comment) { try self.handleTok(self.current_idx - 1); } var i = self.current_idx; - while (i < node.firstToken()) : (i += 1) { + while (i < tree.firstToken(node)) : (i += 1) { try self.handleTok(i); } - self.current_idx = node.lastToken() + 1; + self.current_idx = tree.lastToken(node) + 1; } fn end(self: *GapHighlighter, last: ast.TokenIndex) !void { @@ -194,25 +196,26 @@ const GapHighlighter = struct { }; fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void { + const tree = builder.handle.tree; if (type_node.type.is_type_val) { var new_tok_mod = tok_mod; - if (type_node.isNamespace()) + if (type_node.isNamespace(tree)) new_tok_mod.set("namespace") - else if (type_node.isStructType()) + else if (type_node.isStructType(tree)) new_tok_mod.set("struct") - else if (type_node.isEnumType()) + else if (type_node.isEnumType(tree)) new_tok_mod.set("enum") - else if (type_node.isUnionType()) + else if (type_node.isUnionType(tree)) new_tok_mod.set("union") - else if (type_node.isOpaqueType()) + else if (type_node.isOpaqueType(tree)) new_tok_mod.set("opaque"); try writeTokenMod(builder, target_tok, .type, new_tok_mod); - } else if (type_node.isTypeFunc()) { + } else if (type_node.isTypeFunc(tree)) { try writeTokenMod(builder, target_tok, .type, tok_mod); - } else if (type_node.isFunc()) { + } else if (type_node.isFunc(tree)) { var new_tok_mod = tok_mod; - if (type_node.isGenericFunc()) { + if (type_node.isGenericFunc(tree)) { new_tok_mod.set("generic"); } try writeTokenMod(builder, target_tok, .function, new_tok_mod); @@ -225,29 +228,35 @@ fn writeContainerField( builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, - container_field: *ast.Node.ContainerField, + node: ast.Node.Index, field_token_type: ?TokenType, child_frame: anytype, ) !void { - if (container_field.doc_comments) |docs| try writeDocComments(builder, builder.handle.tree, docs); + const container_field = analysis.containerField(builder.handle.tree, node).?; + if (analysis.getDocCommentTokenIndex(builder.handle.tree, node)) |docs| + try writeDocComments(builder, builder.handle.tree, docs); + try writeToken(builder, container_field.comptime_token, .keyword); - if (field_token_type) |tok_type| try writeToken(builder, container_field.name_token, tok_type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.type_expr }); - if (container_field.align_expr) |n| { - try writeToken(builder, n.firstToken() - 2, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, n }); + if (field_token_type) |tok_type| try writeToken(builder, container_field.ast.name_token, tok_type); + + if (container_field.ast.type_expr != 0) { + if (container_field.ast.align_expr != 0) { + try writeToken(builder, builder.handle.tree.firstToken(container_field.ast.align_expr) - 2, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.align_expr }); + } + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.type_expr }); } - if (container_field.value_expr) |value_expr| block: { - const eq_tok: ast.TokenIndex = if (container_field.type_expr) |type_expr| - type_expr.lastToken() + 1 - else if (container_field.align_expr) |align_expr| - align_expr.lastToken() + 1 + if (container_field.ast.value_expr != 0) block: { + const eq_tok: ast.TokenIndex = if (container_field.ast.type_expr != 0) + builder.handle.tree.lastToken(container_field.ast.type_expr) + 1 + else if (container_field.ast.align_expr != 0) + builder.handle.tree.lastToken(container_field.ast.align_expr) + 1 else break :block; // Check this, I believe it is correct. try writeToken(builder, eq_tok, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, value_expr }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, container_field.ast.value_expr }); } } @@ -257,18 +266,19 @@ fn writeNodeTokens( arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?ast.Node.Index, - tree: ast.Tree, ) error{OutOfMemory}!void { if (maybe_node == null) return; - const node = maybe_node.?; + const handle = builder.handle; + const tree = handle.tree; const node_tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + + const node = maybe_node.?; const tag = node_tags[node]; const main_token = main_tokens[node]; - const handle = builder.handle; const FrameSize = @sizeOf(@Frame(writeNodeTokens)); var child_frame = try arena.child_allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); @@ -281,15 +291,19 @@ fn writeNodeTokens( for (analysis.declMembers(tree, .root, 0, &buf)) |child| { try gap_highlighter.next(child); if (node_tags[child].isContainerField()) { - try writeContainerField(builder, arena, store, analysis.containerField(tree, child), .field, child_frame); + try writeContainerField(builder, arena, store, child, .field, child_frame); } else { try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); } } - try gap_highlighter.end(handle.tree.tokens.len - 1); + try gap_highlighter.end(@truncate(u32, tree.tokens.len) - 1); }, - .block, .block_semicolon, .block_two, .block_two_semicolon => { - const first_tok = if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == identifier) block: { + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + => { + const first_tok = if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) block: { try writeToken(builder, main_token - 2, .label); break :block main_token + 1; } else 0; @@ -313,7 +327,7 @@ fn writeNodeTokens( for (statements) |child| { try gap_highlighter.next(child); if (node_tags[child].isContainerField()) { - try writeContainerField(builder, arena, store, analysis.containerField(tree, child), .field, child_frame); + try writeContainerField(builder, arena, store, child, .field, child_frame); } else { try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); } @@ -321,7 +335,11 @@ fn writeNodeTokens( try gap_highlighter.end(tree.lastToken(node)); }, - .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => { const var_decl = analysis.varDecl(tree, node).?; if (analysis.getDocCommentTokenIndex(tree, node)) |comment_idx| try writeDocComments(builder, handle.tree, comment_idx); @@ -331,14 +349,20 @@ fn writeNodeTokens( try writeToken(builder, var_decl.threadlocal_token, .keyword); try writeToken(builder, var_decl.comptime_token, .keyword); try writeToken(builder, var_decl.ast.mut_token, .keyword); + if (try analysis.resolveTypeOfNode(store, arena, .{ .node = node, .handle = handle })) |decl_type| { try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, .{ .declaration = true }); } else { try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true }); } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node }); + + if (var_decl.ast.type_node != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.type_node }); + if (var_decl.ast.align_node != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.align_node }); + if (var_decl.ast.section_node != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.section_node }); + try writeToken(builder, var_decl.ast.mut_token + 2, .operator); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, var_decl.ast.init_node }); }, @@ -350,9 +374,6 @@ fn writeNodeTokens( try writeToken(builder, main_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - .error_set_decl => { - // @TODO: Semantic highlighting for error set decl - }, .container_decl, .container_decl_trailing, .container_decl_two, @@ -370,7 +391,7 @@ fn writeNodeTokens( try writeToken(builder, decl.layout_token, .keyword); try writeToken(builder, decl.ast.main_token, .keyword); - if (decl.enum_token) |enum_token| { + if (decl.ast.enum_token) |enum_token| { if (decl.ast.arg != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }) else @@ -378,11 +399,11 @@ fn writeNodeTokens( } else if (decl.ast.arg != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, decl.ast.arg }); var gap_highlighter = GapHighlighter.init(builder, main_token + 1); - const field_token_type = fieldTokenType(decl, handle); + const field_token_type = fieldTokenType(node, handle); for (decl.ast.members) |child| { try gap_highlighter.next(child); - if (node_tags[node].isContainerField()) { - try writeContainerField(builder, arena, store, analysis.containerField(tree, node), field_token_type, child_frame); + if (node_tags[child].isContainerField()) { + try writeContainerField(builder, arena, store, child, field_token_type, child_frame); } else { try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); } @@ -398,7 +419,13 @@ fn writeNodeTokens( return try writeToken(builder, main_token, .type); } - if (try analysis.lookupSymbolGlobal(store, arena, handle, handle.tree.getNodeSource(node), handle.tree.items(.start)[main_token])) |child| { + if (try analysis.lookupSymbolGlobal( + store, + arena, + handle, + handle.tree.getNodeSource(node), + handle.tree.tokens.items(.start)[main_token], + )) |child| { if (child.decl.* == .param_decl) { return try writeToken(builder, main_token, .parameter); } @@ -410,10 +437,15 @@ fn writeNodeTokens( } } }, - .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multiple, .fn_decl => { + .fn_proto, + .fn_proto_one, + .fn_proto_simple, + .fn_proto_multi, + .fn_decl, + => { var buf: [1]ast.Node.Index = undefined; const fn_proto: ast.full.FnProto = analysis.fnProto(tree, node, &buf).?; - if (analysis.getDocCommentTokenIndex(tree, node)) |cocs| + if (analysis.getDocCommentTokenIndex(tree, node)) |docs| try writeDocComments(builder, handle.tree, docs); try writeToken(builder, fn_proto.visib_token, .keyword); @@ -440,16 +472,16 @@ fn writeNodeTokens( try writeToken(builder, param_decl.comptime_noalias, .keyword); try writeTokenMod(builder, param_decl.name_token, .parameter, .{ .declaration = true }); if (param_decl.anytype_ellipsis3) |any_token| { - try writeToken(builder, var_node.firstToken(), .type); + try writeToken(builder, any_token, .type); } else if (param_decl.type_expr != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param_decl.type_expr }); } if (fn_proto.ast.align_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr != 0 }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.align_expr }); if (fn_proto.ast.section_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr != 0 }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.section_expr }); if (fn_proto.ast.callconv_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr != 0 }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.callconv_expr }); if (fn_proto.ast.return_type != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, fn_proto.ast.return_type }); @@ -468,26 +500,32 @@ fn writeNodeTokens( try writeToken(builder, main_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .@"comptime", @"nosuspend" => { + .@"comptime", + .@"nosuspend", + => { if (analysis.getDocCommentTokenIndex(tree, node)) |doc| try writeDocComments(builder, handle.tree, doc); try writeToken(builder, main_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - .@"switch", .switch_comma => { + .@"switch", + .switch_comma, + => { try writeToken(builder, main_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); const cases = tree.extra_data[extra.start..extra.end]; - var gap_highlighter = GapHighlighter.init(builder, switch_node.expr.lastToken() + 3); + var gap_highlighter = GapHighlighter.init(builder, tree.lastToken(datas[node].lhs) + 1); for (cases) |case_node| { try gap_highlighter.next(case_node); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, case_node }); } - try gap_highlighter.end(node.lastToken()); + try gap_highlighter.end(tree.lastToken(node)); }, - .switch_case_one, .switch_case => { + .switch_case_one, + .switch_case, + => { const switch_case = if (tag == .switch_case) tree.switchCase(node) else tree.switchCaseOne(node); for (switch_case.ast.values) |item_node| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, item_node }); // check it it's 'else' @@ -499,342 +537,472 @@ fn writeNodeTokens( } try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, switch_case.ast.target_expr }); }, - .@"while", .while_simple, .while_cont, .for_simple, .@"for" => { - const while_node: ast.full.While = switch (node) { - .@"while" => tree.whileFull(node_idx), - .while_simple => tree.whileSimple(node_idx), - .while_cont => tree.whileCont(node_idx), - .@"for" => tree.forFull(node_idx), - .for_simple => tree.forSimple(node_idx), + .@"while", + .while_simple, + .while_cont, + .for_simple, + .@"for", + => { + const while_node: ast.full.While = switch (tag) { + .@"while" => tree.whileFull(node), + .while_simple => tree.whileSimple(node), + .while_cont => tree.whileCont(node), + .@"for" => tree.forFull(node), + .for_simple => tree.forSimple(node), else => unreachable, }; try writeToken(builder, while_node.label_token, .label); try writeToken(builder, while_node.inline_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.condition }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr }); try writeToken(builder, while_node.payload_token, .variable); if (while_node.ast.cont_expr != 0) - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr}); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.body }); - if (while_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .For => { - const for_node = node.cast(ast.Node.For).?; - try writeToken(builder, for_node.label, .label); - try writeToken(builder, for_node.inline_token, .keyword); - try writeToken(builder, for_node.for_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.array_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, for_node.body }); - if (for_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .If => { - const if_node = node.cast(ast.Node.If).?; - try writeToken(builder, if_node.if_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.condition }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.payload }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.body }); - if (if_node.@"else") |else_node| - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, &else_node.base }); - }, - .ArrayInitializer => { - const array_initializer = node.cast(ast.Node.ArrayInitializer).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_initializer.lhs }); - for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); - }, - .ArrayInitializerDot => { - const array_initializer = node.cast(ast.Node.ArrayInitializerDot).?; - for (array_initializer.listConst()) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); - }, - .StructInitializer => { - const struct_initializer = node.cast(ast.Node.StructInitializer).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_initializer.lhs }); - const field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ .node = struct_initializer.lhs, .handle = handle })) |struct_type| switch (struct_type.type.data) { - .other => |type_node| if (type_node.cast(ast.Node.ContainerDecl)) |container_decl| - fieldTokenType(container_decl, handle) - else - null, - else => null, - } else null; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr }); - var gap_highlighter = GapHighlighter.init(builder, struct_initializer.lhs.lastToken() + 1); - for (struct_initializer.listConst()) |field_init_node| { - try gap_highlighter.next(field_init_node); - std.debug.assert(field_init_node.tag == .FieldInitializer); - const field_init = field_init_node.cast(ast.Node.FieldInitializer).?; + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.then_expr }); + + try writeToken(builder, while_node.error_token, .variable); + + if (while_node.ast.else_expr != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr }); + }, + .@"if", + .if_simple, + => { + const if_node: ast.full.If = if (tag == .@"if") tree.ifFull(node) else tree.ifSimple(node); + + try writeToken(builder, if_node.ast.if_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr }); + + try writeToken(builder, if_node.payload_token, .variable); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr }); + + try writeToken(builder, if_node.error_token, .variable); + if (if_node.ast.else_expr != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr }); + }, + .array_init, + .array_init_comma, + .array_init_one, + .array_init_one_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + => { + var buf: [2]ast.Node.Index = undefined; + const array_init: ast.full.ArrayInit = switch (tag) { + .array_init, .array_init_comma => tree.arrayInit(node), + .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node), + .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node), + .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node), + else => unreachable, + }; + + if (array_init.ast.type_expr != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_init.ast.type_expr }); + for (array_init.ast.elements) |elem| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, elem }); + }, + .struct_init, + .struct_init_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init_one, + .struct_init_one_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + => { + var buf: [2]ast.Node.Index = undefined; + const struct_init: ast.full.StructInit = switch (tag) { + .struct_init, .struct_init_comma => tree.structInit(node), + .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node), + .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node), + .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node), + else => unreachable, + }; + + var field_token_type: ?TokenType = null; + + if (struct_init.ast.type_expr != 0) { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, struct_init.ast.type_expr }); + + field_token_type = if (try analysis.resolveTypeOfNode(store, arena, .{ + .node = struct_init.ast.type_expr, + .handle = handle, + })) |struct_type| switch (struct_type.type.data) { + .other => |type_node| if (analysis.isContainer(struct_type.handle.tree.nodes.items(.tag)[type_node])) + fieldTokenType(type_node, handle) + else + null, + else => null, + } else null; + } + + var gap_highlighter = GapHighlighter.init(builder, struct_init.ast.lbrace); + for (struct_init.ast.fields) |field_init| { + try gap_highlighter.next(field_init); + + const init_token = tree.firstToken(field_init); if (field_token_type) |tok_type| { - try writeToken(builder, field_init.period_token, tok_type); - try writeToken(builder, field_init.name_token, tok_type); + try writeToken(builder, init_token - 3, tok_type); + try writeToken(builder, init_token - 2, tok_type); } - try writeToken(builder, field_init.name_token + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); + try writeToken(builder, init_token - 1, .operator); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init }); } - try gap_highlighter.end(struct_initializer.rtoken); + try gap_highlighter.end(tree.lastToken(node)); }, - .StructInitializerDot => { - const struct_initializer = node.castTag(.StructInitializerDot).?; + .call, + .call_comma, + .async_call, + .async_call_comma, + .call_one, + .call_one_comma, + .async_call_one, + .async_call_one_comma, + => { + var params: [1]ast.Node.Index = undefined; + const call: ast.full.Call = switch (tag) { + .call, .call_comma, .async_call, .async_call_comma => tree.callFull(node), + .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node), + else => unreachable, + }; - var gap_highlighter = GapHighlighter.init(builder, struct_initializer.dot + 1); - for (struct_initializer.listConst()) |field_init_node| { - try gap_highlighter.next(field_init_node); - std.debug.assert(field_init_node.tag == .FieldInitializer); - const field_init = field_init_node.castTag(.FieldInitializer).?; - try writeToken(builder, field_init.period_token, .field); - try writeToken(builder, field_init.name_token, .field); - try writeToken(builder, field_init.name_token + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init.expr }); - } - try gap_highlighter.end(struct_initializer.rtoken); - }, - .Call => { - const call = node.cast(ast.Node.Call).?; try writeToken(builder, call.async_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.lhs }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr }); + if (builder.current_token) |curr_tok| { - if (curr_tok != call.lhs.lastToken() and handle.tree.token_ids[call.lhs.lastToken()] == .Identifier) { - try writeToken(builder, call.lhs.lastToken(), .function); + if (curr_tok != tree.lastToken(call.ast.fn_expr) and token_tags[tree.lastToken(call.ast.fn_expr)] == .identifier) { + try writeToken(builder, tree.lastToken(call.ast.fn_expr), .function); } } - for (call.paramsConst()) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); + for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); }, - .Slice => { - const slice = node.castTag(.Slice).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.lhs }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.start }); - try writeToken(builder, slice.start.lastToken() + 1, .operator); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.end }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.sentinel }); + .slice, + .slice_open, + .slice_sentinel, + => { + const slice: ast.full.Slice = switch (tag) { + .slice => tree.slice(node), + .slice_open => tree.sliceOpen(node), + .slice_sentinel => tree.sliceSentinel(node), + else => unreachable, + }; + + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start }); + try writeToken(builder, tree.lastToken(slice.ast.start) + 1, .operator); + + if (slice.ast.end != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end }); + if (slice.ast.sentinel != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sentinel }); }, - .ArrayAccess => { - const arr_acc = node.castTag(.ArrayAccess).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.lhs }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, arr_acc.index_expr }); + .array_access => { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .Deref, .UnwrapOptional => { - const suffix = node.cast(ast.Node.SimpleSuffixOp).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suffix.lhs }); - try writeToken(builder, suffix.rtoken, .operator); + .deref, + .unwrap_optional, + => { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + try writeToken(builder, main_token, .operator); }, - .GroupedExpression => { - const grouped_expr = node.cast(ast.Node.GroupedExpression).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, grouped_expr.expr }); + .grouped_expression => { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - .Return, .Break, .Continue => { - const cfe = node.cast(ast.Node.ControlFlowExpression).?; - try writeToken(builder, cfe.ltoken, .keyword); - switch (node.tag) { - .Break => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), - .Continue => if (cfe.getLabel()) |n| try writeToken(builder, n, .label), - else => {}, - } - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, cfe.getRHS() }); + .@"return", + .@"break", + .@"continue", + => { + try writeToken(builder, main_token, .keyword); + if (datas[node].lhs != 0) + try writeToken(builder, datas[node].lhs, .label); + if (datas[node].rhs != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .Suspend => { - const suspend_node = node.cast(ast.Node.Suspend).?; - try writeToken(builder, suspend_node.suspend_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, suspend_node.body }); + .@"suspend" => { + try writeToken(builder, main_token, .keyword); + if (datas[node].lhs != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - .IntegerLiteral => { - try writeToken(builder, node.firstToken(), .number); + .integer_literal, + .float_literal, + => { + try writeToken(builder, main_token, .number); }, - .EnumLiteral => { - const enum_literal = node.cast(ast.Node.EnumLiteral).?; - try writeToken(builder, enum_literal.dot, .enumMember); - try writeToken(builder, enum_literal.name, .enumMember); + .enum_literal => { + try writeToken(builder, main_token - 1, .enumMember); + try writeToken(builder, main_token, .enumMember); }, - .FloatLiteral => { - try writeToken(builder, node.firstToken(), .number); - }, - .BuiltinCall => { - const builtin_call = node.cast(ast.Node.BuiltinCall).?; - try writeToken(builder, builtin_call.builtin_token, .builtin); - for (builtin_call.paramsConst()) |param| + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => { + const data = datas[node]; + const params = switch (tag) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; + + try writeToken(builder, main_token, .builtin); + for (params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); }, - .StringLiteral, .CharLiteral => { - try writeToken(builder, node.firstToken(), .string); + .string_literal, + .char_literal, + => { + try writeToken(builder, main_token, .string); }, - .MultilineStringLiteral => { - const multi_line = node.cast(ast.Node.MultilineStringLiteral).?; - for (multi_line.linesConst()) |line| try writeToken(builder, line, .string); + .multiline_string_literal => { + var cur_tok = main_token; + const last_tok = datas[node].rhs; + + while (cur_tok <= last_tok) : (cur_tok += 1) try writeToken(builder, cur_tok, .string); }, - .BoolLiteral, .NullLiteral, .UndefinedLiteral, .Unreachable => { - try writeToken(builder, node.firstToken(), .keywordLiteral); + .true_literal, + .false_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + => { + try writeToken(builder, main_token, .keywordLiteral); }, - .ErrorType => { - try writeToken(builder, node.firstToken(), .keyword); + .error_set_decl => { + try writeToken(builder, main_token, .keyword); }, - .Asm => { - const asm_expr = node.cast(ast.Node.Asm).?; - try writeToken(builder, asm_expr.asm_token, .keyword); - try writeToken(builder, asm_expr.volatile_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_expr.template }); + .@"asm", + .asm_output, + .asm_input, + .asm_simple, + => { + const asm_node: ast.full.Asm = switch (tag) { + .@"asm" => tree.asmFull(node), + .asm_simple => tree.asmSimple(node), + else => return, // TODO Inputs, outputs + }; + + try writeToken(builder, main_token, .keyword); + try writeToken(builder, asm_node.volatile_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, asm_node.ast.template }); // TODO Inputs, outputs. }, - .AnyType => { - try writeToken(builder, node.firstToken(), .type); + .@"anytype" => { + try writeToken(builder, main_token, .type); }, - .TestDecl => { - const test_decl = node.cast(ast.Node.TestDecl).?; - if (test_decl.doc_comments) |doc| try writeDocComments(builder, handle.tree, doc); - try writeToken(builder, test_decl.test_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.name }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, test_decl.body_node }); + .test_decl => { + if (analysis.getDocCommentTokenIndex(handle.tree, node)) |doc| + try writeDocComments(builder, handle.tree, doc); + + try writeToken(builder, main_token, .keyword); + if (token_tags[main_token + 1] == .string_literal) + try writeToken(builder, main_token + 1, .string); + + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .Catch => { - const catch_expr = node.cast(ast.Node.Catch).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.lhs }); - try writeToken(builder, catch_expr.op_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, catch_expr.rhs }); + .@"catch" => { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + try writeToken(builder, main_token, .keyword); + if (token_tags[main_token + 1] == .pipe) + try writeToken(builder, main_token + 1, .variable); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .Add, .AddWrap, .ArrayCat, .ArrayMult, .Assign, .AssignBitAnd, .AssignBitOr, .AssignBitShiftLeft, .AssignBitShiftRight, .AssignBitXor, .AssignDiv, .AssignSub, .AssignSubWrap, .AssignMod, .AssignAdd, .AssignAddWrap, .AssignMul, .AssignMulWrap, .BangEqual, .BitAnd, .BitOr, .BitShiftLeft, .BitShiftRight, .BitXor, .BoolAnd, .BoolOr, .Div, .EqualEqual, .ErrorUnion, .GreaterOrEqual, .GreaterThan, .LessOrEqual, .LessThan, .MergeErrorSets, .Mod, .Mul, .MulWrap, .Period, .Range, .Sub, .SubWrap, .OrElse => { - const infix_op = node.cast(ast.Node.SimpleInfixOp).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.lhs }); - if (node.tag != .Period) { - const token_type: TokenType = switch (node.tag) { - .BoolAnd, .BoolOr, .OrElse => .keyword, - else => .operator, - }; - - try writeToken(builder, infix_op.op_token, token_type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, infix_op.rhs }); - } - switch (node.tag) { - .Period => { - const rhs_str = handle.tree.tokenSlice(infix_op.rhs.firstToken()); - - // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added - // writeToken code. - // Maybe we can hook into it insead? Also applies to Identifier and VarDecl - var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); - const lhs_type = try analysis.resolveFieldAccessLhsType( - store, - arena, - (try analysis.resolveTypeOfNodeInternal(store, arena, .{ - .node = infix_op.lhs, - .handle = handle, - }, &bound_type_params)) orelse return, - &bound_type_params, - ); - const left_type_node = switch (lhs_type.type.data) { - .other => |n| n, - else => return, - }; - if (try analysis.lookupSymbolContainer(store, arena, .{ .node = left_type_node, .handle = lhs_type.handle }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { - switch (decl_type.decl.*) { - .ast_node => |decl_node| { - if (decl_node.tag == .ContainerField) { - const tok_type: ?TokenType = if (left_type_node.cast(ast.Node.ContainerDecl)) |container_decl| - fieldTokenType(container_decl, lhs_type.handle) - else if (left_type_node.tag == .Root) - TokenType.field - else - null; - - if (tok_type) |tt| try writeToken(builder, infix_op.rhs.firstToken(), tt); - return; - } else if (decl_node.tag == .ErrorTag) { - try writeToken(builder, infix_op.rhs.firstToken(), .errorTag); - } - }, - else => {}, - } - - if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { - try colorIdentifierBasedOnType(builder, resolved_type, infix_op.rhs.firstToken(), .{}); - } - } - }, - else => {}, - } - }, - .SliceType => { - const slice_type = node.castTag(.SliceType).?; - const ptr_info = slice_type.ptr_info; - if (ptr_info.align_info) |align_info| { - try writeToken(builder, slice_type.op_token + 2, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); - } - try writeToken(builder, ptr_info.const_token, .keyword); - try writeToken(builder, ptr_info.volatile_token, .keyword); - try writeToken(builder, ptr_info.allowzero_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice_type.rhs }); - }, - .PtrType => { - const pointer_type = node.castTag(.PtrType).?; - const tok_ids = builder.handle.tree.token_ids; - - const ptr_info = switch (tok_ids[pointer_type.op_token]) { - .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.ptr_info, - else => pointer_type.ptr_info, - }; - const rhs = switch (tok_ids[pointer_type.op_token]) { - .AsteriskAsterisk => pointer_type.rhs.castTag(.PtrType).?.rhs, - else => pointer_type.rhs, - }; - - const off = switch (tok_ids[pointer_type.op_token]) { - .Asterisk, .AsteriskAsterisk => blk: { - try writeToken(builder, pointer_type.op_token, .operator); - break :blk pointer_type.op_token + 1; - }, - .LBracket => blk: { - try writeToken(builder, pointer_type.op_token + 1, .operator); - const is_c_ptr = tok_ids[pointer_type.op_token + 2] == .Identifier; - - if (is_c_ptr) { - try writeToken(builder, pointer_type.op_token + 2, .operator); - } - - if (ptr_info.sentinel) |sentinel| { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); - break :blk sentinel.lastToken() + 2; - } - - break :blk pointer_type.op_token + 3 + @boolToInt(is_c_ptr); - }, - else => 0, - }; - - if (ptr_info.align_info) |align_info| { - try writeToken(builder, off, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, align_info.node }); - } - try writeToken(builder, ptr_info.const_token, .keyword); - try writeToken(builder, ptr_info.volatile_token, .keyword); - try writeToken(builder, ptr_info.allowzero_token, .keyword); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, rhs }); - }, - .ArrayType => { - const array_type = node.castTag(.ArrayType).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); - }, - .ArrayTypeSentinel => { - const array_type = node.castTag(.ArrayTypeSentinel).?; - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.len_expr }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.sentinel }); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.rhs }); - }, - .AddressOf, .Await, .BitNot, .BoolNot, .OptionalType, .Negation, .NegationWrap, .Resume, .Try => { - const prefix_op = node.cast(ast.Node.SimplePrefixOp).?; - const tok_type: TokenType = switch (node.tag) { - .Try, .Await, .Resume => .keyword, + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .error_union, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .switch_range, + .sub, + .sub_wrap, + .@"orelse", + => { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + const token_type: TokenType = switch (tag) { + .bool_and, .bool_or => .keyword, else => .operator, }; - try writeToken(builder, prefix_op.op_token, tok_type); - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, prefix_op.rhs }); + + try writeToken(builder, main_token, token_type); + if (datas[node].rhs != 0) + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - else => {}, + .field_access => { + const data = datas[node]; + if (data.rhs == 0) return; + const rhs_str = tree.tokenSlice(data.rhs); + + // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added + // writeToken code. + // Maybe we can hook into it insead? Also applies to Identifier and VarDecl + var bound_type_params = analysis.BoundTypeParams.init(&arena.allocator); + const lhs_type = try analysis.resolveFieldAccessLhsType( + store, + arena, + (try analysis.resolveTypeOfNodeInternal(store, arena, .{ + .node = data.lhs, + .handle = handle, + }, &bound_type_params)) orelse return, + &bound_type_params, + ); + const left_type_node = switch (lhs_type.type.data) { + .other => |n| n, + else => return, + }; + if (try analysis.lookupSymbolContainer(store, arena, .{ + .node = left_type_node, + .handle = lhs_type.handle, + }, rhs_str, !lhs_type.type.is_type_val)) |decl_type| { + switch (decl_type.decl.*) { + .ast_node => |decl_node| { + if (decl_type.handle.tree.nodes.items(.tag)[decl_node].isContainerField()) { + const tok_type: ?TokenType = if (analysis.isContainer(lhs_type.handle.tree.nodes.items(.tag)[left_type_node])) + fieldTokenType(decl_node, lhs_type.handle) + else if (left_type_node == 0) + TokenType.field + else + null; + + if (tok_type) |tt| try writeToken(builder, data.rhs, tt); + return; + } else if (decl_type.handle.tree.nodes.items(.tag)[decl_node] == .error_value) { + try writeToken(builder, data.rhs, .errorTag); + } + }, + else => {}, + } + + if (try decl_type.resolveType(store, arena, &bound_type_params)) |resolved_type| { + try colorIdentifierBasedOnType(builder, resolved_type, data.rhs, .{}); + } + } + }, + .ptr_type, + .ptr_type_aligned, + .ptr_type_bit_range, + .ptr_type_sentinel, + => { + const ptr_type = analysis.ptrType(tree, node).?; + + if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and + main_token == main_tokens[ptr_type.ast.child_type]) + { + return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ + builder, + arena, + store, + ptr_type.ast.child_type, + }); + } + + try writeToken(builder, main_token, .operator); + if (ptr_type.ast.sentinel != 0) { + return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ + builder, + arena, + store, + ptr_type.ast.sentinel, + }); + } + + try writeToken(builder, ptr_type.allowzero_token, .keyword); + + if (ptr_type.ast.align_node != 0) { + const first_tok = tree.firstToken(ptr_type.ast.align_node); + try writeToken(builder, first_tok - 2, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.align_node }); + + if (ptr_type.ast.bit_range_start != 0) { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_start }); + try writeToken(builder, tree.firstToken(ptr_type.ast.bit_range_end - 1), .operator); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.bit_range_end }); + } + } + + try writeToken(builder, ptr_type.const_token, .keyword); + try writeToken(builder, ptr_type.volatile_token, .keyword); + + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, ptr_type.ast.child_type }); + }, + .array_type, + .array_type_sentinel, + => { + const array_type: ast.full.ArrayType = if (tag == .array_type) + tree.arrayType(node) + else + tree.arrayTypeSentinel(node); + + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_count }); + if (array_type.ast.sentinel) |sentinel| + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, sentinel }); + + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, array_type.ast.elem_type }); + }, + .address_of, + .bit_not, + .bool_not, + .optional_type, + .negation, + .negation_wrap, + => { + try writeToken(builder, main_token, .operator); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + }, + .@"try", + .@"resume", + .@"await", + => { + try writeToken(builder, main_token, .keyword); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + }, + else => std.log.scoped(.semantic_tokens).debug("TODO: {s}", .{tag}), } } // TODO Range version, edit version. pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); + // pass root node, which always has index '0' - try writeNodeTokens(&builder, arena, store, 0, handle.tree); + try writeNodeTokens(&builder, arena, store, 0); return builder.toOwnedSlice(); } From acc45b4efe44b502c40fbd773268e899732d0553 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 4 Mar 2021 22:53:54 +0100 Subject: [PATCH 21/36] Use the correct tree to display semantic tokens --- src/analysis.zig | 46 ++++++++++++++++++----------------------- src/semantic_tokens.zig | 17 ++++++++------- 2 files changed, 29 insertions(+), 34 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index c0814f2..8c2e2be 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -983,7 +983,8 @@ pub const TypeWithHandle = struct { } } - fn isContainerKind(self: TypeWithHandle, container_kind_tok: std.zig.Token.Tag, tree: ast.Tree) bool { + fn isContainerKind(self: TypeWithHandle, container_kind_tok: std.zig.Token.Tag) bool { + const tree = self.handle.tree; const main_tokens = tree.nodes.items(.main_token); const tags = tree.tokens.items(.tag); switch (self.type.data) { @@ -992,13 +993,13 @@ pub const TypeWithHandle = struct { } } - pub fn isStructType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainerKind(.keyword_struct, tree) or self.isRoot(); + pub fn isStructType(self: TypeWithHandle) bool { + return self.isContainerKind(.keyword_struct) or self.isRoot(); } - pub fn isNamespace(self: TypeWithHandle, tree: ast.Tree) bool { - if (!self.isStructType(tree)) return false; - + pub fn isNamespace(self: TypeWithHandle) bool { + if (!self.isStructType()) return false; + const tree = self.handle.tree; const node = self.type.data.other; const tags = tree.nodes.items(.tag); if (isContainer(tags[node])) { @@ -1010,20 +1011,21 @@ pub const TypeWithHandle = struct { return true; } - pub fn isEnumType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainerKind(.keyword_enum, tree); + pub fn isEnumType(self: TypeWithHandle) bool { + return self.isContainerKind(.keyword_enum); } - pub fn isUnionType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainerKind(.keyword_union, tree); + pub fn isUnionType(self: TypeWithHandle) bool { + return self.isContainerKind(.keyword_union); } - pub fn isOpaqueType(self: TypeWithHandle, tree: ast.Tree) bool { - return self.isContainerKind(.keyword_opaque, tree); + pub fn isOpaqueType(self: TypeWithHandle) bool { + return self.isContainerKind(.keyword_opaque); } - pub fn isTypeFunc(self: TypeWithHandle, tree: ast.Tree) bool { + pub fn isTypeFunc(self: TypeWithHandle) bool { var buf: [1]ast.Node.Index = undefined; + const tree = self.handle.tree; return switch (self.type.data) { .other => |n| if (fnProto(tree, n, &buf)) |fn_proto| blk: { break :blk isTypeFunction(tree, fn_proto); @@ -1032,8 +1034,9 @@ pub const TypeWithHandle = struct { }; } - pub fn isGenericFunc(self: TypeWithHandle, tree: ast.Tree) bool { + pub fn isGenericFunc(self: TypeWithHandle) bool { var buf: [1]ast.Node.Index = undefined; + const tree = self.handle.tree; return switch (self.type.data) { .other => |n| if (fnProto(tree, n, &buf)) |fn_proto| blk: { break :blk isGenericFunction(tree, fn_proto); @@ -1042,7 +1045,8 @@ pub const TypeWithHandle = struct { }; } - pub fn isFunc(self: TypeWithHandle, tree: ast.Tree) bool { + pub fn isFunc(self: TypeWithHandle) bool { + const tree = self.handle.tree; const tags = tree.nodes.items(.tag); return switch (self.type.data) { .other => |n| switch (tags[n]) { @@ -1867,16 +1871,6 @@ pub const DeclWithHandle = struct { }, bound_type_params)) orelse return null, bound_type_params, ), - // .array_payload => |pay| try resolveBracketAccessType( - // store, - // arena, - // (try resolveTypeOfNodeInternal(store, arena, .{ - // .node = pay.array_expr, - // .handle = self.handle, - // }, bound_type_params)) orelse return null, - // .Single, - // bound_type_params, - // ), .label_decl => return null, .switch_payload => |pay| { if (pay.items.len == 0) return null; @@ -1885,7 +1879,7 @@ pub const DeclWithHandle = struct { .node = pay.switch_expr, .handle = self.handle, }, bound_type_params)) orelse return null; - if (!switch_expr_type.isUnionType(tree)) + if (!switch_expr_type.isUnionType()) return null; if (node_tags[pay.items[0]] == .enum_literal) { diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 344e223..61594ed 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -199,23 +199,23 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan const tree = builder.handle.tree; if (type_node.type.is_type_val) { var new_tok_mod = tok_mod; - if (type_node.isNamespace(tree)) + if (type_node.isNamespace()) new_tok_mod.set("namespace") - else if (type_node.isStructType(tree)) + else if (type_node.isStructType()) new_tok_mod.set("struct") - else if (type_node.isEnumType(tree)) + else if (type_node.isEnumType()) new_tok_mod.set("enum") - else if (type_node.isUnionType(tree)) + else if (type_node.isUnionType()) new_tok_mod.set("union") - else if (type_node.isOpaqueType(tree)) + else if (type_node.isOpaqueType()) new_tok_mod.set("opaque"); try writeTokenMod(builder, target_tok, .type, new_tok_mod); - } else if (type_node.isTypeFunc(tree)) { + } else if (type_node.isTypeFunc()) { try writeTokenMod(builder, target_tok, .type, tok_mod); - } else if (type_node.isFunc(tree)) { + } else if (type_node.isFunc()) { var new_tok_mod = tok_mod; - if (type_node.isGenericFunc(tree)) { + if (type_node.isGenericFunc()) { new_tok_mod.set("generic"); } try writeTokenMod(builder, target_tok, .function, new_tok_mod); @@ -277,6 +277,7 @@ fn writeNodeTokens( const main_tokens = tree.nodes.items(.main_token); const node = maybe_node.?; + if (node > node_tags.len) return; const tag = node_tags[node]; const main_token = main_tokens[node]; From 3d8a9732fcb43f6fdb02b6a485d4ae4a3fd99211 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Fri, 5 Mar 2021 22:38:42 +0100 Subject: [PATCH 22/36] Calculate correct token locations and ensure all semantic highlighting matches --- src/analysis.zig | 136 +++++++++++++++++++++++----------------- src/main.zig | 22 +++++-- src/offsets.zig | 35 +++++++++-- src/semantic_tokens.zig | 71 +++++++++++++-------- 4 files changed, 170 insertions(+), 94 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 8c2e2be..f2e6fbb 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -14,42 +14,48 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI var idx = current; if (idx == 0) return null; switch (tags[node]) { - .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => { + .fn_proto, + .fn_proto_one, + .fn_proto_simple, + .fn_proto_multi, + .fn_decl, + => { idx -= 1; if (tokens[idx] == .keyword_extern and idx > 0) idx -= 1; if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; }, - .local_var_decl, .global_var_decl, .aligned_var_decl, .simple_var_decl => { + .local_var_decl, + .global_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { idx -= 1; if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; }, - .container_field, .container_field_init, .container_field_align => { - idx -= 2; // skip '.' token - }, + .error_value => idx -= 1, + .container_field, + .container_field_init, + .container_field_align, + => idx -= 1, + .test_decl => idx -= 1, else => { - if (isContainer(tags[node])) { - idx -= 1; // go to '=' - idx -= 1; // mutability - idx -= 1; // possible 'pub' - if (tokens[idx] == .keyword_pub and idx > 0) - idx -= 1; - } else log.debug("Doc comment check for tag: {s}", .{tags[node]}); + log.debug("Doc comment check for tag: {s}", .{tags[node]}); }, } // Find first doc comment token if (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) { - while ((tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment) and idx > 0) : (idx -= 1) {} - return idx + 1; + while (idx > 0 and + (tokens[idx] == .doc_comment or tokens[idx] == .container_doc_comment)) + { + idx -= 1; + } + return idx + @boolToInt(tokens[idx] != .doc_comment and tokens[idx] != .container_doc_comment); } - // @TODO: Implement doc comments for tags - // } else if (node.castTag(.ErrorTag)) |tag| { - // return tag.doc_comments; - // } return null; } @@ -97,9 +103,9 @@ pub fn collectDocComments( /// Gets a function signature (keywords, name, return value) pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { - const start = tree.tokenLocation(0, func.ast.fn_token).line_start; - const end = tree.tokenLocation(0, tree.nodes.items(.main_token)[func.ast.return_type]).line_end; - return tree.source[start .. end - 1]; + const start = offsets.tokenLocation(tree, func.ast.fn_token).start; + const end = offsets.tokenLocation(tree, tree.nodes.items(.main_token)[func.ast.return_type]).end; + return tree.source[start..end]; } /// Gets a function snippet insert text @@ -161,17 +167,17 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a /// Gets a function signature (keywords, name, return value) pub fn getVariableSignature(tree: ast.Tree, var_decl: ast.full.VarDecl) []const u8 { - const start = tree.tokenLocation(0, var_decl.ast.mut_token).line_start; - const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(var_decl.ast.init_node)).line_end; + const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start; + const end = offsets.tokenLocation(tree, tree.lastToken(var_decl.ast.init_node)).end; return tree.source[start..end]; } // analysis.getContainerFieldSignature(handle.tree, field) pub fn getContainerFieldSignature(tree: ast.Tree, field: ast.full.ContainerField) []const u8 { - const start = tree.tokenLocation(0, field.ast.name_token).line_start; + const start = offsets.tokenLocation(tree, field.ast.name_token).start; const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr; - const end = tree.tokenLocation(@truncate(u32, start), tree.lastToken(end_node)).line_end; - return tree.source[start .. end - 1]; + const end = offsets.tokenLocation(tree, tree.lastToken(end_node)).end; + return tree.source[start..end]; } /// The type node is "type" @@ -610,7 +616,11 @@ pub fn resolveTypeOfNodeInternal( const starts = tree.tokens.items(.start); switch (node_tags[node]) { - .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => { const var_decl = varDecl(tree, node).?; if (var_decl.ast.type_node != 0) block: { return ((try resolveTypeOfNodeInternal( @@ -647,7 +657,10 @@ pub fn resolveTypeOfNodeInternal( } return null; }, - .container_field, .container_field_init, .container_field_align => |c| { + .container_field, + .container_field_init, + .container_field_align, + => |c| { const field: ast.full.ContainerField = switch (c) { .container_field => tree.containerField(node), .container_field_align => tree.containerFieldAlign(node), @@ -721,10 +734,17 @@ pub fn resolveTypeOfNodeInternal( } return null; }, - .@"comptime", .@"nosuspend", .grouped_expression => { + .@"comptime", + .@"nosuspend", + .grouped_expression, + => { return try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle }, bound_type_params); }, - .struct_init, .struct_init_comma, .struct_init_one, .struct_init_one_comma => { + .struct_init, + .struct_init_comma, + .struct_init_one, + .struct_init_one_comma, + => { return ((try resolveTypeOfNodeInternal( store, arena, @@ -735,14 +755,19 @@ pub fn resolveTypeOfNodeInternal( .error_set_decl => { return TypeWithHandle.typeVal(node_handle); }, - .slice, .slice_sentinel, .slice_open => { + .slice, + .slice_sentinel, + .slice_open, + => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle, }, bound_type_params)) orelse return null; return try resolveBracketAccessType(store, arena, left_type, .Range, bound_type_params); }, - .deref, .unwrap_optional => { + .deref, + .unwrap_optional, + => { const left_type = (try resolveTypeOfNodeInternal(store, arena, .{ .node = datas[node].lhs, .handle = handle, @@ -931,10 +956,7 @@ pub fn resolveTypeOfNodeInternal( .type = .{ .data = .{ .other = node }, .is_type_val = false }, .handle = handle, }, - .root => return TypeWithHandle.typeVal(node_handle), - else => { - // log.debug("TODO: implement type resolving for ast tag: {s}", .{node_tags[node]}); - }, + else => {}, } return null; } @@ -1319,6 +1341,8 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { .async_call_one, .async_call_one_comma, => return tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1), + .test_decl => if (data[node].lhs != 0) + return tree.tokenSlice(data[node].lhs), else => |tag| log.debug("INVALID: {}", .{tag}), } @@ -1326,8 +1350,8 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { } fn nodeContainsSourceIndex(tree: ast.Tree, node: ast.Node.Index, source_index: usize) bool { - const first_token = tree.tokenLocation(0, tree.firstToken(node)).line_start; - const last_token = tree.tokenLocation(@truncate(u32, first_token), tree.lastToken(node)).line_end; + const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start; + const last_token = offsets.tokenLocation(tree, tree.lastToken(node)).end; return source_index >= first_token and source_index <= last_token; } @@ -2332,11 +2356,11 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen } fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange { - const loc_start = tree.tokenLocation(0, tree.firstToken(node)); - const loc_end = tree.tokenLocation(@truncate(u32, loc_start.line_start), tree.lastToken(node)); + const loc_start = offsets.tokenLocation(tree, tree.firstToken(node)); + const loc_end = offsets.tokenLocation(tree, tree.lastToken(node)); return SourceRange{ - .start = loc_start.line_start, - .end = loc_end.line_end, + .start = loc_start.start, + .end = loc_end.end, }; } @@ -2554,8 +2578,8 @@ fn makeScopeInternal( const scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, main_tokens[node_idx]).line_start, - .end = tree.tokenLocation(0, last_token).line_start, + .start = offsets.tokenLocation(tree, main_tokens[node_idx]).start, + .end = offsets.tokenLocation(tree, last_token).start, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2627,8 +2651,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.then_expr)).line_end, + .start = offsets.tokenLocation(tree, payload).start, + .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2657,8 +2681,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, err_token).line_start, - .end = tree.tokenLocation(0, tree.lastToken(if_node.ast.else_expr)).line_end, + .start = offsets.tokenLocation(tree, err_token).start, + .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2687,8 +2711,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, while_node.ast.while_token).line_start, - .end = tree.tokenLocation(0, tree.lastToken(node_idx)).line_end, + .start = offsets.tokenLocation(tree, while_node.ast.while_token).start, + .end = offsets.tokenLocation(tree, tree.lastToken(node_idx)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2704,8 +2728,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.then_expr)).line_end, + .start = offsets.tokenLocation(tree, payload).start, + .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2733,8 +2757,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, err_token).line_start, - .end = tree.tokenLocation(0, tree.lastToken(while_node.ast.else_expr)).line_end, + .start = offsets.tokenLocation(tree, err_token).start, + .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2760,8 +2784,8 @@ fn makeScopeInternal( var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ - .start = tree.tokenLocation(0, payload).line_start, - .end = tree.tokenLocation(0, tree.lastToken(switch_case.ast.target_expr)).line_end, + .start = offsets.tokenLocation(tree, payload).start, + .end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, diff --git a/src/main.zig b/src/main.zig index a81fd5e..10d85bb 100644 --- a/src/main.zig +++ b/src/main.zig @@ -220,7 +220,12 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han for (tree.rootDecls()) |decl_idx| { const decl = tree.nodes.items(.tag)[decl_idx]; switch (decl) { - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => blk: { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => blk: { var buf: [1]std.zig.ast.Node.Index = undefined; const func = analysis.fnProto(tree, decl_idx, &buf).?; if (func.extern_export_token != null) break :blk; @@ -367,7 +372,12 @@ fn nodeToCompletion( if (is_type_val) return; switch (node_tags[node]) { - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_decl => { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => { var buf: [1]std.zig.ast.Node.Index = undefined; const func = analysis.fnProto(tree, node, &buf).?; if (func.name_token) |name_token| { @@ -376,7 +386,7 @@ fn nodeToCompletion( const insert_text = if (use_snippets) blk: { // TODO Also check if we are dot accessing from a type val and dont skip in that case. const skip_self_param = if (func.ast.params.len > 0) param_check: { - const in_container = analysis.innermostContainer(handle, tree.tokenLocation(0, func.ast.fn_token).line_start); + const in_container = analysis.innermostContainer(handle, tree.tokens.items(.start)[func.ast.fn_token]); var it = func.iterate(tree); const param = it.next().?; @@ -603,7 +613,9 @@ fn hoverSymbol( tree.firstToken(param.type_expr); const last_token = tree.lastToken(param.type_expr); - const signature_str = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end]; + const start = offsets.tokenLocation(tree, first_token).start; + const end = offsets.tokenLocation(tree, last_token).end; + const signature_str = tree.source[start..end]; break :param_decl if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```\n{s}", .{ signature_str, doc_str }) else @@ -895,7 +907,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl .label = tree.tokenSlice(param.name_token.?), .kind = .Constant, .documentation = doc, - .detail = tree.source[tree.tokenLocation(0, first_token).line_start..tree.tokenLocation(0, last_token).line_end], + .detail = tree.source[offsets.tokenLocation(tree, first_token).start..offsets.tokenLocation(tree, last_token).end], }); }, .pointer_payload => |payload| { diff --git a/src/offsets.zig b/src/offsets.zig index 40d40e0..418fe2f 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -1,5 +1,6 @@ const std = @import("std"); const types = @import("types.zig"); +const ast = std.zig.ast; pub const Encoding = enum { utf8, @@ -70,7 +71,7 @@ pub const TokenLocation = struct { } }; -pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token: std.zig.ast.TokenIndex, encoding: Encoding) !TokenLocation { +pub fn tokenRelativeLocation(tree: ast.Tree, start_index: usize, token: ast.TokenIndex, encoding: Encoding) !TokenLocation { const start = tree.tokens.items(.start)[token]; var loc = TokenLocation{ @@ -108,14 +109,14 @@ pub fn tokenRelativeLocation(tree: std.zig.ast.Tree, start_index: usize, token: } /// Asserts the token is comprised of valid utf8 -pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encoding: Encoding) usize { - const token_loc = tree.tokenLocation(0, token); +pub fn tokenLength(tree: ast.Tree, token: ast.TokenIndex, encoding: Encoding) usize { + const token_loc = tokenLocation(tree, token); if (encoding == .utf8) - return token_loc.line_end - token_loc.line_start; + return token_loc.end - token_loc.start; - var i: usize = token_loc.line_start; + var i: usize = token_loc.start; var utf16_len: usize = 0; - while (i < token_loc.line_end) { + while (i < token_loc.end) { const n = std.unicode.utf8ByteSequenceLength(tree.source[i]) catch unreachable; const codepoint = std.unicode.utf8Decode(tree.source[i .. i + n]) catch unreachable; if (codepoint < 0x10000) { @@ -128,6 +129,28 @@ pub fn tokenLength(tree: std.zig.ast.Tree, token: std.zig.ast.TokenIndex, encodi return utf16_len; } +/// Token location inside source +pub const Loc = struct { + start: usize, + end: usize, +}; + +pub fn tokenLocation(tree: ast.Tree, token_index: ast.TokenIndex) Loc { + const start = tree.tokens.items(.start)[token_index]; + const tag = tree.tokens.items(.tag)[token_index]; + + // For some tokens, re-tokenization is needed to find the end. + var tokenizer: std.zig.Tokenizer = .{ + .buffer = tree.source, + .index = start, + .pending_invalid_token = null, + }; + + const token = tokenizer.next(); + std.debug.assert(token.tag == tag); + return .{ .start = token.loc.start, .end = token.loc.end }; +} + pub fn documentRange(doc: types.TextDocument, encoding: Encoding) !types.Range { var line_idx: i64 = 0; var curr_line: []const u8 = doc.text; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 61594ed..87eaed9 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -64,12 +64,13 @@ const Builder = struct { } fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void { + const starts = self.handle.tree.tokens.items(.start); const start_idx = if (self.current_token) |current_token| - self.handle.tree.tokenLocation(0, current_token).line_start + starts[current_token] else 0; - if (start_idx > self.handle.tree.tokenLocation(0, token).line_start) + if (start_idx > starts[token]) return; const delta_loc = offsets.tokenRelativeLocation(self.handle.tree, start_idx, token, self.encoding) catch return; @@ -268,6 +269,8 @@ fn writeNodeTokens( maybe_node: ?ast.Node.Index, ) error{OutOfMemory}!void { if (maybe_node == null) return; + const node = maybe_node.?; + if (node == 0) return; const handle = builder.handle; const tree = handle.tree; @@ -275,9 +278,8 @@ fn writeNodeTokens( const token_tags = tree.tokens.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + if (node > datas.len) return; - const node = maybe_node.?; - if (node > node_tags.len) return; const tag = node_tags[node]; const main_token = main_tokens[node]; @@ -286,18 +288,16 @@ fn writeNodeTokens( defer arena.child_allocator.free(child_frame); switch (tag) { - .root => { - var gap_highlighter = GapHighlighter.init(builder, 0); - var buf: [2]ast.Node.Index = undefined; - for (analysis.declMembers(tree, .root, 0, &buf)) |child| { - try gap_highlighter.next(child); - if (node_tags[child].isContainerField()) { - try writeContainerField(builder, arena, store, child, .field, child_frame); - } else { - try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, child }); - } - } - try gap_highlighter.end(@truncate(u32, tree.tokens.len) - 1); + .root => unreachable, + .container_field, + .container_field_align, + .container_field_init, + => try writeContainerField(builder, arena, store, node, .field, child_frame), + .@"errdefer" => { + if (datas[node].lhs != 0) + try writeToken(builder, datas[node].lhs, .variable); + + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, .block, .block_semicolon, @@ -381,12 +381,21 @@ fn writeNodeTokens( .container_decl_two_trailing, .container_decl_arg, .container_decl_arg_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .tagged_union_two, + .tagged_union_two_trailing, => { var buf: [2]ast.Node.Index = undefined; const decl: ast.full.ContainerDecl = switch (tag) { .container_decl, .container_decl_trailing => tree.containerDecl(node), .container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node), .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node), + .tagged_union, .tagged_union_trailing => tree.taggedUnion(node), + .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node), + .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(&buf, node), else => unreachable, }; @@ -632,7 +641,7 @@ fn writeNodeTokens( .handle = handle, })) |struct_type| switch (struct_type.type.data) { .other => |type_node| if (analysis.isContainer(struct_type.handle.tree.nodes.items(.tag)[type_node])) - fieldTokenType(type_node, handle) + fieldTokenType(type_node, struct_type.handle) else null, else => null, @@ -644,11 +653,9 @@ fn writeNodeTokens( try gap_highlighter.next(field_init); const init_token = tree.firstToken(field_init); - if (field_token_type) |tok_type| { - try writeToken(builder, init_token - 3, tok_type); - try writeToken(builder, init_token - 2, tok_type); - } - try writeToken(builder, init_token - 1, .operator); + try writeToken(builder, init_token - 3, field_token_type orelse .field); // '.' + try writeToken(builder, init_token - 2, field_token_type orelse .field); // name + try writeToken(builder, init_token - 1, .operator); // '=' try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, field_init }); } try gap_highlighter.end(tree.lastToken(node)); @@ -712,7 +719,6 @@ fn writeNodeTokens( .grouped_expression => { try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - .@"return", .@"break", .@"continue", => { @@ -722,7 +728,7 @@ fn writeNodeTokens( if (datas[node].rhs != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .@"suspend" => { + .@"suspend", .@"return" => { try writeToken(builder, main_token, .keyword); if (datas[node].lhs != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); @@ -872,6 +878,8 @@ fn writeNodeTokens( if (data.rhs == 0) return; const rhs_str = tree.tokenSlice(data.rhs); + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, data.lhs }); + // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added // writeToken code. // Maybe we can hook into it insead? Also applies to Identifier and VarDecl @@ -935,7 +943,7 @@ fn writeNodeTokens( }); } - try writeToken(builder, main_token, .operator); + if (ptr_type.size == .One) try writeToken(builder, main_token, .operator); if (ptr_type.ast.sentinel != 0) { return try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, @@ -995,7 +1003,7 @@ fn writeNodeTokens( try writeToken(builder, main_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, - else => std.log.scoped(.semantic_tokens).debug("TODO: {s}", .{tag}), + .anyframe_literal => try writeToken(builder, main_token, .keyword), } } @@ -1003,7 +1011,16 @@ fn writeNodeTokens( pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); + // reverse the ast from the root declarations + var gap_highlighter = GapHighlighter.init(&builder, 0); + var buf: [2]ast.Node.Index = undefined; + for (analysis.declMembers(handle.tree, .root, 0, &buf)) |child| { + try gap_highlighter.next(child); + try writeNodeTokens(&builder, arena, store, child); + } + + try gap_highlighter.end(@truncate(u32, handle.tree.tokens.len) - 1); // pass root node, which always has index '0' - try writeNodeTokens(&builder, arena, store, 0); + // try writeNodeTokens(&builder, arena, store, 0); return builder.toOwnedSlice(); } From ac8a00342e9d43be2db06049e55e9527b80bbd82 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sat, 6 Mar 2021 20:55:59 +0100 Subject: [PATCH 23/36] All functionalities implemented. Also implemented ability to skip searching for references through std --- src/analysis.zig | 474 ++++++++++++++++++++++++++++++++++++--------- src/config.zig | 4 + src/main.zig | 58 +++++- src/references.zig | 78 ++++++-- src/rename.zig | 2 +- 5 files changed, 489 insertions(+), 127 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index f2e6fbb..a061294 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -35,15 +35,7 @@ pub fn getDocCommentTokenIndex(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenI if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1; }, - .error_value => idx -= 1, - .container_field, - .container_field_init, - .container_field_align, - => idx -= 1, - .test_decl => idx -= 1, - else => { - log.debug("Doc comment check for tag: {s}", .{tags[node]}); - }, + else => idx -= 1, } // Find first doc comment token @@ -103,9 +95,12 @@ pub fn collectDocComments( /// Gets a function signature (keywords, name, return value) pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { - const start = offsets.tokenLocation(tree, func.ast.fn_token).start; - const end = offsets.tokenLocation(tree, tree.nodes.items(.main_token)[func.ast.return_type]).end; - return tree.source[start..end]; + const start = offsets.tokenLocation(tree, func.ast.fn_token); + // return type can be 0 when user wrote incorrect fn signature + // to ensure we don't break, just end the signature at end of fn token + if (func.ast.return_type == 0) return tree.source[start.start..start.end]; + const end = offsets.tokenLocation(tree, tree.lastToken(func.ast.return_type)).end; + return tree.source[start.start..end]; } /// Gets a function snippet insert text @@ -240,12 +235,7 @@ pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { .container_field_align => tree.containerFieldAlign(node).ast.name_token, .identifier => main_token, - - // @TODO: Errors - // .error_=> { - // const tag = node.castTag(.ErrorTag).?; - // return tag.name_token; - // }, + .error_value => main_token + 2, // 'error'. // lhs of main token is name token, so use `node` - 1 .test_decl => if (tree.tokens.items(.tag)[main_token + 1] == .string_literal) @@ -328,9 +318,7 @@ fn resolveVarDeclAliasInternal( break :block NodeWithHandle{ .node = resolved_node, .handle = resolved.handle }; } else return null; - if (try lookupSymbolContainer(store, arena, container_node, tree.tokenSlice(datas[node_handle.node].rhs), false)) |inner_decl| { - return inner_decl; - } + return try lookupSymbolContainer(store, arena, container_node, tree.tokenSlice(datas[node_handle.node].rhs), false); } return null; } @@ -366,22 +354,70 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato return null; } +/// Returns `true` when the given `node` is one of the block tags +fn isBlock(tree: ast.Tree, node: ast.Node.Index) bool { + return switch (tree.nodes.items(.tag)[node]) { + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + => true, + else => false, + }; +} + +/// Returns `true` when the given `node` is one of the call tags +fn isCall(tree: ast.Tree, node: ast.Node.Index) bool { + return switch (tree.nodes.items(.tag)[node]) { + .call, + .call_comma, + .call_one, + .call_one_comma, + .async_call, + .async_call_comma, + .async_call_one, + .async_call_one_comma, + => true, + else => false, + }; +} + fn findReturnStatementInternal( tree: ast.Tree, - fn_decl: *ast.Node.FnProto, - base_node: *ast.Node, + fn_decl: ast.full.FnProto, + body: ast.Node.Index, already_found: *bool, -) ?*ast.Node.ControlFlowExpression { - var result: ?*ast.Node.ControlFlowExpression = null; - var child_idx: usize = 0; +) ?ast.Node.Index { + var result: ?ast.Node.Index = null; - while (base_node.iterate(child_idx)) |child_node| : (child_idx += 1) { - if (child_node.castTag(.Return)) |cfe| { - // If we are calling ourselves recursively, ignore this return. - if (cfe.getRHS()) |rhs| { - if (rhs.castTag(.Call)) |call_node| { - if (call_node.lhs.tag == .Identifier) { - if (std.mem.eql(u8, getDeclName(tree, call_node.lhs).?, getDeclName(tree, &fn_decl.base).?)) { + const node_tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + + if (!isBlock(tree, body)) return null; + + const statements: []const ast.Node.Index = switch (node_tags[body]) { + .block, .block_semicolon => tree.extra_data[datas[body].lhs..datas[body].rhs], + .block_two, .block_two_semicolon => blk: { + const statements = &[_]ast.Node.Index{ datas[body].lhs, datas[body].rhs }; + const len: usize = if (datas[body].lhs == 0) + @as(usize, 0) + else if (datas[body].rhs == 0) + @as(usize, 1) + else + @as(usize, 2); + break :blk statements[0..len]; + }, + else => unreachable, + }; + + for (statements) |child_idx| { + if (node_tags[child_idx] == .@"return") { + if (datas[child_idx].lhs != 0) { + const lhs = datas[child_idx].lhs; + if (isCall(tree, lhs)) { + const call_name = getDeclName(tree, datas[lhs].lhs); + if (call_name) |name| { + if (std.mem.eql(u8, name, tree.tokenSlice(fn_decl.name_token.?))) { continue; } } @@ -390,18 +426,19 @@ fn findReturnStatementInternal( if (already_found.*) return null; already_found.* = true; - result = cfe; + result = child_idx; continue; } - result = findReturnStatementInternal(tree, fn_decl, child_node, already_found); + result = findReturnStatementInternal(tree, fn_decl, child_idx, already_found); } + return result; } -fn findReturnStatement(tree: ast.Tree, fn_decl: *ast.Node.FnProto) ?*ast.Node.ControlFlowExpression { +fn findReturnStatement(tree: ast.Tree, fn_decl: ast.full.FnProto, body: ast.Node.Index) ?ast.Node.Index { var already_found = false; - return findReturnStatementInternal(tree, fn_decl, fn_decl.getBodyNode().?, &already_found); + return findReturnStatementInternal(tree, fn_decl, body, &already_found); } /// Resolves the return type of a function @@ -415,14 +452,25 @@ pub fn resolveReturnType( ) !?TypeWithHandle { const tree = handle.tree; if (isTypeFunction(tree, fn_decl) and fn_body != null) { - // @TODO: find return statement inside fn body of `type` (generic) functions + // If this is a type function and it only contains a single return statement that returns + // a container declaration, we will return that declaration. + const ret = findReturnStatement(tree, fn_decl, fn_body.?) orelse return null; + const data = tree.nodes.items(.data)[ret]; + if (data.lhs != 0) { + return try resolveTypeOfNodeInternal(store, arena, .{ + .node = data.lhs, + .handle = handle, + }, bound_type_params); + } + + return null; } if (fn_decl.ast.return_type == 0) return null; - return resolveTypeOfNodeInternal(store, arena, .{ + return ((try resolveTypeOfNodeInternal(store, arena, .{ .node = fn_decl.ast.return_type, .handle = handle, - }, bound_type_params); + }, bound_type_params)) orelse return null).instanceTypeVal(); } /// Resolves the child type of an optional type @@ -647,7 +695,7 @@ pub fn resolveTypeOfNodeInternal( switch (child.decl.*) { .ast_node => |n| { if (n == node) return null; - if (varDecl(tree, n)) |var_decl| { + if (varDecl(child.handle.tree, n)) |var_decl| { if (var_decl.ast.init_node != 0 and var_decl.ast.init_node == node) return null; } }, @@ -721,7 +769,7 @@ pub fn resolveTypeOfNodeInternal( const call_param_type = (try resolveTypeOfNodeInternal(store, arena, .{ .node = call.ast.params[it.param_i - 1 - @boolToInt(has_self_param)], - .handle = decl.handle, + .handle = handle, }, bound_type_params)) orelse continue; if (!call_param_type.type.is_type_val) continue; @@ -810,7 +858,7 @@ pub fn resolveTypeOfNodeInternal( arena, .{ .node = left_type_node, .handle = left_type.handle }, rhs_str, - left_type.type.is_type_val, + !left_type.type.is_type_val, )) |child| { return try child.resolveType(store, arena, bound_type_params); } else return null; @@ -829,13 +877,13 @@ pub fn resolveTypeOfNodeInternal( }, bound_type_params)) orelse return null; return try resolveUnwrapErrorType(store, arena, left_type, bound_type_params); }, - .error_union => return TypeWithHandle.typeVal(node_handle), .array_type, .array_type_sentinel, .optional_type, .ptr_type_aligned, .ptr_type, .ptr_type_bit_range, + .error_union, => return TypeWithHandle.typeVal(node_handle), .@"try" => { const rhs_type = (try resolveTypeOfNodeInternal(store, arena, .{ @@ -860,7 +908,11 @@ pub fn resolveTypeOfNodeInternal( .handle = rhs_type.handle, }; }, - .builtin_call, .builtin_call_comma, .builtin_call_two, .builtin_call_two_comma => { + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => { const data = datas[node]; const params = switch (node_tags[node]) { .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], @@ -935,15 +987,24 @@ pub fn resolveTypeOfNodeInternal( .container_decl_trailing, .container_decl_two, .container_decl_two_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, => { return TypeWithHandle.typeVal(node_handle); }, - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => { var buf: [1]ast.Node.Index = undefined; - const fn_proto = fnProto(tree, node, &buf).?; - // This is a function type - if (fn_proto.name_token == null) { + if (fnProto(tree, node, &buf).?.name_token == null) { return TypeWithHandle.typeVal(node_handle); } @@ -952,7 +1013,9 @@ pub fn resolveTypeOfNodeInternal( .handle = handle, }; }, - .multiline_string_literal, .string_literal => return TypeWithHandle{ + .multiline_string_literal, + .string_literal, + => return TypeWithHandle{ .type = .{ .data = .{ .other = node }, .is_type_val = false }, .handle = handle, }, @@ -1317,11 +1380,7 @@ pub fn nodeToString(tree: ast.Tree, node: ast.Node.Index) ?[]const u8 { .container_field => return tree.tokenSlice(tree.containerField(node).ast.name_token), .container_field_init => return tree.tokenSlice(tree.containerFieldInit(node).ast.name_token), .container_field_align => return tree.tokenSlice(tree.containerFieldAlign(node).ast.name_token), - // @TODO: Error tag name - // .ErrorTag => { - // const tag = node.castTag(.ErrorTag).?; - // return tree.tokenSlice(tag.name_token); - // }, + .error_value => return tree.tokenSlice(data[node].rhs), .identifier => return tree.tokenSlice(main_token), .fn_proto, .fn_proto_multi, @@ -2005,6 +2064,7 @@ fn iterateSymbolsContainerInternal( .label_decl => continue, else => {}, } + const decl = DeclWithHandle{ .decl = &entry.value, .handle = handle }; if (handle != orig_handle and !decl.isPublic()) continue; try callback(context, decl); @@ -2136,12 +2196,24 @@ fn resolveUse( if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; try use_trail.append(use); - const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle })) orelse continue; + const use_expr = (try resolveTypeOfNode( + store, + arena, + .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle }, + )) orelse continue; + const use_expr_node = switch (use_expr.type.data) { .other => |n| n, else => continue, }; - if (try lookupSymbolContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, symbol, false, use_trail)) |candidate| { + if (try lookupSymbolContainerInternal( + store, + arena, + .{ .node = use_expr_node, .handle = use_expr.handle }, + symbol, + false, + use_trail, + )) |candidate| { if (candidate.handle != handle and !candidate.isPublic()) { continue; } @@ -2183,24 +2255,24 @@ fn lookupSymbolGlobalInternal( use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope| { - if (source_index >= scope.range.start and source_index < scope.range.end) { - if (scope.decls.getEntry(symbol)) |candidate| { - switch (candidate.value) { - .ast_node => |node| { - if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; - }, - .label_decl => continue, - else => {}, - } - return DeclWithHandle{ - .decl = &candidate.value, - .handle = handle, - }; + // if (source_index >= scope.range.start and source_index < scope.range.end) { + if (scope.decls.getEntry(symbol)) |candidate| { + switch (candidate.value) { + .ast_node => |node| { + if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; + }, + .label_decl => continue, + else => {}, } - - if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + return DeclWithHandle{ + .decl = &candidate.value, + .handle = handle, + }; } + if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + // } + if (scope.range.start > source_index) return null; } @@ -2235,7 +2307,7 @@ fn lookupSymbolContainerInternal( const token_tags = tree.tokens.items(.tag); const main_token = tree.nodes.items(.main_token)[container]; - const is_enum = if (isContainer(node_tags[container]) and node_tags[container] != .root) + const is_enum = if (container != 0 and isContainer(node_tags[container])) token_tags[main_token] == .keyword_enum else false; @@ -2397,7 +2469,6 @@ pub fn declMembers(tree: ast.Tree, tag: ast.Node.Tag, node_idx: ast.Node.Index, .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members, .tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members, .root => tree.rootDecls(), - // @TODO: Fix error set declarations .error_set_decl => &[_]ast.Node.Index{}, else => unreachable, }; @@ -2425,6 +2496,7 @@ fn makeScopeInternal( tree: ast.Tree, node_idx: ast.Node.Index, ) error{OutOfMemory}!void { + // if (node_idx > tree.nodes.len) return; const tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); const data = tree.nodes.items(.data); @@ -2460,13 +2532,12 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, decl); const name = getDeclName(tree, decl) orelse continue; - // @TODO: implement tests - // if (decl.tag == .TestDecl) { - // try tests.append(decl); - // continue; - // } + if (tags[decl] == .test_decl) { + try tests.append(decl); + continue; + } - if (tags[decl] == .error_set_decl) { + if (node == .error_set_decl) { (try error_completions.addOne(allocator)).* = .{ .label = name, .kind = .Constant, @@ -2490,7 +2561,6 @@ fn makeScopeInternal( continue; } - // @TODO: We can probably just use node_idx directly instead of first transforming to container const container_decl: ?ast.full.ContainerDecl = switch (node) { .container_decl, .container_decl_trailing => tree.containerDecl(node_idx), .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx), @@ -2537,7 +2607,12 @@ fn makeScopeInternal( } switch (node) { - .fn_proto, .fn_proto_one, .fn_proto_simple, .fn_proto_multi, .fn_decl => |fn_tag| { + .fn_proto, + .fn_proto_one, + .fn_proto_simple, + .fn_proto_multi, + .fn_decl, + => |fn_tag| { var buf: [1]ast.Node.Index = undefined; const func = fnProto(tree, node_idx, &buf).?; @@ -2569,7 +2644,11 @@ fn makeScopeInternal( .test_decl => { return try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); }, - .block, .block_semicolon, .block_two, .block_two_semicolon => { + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + => { const first_token = tree.firstToken(node_idx); const last_token = tree.lastToken(node_idx); @@ -2638,10 +2717,9 @@ fn makeScopeInternal( scopes.items[scope_idx].uses = uses.toOwnedSlice(); return; }, - .@"comptime", .@"nosuspend" => { - return try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].lhs); - }, - .@"if", .if_simple => { + .@"if", + .if_simple, + => { const if_node: ast.full.If = if (node == .@"if") tree.ifFull(node_idx) else @@ -2697,7 +2775,12 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, if_node.ast.else_expr); } }, - .@"while", .while_simple, .while_cont, .@"for", .for_simple => { + .@"while", + .while_simple, + .while_cont, + .@"for", + .for_simple, + => { const while_node: ast.full.While = switch (node) { .@"while" => tree.whileFull(node_idx), .while_simple => tree.whileSimple(node_idx), @@ -2773,7 +2856,9 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, while_node.ast.else_expr); } }, - .switch_case, .switch_case_one => { + .switch_case, + .switch_case_one, + => { const switch_case: ast.full.SwitchCase = switch (node) { .switch_case => tree.switchCase(node_idx), .switch_case_one => tree.switchCaseOne(node_idx), @@ -2809,7 +2894,11 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, switch_case.ast.target_expr); }, - .global_var_decl, .local_var_decl, .aligned_var_decl, .simple_var_decl => { + .global_var_decl, + .local_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { const var_decl = varDecl(tree, node_idx).?; if (var_decl.ast.type_node != 0) { try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, var_decl.ast.type_node); @@ -2819,13 +2908,206 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, var_decl.ast.init_node); } }, + .call, + .call_comma, + .call_one, + .call_one_comma, + .async_call, + .async_call_comma, + .async_call_one, + .async_call_one_comma, + => { + var buf: [1]ast.Node.Index = undefined; + const call: ast.full.Call = switch (node) { + .async_call, + .async_call_comma, + .call, + .call_comma, + => tree.callFull(node_idx), + .async_call_one, + .async_call_one_comma, + .call_one, + .call_one_comma, + => tree.callOne(&buf, node_idx), + else => unreachable, + }; + + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, call.ast.fn_expr); + for (call.ast.params) |param| + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, param); + }, + .struct_init, + .struct_init_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .struct_init_one, + .struct_init_one_comma, + => { + var buf: [2]ast.Node.Index = undefined; + const struct_init: ast.full.StructInit = switch (node) { + .struct_init, .struct_init_comma => tree.structInit(node_idx), + .struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node_idx), + .struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node_idx), + .struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node_idx), + else => unreachable, + }; + + if (struct_init.ast.type_expr != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, struct_init.ast.type_expr); + + for (struct_init.ast.fields) |field| { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, field); + } + }, + .array_init, + .array_init_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + .array_init_one, + .array_init_one_comma, + => { + var buf: [2]ast.Node.Index = undefined; + const array_init: ast.full.ArrayInit = switch (node) { + .array_init, .array_init_comma => tree.arrayInit(node_idx), + .array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node_idx), + .array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node_idx), + .array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node_idx), + else => unreachable, + }; + + if (array_init.ast.type_expr != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, array_init.ast.type_expr); + for (array_init.ast.elements) |elem| { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, elem); + } + }, + .container_field, + .container_field_align, + .container_field_init, + => { + const field = containerField(tree, node_idx).?; + + if (field.ast.type_expr != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, field.ast.type_expr); + if (field.ast.align_expr != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, field.ast.align_expr); + if (field.ast.value_expr != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, field.ast.value_expr); + }, + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => { + const b_data = data[node_idx]; + const params = switch (node) { + .builtin_call, .builtin_call_comma => tree.extra_data[b_data.lhs..b_data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (b_data.lhs == 0) + &[_]ast.Node.Index{} + else if (b_data.rhs == 0) + &[_]ast.Node.Index{b_data.lhs} + else + &[_]ast.Node.Index{ b_data.lhs, b_data.rhs }, + else => unreachable, + }; + + for (params) |param| { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, param); + } + }, + .ptr_type, + .ptr_type_aligned, + .ptr_type_bit_range, + .ptr_type_sentinel, + => { + const ptr_type: ast.full.PtrType = ptrType(tree, node_idx).?; + if (ptr_type.ast.sentinel != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, ptr_type.ast.sentinel); + if (ptr_type.ast.align_node != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, ptr_type.ast.align_node); + if (ptr_type.ast.child_type != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, ptr_type.ast.child_type); + }, + .slice, + .slice_open, + .slice_sentinel, + => { + const slice: ast.full.Slice = switch (node) { + .slice => tree.slice(node_idx), + .slice_open => tree.sliceOpen(node_idx), + .slice_sentinel => tree.sliceSentinel(node_idx), + else => unreachable, + }; + + if (slice.ast.sliced != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, slice.ast.sliced); + if (slice.ast.start != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, slice.ast.start); + if (slice.ast.end != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, slice.ast.end); + if (slice.ast.sentinel != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, slice.ast.sentinel); + }, + + // no scope + .@"asm", + .asm_simple, + .asm_output, + .asm_input, + .error_value, + .@"anytype", + .multiline_string_literal, + .string_literal, + .enum_literal, + .identifier, + .anyframe_type, + .anyframe_literal, + .char_literal, + .integer_literal, + .float_literal, + .false_literal, + .true_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + .@"continue", + => {}, + .@"break", .@"defer" => { + if (data[node_idx].rhs != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); + }, + // all lhs kind of nodes + .@"return", + .@"resume", + .field_access, + .@"suspend", + .deref, + .@"try", + .@"await", + .optional_type, + .@"comptime", + .@"nosuspend", + .bool_not, + .negation, + .bit_not, + .negation_wrap, + .address_of, + .grouped_expression, + .unwrap_optional, + => { + if (data[node_idx].lhs != 0) { + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].lhs); + } + }, else => { - // log.debug("Implement makeScopeInternal for node type: '{s}'", .{node}); - // @TODO: Could we just do node_idx + 1 here? - // var child_idx: usize = 0; - // while (node.iterate(child_idx)) |child_node| : (child_idx += 1) { - // try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, child_node); - // } + if (data[node_idx].lhs != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].lhs); + if (data[node_idx].rhs != 0) + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].rhs); }, } } diff --git a/src/config.zig b/src/config.zig index 9039e59..346aa7d 100644 --- a/src/config.zig +++ b/src/config.zig @@ -25,3 +25,7 @@ enable_semantic_tokens: bool = true, /// Whether to enable `*` and `?` operators in completion lists operator_completions: bool = true, + +/// Skips references to std. This will improve lookup speeds. +/// Going to definition however will continue to work +skip_std_references: bool = true, diff --git a/src/main.zig b/src/main.zig index 10d85bb..11c12b5 100644 --- a/src/main.zig +++ b/src/main.zig @@ -366,6 +366,7 @@ fn nodeToCompletion( .arena = arena, .orig_handle = orig_handle, }; + logger.debug("eklafgaef", .{}); try analysis.iterateSymbolsContainer(&document_store, arena, node_handle, orig_handle, declToCompletion, context, !is_type_val); } @@ -427,7 +428,11 @@ fn nodeToCompletion( }); } }, - .global_var_decl, .local_var_decl, .aligned_var_decl, .simple_var_decl => { + .global_var_decl, + .local_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { const var_decl = analysis.varDecl(tree, node).?; const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const; @@ -448,7 +453,10 @@ fn nodeToCompletion( .detail = analysis.getVariableSignature(tree, var_decl), }); }, - .container_field, .container_field_align, .container_field_init => { + .container_field, + .container_field_align, + .container_field_init, + => { const field = analysis.containerField(tree, node).?; try list.append(.{ .label = handle.tree.tokenSlice(field.ast.name_token), @@ -457,13 +465,19 @@ fn nodeToCompletion( .detail = analysis.getContainerFieldSignature(handle.tree, field), }); }, - .array_type, .array_type_sentinel => { + .array_type, + .array_type_sentinel, + => { try list.append(.{ .label = "len", .kind = .Field, }); }, - .ptr_type, .ptr_type_aligned, .ptr_type_bit_range, .ptr_type_sentinel => { + .ptr_type, + .ptr_type_aligned, + .ptr_type_bit_range, + .ptr_type_sentinel, + => { const ptr_type = analysis.ptrType(tree, node).?; switch (ptr_type.size) { @@ -496,7 +510,7 @@ fn nodeToCompletion( .kind = .Field, }); }, - else => if (analysis.nodeToString(handle.tree, node)) |string| { + else => if (analysis.nodeToString(tree, node)) |string| { try list.append(.{ .label = string, .kind = .Field, @@ -828,10 +842,26 @@ fn renameDefinitionLabel(arena: *std.heap.ArenaAllocator, id: types.RequestId, h }); } -fn referencesDefinitionGlobal(arena: *std.heap.ArenaAllocator, id: types.RequestId, handle: *DocumentStore.Handle, pos_index: usize, include_decl: bool) !void { +fn referencesDefinitionGlobal( + arena: *std.heap.ArenaAllocator, + id: types.RequestId, + handle: *DocumentStore.Handle, + pos_index: usize, + include_decl: bool, + skip_std_references: bool, +) !void { const decl = (try getSymbolGlobal(arena, pos_index, handle)) orelse return try respondGeneric(id, null_result_response); var locs = std.ArrayList(types.Location).init(&arena.allocator); - try references.symbolReferences(arena, &document_store, decl, offset_encoding, include_decl, &locs, std.ArrayList(types.Location).append); + try references.symbolReferences( + arena, + &document_store, + decl, + offset_encoding, + include_decl, + &locs, + std.ArrayList(types.Location).append, + skip_std_references, + ); try send(arena, types.Response{ .id = id, .result = .{ .Locations = locs.items }, @@ -849,7 +879,7 @@ fn referencesDefinitionFieldAccess( ) !void { const decl = (try getSymbolFieldAccess(handle, arena, position, range, config)) orelse return try respondGeneric(id, null_result_response); var locs = std.ArrayList(types.Location).init(&arena.allocator); - try references.symbolReferences(arena, &document_store, decl, offset_encoding, include_decl, &locs, std.ArrayList(types.Location).append); + try references.symbolReferences(arena, &document_store, decl, offset_encoding, include_decl, &locs, std.ArrayList(types.Location).append, config.skip_std_references); try send(arena, types.Response{ .id = id, .result = .{ .Locations = locs.items }, @@ -885,7 +915,15 @@ fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenI fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) !void { const tree = decl_handle.handle.tree; switch (decl_handle.decl.*) { - .ast_node => |node| try nodeToCompletion(context.arena, context.completions, .{ .node = node, .handle = decl_handle.handle }, null, context.orig_handle, false, context.config.*), + .ast_node => |node| try nodeToCompletion( + context.arena, + context.completions, + .{ .node = node, .handle = decl_handle.handle }, + null, + context.orig_handle, + false, + context.config.*, + ), .param_decl => |param| { const doc_kind: types.MarkupContent.Kind = if (client_capabilities.completion_doc_supports_md) .Markdown else .PlainText; const doc = if (param.first_doc_comment) |doc_comments| @@ -1419,7 +1457,7 @@ fn referencesHandler(arena: *std.heap.ArenaAllocator, id: types.RequestId, req: const include_decl = req.params.context.includeDeclaration; switch (pos_context) { - .var_access => try referencesDefinitionGlobal(arena, id, handle, doc_position.absolute_index, include_decl), + .var_access => try referencesDefinitionGlobal(arena, id, handle, doc_position.absolute_index, include_decl, config.skip_std_references), .field_access => |range| try referencesDefinitionFieldAccess(arena, id, handle, doc_position, range, include_decl, config), .label => try referencesDefinitionLabel(arena, id, handle, doc_position.absolute_index, include_decl), else => try respondGeneric(id, null_result_response), diff --git a/src/references.zig b/src/references.zig index 951476b..b77749f 100644 --- a/src/references.zig +++ b/src/references.zig @@ -122,7 +122,11 @@ fn symbolReferencesInternal( for (analysis.declMembers(tree, node_tags[node], node, &buf)) |member| try symbolReferencesInternal(arena, store, .{ .node = member, .handle = handle }, decl, encoding, context, handler); }, - .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => { const var_decl = analysis.varDecl(tree, node).?; if (var_decl.ast.type_node != 0) { try symbolReferencesInternal(arena, store, .{ .node = var_decl.ast.type_node, .handle = handle }, decl, encoding, context, handler); @@ -131,12 +135,13 @@ fn symbolReferencesInternal( try symbolReferencesInternal(arena, store, .{ .node = var_decl.ast.init_node, .handle = handle }, decl, encoding, context, handler); } }, - // @TODO: Usingnamespace - // .Use => { - // const use = node.cast(ast.Node.Use).?; - // try symbolReferencesInternal(arena, store, .{ .node = use.expr, .handle = handle }, decl, encoding, context, handler); - // }, - .container_field, .container_field_align, .container_field_init => { + .@"usingnamespace" => { + try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); + }, + .container_field, + .container_field_align, + .container_field_init, + => { const field = analysis.containerField(tree, node).?; if (field.ast.type_expr != 0) { try symbolReferencesInternal(arena, store, .{ .node = field.ast.type_expr, .handle = handle }, decl, encoding, context, handler); @@ -152,7 +157,12 @@ fn symbolReferencesInternal( } } }, - .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl => { + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => { var buf: [1]ast.Node.Index = undefined; const fn_proto = analysis.fnProto(tree, node, &buf).?; var it = fn_proto.iterate(tree); @@ -189,7 +199,9 @@ fn symbolReferencesInternal( .@"nosuspend" => { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); }, - .@"switch", .switch_comma => { + .@"switch", + .switch_comma, + => { // TODO When renaming a union(enum) field, also rename switch items that refer to it. try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); @@ -212,7 +224,12 @@ fn symbolReferencesInternal( for (case.ast.values) |val| try symbolReferencesInternal(arena, store, .{ .node = val, .handle = handle }, decl, encoding, context, handler); }, - .@"while", .while_simple, .while_cont, .for_simple, .@"for" => { + .@"while", + .while_simple, + .while_cont, + .for_simple, + .@"for", + => { const loop: ast.full.While = switch (node_tags[node]) { .@"while" => tree.whileFull(node), .while_simple => tree.whileSimple(node), @@ -230,7 +247,9 @@ fn symbolReferencesInternal( try symbolReferencesInternal(arena, store, .{ .node = loop.ast.else_expr, .handle = handle }, decl, encoding, context, handler); } }, - .@"if", .if_simple => { + .@"if", + .if_simple, + => { const if_node: ast.full.If = if (node_tags[node] == .@"if") tree.ifFull(node) else tree.ifSimple(node); try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.cond_expr, .handle = handle }, decl, encoding, context, handler); @@ -239,11 +258,17 @@ fn symbolReferencesInternal( try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.else_expr, .handle = handle }, decl, encoding, context, handler); } }, - .array_type, .array_type_sentinel => { + .array_type, + .array_type_sentinel, + => { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); }, - .ptr_type, .ptr_type_aligned, .ptr_type_bit_range, .ptr_type_sentinel => { + .ptr_type, + .ptr_type_aligned, + .ptr_type_bit_range, + .ptr_type_sentinel, + => { const ptr_type = analysis.ptrType(tree, node).?; if (ptr_type.ast.align_node != 0) { @@ -334,7 +359,10 @@ fn symbolReferencesInternal( try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler); } }, - .slice, .slice_sentinel, .slice_open => |s| { + .slice, + .slice_sentinel, + .slice_open, + => |s| { const slice: ast.full.Slice = switch (s) { .slice => tree.slice(node), .slice_open => tree.sliceOpen(node), @@ -353,13 +381,18 @@ fn symbolReferencesInternal( try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = datas[node].rhs, .handle = handle }, decl, encoding, context, handler); }, - .deref, .unwrap_optional => { + .deref, + .unwrap_optional, + => { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); }, .grouped_expression => { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); }, - .@"return", .@"break", .@"continue" => { + .@"return", + .@"break", + .@"continue", + => { if (datas[node].lhs != 0) { try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler); } @@ -389,7 +422,9 @@ fn symbolReferencesInternal( for (params) |param| try symbolReferencesInternal(arena, store, .{ .node = param, .handle = handle }, decl, encoding, context, handler); }, - .@"asm", .asm_simple => |a| { + .@"asm", + .asm_simple, + => |a| { const _asm: ast.full.Asm = if (a == .@"asm") tree.asmFull(node) else tree.asmSimple(node); if (_asm.ast.items.len == 0) try symbolReferencesInternal(arena, store, .{ .node = _asm.ast.template, .handle = handle }, decl, encoding, context, handler); @@ -491,6 +526,7 @@ pub fn symbolReferences( include_decl: bool, context: anytype, comptime handler: anytype, + skip_std_references: bool, ) !void { std.debug.assert(decl_handle.decl.* != .label_decl); const curr_handle = decl_handle.handle; @@ -500,16 +536,18 @@ pub fn symbolReferences( var handles = std.ArrayList(*DocumentStore.Handle).init(&arena.allocator); var handle_it = store.handles.iterator(); while (handle_it.next()) |entry| { + if (skip_std_references and std.mem.indexOf(u8, entry.key, "std") != null) { + if (!include_decl or entry.value != curr_handle) + continue; + } try handles.append(entry.value); } for (handles.items) |handle| { if (include_decl and handle == curr_handle) { try tokenReference(curr_handle, decl_handle.nameToken(), encoding, context, handler); - try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = handle }, decl_handle, encoding, context, handler); } - // @TODO: make references across files working - // try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = handle }, decl_handle, encoding, context, handler); + try symbolReferencesInternal(arena, store, .{ .node = 0, .handle = handle }, decl_handle, encoding, context, handler); } }, .param_decl => |param| { diff --git a/src/rename.zig b/src/rename.zig index 803b424..5d87cec 100644 --- a/src/rename.zig +++ b/src/rename.zig @@ -40,7 +40,7 @@ pub fn renameSymbol( .edits = edits, .allocator = &arena.allocator, .new_name = new_name, - }, refHandler); + }, refHandler, true); } pub fn renameLabel( From e2f4bbf2f3a396ae58ad28d7a28e0298fc7eab28 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 7 Mar 2021 14:51:47 +0100 Subject: [PATCH 24/36] Implement semantic tokens for regular comments --- src/analysis.zig | 17 ++++++-- src/config.zig | 2 +- src/main.zig | 2 +- src/offsets.zig | 4 +- src/references.zig | 2 +- src/semantic_tokens.zig | 94 +++++++++++++++++++++++++++++++++++++++-- 6 files changed, 110 insertions(+), 11 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index a061294..f25ba75 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1785,8 +1785,19 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod if (name.len == 0) return; - const start_loc = context.prev_loc.add(try offsets.tokenRelativeLocation(tree, context.prev_loc.offset, tree.firstToken(node), context.encoding)); - const end_loc = start_loc.add(try offsets.tokenRelativeLocation(tree, start_loc.offset, tree.lastToken(node), context.encoding)); + const starts = tree.tokens.items(.start); + const start_loc = context.prev_loc.add(try offsets.tokenRelativeLocation( + tree, + context.prev_loc.offset, + starts[tree.firstToken(node)], + context.encoding, + )); + const end_loc = start_loc.add(try offsets.tokenRelativeLocation( + tree, + start_loc.offset, + starts[tree.lastToken(node)], + context.encoding, + )); context.prev_loc = end_loc; const range = types.Range{ .start = .{ @@ -1908,7 +1919,7 @@ pub const DeclWithHandle = struct { pub fn location(self: DeclWithHandle, encoding: offsets.Encoding) !offsets.TokenLocation { const tree = self.handle.tree; - return try offsets.tokenRelativeLocation(tree, 0, self.nameToken(), encoding); + return try offsets.tokenRelativeLocation(tree, 0, tree.tokens.items(.start)[self.nameToken()], encoding); } fn isPublic(self: DeclWithHandle) bool { diff --git a/src/config.zig b/src/config.zig index 346aa7d..fe224e2 100644 --- a/src/config.zig +++ b/src/config.zig @@ -28,4 +28,4 @@ operator_completions: bool = true, /// Skips references to std. This will improve lookup speeds. /// Going to definition however will continue to work -skip_std_references: bool = true, +skip_std_references: bool = false, diff --git a/src/main.zig b/src/main.zig index 11c12b5..89b3b1d 100644 --- a/src/main.zig +++ b/src/main.zig @@ -554,7 +554,7 @@ fn gotoDefinitionSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, de const name_token = analysis.getDeclNameToken(handle.tree, node) orelse return try respondGeneric(id, null_result_response); - break :block offsets.tokenRelativeLocation(handle.tree, 0, name_token, offset_encoding) catch return; + break :block offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[name_token], offset_encoding) catch return; }, else => decl_handle.location(offset_encoding) catch return, }; diff --git a/src/offsets.zig b/src/offsets.zig index 418fe2f..8f38f56 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -71,8 +71,8 @@ pub const TokenLocation = struct { } }; -pub fn tokenRelativeLocation(tree: ast.Tree, start_index: usize, token: ast.TokenIndex, encoding: Encoding) !TokenLocation { - const start = tree.tokens.items(.start)[token]; +pub fn tokenRelativeLocation(tree: ast.Tree, start_index: usize, next_token_index: usize, encoding: Encoding) !TokenLocation { + const start = next_token_index; var loc = TokenLocation{ .line = 0, diff --git a/src/references.zig b/src/references.zig index b77749f..6c98824 100644 --- a/src/references.zig +++ b/src/references.zig @@ -14,7 +14,7 @@ fn tokenReference( context: anytype, comptime handler: anytype, ) !void { - const loc = offsets.tokenRelativeLocation(handle.tree, 0, tok, encoding) catch return; + const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return; try handler(context, types.Location{ .uri = handle.uri(), .range = .{ diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 87eaed9..a59eabd 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -48,11 +48,21 @@ pub const TokenModifiers = packed struct { } }; +const Comment = struct { + /// Length of the comment + length: u32, + /// Source index of the comment + start: u32, +}; + +const CommentList = std.ArrayList(Comment); + const Builder = struct { handle: *DocumentStore.Handle, current_token: ?ast.TokenIndex, arr: std.ArrayList(u32), encoding: offsets.Encoding, + comments: CommentList, fn init(allocator: *std.mem.Allocator, handle: *DocumentStore.Handle, encoding: offsets.Encoding) Builder { return Builder{ @@ -60,6 +70,7 @@ const Builder = struct { .current_token = null, .arr = std.ArrayList(u32).init(allocator), .encoding = encoding, + .comments = CommentList.init(allocator), }; } @@ -73,7 +84,21 @@ const Builder = struct { if (start_idx > starts[token]) return; - const delta_loc = offsets.tokenRelativeLocation(self.handle.tree, start_idx, token, self.encoding) catch return; + const delta_loc = if (self.findCommentBetween(start_idx, starts[token])) |comment| blk: { + const old_loc = self.handle.tree.tokenLocation(0, self.current_token orelse 0); + const comment_delta = offsets.tokenRelativeLocation(self.handle.tree, start_idx, comment.start, self.encoding) catch return; + + try self.arr.appendSlice(&[_]u32{ + @truncate(u32, comment_delta.line), + @truncate(u32, comment_delta.column), + comment.length, + @enumToInt(TokenType.comment), + 0, + }); + + break :blk offsets.tokenRelativeLocation(self.handle.tree, comment.start, starts[token], self.encoding) catch return; + } else offsets.tokenRelativeLocation(self.handle.tree, start_idx, starts[token], self.encoding) catch return; + try self.arr.appendSlice(&[_]u32{ @truncate(u32, delta_loc.line), @truncate(u32, delta_loc.column), @@ -87,6 +112,15 @@ const Builder = struct { fn toOwnedSlice(self: *Builder) []u32 { return self.arr.toOwnedSlice(); } + + /// Based on a given start and end index, returns a `Comment` between the positions + /// Returns `null` if none was fone + fn findCommentBetween(self: Builder, from: u32, to: u32) ?Comment { + return for (self.comments.items) |comment| { + if (comment.start > from and comment.start < to) + break comment; + } else null; + } }; fn writeToken( @@ -1011,8 +1045,12 @@ fn writeNodeTokens( pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { var builder = Builder.init(arena.child_allocator, handle, encoding); + // as line comments are not nodes, we parse the text then generate the tokens for them + try findComments(&builder, handle.tree.source, encoding); + // reverse the ast from the root declarations var gap_highlighter = GapHighlighter.init(&builder, 0); + var buf: [2]ast.Node.Index = undefined; for (analysis.declMembers(handle.tree, .root, 0, &buf)) |child| { try gap_highlighter.next(child); @@ -1020,7 +1058,57 @@ pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentS } try gap_highlighter.end(@truncate(u32, handle.tree.tokens.len) - 1); - // pass root node, which always has index '0' - // try writeNodeTokens(&builder, arena, store, 0); + return builder.toOwnedSlice(); } + +/// As the AST does not contain nodes for comments +/// this will parse through the entire file to search for comments +/// and generate semantic tokens for them +fn findComments(builder: *Builder, source: []const u8, encoding: offsets.Encoding) !void { + var state: enum { none, comment, doc_comment } = .none; + + var prev: u8 = 0; + var start: usize = 0; + for (source) |c, i| { + if (state == .comment and c == '/') { + state = .none; + continue; + } + + if (state == .none and c == '/' and prev == '/') { + state = .comment; + start = i - 1; + } + + if (c == '\n') { + if (state == .comment) { + state = .none; + + const len = if (encoding == .utf8) + i - start + else blk: { + var index: usize = start; + var utf16_len: usize = 0; + while (index < i) { + const n = std.unicode.utf8ByteSequenceLength(source[index]) catch unreachable; + const codepoint = std.unicode.utf8Decode(source[index .. index + n]) catch unreachable; + if (codepoint < 0x10000) { + utf16_len += 1; + } else { + utf16_len += 2; + } + index += n; + } + break :blk utf16_len; + }; + + try builder.comments.append(.{ + .length = @truncate(u32, len), + .start = @truncate(u32, start), + }); + } + } + prev = c; + } +} From 53c37765c0939ba016c498ca05b34a5d4bca2d26 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 7 Mar 2021 18:45:37 +0100 Subject: [PATCH 25/36] Fix inferred error set return types --- README.md | 1 + src/analysis.zig | 21 +++++++++++++++++++-- src/main.zig | 4 +--- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index dfbf335..5ae81d1 100644 --- a/README.md +++ b/README.md @@ -63,6 +63,7 @@ The following options are currently available. | `build_runner_cache_path` | `?[]const u8` | `null` | Path to a directroy that will be used as zig's cache when running `zig run build_runner.zig ...`. `null` is equivalent to `${KnownFloders.Cache}/zls` | | `enable_semantic_tokens` | `bool` | `true` | Enables semantic token support when the client also supports it. | | `operator_completions` | `bool` | `true` | Enables `*` and `?` operators in completion lists. | +| `skip_std_references` | `bool` | `false` | When true, skips searching for references in std. Improves lookup speed for functions in user's code. Renaming and go-to-definition will continue to work as is. ## Features diff --git a/src/analysis.zig b/src/analysis.zig index f25ba75..85fe845 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -467,8 +467,24 @@ pub fn resolveReturnType( } if (fn_decl.ast.return_type == 0) return null; - return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = fn_decl.ast.return_type, + const return_type = fn_decl.ast.return_type; + + const is_inferred_error = tree.tokens.items(.tag)[tree.firstToken(return_type) - 1] == .bang; + return if (is_inferred_error) block: { + const child_type = (try resolveTypeOfNodeInternal(store, arena, .{ + .node = return_type, + .handle = handle, + }, bound_type_params)) orelse return null; + const child_type_node = switch (child_type.type.data) { + .other => |n| n, + else => return null, + }; + break :block TypeWithHandle{ + .type = .{ .data = .{ .error_union = child_type_node }, .is_type_val = false }, + .handle = child_type.handle, + }; + } else ((try resolveTypeOfNodeInternal(store, arena, .{ + .node = return_type, .handle = handle, }, bound_type_params)) orelse return null).instanceTypeVal(); } @@ -509,6 +525,7 @@ fn resolveUnwrapErrorType( }, .primitive, .slice, .pointer => return null, }; + if (rhs.handle.tree.nodes.items(.tag)[rhs_node] == .error_union) { return ((try resolveTypeOfNodeInternal(store, arena, .{ .node = rhs.handle.tree.nodes.items(.data)[rhs_node].rhs, diff --git a/src/main.zig b/src/main.zig index 89b3b1d..b96aded 100644 --- a/src/main.zig +++ b/src/main.zig @@ -366,7 +366,6 @@ fn nodeToCompletion( .arena = arena, .orig_handle = orig_handle, }; - logger.debug("eklafgaef", .{}); try analysis.iterateSymbolsContainer(&document_store, arena, node_handle, orig_handle, declToCompletion, context, !is_type_val); } @@ -623,9 +622,8 @@ fn hoverSymbol( const first_token = param.first_doc_comment orelse param.comptime_noalias orelse param.name_token orelse - param.anytype_ellipsis3 orelse tree.firstToken(param.type_expr); - const last_token = tree.lastToken(param.type_expr); + const last_token = tree.lastToken(param.anytype_ellipsis3 orelse param.type_expr); const start = offsets.tokenLocation(tree, first_token).start; const end = offsets.tokenLocation(tree, last_token).end; From a078a62a375350d4f22ce24ceab7d56c02f826e7 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 7 Mar 2021 18:52:22 +0100 Subject: [PATCH 26/36] Semantic tokens: Allow for multiple line comments --- src/semantic_tokens.zig | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index a59eabd..32e43d4 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -76,7 +76,7 @@ const Builder = struct { fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void { const starts = self.handle.tree.tokens.items(.start); - const start_idx = if (self.current_token) |current_token| + var start_idx = if (self.current_token) |current_token| starts[current_token] else 0; @@ -84,7 +84,7 @@ const Builder = struct { if (start_idx > starts[token]) return; - const delta_loc = if (self.findCommentBetween(start_idx, starts[token])) |comment| blk: { + const delta_loc = while (self.findCommentBetween(start_idx, starts[token])) |comment| { const old_loc = self.handle.tree.tokenLocation(0, self.current_token orelse 0); const comment_delta = offsets.tokenRelativeLocation(self.handle.tree, start_idx, comment.start, self.encoding) catch return; @@ -96,7 +96,7 @@ const Builder = struct { 0, }); - break :blk offsets.tokenRelativeLocation(self.handle.tree, comment.start, starts[token], self.encoding) catch return; + start_idx = comment.start; } else offsets.tokenRelativeLocation(self.handle.tree, start_idx, starts[token], self.encoding) catch return; try self.arr.appendSlice(&[_]u32{ From fd6b94bcc91565410916eed6910d1d58a714640f Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sun, 7 Mar 2021 21:54:54 +0100 Subject: [PATCH 27/36] Fixes multiple small bugs: - Correct completion based on scope - Semantic tokens for while/if/else/for keywords - Fix crash on import path --- src/analysis.zig | 112 ++++++++++++++++++++++++---------------- src/main.zig | 4 +- src/semantic_tokens.zig | 9 +++- 3 files changed, 77 insertions(+), 48 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 85fe845..f4efe5f 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1456,39 +1456,45 @@ pub fn fnProto(tree: ast.Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?a pub fn getImportStr(tree: ast.Tree, node: ast.Node.Index, source_index: usize) ?[]const u8 { const node_tags = tree.nodes.items(.tag); var buf: [2]ast.Node.Index = undefined; - const decls = declMembers(tree, node_tags[node], node, &buf); - - for (decls) |decl_idx| { - if (!nodeContainsSourceIndex(tree, decl_idx, source_index)) { - continue; + if (isContainer(node_tags[node])) { + const decls = declMembers(tree, node_tags[node], node, &buf); + for (decls) |decl_idx| { + if (getImportStr(tree, decl_idx, source_index)) |name| { + return name; + } } + return null; + } else if (varDecl(tree, node)) |var_decl| { + return getImportStr(tree, var_decl.ast.init_node, source_index); + } else if (node_tags[node] == .@"usingnamespace") { + return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index); + } - if (isBuiltinCall(tree, decl_idx)) { - const builtin_token = tree.nodes.items(.main_token)[decl_idx]; - const call_name = tree.tokenSlice(builtin_token); + if (!nodeContainsSourceIndex(tree, node, source_index)) { + return null; + } - if (!std.mem.eql(u8, call_name, "@import")) continue; - const data = tree.nodes.items(.data)[decl_idx]; - const params = switch (node_tags[decl_idx]) { - .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], - .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) - &[_]ast.Node.Index{} - else if (data.rhs == 0) - &[_]ast.Node.Index{data.lhs} - else - &[_]ast.Node.Index{ data.lhs, data.rhs }, - else => unreachable, - }; + if (isBuiltinCall(tree, node)) { + const builtin_token = tree.nodes.items(.main_token)[node]; + const call_name = tree.tokenSlice(builtin_token); - if (params.len != 1) continue; + if (!std.mem.eql(u8, call_name, "@import")) return null; + const data = tree.nodes.items(.data)[node]; + const params = switch (node_tags[node]) { + .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs], + .builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0) + &[_]ast.Node.Index{} + else if (data.rhs == 0) + &[_]ast.Node.Index{data.lhs} + else + &[_]ast.Node.Index{ data.lhs, data.rhs }, + else => unreachable, + }; - const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]); - return import_str[1 .. import_str.len - 1]; - } + if (params.len != 1) return null; - if (getImportStr(tree, decl_idx, source_index)) |name| { - return name; - } + const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]); + return import_str[1 .. import_str.len - 1]; } return null; @@ -2105,12 +2111,30 @@ fn iterateSymbolsContainerInternal( if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; try use_trail.append(use); - const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = tree.nodes.items(.data)[use].rhs, .handle = handle })) orelse continue; + const rhs = tree.nodes.items(.data)[use].rhs; + // rhs can be invalid so apply the following check to ensure + // we do not go out of bounds when resolving the type + if (rhs == 0 or rhs > tree.nodes.len) continue; + const use_expr = (try resolveTypeOfNode(store, arena, .{ + .node = tree.nodes.items(.data)[use].rhs, + .handle = orig_handle, + })) orelse continue; + const use_expr_node = switch (use_expr.type.data) { .other => |n| n, else => continue, }; - try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, orig_handle, callback, context, false, use_trail); + + try iterateSymbolsContainerInternal( + store, + arena, + .{ .node = use_expr_node, .handle = use_expr.handle }, + orig_handle, + callback, + context, + false, + use_trail, + ); } } } @@ -2283,23 +2307,23 @@ fn lookupSymbolGlobalInternal( use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope| { - // if (source_index >= scope.range.start and source_index < scope.range.end) { - if (scope.decls.getEntry(symbol)) |candidate| { - switch (candidate.value) { - .ast_node => |node| { - if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; - }, - .label_decl => continue, - else => {}, + if (source_index >= scope.range.start and source_index < scope.range.end) { + if (scope.decls.getEntry(symbol)) |candidate| { + switch (candidate.value) { + .ast_node => |node| { + if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue; + }, + .label_decl => continue, + else => {}, + } + return DeclWithHandle{ + .decl = &candidate.value, + .handle = handle, + }; } - return DeclWithHandle{ - .decl = &candidate.value, - .handle = handle, - }; - } - if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; - // } + if (try resolveUse(store, arena, scope.uses, symbol, handle, use_trail)) |result| return result; + } if (scope.range.start > source_index) return null; } diff --git a/src/main.zig b/src/main.zig index b96aded..79be07c 100644 --- a/src/main.zig +++ b/src/main.zig @@ -622,8 +622,8 @@ fn hoverSymbol( const first_token = param.first_doc_comment orelse param.comptime_noalias orelse param.name_token orelse - tree.firstToken(param.type_expr); - const last_token = tree.lastToken(param.anytype_ellipsis3 orelse param.type_expr); + tree.firstToken(param.type_expr); // extern fn + const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr); const start = offsets.tokenLocation(tree, first_token).start; const end = offsets.tokenLocation(tree, last_token).end; diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 32e43d4..2e3b911 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -598,6 +598,7 @@ fn writeNodeTokens( try writeToken(builder, while_node.label_token, .label); try writeToken(builder, while_node.inline_token, .keyword); + try writeToken(builder, while_node.ast.while_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr }); try writeToken(builder, while_node.payload_token, .variable); if (while_node.ast.cont_expr != 0) @@ -607,8 +608,10 @@ fn writeNodeTokens( try writeToken(builder, while_node.error_token, .variable); - if (while_node.ast.else_expr != 0) + if (while_node.ast.else_expr != 0) { + try writeToken(builder, while_node.else_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr }); + } }, .@"if", .if_simple, @@ -622,8 +625,10 @@ fn writeNodeTokens( try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr }); try writeToken(builder, if_node.error_token, .variable); - if (if_node.ast.else_expr != 0) + if (if_node.ast.else_expr != 0) { + try writeToken(builder, if_node.else_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr }); + } }, .array_init, .array_init_comma, From 43ebfc73008c105baab433501a8a15c558b7f74e Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Mon, 8 Mar 2021 19:46:23 +0100 Subject: [PATCH 28/36] Work on completion for unwrapped slices/optionals in loops/ifs --- src/analysis.zig | 82 ++++++++++++++++++++++++++++++++++------------ src/main.zig | 23 +++++++------ src/references.zig | 2 +- 3 files changed, 73 insertions(+), 34 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index f4efe5f..128252e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -595,6 +595,7 @@ fn resolveBracketAccessType( const tags = tree.nodes.items(.tag); const tag = tags[lhs_node]; const data = tree.nodes.items(.data)[lhs_node]; + if (tag == .array_type or tag == .array_type_sentinel) { if (rhs == .Single) return ((try resolveTypeOfNodeInternal(store, arena, .{ @@ -605,11 +606,11 @@ fn resolveBracketAccessType( .type = .{ .data = .{ .slice = data.rhs }, .is_type_val = false }, .handle = lhs.handle, }; - } else if (isPtrType(tree, lhs_node)) { - if (tags[data.rhs] == .array_type or tags[data.rhs] == .array_type_sentinel) { + } else if (ptrType(tree, lhs_node)) |ptr_type| { + if (ptr_type.size == .Slice) { if (rhs == .Single) { return ((try resolveTypeOfNodeInternal(store, arena, .{ - .node = tree.nodes.items(.data)[data.rhs].rhs, + .node = ptr_type.ast.child_type, .handle = lhs.handle, }, bound_type_params)) orelse return null).instanceTypeVal(); } @@ -1321,10 +1322,15 @@ pub fn getFieldAccessType( // Can't call a function type, we need a function type instance. if (current_type.type.is_type_val) return null; + const cur_tree = current_type.handle.tree; var buf: [1]ast.Node.Index = undefined; - if (fnProto(tree, current_type_node, &buf)) |func| { - const has_body = tree.nodes.items(.tag)[current_type_node] == .fn_decl; - const body = tree.nodes.items(.data)[current_type_node].rhs; + if (fnProto(cur_tree, current_type_node, &buf)) |func| { + // Check if the function has a body and if so, pass it + // so the type can be resolved if it's a generic function returning + // an anonymous struct + const has_body = cur_tree.nodes.items(.tag)[current_type_node] == .fn_decl; + const body = cur_tree.nodes.items(.data)[current_type_node].rhs; + if (try resolveReturnType(store, arena, func, current_type.handle, &bound_type_params, if (has_body) body else null)) |ret| { current_type = ret; // Skip to the right paren @@ -1911,16 +1917,16 @@ pub const Declaration = union(enum) { name: ast.TokenIndex, condition: ast.Node.Index, }, - // array_payload: struct { - // identifier: *ast.Node, - // array_expr: ast.full.ArrayType, - // }, + array_payload: struct { + identifier: ast.TokenIndex, + array_expr: ast.Node.Index, + }, switch_payload: struct { node: ast.TokenIndex, switch_expr: ast.Node.Index, items: []const ast.Node.Index, }, - label_decl: ast.TokenIndex, // .id is While, For or Block (firstToken will be the label) + label_decl: ast.TokenIndex, }; pub const DeclWithHandle = struct { @@ -1934,7 +1940,7 @@ pub const DeclWithHandle = struct { .ast_node => |n| getDeclNameToken(tree, n).?, .param_decl => |p| p.name_token.?, .pointer_payload => |pp| pp.name, - // .array_payload => |ap| ap.identifier.firstToken(), + .array_payload => |ap| ap.identifier, .switch_payload => |sp| sp.node + @boolToInt(token_tags[sp.node] == .asterisk), .label_decl => |ld| ld, }; @@ -1956,9 +1962,13 @@ pub const DeclWithHandle = struct { const tree = self.handle.tree; const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); - return switch (self.decl.*) { - .ast_node => |node| try resolveTypeOfNodeInternal(store, arena, .{ .node = node, .handle = self.handle }, bound_type_params), + .ast_node => |node| try resolveTypeOfNodeInternal( + store, + arena, + .{ .node = node, .handle = self.handle }, + bound_type_params, + ), .param_decl => |param_decl| { if (typeIsType(self.handle.tree, param_decl.type_expr)) { var bound_param_it = bound_type_params.iterator(); @@ -1988,6 +1998,16 @@ pub const DeclWithHandle = struct { }, bound_type_params)) orelse return null, bound_type_params, ), + .array_payload => |pay| try resolveBracketAccessType( + store, + arena, + (try resolveTypeOfNodeInternal(store, arena, .{ + .node = pay.array_expr, + .handle = self.handle, + }, bound_type_params)) orelse return null, + .Single, + bound_type_params, + ), .label_decl => return null, .switch_payload => |pay| { if (pay.items.len == 0) return null; @@ -2832,7 +2852,7 @@ fn makeScopeInternal( .while_cont, .@"for", .for_simple, - => { + => |tag| { const while_node: ast.full.While = switch (node) { .@"while" => tree.whileFull(node_idx), .while_simple => tree.whileSimple(node_idx), @@ -2841,6 +2861,9 @@ fn makeScopeInternal( .for_simple => tree.forSimple(node_idx), else => unreachable, }; + + const is_for = tag == .@"for" or tag == .for_simple; + if (while_node.label_token) |label| { std.debug.assert(token_tags[label] == .identifier); var scope = try scopes.addOne(allocator); @@ -2877,12 +2900,29 @@ fn makeScopeInternal( std.debug.assert(token_tags[name_token] == .identifier); const name = tree.tokenSlice(name_token); - try scope.decls.putNoClobber(name, .{ - .pointer_payload = .{ - .name = name_token, - .condition = while_node.ast.cond_expr, - }, - }); + try scope.decls.putNoClobber(name, if (is_for) + .{ + .array_payload = .{ + .identifier = name_token, + .array_expr = while_node.ast.cond_expr, + }, + } + else + .{ + .pointer_payload = .{ + .name = name_token, + .condition = while_node.ast.cond_expr, + }, + }); + + // for loop with index as well + if (token_tags[name_token + 1] == .comma) { + const index_token = name_token + 2; + std.debug.assert(token_tags[index_token] == .identifier); + if (try scope.decls.fetchPut(tree.tokenSlice(index_token), .{ .label_decl = index_token })) |existing| { + // TODO Record a redefinition error + } + } } try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, while_node.ast.then_expr); diff --git a/src/main.zig b/src/main.zig index 79be07c..a22ba35 100644 --- a/src/main.zig +++ b/src/main.zig @@ -637,10 +637,10 @@ fn hoverSymbol( try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{tree.tokenSlice(payload.name)}) else try std.fmt.allocPrint(&arena.allocator, "{s}", .{tree.tokenSlice(payload.name)}), - // .array_payload => |payload| if (hover_kind == .Markdown) - // try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.identifier.firstToken())}) - // else - // try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.identifier.firstToken())}), + .array_payload => |payload| if (hover_kind == .Markdown) + try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.identifier)}) + else + try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.identifier)}), .switch_payload => |payload| if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{tree.tokenSlice(payload.node)}) else @@ -935,9 +935,8 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl const first_token = param.first_doc_comment orelse param.comptime_noalias orelse param.name_token orelse - param.anytype_ellipsis3 orelse tree.firstToken(param.type_expr); - const last_token = tree.lastToken(param.type_expr); + const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr); try context.completions.append(.{ .label = tree.tokenSlice(param.name_token.?), @@ -952,12 +951,12 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl .kind = .Variable, }); }, - // .array_payload => |payload| { - // try context.completions.append(.{ - // .label = tree.tokenSlice(payload.identifier.firstToken()), - // .kind = .Variable, - // }); - // }, + .array_payload => |payload| { + try context.completions.append(.{ + .label = tree.tokenSlice(payload.identifier), + .kind = .Variable, + }); + }, .switch_payload => |payload| { try context.completions.append(.{ .label = tree.tokenSlice(tree.firstToken(payload.node)), diff --git a/src/references.zig b/src/references.zig index 6c98824..ea62022 100644 --- a/src/references.zig +++ b/src/references.zig @@ -585,7 +585,7 @@ pub fn symbolReferences( return; }; }, - .pointer_payload, .switch_payload => { + .pointer_payload, .switch_payload, .array_payload => { if (include_decl) { try tokenReference(curr_handle, decl_handle.nameToken(), encoding, context, handler); } From 20cb0b730736af98e4f48b85fc33f9031a7189a6 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Mon, 8 Mar 2021 20:31:02 +0100 Subject: [PATCH 29/36] Fix off by one scope end index --- src/analysis.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 128252e..a60dcc3 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2802,7 +2802,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end, + .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end + 1, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2887,7 +2887,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end, + .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end + 1, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, From 858f3cb2824b2deeed422dfd5e427a715ae82aa4 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Tue, 9 Mar 2021 12:35:56 +0100 Subject: [PATCH 30/36] Completion of unions in switches and its tags --- src/analysis.zig | 70 ++++++++++++++++++++++++++---------------------- src/main.zig | 2 +- 2 files changed, 39 insertions(+), 33 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index a60dcc3..be6698a 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1941,7 +1941,7 @@ pub const DeclWithHandle = struct { .param_decl => |p| p.name_token.?, .pointer_payload => |pp| pp.name, .array_payload => |ap| ap.identifier, - .switch_payload => |sp| sp.node + @boolToInt(token_tags[sp.node] == .asterisk), + .switch_payload => |sp| sp.node, .label_decl => |ld| ld, }; } @@ -2948,43 +2948,49 @@ fn makeScopeInternal( try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, while_node.ast.else_expr); } }, - .switch_case, - .switch_case_one, + .@"switch", + .switch_comma, => { - const switch_case: ast.full.SwitchCase = switch (node) { - .switch_case => tree.switchCase(node_idx), - .switch_case_one => tree.switchCaseOne(node_idx), - else => unreachable, - }; + const cond = data[node_idx].lhs; + const extra = tree.extraData(data[node_idx].rhs, ast.Node.SubRange); + const cases = tree.extra_data[extra.start..extra.end]; - if (switch_case.payload_token) |payload| { - var scope = try scopes.addOne(allocator); - scope.* = .{ - .range = .{ - .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end, - }, - .decls = std.StringHashMap(Declaration).init(allocator), - .uses = &.{}, - .tests = &.{}, - .data = .other, + for(cases)|case| { + const switch_case: ast.full.SwitchCase = switch (tags[case]) { + .switch_case => tree.switchCase(case), + .switch_case_one => tree.switchCaseOne(case), + else => continue, }; - errdefer scope.decls.deinit(); - // if payload is *name than get next token - const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); - const name = tree.tokenSlice(name_token); + if (switch_case.payload_token) |payload| { + var scope = try scopes.addOne(allocator); + scope.* = .{ + .range = .{ + .start = offsets.tokenLocation(tree, payload).start, + .end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end + 1, + }, + .decls = std.StringHashMap(Declaration).init(allocator), + .uses = &.{}, + .tests = &.{}, + .data = .other, + }; + errdefer scope.decls.deinit(); - try scope.decls.putNoClobber(name, .{ - .switch_payload = .{ - .node = payload, - .switch_expr = switch_case.ast.target_expr, - .items = switch_case.ast.values, - }, - }); + // if payload is *name than get next token + const name_token = payload + @boolToInt(token_tags[payload] == .asterisk); + const name = tree.tokenSlice(name_token); + + try scope.decls.putNoClobber(name, .{ + .switch_payload = .{ + .node = name_token, + .switch_expr = cond, + .items = switch_case.ast.values, + }, + }); + } + + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, switch_case.ast.target_expr); } - - try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, switch_case.ast.target_expr); }, .global_var_decl, .local_var_decl, diff --git a/src/main.zig b/src/main.zig index a22ba35..7b9c626 100644 --- a/src/main.zig +++ b/src/main.zig @@ -959,7 +959,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl }, .switch_payload => |payload| { try context.completions.append(.{ - .label = tree.tokenSlice(tree.firstToken(payload.node)), + .label = tree.tokenSlice(payload.node), .kind = .Variable, }); }, From 3ac6c82b9a31f6287e0ca1f1633e39d26eb34ef6 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Tue, 9 Mar 2021 19:53:59 +0100 Subject: [PATCH 31/36] Completion for index label in for loops and fix for function snippets --- src/analysis.zig | 45 +++++++++++++++++++++++++++++++-------------- src/main.zig | 12 ++++++++++++ src/references.zig | 2 +- 3 files changed, 44 insertions(+), 15 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index be6698a..4d8b994 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -104,7 +104,12 @@ pub fn getFunctionSignature(tree: ast.Tree, func: ast.full.FnProto) []const u8 { } /// Gets a function snippet insert text -pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: ast.full.FnProto, skip_self_param: bool) ![]const u8 { +pub fn getFunctionSnippet( + allocator: *std.mem.Allocator, + tree: ast.Tree, + func: ast.full.FnProto, + skip_self_param: bool, +) ![]const u8 { const name_index = func.name_token orelse unreachable; var buffer = std.ArrayList(u8).init(allocator); @@ -118,11 +123,15 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a const token_tags = tree.tokens.items(.tag); var it = func.iterate(tree); - while (it.next()) |param| { - if (skip_self_param and it.param_i - 1 == 0) continue; - if (it.param_i - 1 != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); + var i: usize = 0; + while (it.next()) |param| : (i += 1) { + if (skip_self_param and i == 0) continue; + if (i != @boolToInt(skip_self_param)) + try buffer.appendSlice(", ${") + else + try buffer.appendSlice("${"); - try buf_stream.print("{d}:", .{it.param_i}); + try buf_stream.print("{d}:", .{i + 1}); if (param.comptime_noalias) |token_index| { if (token_tags[token_index] == .keyword_comptime) @@ -141,7 +150,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a try buffer.appendSlice("anytype") else try buffer.appendSlice("..."); - } else { + } else if (param.type_expr != 0) { var curr_token = tree.firstToken(param.type_expr); var end_token = tree.lastToken(param.type_expr); while (curr_token <= end_token) : (curr_token += 1) { @@ -152,7 +161,8 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: ast.Tree, func: a try buffer.appendSlice(tree.tokenSlice(curr_token)); if (is_comma or tag == .keyword_const) try buffer.append(' '); } - } + } else unreachable; + try buffer.append('}'); } try buffer.append(')'); @@ -780,13 +790,14 @@ pub fn resolveTypeOfNodeInternal( // Bind type params to the expressions passed in txhe calls. const param_len = std.math.min(call.ast.params.len + @boolToInt(has_self_param), fn_decl.ast.params.len); - while (it.next()) |decl_param| { - if (it.param_i - 1 == 0 and has_self_param) continue; - if (it.param_i - 1 >= param_len) break; + var i: usize = 0; + while (it.next()) |decl_param| : (i += 1) { + if (i == 0 and has_self_param) continue; + if (i >= param_len) break; if (!typeIsType(decl.handle.tree, decl_param.type_expr)) continue; const call_param_type = (try resolveTypeOfNodeInternal(store, arena, .{ - .node = call.ast.params[it.param_i - 1 - @boolToInt(has_self_param)], + .node = call.ast.params[i - @boolToInt(has_self_param)], .handle = handle, }, bound_type_params)) orelse continue; if (!call_param_type.type.is_type_val) continue; @@ -1921,6 +1932,7 @@ pub const Declaration = union(enum) { identifier: ast.TokenIndex, array_expr: ast.Node.Index, }, + array_index: ast.TokenIndex, switch_payload: struct { node: ast.TokenIndex, switch_expr: ast.Node.Index, @@ -1941,6 +1953,7 @@ pub const DeclWithHandle = struct { .param_decl => |p| p.name_token.?, .pointer_payload => |pp| pp.name, .array_payload => |ap| ap.identifier, + .array_index => |ai| ai, .switch_payload => |sp| sp.node, .label_decl => |ld| ld, }; @@ -2008,6 +2021,10 @@ pub const DeclWithHandle = struct { .Single, bound_type_params, ), + .array_index => TypeWithHandle{ + .type = .{ .data = .primitive, .is_type_val = true }, + .handle = self.handle, + }, .label_decl => return null, .switch_payload => |pay| { if (pay.items.len == 0) return null; @@ -2024,7 +2041,7 @@ pub const DeclWithHandle = struct { if (scope.decls.getEntry(tree.tokenSlice(main_tokens[pay.items[0]]))) |candidate| { switch (candidate.value) { .ast_node => |node| { - if (containerField(tree, node)) |container_field| { + if (containerField(switch_expr_type.handle.tree, node)) |container_field| { if (container_field.ast.type_expr != 0) { return ((try resolveTypeOfNodeInternal( store, @@ -2919,7 +2936,7 @@ fn makeScopeInternal( if (token_tags[name_token + 1] == .comma) { const index_token = name_token + 2; std.debug.assert(token_tags[index_token] == .identifier); - if (try scope.decls.fetchPut(tree.tokenSlice(index_token), .{ .label_decl = index_token })) |existing| { + if (try scope.decls.fetchPut(tree.tokenSlice(index_token), .{ .array_index = index_token })) |existing| { // TODO Record a redefinition error } } @@ -2955,7 +2972,7 @@ fn makeScopeInternal( const extra = tree.extraData(data[node_idx].rhs, ast.Node.SubRange); const cases = tree.extra_data[extra.start..extra.end]; - for(cases)|case| { + for (cases) |case| { const switch_case: ast.full.SwitchCase = switch (tags[case]) { .switch_case => tree.switchCase(case), .switch_case_one => tree.switchCaseOne(case), diff --git a/src/main.zig b/src/main.zig index 7b9c626..5909429 100644 --- a/src/main.zig +++ b/src/main.zig @@ -391,6 +391,8 @@ fn nodeToCompletion( var it = func.iterate(tree); const param = it.next().?; + if (param.type_expr == 0) break :param_check false; + if (try analysis.resolveTypeOfNode(&document_store, arena, .{ .node = param.type_expr, .handle = handle, @@ -641,6 +643,10 @@ fn hoverSymbol( try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload.identifier)}) else try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload.identifier)}), + .array_index => |payload| if (hover_kind == .Markdown) + try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{handle.tree.tokenSlice(payload)}) + else + try std.fmt.allocPrint(&arena.allocator, "{s}", .{handle.tree.tokenSlice(payload)}), .switch_payload => |payload| if (hover_kind == .Markdown) try std.fmt.allocPrint(&arena.allocator, "```zig\n{s}\n```", .{tree.tokenSlice(payload.node)}) else @@ -957,6 +963,12 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl .kind = .Variable, }); }, + .array_index => |payload| { + try context.completions.append(.{ + .label = tree.tokenSlice(payload), + .kind = .Variable, + }); + }, .switch_payload => |payload| { try context.completions.append(.{ .label = tree.tokenSlice(payload.node), diff --git a/src/references.zig b/src/references.zig index ea62022..a10c4f3 100644 --- a/src/references.zig +++ b/src/references.zig @@ -585,7 +585,7 @@ pub fn symbolReferences( return; }; }, - .pointer_payload, .switch_payload, .array_payload => { + .pointer_payload, .switch_payload, .array_payload, .array_index => { if (include_decl) { try tokenReference(curr_handle, decl_handle.nameToken(), encoding, context, handler); } From 85937d48ca083d3a422512ac232fbfd7237b58c7 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Wed, 10 Mar 2021 09:29:25 +0100 Subject: [PATCH 32/36] Semantic token + scope fixes: - Highlights pipes of loops/elses payloads correctly - New ast structure counts last token as the variable itself and not the '}' so change scope checking to '<=' instead of '<' --- src/analysis.zig | 15 +++++++-------- src/semantic_tokens.zig | 20 +++++++++++++++++--- tests/sessions.zig | 36 +++++++++++++++++++++++------------- 3 files changed, 47 insertions(+), 24 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 4d8b994..4956db8 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2022,7 +2022,7 @@ pub const DeclWithHandle = struct { bound_type_params, ), .array_index => TypeWithHandle{ - .type = .{ .data = .primitive, .is_type_val = true }, + .type = .{ .data = .primitive, .is_type_val = false }, .handle = self.handle, }, .label_decl => return null, @@ -2220,7 +2220,7 @@ fn iterateSymbolsGlobalInternal( use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!void { for (handle.document_scope.scopes) |scope| { - if (source_index >= scope.range.start and source_index < scope.range.end) { + if (source_index >= scope.range.start and source_index <= scope.range.end) { var decl_it = scope.decls.iterator(); while (decl_it.next()) |entry| { if (entry.value == .ast_node and handle.tree.nodes.items(.tag)[entry.value.ast_node].isContainerField()) continue; @@ -2262,7 +2262,7 @@ pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) Ty if (handle.document_scope.scopes.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle }); for (handle.document_scope.scopes[1..]) |scope| { - if (source_index >= scope.range.start and source_index < scope.range.end) { + if (source_index >= scope.range.start and source_index <= scope.range.end) { switch (scope.data) { .container => |node| current = node, else => {}, @@ -2344,7 +2344,7 @@ fn lookupSymbolGlobalInternal( use_trail: *std.ArrayList(ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope| { - if (source_index >= scope.range.start and source_index < scope.range.end) { + if (source_index >= scope.range.start and source_index <= scope.range.end) { if (scope.decls.getEntry(symbol)) |candidate| { switch (candidate.value) { .ast_node => |node| { @@ -2585,7 +2585,6 @@ fn makeScopeInternal( tree: ast.Tree, node_idx: ast.Node.Index, ) error{OutOfMemory}!void { - // if (node_idx > tree.nodes.len) return; const tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); const data = tree.nodes.items(.data); @@ -2819,7 +2818,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end + 1, + .end = offsets.tokenLocation(tree, tree.lastToken(if_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2904,7 +2903,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end + 1, + .end = offsets.tokenLocation(tree, tree.lastToken(while_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, @@ -2984,7 +2983,7 @@ fn makeScopeInternal( scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end + 1, + .end = offsets.tokenLocation(tree, tree.lastToken(switch_case.ast.target_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .uses = &.{}, diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 2e3b911..70bc475 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -600,16 +600,30 @@ fn writeNodeTokens( try writeToken(builder, while_node.inline_token, .keyword); try writeToken(builder, while_node.ast.while_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cond_expr }); - try writeToken(builder, while_node.payload_token, .variable); + if (while_node.payload_token) |payload| { + try writeToken(builder, payload - 1, .operator); + try writeToken(builder, payload, .variable); + var r_pipe = payload + 1; + if (token_tags[r_pipe] == .comma) { + r_pipe += 1; + try writeToken(builder, r_pipe, .variable); + r_pipe += 1; + } + try writeToken(builder, r_pipe, .operator); + } if (while_node.ast.cont_expr != 0) try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.cont_expr }); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.then_expr }); - try writeToken(builder, while_node.error_token, .variable); - if (while_node.ast.else_expr != 0) { try writeToken(builder, while_node.else_token, .keyword); + + if (while_node.error_token) |err_token| { + try writeToken(builder, err_token - 1, .operator); + try writeToken(builder, err_token, .variable); + try writeToken(builder, err_token + 1, .operator); + } try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, while_node.ast.else_expr }); } }, diff --git a/tests/sessions.zig b/tests/sessions.zig index 8cb96c5..0ba3a4b 100644 --- a/tests/sessions.zig +++ b/tests/sessions.zig @@ -4,13 +4,16 @@ const headerPkg = @import("header"); const suffix = if (std.builtin.os.tag == .windows) ".exe" else ""; const allocator = std.heap.page_allocator; -const initialize_message = \\{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"processId":6896,"clientInfo":{"name":"vscode","version":"1.46.1"},"rootPath":null,"rootUri":null,"capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional"},"didChangeConfiguration":{"dynamicRegistration":true},"didChangeWatchedFiles":{"dynamicRegistration":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]}},"executeCommand":{"dynamicRegistration":true},"configuration":true,"workspaceFolders":true},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"complexDiagnosticCodeSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true},"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true}},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]}},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}}},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["comment","keyword","number","regexp","operator","namespace","type","struct","class","interface","enum","typeParameter","function","member","macro","variable","parameter","property","label"],"tokenModifiers":["declaration","documentation","static","abstract","deprecated","readonly"]}},"window":{"workDoneProgress":true}},"trace":"off","workspaceFolders":[{"uri":"file://./tests", "name":"root"}]}} +const initialize_message = + \\{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"processId":6896,"clientInfo":{"name":"vscode","version":"1.46.1"},"rootPath":null,"rootUri":null,"capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional"},"didChangeConfiguration":{"dynamicRegistration":true},"didChangeWatchedFiles":{"dynamicRegistration":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]}},"executeCommand":{"dynamicRegistration":true},"configuration":true,"workspaceFolders":true},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"complexDiagnosticCodeSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true},"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true}},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]}},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}}},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["comment","keyword","number","regexp","operator","namespace","type","struct","class","interface","enum","typeParameter","function","member","macro","variable","parameter","property","label"],"tokenModifiers":["declaration","documentation","static","abstract","deprecated","readonly"]}},"window":{"workDoneProgress":true}},"trace":"off","workspaceFolders":[{"uri":"file://./tests", "name":"root"}]}} ; -const initialized_message = \\{"jsonrpc":"2.0","method":"initialized","params":{}} +const initialized_message = + \\{"jsonrpc":"2.0","method":"initialized","params":{}} ; -const shutdown_message = \\{"jsonrpc":"2.0", "id":"STDWN", "method":"shutdown","params":{}} +const shutdown_message = + \\{"jsonrpc":"2.0", "id":"STDWN", "method":"shutdown","params":{}} ; fn sendRequest(req: []const u8, process: *std.ChildProcess) !void { @@ -22,7 +25,7 @@ fn readResponses(process: *std.ChildProcess, expected_responses: anytype) !void var seen = std.mem.zeroes([expected_responses.len]bool); while (true) { const header = headerPkg.readRequestHeader(allocator, process.stdout.?.reader()) catch |err| { - switch(err) { + switch (err) { error.EndOfStream => break, else => return err, } @@ -52,7 +55,7 @@ fn readResponses(process: *std.ChildProcess, expected_responses: anytype) !void } fn startZls() !*std.ChildProcess { - var process = try std.ChildProcess.init(&[_][]const u8{ "zig-cache/bin/zls" ++ suffix }, allocator); + var process = try std.ChildProcess.init(&[_][]const u8{"zig-cache/bin/zls" ++ suffix}, allocator); process.stdin_behavior = .Pipe; process.stdout_behavior = .Pipe; process.stderr_behavior = std.ChildProcess.StdIo.Inherit; @@ -89,10 +92,12 @@ test "Open file, ask for semantic tokens" { try sendRequest(initialize_message, process); try sendRequest(initialized_message, process); - const new_file_req = \\{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file://./tests/test.zig","languageId":"zig","version":420,"text":"const std = @import(\"std\");"}}} + const new_file_req = + \\{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file://./tests/test.zig","languageId":"zig","version":420,"text":"const std = @import(\"std\");"}}} ; try sendRequest(new_file_req, process); - const sem_toks_req = \\{"jsonrpc":"2.0","id":2,"method":"textDocument/semanticTokens/full","params":{"textDocument":{"uri":"file://./tests/test.zig"}}} + const sem_toks_req = + \\{"jsonrpc":"2.0","id":2,"method":"textDocument/semanticTokens/full","params":{"textDocument":{"uri":"file://./tests/test.zig"}}} ; try sendRequest(sem_toks_req, process); try sendRequest(shutdown_message, process); @@ -106,10 +111,12 @@ test "Requesting a completion in an empty file" { try sendRequest(initialize_message, process); try sendRequest(initialized_message, process); - const new_file_req = \\{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":""}}} + const new_file_req = + \\{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":""}}} ; try sendRequest(new_file_req, process); - const completion_req = \\{"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":0,"character":0}}} + const completion_req = + \\{"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":0,"character":0}}} ; try sendRequest(completion_req, process); try sendRequest(shutdown_message, process); @@ -121,19 +128,22 @@ test "Requesting a completion with no trailing whitespace" { try sendRequest(initialize_message, process); try sendRequest(initialized_message, process); - const new_file_req = \\{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const std = @import(\"std\");\nc"}}} + const new_file_req = + \\{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const std = @import(\"std\");\nc"}}} ; try sendRequest(new_file_req, process); - const completion_req = \\{"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}} + const completion_req = + \\{"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}} ; try sendRequest(completion_req, process); try sendRequest(shutdown_message, process); try consumeOutputAndWait(process, .{ - \\{"jsonrpc":"2.0","id":2,"result":{"isIncomplete":false,"items":[]}} + \\{"jsonrpc":"2.0","id":2,"result":{"isIncomplete":false,"items":[{"label":"std","kind":21,"textEdit":null,"filterText":null,"insertText":null,"insertTextFormat":1,"detail":"const std = @import(\"std\")","documentation":null}]}} }); } -const initialize_message_offs = \\{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"processId":6896,"clientInfo":{"name":"vscode","version":"1.46.1"},"rootPath":null,"rootUri":null,"capabilities":{"offsetEncoding":["utf-16", "utf-8"],"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional"},"didChangeConfiguration":{"dynamicRegistration":true},"didChangeWatchedFiles":{"dynamicRegistration":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]}},"executeCommand":{"dynamicRegistration":true},"configuration":true,"workspaceFolders":true},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"complexDiagnosticCodeSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true},"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true}},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]}},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}}},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["comment","keyword","number","regexp","operator","namespace","type","struct","class","interface","enum","typeParameter","function","member","macro","variable","parameter","property","label"],"tokenModifiers":["declaration","documentation","static","abstract","deprecated","readonly"]}},"window":{"workDoneProgress":true}},"trace":"off","workspaceFolders":null}} +const initialize_message_offs = + \\{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"processId":6896,"clientInfo":{"name":"vscode","version":"1.46.1"},"rootPath":null,"rootUri":null,"capabilities":{"offsetEncoding":["utf-16", "utf-8"],"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional"},"didChangeConfiguration":{"dynamicRegistration":true},"didChangeWatchedFiles":{"dynamicRegistration":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]}},"executeCommand":{"dynamicRegistration":true},"configuration":true,"workspaceFolders":true},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"complexDiagnosticCodeSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true},"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true}},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]}},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}}},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["comment","keyword","number","regexp","operator","namespace","type","struct","class","interface","enum","typeParameter","function","member","macro","variable","parameter","property","label"],"tokenModifiers":["declaration","documentation","static","abstract","deprecated","readonly"]}},"window":{"workDoneProgress":true}},"trace":"off","workspaceFolders":null}} ; test "Requesting utf-8 offset encoding" { From 094b31be481ead8a0b53e48cf68af5f7ab2c722f Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Wed, 10 Mar 2021 17:04:14 +0100 Subject: [PATCH 33/36] Fix namespaces in containers --- src/analysis.zig | 108 +++++++++++++++++++++++++---------------------- 1 file changed, 57 insertions(+), 51 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 4956db8..c51c913 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2117,62 +2117,55 @@ fn iterateSymbolsContainerInternal( const token_tags = tree.tokens.items(.tag); const main_token = tree.nodes.items(.main_token)[container]; - const is_enum = if (isContainer(node_tags[container]) and node_tags[container] != .root) - token_tags[main_token] == .keyword_enum - else - false; + const is_enum = token_tags[main_token] == .keyword_enum; - if (findContainerScope(container_handle)) |container_scope| { - var decl_it = container_scope.decls.iterator(); - while (decl_it.next()) |entry| { - switch (entry.value) { - .ast_node => |node| { - if (node_tags[node].isContainerField()) { - if (!instance_access and !is_enum) continue; - if (instance_access and is_enum) continue; - } - }, - .label_decl => continue, - else => {}, - } + const container_scope = findContainerScope(container_handle) orelse return; - const decl = DeclWithHandle{ .decl = &entry.value, .handle = handle }; - if (handle != orig_handle and !decl.isPublic()) continue; - try callback(context, decl); + var decl_it = container_scope.decls.iterator(); + while (decl_it.next()) |entry| { + switch (entry.value) { + .ast_node => |node| { + if (node_tags[node].isContainerField()) { + if (!instance_access and !is_enum) continue; + if (instance_access and is_enum) continue; + } + }, + .label_decl => continue, + else => {}, } - for (container_scope.uses) |use| { - const use_token = tree.nodes.items(.main_token)[use]; - const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub; - if (handle != orig_handle and !is_pub) continue; - if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; - try use_trail.append(use); + const decl = DeclWithHandle{ .decl = &entry.value, .handle = handle }; + if (handle != orig_handle and !decl.isPublic()) continue; + try callback(context, decl); + } - const rhs = tree.nodes.items(.data)[use].rhs; - // rhs can be invalid so apply the following check to ensure - // we do not go out of bounds when resolving the type - if (rhs == 0 or rhs > tree.nodes.len) continue; - const use_expr = (try resolveTypeOfNode(store, arena, .{ - .node = tree.nodes.items(.data)[use].rhs, - .handle = orig_handle, - })) orelse continue; + for (container_scope.uses) |use| { + const use_token = tree.nodes.items(.main_token)[use]; + const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub; + if (handle != orig_handle and !is_pub) continue; + if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; + try use_trail.append(use); - const use_expr_node = switch (use_expr.type.data) { - .other => |n| n, - else => continue, - }; + const lhs = tree.nodes.items(.data)[use].lhs; + const use_expr = (try resolveTypeOfNode(store, arena, .{ + .node = lhs, + .handle = handle, + })) orelse continue; - try iterateSymbolsContainerInternal( - store, - arena, - .{ .node = use_expr_node, .handle = use_expr.handle }, - orig_handle, - callback, - context, - false, - use_trail, - ); - } + const use_expr_node = switch (use_expr.type.data) { + .other => |n| n, + else => continue, + }; + try iterateSymbolsContainerInternal( + store, + arena, + .{ .node = use_expr_node, .handle = use_expr.handle }, + orig_handle, + callback, + context, + false, + use_trail, + ); } } @@ -2232,12 +2225,25 @@ fn iterateSymbolsGlobalInternal( if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; try use_trail.append(use); - const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle })) orelse continue; + const use_expr = (try resolveTypeOfNode( + store, + arena, + .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle }, + )) orelse continue; const use_expr_node = switch (use_expr.type.data) { .other => |n| n, else => continue, }; - try iterateSymbolsContainerInternal(store, arena, .{ .node = use_expr_node, .handle = use_expr.handle }, handle, callback, context, false, use_trail); + try iterateSymbolsContainerInternal( + store, + arena, + .{ .node = use_expr_node, .handle = use_expr.handle }, + handle, + callback, + context, + false, + use_trail, + ); } } From ef1b3c95f176811bcbd2a3f727135ace552cb005 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 11 Mar 2021 13:59:09 +0100 Subject: [PATCH 34/36] Completion fixes: - Fixes namespace completions. Uses pointers instead of indices to ensure they are unique - Adds .ptr completion to slices and `*` to many pointer --- src/analysis.zig | 56 ++++++++++++++++++++++++------------------------ src/main.zig | 8 +++++-- 2 files changed, 34 insertions(+), 30 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index c51c913..6a3b5e7 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2107,7 +2107,7 @@ fn iterateSymbolsContainerInternal( comptime callback: anytype, context: anytype, instance_access: bool, - use_trail: *std.ArrayList(ast.Node.Index), + use_trail: *std.ArrayList(*const ast.Node.Index), ) error{OutOfMemory}!void { const container = container_handle.node; const handle = container_handle.handle; @@ -2140,13 +2140,13 @@ fn iterateSymbolsContainerInternal( } for (container_scope.uses) |use| { - const use_token = tree.nodes.items(.main_token)[use]; + const use_token = tree.nodes.items(.main_token)[use.*]; const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub; if (handle != orig_handle and !is_pub) continue; - if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; + if (std.mem.indexOfScalar(*const ast.Node.Index, use_trail.items, use) != null) continue; try use_trail.append(use); - const lhs = tree.nodes.items(.data)[use].lhs; + const lhs = tree.nodes.items(.data)[use.*].lhs; const use_expr = (try resolveTypeOfNode(store, arena, .{ .node = lhs, .handle = handle, @@ -2178,7 +2178,7 @@ pub fn iterateSymbolsContainer( context: anytype, instance_access: bool, ) error{OutOfMemory}!void { - var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); + var use_trail = std.ArrayList(*const ast.Node.Index).init(&arena.allocator); return try iterateSymbolsContainerInternal(store, arena, container_handle, orig_handle, callback, context, instance_access, &use_trail); } @@ -2210,7 +2210,7 @@ fn iterateSymbolsGlobalInternal( source_index: usize, comptime callback: anytype, context: anytype, - use_trail: *std.ArrayList(ast.Node.Index), + use_trail: *std.ArrayList(*const ast.Node.Index), ) error{OutOfMemory}!void { for (handle.document_scope.scopes) |scope| { if (source_index >= scope.range.start and source_index <= scope.range.end) { @@ -2222,13 +2222,13 @@ fn iterateSymbolsGlobalInternal( } for (scope.uses) |use| { - if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; + if (std.mem.indexOfScalar(*const ast.Node.Index, use_trail.items, use) != null) continue; try use_trail.append(use); const use_expr = (try resolveTypeOfNode( store, arena, - .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle }, + .{ .node = handle.tree.nodes.items(.data)[use.*].lhs, .handle = handle }, )) orelse continue; const use_expr_node = switch (use_expr.type.data) { .other => |n| n, @@ -2259,7 +2259,7 @@ pub fn iterateSymbolsGlobal( comptime callback: anytype, context: anytype, ) error{OutOfMemory}!void { - var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); + var use_trail = std.ArrayList(*const ast.Node.Index).init(&arena.allocator); return try iterateSymbolsGlobalInternal(store, arena, handle, source_index, callback, context, &use_trail); } @@ -2282,19 +2282,19 @@ pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) Ty fn resolveUse( store: *DocumentStore, arena: *std.heap.ArenaAllocator, - uses: []const ast.Node.Index, + uses: []const *const ast.Node.Index, symbol: []const u8, handle: *DocumentStore.Handle, - use_trail: *std.ArrayList(ast.Node.Index), + use_trail: *std.ArrayList(*const ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (uses) |use| { - if (std.mem.indexOfScalar(ast.Node.Index, use_trail.items, use) != null) continue; + if (std.mem.indexOfScalar(*const ast.Node.Index, use_trail.items, use) != null) continue; try use_trail.append(use); const use_expr = (try resolveTypeOfNode( store, arena, - .{ .node = handle.tree.nodes.items(.data)[use].lhs, .handle = handle }, + .{ .node = handle.tree.nodes.items(.data)[use.*].lhs, .handle = handle }, )) orelse continue; const use_expr_node = switch (use_expr.type.data) { @@ -2347,7 +2347,7 @@ fn lookupSymbolGlobalInternal( handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize, - use_trail: *std.ArrayList(ast.Node.Index), + use_trail: *std.ArrayList(*const ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { for (handle.document_scope.scopes) |scope| { if (source_index >= scope.range.start and source_index <= scope.range.end) { @@ -2381,7 +2381,7 @@ pub fn lookupSymbolGlobal( symbol: []const u8, source_index: usize, ) error{OutOfMemory}!?DeclWithHandle { - var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); + var use_trail = std.ArrayList(*const ast.Node.Index).init(&arena.allocator); return try lookupSymbolGlobalInternal(store, arena, handle, symbol, source_index, &use_trail); } @@ -2393,7 +2393,7 @@ fn lookupSymbolContainerInternal( /// If true, we are looking up the symbol like we are accessing through a field access /// of an instance of the type, otherwise as a field access of the type value itself. instance_access: bool, - use_trail: *std.ArrayList(ast.Node.Index), + use_trail: *std.ArrayList(*const ast.Node.Index), ) error{OutOfMemory}!?DeclWithHandle { const container = container_handle.node; const handle = container_handle.handle; @@ -2402,10 +2402,7 @@ fn lookupSymbolContainerInternal( const token_tags = tree.tokens.items(.tag); const main_token = tree.nodes.items(.main_token)[container]; - const is_enum = if (container != 0 and isContainer(node_tags[container])) - token_tags[main_token] == .keyword_enum - else - false; + const is_enum = token_tags[main_token] == .keyword_enum; if (findContainerScope(container_handle)) |container_scope| { if (container_scope.decls.getEntry(symbol)) |candidate| { @@ -2438,7 +2435,7 @@ pub fn lookupSymbolContainer( /// of an instance of the type, otherwise as a field access of the type value itself. instance_access: bool, ) error{OutOfMemory}!?DeclWithHandle { - var use_trail = std.ArrayList(ast.Node.Index).init(&arena.allocator); + var use_trail = std.ArrayList(*const ast.Node.Index).init(&arena.allocator); return try lookupSymbolContainerInternal(store, arena, container_handle, symbol, instance_access, &use_trail); } @@ -2496,7 +2493,7 @@ pub const Scope = struct { range: SourceRange, decls: std.StringHashMap(Declaration), tests: []const ast.Node.Index, - uses: []const ast.Node.Index, + uses: []const *const ast.Node.Index, data: Data, }; @@ -2609,7 +2606,7 @@ fn makeScopeInternal( .data = .{ .container = node_idx }, }; const scope_idx = scopes.items.len - 1; - var uses = std.ArrayList(ast.Node.Index).init(allocator); + var uses = std.ArrayList(*const ast.Node.Index).init(allocator); var tests = std.ArrayList(ast.Node.Index).init(allocator); errdefer { @@ -2618,9 +2615,10 @@ fn makeScopeInternal( tests.deinit(); } - for (ast_decls) |decl| { + for (ast_decls) |*ptr_decl| { + const decl = ptr_decl.*; if (tags[decl] == .@"usingnamespace") { - try uses.append(decl); + try uses.append(ptr_decl); continue; } @@ -2771,7 +2769,7 @@ fn makeScopeInternal( .data = .{ .block = node_idx }, }; var scope_idx = scopes.items.len - 1; - var uses = std.ArrayList(ast.Node.Index).init(allocator); + var uses = std.ArrayList(*const ast.Node.Index).init(allocator); errdefer { scopes.items[scope_idx].decls.deinit(); @@ -2793,9 +2791,10 @@ fn makeScopeInternal( else => unreachable, }; - for (statements) |idx| { + for (statements) |*ptr_stmt| { + const idx = ptr_stmt.*; if (tags[idx] == .@"usingnamespace") { - try uses.append(idx); + try uses.append(ptr_stmt); continue; } @@ -3218,6 +3217,7 @@ fn makeScopeInternal( .address_of, .grouped_expression, .unwrap_optional, + .@"usingnamespace", => { if (data[node_idx].lhs != 0) { try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, data[node_idx].lhs); diff --git a/src/main.zig b/src/main.zig index 5909429..879fe11 100644 --- a/src/main.zig +++ b/src/main.zig @@ -482,13 +482,17 @@ fn nodeToCompletion( const ptr_type = analysis.ptrType(tree, node).?; switch (ptr_type.size) { - .One, .C => if (config.operator_completions) { + .One, .C, .Many => if (config.operator_completions) { try list.append(.{ .label = "*", .kind = .Operator, }); }, - .Many, .Slice => return list.append(.{ .label = "len", .kind = .Field }), + .Slice => { + try list.append(.{ .label = "ptr", .kind = .Field }); + try list.append(.{ .label = "len", .kind = .Field }); + return; + }, } if (unwrapped) |actual_type| { From 7a2cd418a87e7b3f02bec9d38e1fb7d24f18d50d Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 11 Mar 2021 20:45:05 +0100 Subject: [PATCH 35/36] Scope fixes + semantic token colors: - Creates a scope for errdefer and adds the payload as a declaration (Previously lhs was a token index meaning we had an out of bounds error due too high of an integer) - Semantic highlighting corrected for error payloads in loops/ifs and errdefers --- src/analysis.zig | 25 ++++++++++++++++++++++++- src/semantic_tokens.zig | 32 +++++++++++++++++++++++++------- 2 files changed, 49 insertions(+), 8 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 6a3b5e7..f4e697e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1202,7 +1202,7 @@ fn maybeCollectImport(tree: ast.Tree, builtin_call: ast.Node.Index, arr: *std.Ar &[_]ast.Node.Index{ data.lhs, data.rhs }, else => unreachable, }; - if (params.len > 1) return; + if (params.len != 1) return; if (tags[params[0]] != .string_literal) return; @@ -3171,6 +3171,29 @@ fn makeScopeInternal( if (slice.ast.sentinel != 0) try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, slice.ast.sentinel); }, + .@"errdefer" => { + const expr = data[node_idx].rhs; + if (data[node_idx].lhs != 0) { + const payload_token = data[node_idx].lhs; + var scope = try scopes.addOne(allocator); + scope.* = .{ + .range = .{ + .start = offsets.tokenLocation(tree, payload_token).start, + .end = offsets.tokenLocation(tree, tree.lastToken(expr)).end, + }, + .decls = std.StringHashMap(Declaration).init(allocator), + .uses = &.{}, + .tests = &.{}, + .data = .other, + }; + errdefer scope.decls.deinit(); + + const name = tree.tokenSlice(payload_token); + try scope.decls.putNoClobber(name, .{ .ast_node = expr }); + } + + try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, expr); + }, // no scope .@"asm", diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 70bc475..58df675 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -328,8 +328,14 @@ fn writeNodeTokens( .container_field_init, => try writeContainerField(builder, arena, store, node, .field, child_frame), .@"errdefer" => { - if (datas[node].lhs != 0) - try writeToken(builder, datas[node].lhs, .variable); + try writeToken(builder, main_token, .keyword); + + if (datas[node].lhs != 0) { + const payload_tok = datas[node].lhs; + try writeToken(builder, payload_tok - 1, .operator); + try writeToken(builder, payload_tok, .variable); + try writeToken(builder, payload_tok + 1, .operator); + } try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, @@ -635,12 +641,22 @@ fn writeNodeTokens( try writeToken(builder, if_node.ast.if_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr }); - try writeToken(builder, if_node.payload_token, .variable); + if (if_node.payload_token) |payload| { + // if (?x) |x| + try writeToken(builder, payload - 1, .operator); // | + try writeToken(builder, payload, .variable); // x + try writeToken(builder, payload + 1, .operator); // | + } try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.then_expr }); - try writeToken(builder, if_node.error_token, .variable); if (if_node.ast.else_expr != 0) { try writeToken(builder, if_node.else_token, .keyword); + if (if_node.error_token) |err_token| { + // else |err| + try writeToken(builder, err_token - 1, .operator); // | + try writeToken(builder, err_token, .variable); // err + try writeToken(builder, err_token + 1, .operator); // | + } try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.else_expr }); } }, @@ -763,12 +779,14 @@ fn writeNodeTokens( try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].rhs }); }, - .deref, - .unwrap_optional, - => { + .deref => { try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); try writeToken(builder, main_token, .operator); }, + .unwrap_optional => { + try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); + try writeToken(builder, main_token + 1, .operator); + }, .grouped_expression => { try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, datas[node].lhs }); }, From 0d85668ce471ffa7db1ab96330ead6f325ea9c45 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Fri, 12 Mar 2021 11:56:51 +0100 Subject: [PATCH 36/36] Do not offer completion for function labels --- src/analysis.zig | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index f4e697e..435814d 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1285,9 +1285,13 @@ pub fn getFieldAccessType( .period => { const after_period = tokenizer.next(); switch (after_period.tag) { - .eof => return FieldAccessReturn{ - .original = current_type, - .unwrapped = try resolveDerefType(store, arena, current_type, &bound_type_params), + .eof => { + // function labels cannot be dot accessed + if (current_type.isFunc()) return null; + return FieldAccessReturn{ + .original = current_type, + .unwrapped = try resolveDerefType(store, arena, current_type, &bound_type_params), + }; }, .identifier => { if (after_period.loc.end == tokenizer.buffer.len) {