From 04edc7a2d53708fb3167b9873f4666cfb00e4bfa Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 2 Oct 2021 16:39:24 -0700 Subject: [PATCH] remove all use of `usingnamespace` --- src/analysis.zig | 115 ++++++++++++++++++++-------------------- src/main.zig | 19 +++---- src/references.zig | 19 ++++--- src/semantic_tokens.zig | 31 ++++++----- src/signature_help.zig | 9 ++-- 5 files changed, 95 insertions(+), 98 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index f231aa3..98242c4 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -4,8 +4,7 @@ const Ast = std.zig.Ast; const types = @import("./types.zig"); const offsets = @import("./offsets.zig"); const log = std.log.scoped(.analysis); -const Analysis = @This(); -usingnamespace @import("./ast.zig"); +const ast = @import("./ast.zig"); var using_trail: std.ArrayList([*]const u8) = undefined; var resolve_trail: std.ArrayList(NodeWithHandle) = undefined; @@ -88,7 +87,7 @@ pub fn getFunctionSignature(tree: Ast, func: Ast.full.FnProto) []const u8 { const start = offsets.tokenLocation(tree, func.ast.fn_token); const end = if (func.ast.return_type != 0) - offsets.tokenLocation(tree, Analysis.lastToken(tree, func.ast.return_type)) + offsets.tokenLocation(tree, ast.lastToken(tree, func.ast.return_type)) else start; return tree.source[start.start..end.end]; @@ -138,7 +137,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: Ast, func: Ast.fu try buffer.appendSlice("..."); } else if (param.type_expr != 0) { var curr_token = tree.firstToken(param.type_expr); - var end_token = Analysis.lastToken(tree, param.type_expr); + var end_token = ast.lastToken(tree, param.type_expr); while (curr_token <= end_token) : (curr_token += 1) { const tag = token_tags[curr_token]; const is_comma = tag == .comma; @@ -192,14 +191,14 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt pub fn getVariableSignature(tree: Ast, var_decl: Ast.full.VarDecl) []const u8 { const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start; - const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, var_decl.ast.init_node)).end; + const end = offsets.tokenLocation(tree, ast.lastToken(tree, var_decl.ast.init_node)).end; return tree.source[start..end]; } pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []const u8 { const start = offsets.tokenLocation(tree, field.ast.name_token).start; const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr; - const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, end_node)).end; + const end = offsets.tokenLocation(tree, ast.lastToken(tree, end_node)).end; return tree.source[start..end]; } @@ -258,7 +257,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { .fn_decl, => blk: { var params: [1]Ast.Node.Index = undefined; - break :blk Analysis.fnProto(tree, node, ¶ms).?.name_token; + break :blk ast.fnProto(tree, node, ¶ms).?.name_token; }, // containers @@ -288,7 +287,7 @@ fn getDeclName(tree: Ast, node: Ast.Node.Index) ?[]const u8 { fn isContainerDecl(decl_handle: DeclWithHandle) bool { return switch (decl_handle.decl.*) { - .ast_node => |inner_node| Analysis.isContainer(decl_handle.handle.tree.nodes.items(.tag)[inner_node]), + .ast_node => |inner_node| ast.isContainer(decl_handle.handle.tree.nodes.items(.tag)[inner_node]), else => false, }; } @@ -315,7 +314,7 @@ fn resolveVarDeclAliasInternal(store: *DocumentStore, arena: *std.heap.ArenaAllo if (node_tags[node_handle.node] == .field_access) { const lhs = datas[node_handle.node].lhs; - const container_node = if (Analysis.isBuiltinCall(tree, lhs)) block: { + const container_node = if (ast.isBuiltinCall(tree, lhs)) block: { if (!std.mem.eql(u8, tree.tokenSlice(main_tokens[lhs]), "@import")) return null; @@ -330,7 +329,7 @@ fn resolveVarDeclAliasInternal(store: *DocumentStore, arena: *std.heap.ArenaAllo .other => |n| n, else => return null, }; - if (!Analysis.isContainer(resolved.handle.tree, resolved_node)) return null; + if (!ast.isContainer(resolved.handle.tree, resolved_node)) return null; break :block NodeWithHandle{ .node = resolved_node, .handle = resolved.handle }; } else return null; @@ -352,7 +351,7 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato const token_tags = tree.tokens.items(.tag); const node_tags = tree.nodes.items(.tag); - if (Analysis.varDecl(handle.tree, decl)) |var_decl| { + if (ast.varDecl(handle.tree, decl)) |var_decl| { if (var_decl.ast.init_node == 0) return null; const base_exp = var_decl.ast.init_node; if (token_tags[var_decl.ast.mut_token] != .keyword_const) return null; @@ -407,7 +406,7 @@ fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.N if (node_tags[child_idx] == .@"return") { if (datas[child_idx].lhs != 0) { const lhs = datas[child_idx].lhs; - if (Analysis.isCall(tree, lhs)) { + if (ast.isCall(tree, lhs)) { const call_name = getDeclName(tree, datas[lhs].lhs); if (call_name) |name| { if (std.mem.eql(u8, name, tree.tokenSlice(fn_decl.name_token.?))) { @@ -536,7 +535,7 @@ fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, dere const token_tag = tree.tokens.items(.tag)[main_token]; if (isPtrType(tree, deref_node)) { - const ptr_type = Analysis.ptrType(tree, deref_node).?; + const ptr_type = ast.ptrType(tree, deref_node).?; switch (token_tag) { .asterisk => { return ((try resolveTypeOfNodeInternal(store, arena, .{ @@ -573,7 +572,7 @@ fn resolveBracketAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocat .type = .{ .data = .{ .slice = data.rhs }, .is_type_val = false }, .handle = lhs.handle, }; - } else if (Analysis.ptrType(tree, lhs_node)) |ptr_type| { + } else if (ast.ptrType(tree, lhs_node)) |ptr_type| { if (ptr_type.size == .Slice) { if (rhs == .Single) { return ((try resolveTypeOfNodeInternal(store, arena, .{ @@ -655,7 +654,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl .simple_var_decl, .aligned_var_decl, => { - const var_decl = Analysis.varDecl(tree, node).?; + const var_decl = ast.varDecl(tree, node).?; if (var_decl.ast.type_node != 0) { const decl_type = .{ .node = var_decl.ast.type_node, .handle = handle }; if (try resolveTypeOfNodeInternal(store, arena, decl_type, bound_type_params)) |typ| @@ -685,7 +684,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl switch (child.decl.*) { .ast_node => |n| { if (n == node) return null; - if (Analysis.varDecl(child.handle.tree, n)) |var_decl| { + if (ast.varDecl(child.handle.tree, n)) |var_decl| { if (var_decl.ast.init_node == node) return null; } @@ -706,7 +705,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl .async_call_one_comma, => { var params: [1]Ast.Node.Index = undefined; - const call = Analysis.callFull(tree, node, ¶ms) orelse unreachable; + const call = ast.callFull(tree, node, ¶ms) orelse unreachable; const callee = .{ .node = call.ast.fn_expr, .handle = handle }; const decl = (try resolveTypeOfNodeInternal(store, arena, callee, bound_type_params)) orelse @@ -718,7 +717,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl else => return null, }; var buf: [1]Ast.Node.Index = undefined; - const func_maybe = Analysis.fnProto(decl.handle.tree, decl_node, &buf); + const func_maybe = ast.fnProto(decl.handle.tree, decl_node, &buf); if (func_maybe) |fn_decl| { var expected_params = fn_decl.ast.params.len; @@ -949,7 +948,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl => { var buf: [1]Ast.Node.Index = undefined; // This is a function type - if (Analysis.fnProto(tree, node, &buf).?.name_token == null) { + if (ast.fnProto(tree, node, &buf).?.name_token == null) { return TypeWithHandle.typeVal(node_handle); } @@ -1032,9 +1031,9 @@ pub const TypeWithHandle = struct { const tree = self.handle.tree; const node = self.type.data.other; const tags = tree.nodes.items(.tag); - if (Analysis.isContainer(tree, node)) { + if (ast.isContainer(tree, node)) { var buf: [2]Ast.Node.Index = undefined; - for (Analysis.declMembers(tree, node, &buf)) |child| { + for (ast.declMembers(tree, node, &buf)) |child| { if (tags[child].isContainerField()) return false; } } @@ -1057,7 +1056,7 @@ pub const TypeWithHandle = struct { var buf: [1]Ast.Node.Index = undefined; const tree = self.handle.tree; return switch (self.type.data) { - .other => |n| if (Analysis.fnProto(tree, n, &buf)) |fn_proto| blk: { + .other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: { break :blk isTypeFunction(tree, fn_proto); } else false, else => false, @@ -1068,7 +1067,7 @@ pub const TypeWithHandle = struct { var buf: [1]Ast.Node.Index = undefined; const tree = self.handle.tree; return switch (self.type.data) { - .other => |n| if (Analysis.fnProto(tree, n, &buf)) |fn_proto| blk: { + .other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: { break :blk isGenericFunction(tree, fn_proto); } else false, else => false, @@ -1231,7 +1230,7 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator if (current_type.type.is_type_val) return null; const cur_tree = current_type.handle.tree; var buf: [1]Ast.Node.Index = undefined; - if (Analysis.fnProto(cur_tree, current_type_node, &buf)) |func| { + if (ast.fnProto(cur_tree, current_type_node, &buf)) |func| { // Check if the function has a body and if so, pass it // so the type can be resolved if it's a generic function returning // an anonymous struct @@ -1292,13 +1291,13 @@ pub fn isNodePublic(tree: Ast, node: Ast.Node.Index) bool { .local_var_decl, .simple_var_decl, .aligned_var_decl, - => Analysis.varDecl(tree, node).?.visib_token != null, + => ast.varDecl(tree, node).?.visib_token != null, .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl, - => Analysis.fnProto(tree, node, &buf).?.visib_token != null, + => ast.fnProto(tree, node, &buf).?.visib_token != null, else => true, }; } @@ -1318,7 +1317,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 { .fn_proto_one, .fn_proto_simple, .fn_decl, - => if (Analysis.fnProto(tree, node, &buf).?.name_token) |name| + => if (ast.fnProto(tree, node, &buf).?.name_token) |name| return tree.tokenSlice(name), .field_access => return tree.tokenSlice(data[node].rhs), .call, @@ -1341,22 +1340,22 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 { fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) bool { const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start; - const last_token = offsets.tokenLocation(tree, Analysis.lastToken(tree, node)).end; + const last_token = offsets.tokenLocation(tree, ast.lastToken(tree, node)).end; return source_index >= first_token and source_index <= last_token; } pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 { const node_tags = tree.nodes.items(.tag); var buf: [2]Ast.Node.Index = undefined; - if (Analysis.isContainer(tree, node)) { - const decls = Analysis.declMembers(tree, node, &buf); + if (ast.isContainer(tree, node)) { + const decls = ast.declMembers(tree, node, &buf); for (decls) |decl_idx| { if (getImportStr(tree, decl_idx, source_index)) |name| { return name; } } return null; - } else if (Analysis.varDecl(tree, node)) |var_decl| { + } else if (ast.varDecl(tree, node)) |var_decl| { return getImportStr(tree, var_decl.ast.init_node, source_index); } else if (node_tags[node] == .@"usingnamespace") { return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index); @@ -1366,7 +1365,7 @@ pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]con return null; } - if (Analysis.isBuiltinCall(tree, node)) { + if (ast.isBuiltinCall(tree, node)) { const builtin_token = tree.nodes.items(.main_token)[node]; const call_name = tree.tokenSlice(builtin_token); @@ -1712,7 +1711,7 @@ fn addOutlineNodes(allocator: *std.mem.Allocator, tree: Ast, child: Ast.Node.Ind .tagged_union_two_trailing, => { var buf: [2]Ast.Node.Index = undefined; - for (Analysis.declMembers(tree, child, &buf)) |member| + for (ast.declMembers(tree, child, &buf)) |member| try addOutlineNodes(allocator, tree, member, context); return; }, @@ -1746,7 +1745,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Ast, node: As const end_loc = start_loc.add(try offsets.tokenRelativeLocation( tree, start_loc.offset, - starts[Analysis.lastToken(tree, node)], + starts[ast.lastToken(tree, node)], context.encoding, )); context.prev_loc = end_loc; @@ -1800,13 +1799,13 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Ast, node: As .encoding = context.encoding, }; - if (Analysis.isContainer(tree, node)) { + if (ast.isContainer(tree, node)) { var buf: [2]Ast.Node.Index = undefined; - for (Analysis.declMembers(tree, node, &buf)) |child| + for (ast.declMembers(tree, node, &buf)) |child| try addOutlineNodes(allocator, tree, child, &child_context); } - if (Analysis.varDecl(tree, node)) |var_decl| { + if (ast.varDecl(tree, node)) |var_decl| { if (var_decl.ast.init_node != 0) try addOutlineNodes(allocator, tree, var_decl.ast.init_node, &child_context); } @@ -1951,7 +1950,7 @@ pub const DeclWithHandle = struct { if (scope.decls.getEntry(tree.tokenSlice(main_tokens[pay.items[0]]))) |candidate| { switch (candidate.value_ptr.*) { .ast_node => |node| { - if (Analysis.containerField(switch_expr_type.handle.tree, node)) |container_field| { + if (ast.containerField(switch_expr_type.handle.tree, node)) |container_field| { if (container_field.ast.type_expr != 0) { return ((try resolveTypeOfNodeInternal( store, @@ -1977,7 +1976,7 @@ fn findContainerScope(container_handle: NodeWithHandle) ?*Scope { const container = container_handle.node; const handle = container_handle.handle; - if (!Analysis.isContainer(handle.tree, container)) return null; + if (!ast.isContainer(handle.tree, container)) return null; // Find the container scope. return for (handle.document_scope.scopes) |*scope| { @@ -2405,7 +2404,7 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: Ast) !DocumentScop fn nodeSourceRange(tree: Ast, node: Ast.Node.Index) SourceRange { const loc_start = offsets.tokenLocation(tree, tree.firstToken(node)); - const loc_end = offsets.tokenLocation(tree, Analysis.lastToken(tree, node)); + const loc_end = offsets.tokenLocation(tree, ast.lastToken(tree, node)); return SourceRange{ .start = loc_start.start, @@ -2430,7 +2429,7 @@ fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx const node_tag = tags[node_idx]; var buf: [2]Ast.Node.Index = undefined; - const ast_decls = Analysis.declMembers(tree, node_idx, &buf); + const ast_decls = ast.declMembers(tree, node_idx, &buf); (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), @@ -2581,7 +2580,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .fn_decl, => |fn_tag| { var buf: [1]Ast.Node.Index = undefined; - const func = Analysis.fnProto(tree, node_idx, &buf).?; + const func = ast.fnProto(tree, node_idx, &buf).?; (try scopes.addOne(allocator)).* = .{ .range = nodeSourceRange(tree, node_idx), @@ -2628,7 +2627,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .block_two_semicolon, => { const first_token = tree.firstToken(node_idx); - const last_token = Analysis.lastToken(tree, node_idx); + const last_token = ast.lastToken(tree, node_idx); // if labeled block if (token_tags[first_token] == .identifier) { @@ -2681,7 +2680,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ } try makeScopeInternal(allocator, context, idx); - if (Analysis.varDecl(tree, idx)) |var_decl| { + if (ast.varDecl(tree, idx)) |var_decl| { const name = tree.tokenSlice(var_decl.ast.mut_token + 1); if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| { _ = existing; @@ -2696,14 +2695,14 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .@"if", .if_simple, => { - const if_node = Analysis.ifFull(tree, node_idx); + const if_node = ast.ifFull(tree, node_idx); if (if_node.payload_token) |payload| { var scope = try scopes.addOne(allocator); scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, if_node.ast.then_expr)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, if_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, @@ -2731,7 +2730,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, err_token).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, if_node.ast.else_expr)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, if_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, @@ -2754,7 +2753,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, tree.firstToken(catch_expr)).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, catch_expr)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, catch_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, @@ -2776,7 +2775,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .@"for", .for_simple, => { - const while_node = Analysis.whileAst(tree, node_idx).?; + const while_node = ast.whileAst(tree, node_idx).?; const is_for = node_tag == .@"for" or node_tag == .for_simple; if (while_node.label_token) |label| { @@ -2785,7 +2784,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, while_node.ast.while_token).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, node_idx)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, node_idx)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, @@ -2800,7 +2799,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, while_node.ast.then_expr)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, while_node.ast.then_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, @@ -2845,7 +2844,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, err_token).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, while_node.ast.else_expr)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, while_node.ast.else_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, @@ -2877,7 +2876,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, switch_case.ast.target_expr)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, switch_case.ast.target_expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, @@ -2911,7 +2910,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .aligned_var_decl, .simple_var_decl, => { - const var_decl = Analysis.varDecl(tree, node_idx).?; + const var_decl = ast.varDecl(tree, node_idx).?; if (var_decl.ast.type_node != 0) { try makeScopeInternal(allocator, context, var_decl.ast.type_node); } @@ -2930,7 +2929,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .async_call_one_comma, => { var buf: [1]Ast.Node.Index = undefined; - const call = Analysis.callFull(tree, node_idx, &buf).?; + const call = ast.callFull(tree, node_idx, &buf).?; try makeScopeInternal(allocator, context, call.ast.fn_expr); for (call.ast.params) |param| @@ -2989,7 +2988,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .container_field_align, .container_field_init, => { - const field = Analysis.containerField(tree, node_idx).?; + const field = ast.containerField(tree, node_idx).?; try makeScopeInternal(allocator, context, field.ast.type_expr); try makeScopeInternal(allocator, context, field.ast.align_expr); @@ -3021,7 +3020,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type: Ast.full.PtrType = Analysis.ptrType(tree, node_idx).?; + const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node_idx).?; try makeScopeInternal(allocator, context, ptr_type.ast.sentinel); try makeScopeInternal(allocator, context, ptr_type.ast.align_node); @@ -3050,7 +3049,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_ scope.* = .{ .range = .{ .start = offsets.tokenLocation(tree, payload_token).start, - .end = offsets.tokenLocation(tree, Analysis.lastToken(tree, expr)).end, + .end = offsets.tokenLocation(tree, ast.lastToken(tree, expr)).end, }, .decls = std.StringHashMap(Declaration).init(allocator), .data = .other, diff --git a/src/main.zig b/src/main.zig index 10faf9b..376b981 100644 --- a/src/main.zig +++ b/src/main.zig @@ -7,6 +7,7 @@ const readRequestHeader = @import("./header.zig").readRequestHeader; const requests = @import("./requests.zig"); const types = @import("./types.zig"); const analysis = @import("./analysis.zig"); +const ast = @import("./ast.zig"); const references = @import("./references.zig"); const rename = @import("./rename.zig"); const offsets = @import("./offsets.zig"); @@ -245,7 +246,7 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han .fn_decl, => blk: { var buf: [1]Ast.Node.Index = undefined; - const func = analysis.fnProto(tree, decl_idx, &buf).?; + const func = ast.fnProto(tree, decl_idx, &buf).?; if (func.extern_export_inline_token != null) break :blk; if (config.warn_style) { @@ -370,7 +371,7 @@ fn nodeToCompletion(arena: *std.heap.ArenaAllocator, list: *std.ArrayList(types. else null; - if (analysis.isContainer(handle.tree, node)) { + if (ast.isContainer(handle.tree, node)) { const context = DeclToCompletionContext{ .completions = list, .config = &config, @@ -399,7 +400,7 @@ fn nodeToCompletion(arena: *std.heap.ArenaAllocator, list: *std.ArrayList(types. .fn_decl, => { var buf: [1]Ast.Node.Index = undefined; - const func = analysis.fnProto(tree, node, &buf).?; + const func = ast.fnProto(tree, node, &buf).?; if (func.name_token) |name_token| { const use_snippets = config.enable_snippets and client_capabilities.supports_snippets; const insert_text = if (use_snippets) blk: { @@ -425,7 +426,7 @@ fn nodeToCompletion(arena: *std.heap.ArenaAllocator, list: *std.ArrayList(types. .aligned_var_decl, .simple_var_decl, => { - const var_decl = analysis.varDecl(tree, node).?; + const var_decl = ast.varDecl(tree, node).?; const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const; if (try analysis.resolveVarDeclAlias(&document_store, arena, node_handle)) |result| { @@ -451,7 +452,7 @@ fn nodeToCompletion(arena: *std.heap.ArenaAllocator, list: *std.ArrayList(types. .container_field_align, .container_field_init, => { - const field = analysis.containerField(tree, node).?; + const field = ast.containerField(tree, node).?; try list.append(.{ .label = handle.tree.tokenSlice(field.ast.name_token), .kind = .Field, @@ -476,7 +477,7 @@ fn nodeToCompletion(arena: *std.heap.ArenaAllocator, list: *std.ArrayList(types. .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type = analysis.ptrType(tree, node).?; + const ptr_type = ast.ptrType(tree, node).?; switch (ptr_type.size) { .One, .C, .Many => if (config.operator_completions) { @@ -619,11 +620,11 @@ fn hoverSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle var buf: [1]Ast.Node.Index = undefined; - if (analysis.varDecl(tree, node)) |var_decl| { + if (ast.varDecl(tree, node)) |var_decl| { break :def analysis.getVariableSignature(tree, var_decl); - } else if (analysis.fnProto(tree, node, &buf)) |fn_proto| { + } else if (ast.fnProto(tree, node, &buf)) |fn_proto| { break :def analysis.getFunctionSignature(tree, fn_proto); - } else if (analysis.containerField(tree, node)) |field| { + } else if (ast.containerField(tree, node)) |field| { break :def analysis.getContainerFieldSignature(tree, field); } else { break :def analysis.nodeToString(tree, node) orelse diff --git a/src/references.zig b/src/references.zig index 84dfe3c..e75cdd0 100644 --- a/src/references.zig +++ b/src/references.zig @@ -5,8 +5,7 @@ const analysis = @import("./analysis.zig"); const types = @import("./types.zig"); const offsets = @import("./offsets.zig"); const log = std.log.scoped(.references); -const Reference = @This(); -usingnamespace @import("./ast.zig"); +const ast = @import("./ast.zig"); fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void { const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return; @@ -101,7 +100,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .error_set_decl, => { var buf: [2]Ast.Node.Index = undefined; - for (Reference.declMembers(tree, node, &buf)) |member| + for (ast.declMembers(tree, node, &buf)) |member| try symbolReferencesInternal(arena, store, .{ .node = member, .handle = handle }, decl, encoding, context, handler); }, .global_var_decl, @@ -109,7 +108,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .simple_var_decl, .aligned_var_decl, => { - const var_decl = Reference.varDecl(tree, node).?; + const var_decl = ast.varDecl(tree, node).?; if (var_decl.ast.type_node != 0) { try symbolReferencesInternal(arena, store, .{ .node = var_decl.ast.type_node, .handle = handle }, decl, encoding, context, handler); } @@ -124,7 +123,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .container_field_align, .container_field_init, => { - const field = Reference.containerField(tree, node).?; + const field = ast.containerField(tree, node).?; if (field.ast.type_expr != 0) { try symbolReferencesInternal(arena, store, .{ .node = field.ast.type_expr, .handle = handle }, decl, encoding, context, handler); } @@ -146,7 +145,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .fn_decl, => { var buf: [1]Ast.Node.Index = undefined; - const fn_proto = Reference.fnProto(tree, node, &buf).?; + const fn_proto = ast.fnProto(tree, node, &buf).?; var it = fn_proto.iterate(tree); while (it.next()) |param| { if (param.type_expr != 0) @@ -212,7 +211,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .for_simple, .@"for", => { - const loop = Reference.whileAst(tree, node).?; + const loop = ast.whileAst(tree, node).?; try symbolReferencesInternal(arena, store, .{ .node = loop.ast.cond_expr, .handle = handle }, decl, encoding, context, handler); if (loop.ast.cont_expr != 0) { try symbolReferencesInternal(arena, store, .{ .node = loop.ast.cont_expr, .handle = handle }, decl, encoding, context, handler); @@ -225,7 +224,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .@"if", .if_simple, => { - const if_node = Reference.ifFull(tree, node); + const if_node = ast.ifFull(tree, node); try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.cond_expr, .handle = handle }, decl, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.then_expr, .handle = handle }, decl, encoding, context, handler); @@ -244,7 +243,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type = Reference.ptrType(tree, node).?; + const ptr_type = ast.ptrType(tree, node).?; if (ptr_type.ast.align_node != 0) { try symbolReferencesInternal(arena, store, .{ .node = ptr_type.ast.align_node, .handle = handle }, decl, encoding, context, handler); @@ -547,7 +546,7 @@ pub fn symbolReferences(arena: *std.heap.ArenaAllocator, store: *DocumentStore, switch (scope.data) { .function => |proto| { var buf: [1]Ast.Node.Index = undefined; - const fn_proto = Reference.fnProto(curr_handle.tree, proto, &buf).?; + const fn_proto = ast.fnProto(curr_handle.tree, proto, &buf).?; var it = fn_proto.iterate(curr_handle.tree); while (it.next()) |candidate| { if (std.meta.eql(candidate, param)) { diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 0320760..053ade1 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -4,8 +4,7 @@ const DocumentStore = @import("./DocumentStore.zig"); const analysis = @import("./analysis.zig"); const Ast = std.zig.Ast; const log = std.log.scoped(.semantic_tokens); -const SemanticToken = @This(); -usingnamespace @import("./ast.zig"); +const ast = @import("./ast.zig"); pub const TokenType = enum(u32) { type, @@ -322,7 +321,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D .simple_var_decl, .aligned_var_decl, => { - const var_decl = SemanticToken.varDecl(tree, node).?; + const var_decl = ast.varDecl(tree, node).?; if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| try writeDocComments(builder, tree, comment_idx); @@ -431,7 +430,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D .fn_decl, => { var buf: [1]Ast.Node.Index = undefined; - const fn_proto: Ast.full.FnProto = SemanticToken.fnProto(tree, node, &buf).?; + const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buf).?; if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs| try writeDocComments(builder, tree, docs); @@ -523,7 +522,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D .for_simple, .@"for", => { - const while_node = SemanticToken.whileAst(tree, node).?; + const while_node = ast.whileAst(tree, node).?; try writeToken(builder, while_node.label_token, .label); try writeToken(builder, while_node.inline_token, .keyword); try writeToken(builder, while_node.ast.while_token, .keyword); @@ -557,7 +556,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D .@"if", .if_simple, => { - const if_node = SemanticToken.ifFull(tree, node); + const if_node = ast.ifFull(tree, node); try writeToken(builder, if_node.ast.if_token, .keyword); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr }); @@ -629,7 +628,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D .node = struct_init.ast.type_expr, .handle = handle, })) |struct_type| switch (struct_type.type.data) { - .other => |type_node| if (SemanticToken.isContainer(struct_type.handle.tree, type_node)) + .other => |type_node| if (ast.isContainer(struct_type.handle.tree, type_node)) fieldTokenType(type_node, struct_type.handle) else null, @@ -665,8 +664,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, call.ast.fn_expr }); if (builder.previous_token) |prev| { - if (prev != SemanticToken.lastToken(tree, call.ast.fn_expr) and token_tags[SemanticToken.lastToken(tree, call.ast.fn_expr)] == .identifier) { - try writeToken(builder, SemanticToken.lastToken(tree, call.ast.fn_expr), .function); + if (prev != ast.lastToken(tree, call.ast.fn_expr) and token_tags[ast.lastToken(tree, call.ast.fn_expr)] == .identifier) { + try writeToken(builder, ast.lastToken(tree, call.ast.fn_expr), .function); } } for (call.ast.params) |param| try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, param }); @@ -684,7 +683,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sliced }); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.start }); - try writeToken(builder, SemanticToken.lastToken(tree, slice.ast.start) + 1, .operator); + try writeToken(builder, ast.lastToken(tree, slice.ast.start) + 1, .operator); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.end }); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, slice.ast.sentinel }); @@ -889,7 +888,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D switch (decl_type.decl.*) { .ast_node => |decl_node| { if (decl_type.handle.tree.nodes.items(.tag)[decl_node].isContainerField()) { - const tok_type: ?TokenType = if (SemanticToken.isContainer(lhs_type.handle.tree, left_type_node)) + const tok_type: ?TokenType = if (ast.isContainer(lhs_type.handle.tree, left_type_node)) fieldTokenType(decl_node, lhs_type.handle) else if (left_type_node == 0) TokenType.field @@ -915,7 +914,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D .ptr_type_bit_range, .ptr_type_sentinel, => { - const ptr_type = SemanticToken.ptrType(tree, node).?; + const ptr_type = ast.ptrType(tree, node).?; if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and main_token == main_tokens[ptr_type.ast.child_type]) @@ -983,7 +982,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D fn writeContainerField(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, node: Ast.Node.Index, field_token_type: ?TokenType, child_frame: anytype) !void { const tree = builder.handle.tree; - const container_field = SemanticToken.containerField(tree, node).?; + const container_field = ast.containerField(tree, node).?; const base = tree.nodes.items(.main_token)[node]; const tokens = tree.tokens.items(.tag); @@ -1003,9 +1002,9 @@ fn writeContainerField(builder: *Builder, arena: *std.heap.ArenaAllocator, store if (container_field.ast.value_expr != 0) block: { const eq_tok: Ast.TokenIndex = if (container_field.ast.align_expr != 0) - SemanticToken.lastToken(tree, container_field.ast.align_expr) + 2 + ast.lastToken(tree, container_field.ast.align_expr) + 2 else if (container_field.ast.type_expr != 0) - SemanticToken.lastToken(tree, container_field.ast.type_expr) + 1 + ast.lastToken(tree, container_field.ast.type_expr) + 1 else break :block; @@ -1021,7 +1020,7 @@ pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentS // reverse the ast from the root declarations var buf: [2]Ast.Node.Index = undefined; - for (SemanticToken.declMembers(handle.tree, 0, &buf)) |child| { + for (ast.declMembers(handle.tree, 0, &buf)) |child| { writeNodeTokens(&builder, arena, store, child) catch |err| switch (err) { error.MovedBackwards => break, else => |e| return e, diff --git a/src/signature_help.zig b/src/signature_help.zig index eec61fd..83089c8 100644 --- a/src/signature_help.zig +++ b/src/signature_help.zig @@ -6,8 +6,7 @@ const types = @import("./types.zig"); const Ast = std.zig.Ast; const Token = std.zig.Token; const identifierFromPosition = @import("./main.zig").identifierFromPosition; -const SignatureHelp = @This(); -usingnamespace @import("./ast.zig"); +const ast = @import("./ast.zig"); fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAllocator, commas: u32, skip_self_param: bool, handle: *DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation { const ParameterInformation = types.SignatureInformation.ParameterInformation; @@ -51,7 +50,7 @@ fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.Arena if (param_label_start == 0) param_label_start = token_starts[tree.firstToken(param.type_expr)]; - const last_param_tok = SignatureHelp.lastToken(tree, param.type_expr); + const last_param_tok = ast.lastToken(tree, param.type_expr); param_label_end = token_starts[last_param_tok] + tree.tokenSlice(last_param_tok).len; } const param_label = tree.source[param_label_start..param_label_end]; @@ -274,7 +273,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl }; var buf: [1]Ast.Node.Index = undefined; - if (SignatureHelp.fnProto(type_handle.handle.tree, node, &buf)) |proto| { + if (ast.fnProto(type_handle.handle.tree, node, &buf)) |proto| { return try fnProtoToSignatureInfo( document_store, arena, @@ -326,7 +325,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl } } - if (SignatureHelp.fnProto(res_handle.tree, node, &buf)) |proto| { + if (ast.fnProto(res_handle.tree, node, &buf)) |proto| { return try fnProtoToSignatureInfo( document_store, arena,