From c39a4eb2ab2bc99df61a849ef7a0bb5719fee08e Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Thu, 15 Dec 2022 19:03:09 +0100 Subject: [PATCH] simplify & refactor analysis code (#823) --- src/Server.zig | 51 +++++--------------- src/analysis.zig | 119 +++++++++++++++++++---------------------------- 2 files changed, 60 insertions(+), 110 deletions(-) diff --git a/src/Server.zig b/src/Server.zig index 61d2a84..7c38d92 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -872,11 +872,7 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO const end = offsets.tokenToLoc(tree, last_token).end; break :def tree.source[start..end]; }, - .pointer_payload => |payload| tree.tokenSlice(payload.name), - .array_payload => |payload| handle.tree.tokenSlice(payload.identifier), - .array_index => |payload| handle.tree.tokenSlice(payload), - .switch_payload => |payload| tree.tokenSlice(payload.node), - .label_decl => |label_decl| tree.tokenSlice(label_decl.label), + .pointer_payload, .array_payload, .array_index, .switch_payload, .label_decl => tree.tokenSlice(decl_handle.nameToken()), }; var bound_type_params = analysis.BoundTypeParams{}; @@ -1160,43 +1156,18 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl .insertTextFormat = .PlainText, }); }, - .pointer_payload => |payload| { + .pointer_payload, + .array_payload, + .array_index, + .switch_payload, + .label_decl, + => { + const name = tree.tokenSlice(decl_handle.nameToken()); + try context.completions.append(allocator, .{ - .label = tree.tokenSlice(payload.name), + .label = name, .kind = .Variable, - .insertText = tree.tokenSlice(payload.name), - .insertTextFormat = .PlainText, - }); - }, - .array_payload => |payload| { - try context.completions.append(allocator, .{ - .label = tree.tokenSlice(payload.identifier), - .kind = .Variable, - .insertText = tree.tokenSlice(payload.identifier), - .insertTextFormat = .PlainText, - }); - }, - .array_index => |payload| { - try context.completions.append(allocator, .{ - .label = tree.tokenSlice(payload), - .kind = .Variable, - .insertText = tree.tokenSlice(payload), - .insertTextFormat = .PlainText, - }); - }, - .switch_payload => |payload| { - try context.completions.append(allocator, .{ - .label = tree.tokenSlice(payload.node), - .kind = .Variable, - .insertText = tree.tokenSlice(payload.node), - .insertTextFormat = .PlainText, - }); - }, - .label_decl => |label_decl| { - try context.completions.append(allocator, .{ - .label = tree.tokenSlice(label_decl.label), - .kind = .Variable, - .insertText = tree.tokenSlice(label_decl.label), + .insertText = name, .insertTextFormat = .PlainText, }); }, diff --git a/src/analysis.zig b/src/analysis.zig index 65d287a..a559d97 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -275,10 +275,11 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { const main_token = tree.nodes.items(.main_token)[node]; return switch (tags[node]) { // regular declaration names. + 1 to mut token because name comes after 'const'/'var' - .local_var_decl => tree.localVarDecl(node).ast.mut_token + 1, - .global_var_decl => tree.globalVarDecl(node).ast.mut_token + 1, - .simple_var_decl => tree.simpleVarDecl(node).ast.mut_token + 1, - .aligned_var_decl => tree.alignedVarDecl(node).ast.mut_token + 1, + .local_var_decl, + .global_var_decl, + .simple_var_decl, + .aligned_var_decl, + => ast.varDecl(tree, node).?.ast.mut_token + 1, // function declaration names .fn_proto, .fn_proto_multi, @@ -291,20 +292,9 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { }, // containers - .container_field => blk: { - const field = tree.containerField(node); - if (field.ast.tuple_like) break :blk null; - break :blk field.ast.main_token; - }, - .container_field_init => blk: { - const field = tree.containerFieldInit(node); - if (field.ast.tuple_like) break :blk null; - break :blk field.ast.main_token; - }, - .container_field_align => blk: { - const field = tree.containerFieldAlign(node); - if (field.ast.tuple_like) break :blk null; - break :blk field.ast.main_token; + .container_field, .container_field_init, .container_field_align => { + const field = ast.containerField(tree, node).?.ast; + return if (field.tuple_like) null else field.main_token; }, .identifier => main_token, @@ -1410,45 +1400,39 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 { const data = tree.nodes.items(.data); const main_token = tree.nodes.items(.main_token)[node]; var buf: [1]Ast.Node.Index = undefined; - switch (tree.nodes.items(.tag)[node]) { - .container_field => { - const field = tree.containerField(node).ast; + return switch (tree.nodes.items(.tag)[node]) { + .container_field, + .container_field_init, + .container_field_align, + => { + const field = ast.containerField(tree, node).?.ast; return if (field.tuple_like) null else tree.tokenSlice(field.main_token); }, - .container_field_init => { - const field = tree.containerFieldInit(node).ast; - return if (field.tuple_like) null else tree.tokenSlice(field.main_token); - }, - .container_field_align => { - const field = tree.containerFieldAlign(node).ast; - return if (field.tuple_like) null else tree.tokenSlice(field.main_token); - }, - .error_value => return tree.tokenSlice(data[node].rhs), - .identifier => return tree.tokenSlice(main_token), + .error_value => tree.tokenSlice(data[node].rhs), + .identifier => tree.tokenSlice(main_token), .fn_proto, .fn_proto_multi, .fn_proto_one, .fn_proto_simple, .fn_decl, - => if (ast.fnProto(tree, node, &buf).?.name_token) |name| - return tree.tokenSlice(name), - .field_access => return ast.tokenSlice(tree, data[node].rhs) catch return null, + => if (ast.fnProto(tree, node, &buf).?.name_token) |name| tree.tokenSlice(name) else null, + .field_access => tree.tokenSlice(data[node].rhs), .call, .call_comma, .async_call, .async_call_comma, - => return tree.tokenSlice(tree.callFull(node).ast.lparen - 1), + => tree.tokenSlice(tree.callFull(node).ast.lparen - 1), .call_one, .call_one_comma, .async_call_one, .async_call_one_comma, - => return tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1), - .test_decl => if (data[node].lhs != 0) - return tree.tokenSlice(data[node].lhs), - else => |tag| log.debug("INVALID: {}", .{tag}), - } - - return null; + => tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1), + .test_decl => if (data[node].lhs != 0) tree.tokenSlice(data[node].lhs) else null, + else => |tag| { + log.debug("INVALID: {}", .{tag}); + return null; + }, + }; } fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) bool { @@ -1669,38 +1653,33 @@ pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_in } } - return block: { - if (stack.popOrNull()) |state| { - switch (state.ctx) { - .empty => {}, - .label => |filled| { - // We need to check this because the state could be a filled - // label if only a space follows it - if (!filled or line[line.len - 1] != ' ') { - break :block state.ctx; - } - }, - else => break :block state.ctx, - } + if (stack.popOrNull()) |state| { + switch (state.ctx) { + .empty => {}, + .label => |filled| { + // We need to check this because the state could be a filled + // label if only a space follows it + if (!filled or line[line.len - 1] != ' ') { + return state.ctx; + } + }, + else => return state.ctx, } + } - if (line.len == 0) return .empty; + if (line.len == 0) return .empty; - var held_line = try allocator.dupeZ(u8, offsets.locToSlice(text, line_loc)); - defer allocator.free(held_line); + var held_line = try allocator.dupeZ(u8, offsets.locToSlice(text, line_loc)); + defer allocator.free(held_line); - switch (line[0]) { - 'a'...'z', 'A'...'Z', '_', '@' => {}, - else => break :block .empty, - } - var tokenizer = std.zig.Tokenizer.init(held_line); - const tok = tokenizer.next(); - if (tok.tag == .identifier) { - break :block PositionContext{ .var_access = tok.loc }; - } else { - break :block .empty; - } - }; + switch (line[0]) { + 'a'...'z', 'A'...'Z', '_', '@' => {}, + else => return .empty, + } + var tokenizer = std.zig.Tokenizer.init(held_line); + const tok = tokenizer.next(); + + return if (tok.tag == .identifier) PositionContext{ .var_access = tok.loc } else .empty; } fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {