simplify & refactor analysis code (#823)

This commit is contained in:
Techatrix 2022-12-15 19:03:09 +01:00 committed by GitHub
parent 3526f5fb84
commit c39a4eb2ab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 60 additions and 110 deletions

View File

@ -872,11 +872,7 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO
const end = offsets.tokenToLoc(tree, last_token).end; const end = offsets.tokenToLoc(tree, last_token).end;
break :def tree.source[start..end]; break :def tree.source[start..end];
}, },
.pointer_payload => |payload| tree.tokenSlice(payload.name), .pointer_payload, .array_payload, .array_index, .switch_payload, .label_decl => tree.tokenSlice(decl_handle.nameToken()),
.array_payload => |payload| handle.tree.tokenSlice(payload.identifier),
.array_index => |payload| handle.tree.tokenSlice(payload),
.switch_payload => |payload| tree.tokenSlice(payload.node),
.label_decl => |label_decl| tree.tokenSlice(label_decl.label),
}; };
var bound_type_params = analysis.BoundTypeParams{}; var bound_type_params = analysis.BoundTypeParams{};
@ -1160,43 +1156,18 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
.insertTextFormat = .PlainText, .insertTextFormat = .PlainText,
}); });
}, },
.pointer_payload => |payload| { .pointer_payload,
.array_payload,
.array_index,
.switch_payload,
.label_decl,
=> {
const name = tree.tokenSlice(decl_handle.nameToken());
try context.completions.append(allocator, .{ try context.completions.append(allocator, .{
.label = tree.tokenSlice(payload.name), .label = name,
.kind = .Variable, .kind = .Variable,
.insertText = tree.tokenSlice(payload.name), .insertText = name,
.insertTextFormat = .PlainText,
});
},
.array_payload => |payload| {
try context.completions.append(allocator, .{
.label = tree.tokenSlice(payload.identifier),
.kind = .Variable,
.insertText = tree.tokenSlice(payload.identifier),
.insertTextFormat = .PlainText,
});
},
.array_index => |payload| {
try context.completions.append(allocator, .{
.label = tree.tokenSlice(payload),
.kind = .Variable,
.insertText = tree.tokenSlice(payload),
.insertTextFormat = .PlainText,
});
},
.switch_payload => |payload| {
try context.completions.append(allocator, .{
.label = tree.tokenSlice(payload.node),
.kind = .Variable,
.insertText = tree.tokenSlice(payload.node),
.insertTextFormat = .PlainText,
});
},
.label_decl => |label_decl| {
try context.completions.append(allocator, .{
.label = tree.tokenSlice(label_decl.label),
.kind = .Variable,
.insertText = tree.tokenSlice(label_decl.label),
.insertTextFormat = .PlainText, .insertTextFormat = .PlainText,
}); });
}, },

View File

@ -275,10 +275,11 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
const main_token = tree.nodes.items(.main_token)[node]; const main_token = tree.nodes.items(.main_token)[node];
return switch (tags[node]) { return switch (tags[node]) {
// regular declaration names. + 1 to mut token because name comes after 'const'/'var' // regular declaration names. + 1 to mut token because name comes after 'const'/'var'
.local_var_decl => tree.localVarDecl(node).ast.mut_token + 1, .local_var_decl,
.global_var_decl => tree.globalVarDecl(node).ast.mut_token + 1, .global_var_decl,
.simple_var_decl => tree.simpleVarDecl(node).ast.mut_token + 1, .simple_var_decl,
.aligned_var_decl => tree.alignedVarDecl(node).ast.mut_token + 1, .aligned_var_decl,
=> ast.varDecl(tree, node).?.ast.mut_token + 1,
// function declaration names // function declaration names
.fn_proto, .fn_proto,
.fn_proto_multi, .fn_proto_multi,
@ -291,20 +292,9 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
}, },
// containers // containers
.container_field => blk: { .container_field, .container_field_init, .container_field_align => {
const field = tree.containerField(node); const field = ast.containerField(tree, node).?.ast;
if (field.ast.tuple_like) break :blk null; return if (field.tuple_like) null else field.main_token;
break :blk field.ast.main_token;
},
.container_field_init => blk: {
const field = tree.containerFieldInit(node);
if (field.ast.tuple_like) break :blk null;
break :blk field.ast.main_token;
},
.container_field_align => blk: {
const field = tree.containerFieldAlign(node);
if (field.ast.tuple_like) break :blk null;
break :blk field.ast.main_token;
}, },
.identifier => main_token, .identifier => main_token,
@ -1410,45 +1400,39 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
const data = tree.nodes.items(.data); const data = tree.nodes.items(.data);
const main_token = tree.nodes.items(.main_token)[node]; const main_token = tree.nodes.items(.main_token)[node];
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
switch (tree.nodes.items(.tag)[node]) { return switch (tree.nodes.items(.tag)[node]) {
.container_field => { .container_field,
const field = tree.containerField(node).ast; .container_field_init,
.container_field_align,
=> {
const field = ast.containerField(tree, node).?.ast;
return if (field.tuple_like) null else tree.tokenSlice(field.main_token); return if (field.tuple_like) null else tree.tokenSlice(field.main_token);
}, },
.container_field_init => { .error_value => tree.tokenSlice(data[node].rhs),
const field = tree.containerFieldInit(node).ast; .identifier => tree.tokenSlice(main_token),
return if (field.tuple_like) null else tree.tokenSlice(field.main_token);
},
.container_field_align => {
const field = tree.containerFieldAlign(node).ast;
return if (field.tuple_like) null else tree.tokenSlice(field.main_token);
},
.error_value => return tree.tokenSlice(data[node].rhs),
.identifier => return tree.tokenSlice(main_token),
.fn_proto, .fn_proto,
.fn_proto_multi, .fn_proto_multi,
.fn_proto_one, .fn_proto_one,
.fn_proto_simple, .fn_proto_simple,
.fn_decl, .fn_decl,
=> if (ast.fnProto(tree, node, &buf).?.name_token) |name| => if (ast.fnProto(tree, node, &buf).?.name_token) |name| tree.tokenSlice(name) else null,
return tree.tokenSlice(name), .field_access => tree.tokenSlice(data[node].rhs),
.field_access => return ast.tokenSlice(tree, data[node].rhs) catch return null,
.call, .call,
.call_comma, .call_comma,
.async_call, .async_call,
.async_call_comma, .async_call_comma,
=> return tree.tokenSlice(tree.callFull(node).ast.lparen - 1), => tree.tokenSlice(tree.callFull(node).ast.lparen - 1),
.call_one, .call_one,
.call_one_comma, .call_one_comma,
.async_call_one, .async_call_one,
.async_call_one_comma, .async_call_one_comma,
=> return tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1), => tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1),
.test_decl => if (data[node].lhs != 0) .test_decl => if (data[node].lhs != 0) tree.tokenSlice(data[node].lhs) else null,
return tree.tokenSlice(data[node].lhs), else => |tag| {
else => |tag| log.debug("INVALID: {}", .{tag}), log.debug("INVALID: {}", .{tag});
} return null;
},
return null; };
} }
fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) bool { fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) bool {
@ -1669,38 +1653,33 @@ pub fn getPositionContext(allocator: std.mem.Allocator, text: []const u8, doc_in
} }
} }
return block: { if (stack.popOrNull()) |state| {
if (stack.popOrNull()) |state| { switch (state.ctx) {
switch (state.ctx) { .empty => {},
.empty => {}, .label => |filled| {
.label => |filled| { // We need to check this because the state could be a filled
// We need to check this because the state could be a filled // label if only a space follows it
// label if only a space follows it if (!filled or line[line.len - 1] != ' ') {
if (!filled or line[line.len - 1] != ' ') { return state.ctx;
break :block state.ctx; }
} },
}, else => return state.ctx,
else => break :block state.ctx,
}
} }
}
if (line.len == 0) return .empty; if (line.len == 0) return .empty;
var held_line = try allocator.dupeZ(u8, offsets.locToSlice(text, line_loc)); var held_line = try allocator.dupeZ(u8, offsets.locToSlice(text, line_loc));
defer allocator.free(held_line); defer allocator.free(held_line);
switch (line[0]) { switch (line[0]) {
'a'...'z', 'A'...'Z', '_', '@' => {}, 'a'...'z', 'A'...'Z', '_', '@' => {},
else => break :block .empty, else => return .empty,
} }
var tokenizer = std.zig.Tokenizer.init(held_line); var tokenizer = std.zig.Tokenizer.init(held_line);
const tok = tokenizer.next(); const tok = tokenizer.next();
if (tok.tag == .identifier) {
break :block PositionContext{ .var_access = tok.loc }; return if (tok.tag == .identifier) PositionContext{ .var_access = tok.loc } else .empty;
} else {
break :block .empty;
}
};
} }
fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void { fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {