Add anytype resolution based on call references (#1067)
This commit is contained in:
parent
c217502670
commit
8b5c649805
159
src/analysis.zig
159
src/analysis.zig
@ -9,6 +9,7 @@ const ast = @import("ast.zig");
|
||||
const tracy = @import("tracy.zig");
|
||||
const ComptimeInterpreter = @import("ComptimeInterpreter.zig");
|
||||
const InternPool = ComptimeInterpreter.InternPool;
|
||||
const references = @import("features/references.zig");
|
||||
|
||||
const Analyser = @This();
|
||||
|
||||
@ -1162,6 +1163,77 @@ pub const TypeWithHandle = struct {
|
||||
type: Type,
|
||||
handle: *const DocumentStore.Handle,
|
||||
|
||||
const Context = struct {
|
||||
// Note that we don't hash/equate descriptors to remove
|
||||
// duplicates
|
||||
|
||||
fn hashType(hasher: *std.hash.Wyhash, ty: Type) void {
|
||||
hasher.update(&.{ @boolToInt(ty.is_type_val), @enumToInt(ty.data) });
|
||||
|
||||
switch (ty.data) {
|
||||
.pointer,
|
||||
.slice,
|
||||
.error_union,
|
||||
.other,
|
||||
.primitive,
|
||||
=> |idx| hasher.update(&std.mem.toBytes(idx)),
|
||||
.either => |entries| {
|
||||
for (entries) |e| {
|
||||
hasher.update(e.descriptor);
|
||||
hasher.update(e.type_with_handle.handle.uri);
|
||||
hashType(hasher, e.type_with_handle.type);
|
||||
}
|
||||
},
|
||||
.array_index => {},
|
||||
.@"comptime" => {
|
||||
// TODO
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash(self: @This(), item: TypeWithHandle) u64 {
|
||||
_ = self;
|
||||
var hasher = std.hash.Wyhash.init(0);
|
||||
hashType(&hasher, item.type);
|
||||
hasher.update(item.handle.uri);
|
||||
return hasher.final();
|
||||
}
|
||||
|
||||
pub fn eql(self: @This(), a: TypeWithHandle, b: TypeWithHandle) bool {
|
||||
_ = self;
|
||||
|
||||
if (!std.mem.eql(u8, a.handle.uri, b.handle.uri)) return false;
|
||||
if (a.type.is_type_val != b.type.is_type_val) return false;
|
||||
if (@enumToInt(a.type.data) != @enumToInt(b.type.data)) return false;
|
||||
|
||||
switch (a.type.data) {
|
||||
inline .pointer,
|
||||
.slice,
|
||||
.error_union,
|
||||
.other,
|
||||
.primitive,
|
||||
=> |a_idx, name| {
|
||||
if (a_idx != @field(b.type.data, @tagName(name))) return false;
|
||||
},
|
||||
.either => |a_entries| {
|
||||
const b_entries = b.type.data.either;
|
||||
|
||||
if (a_entries.len != b_entries.len) return false;
|
||||
for (a_entries, b_entries) |ae, be| {
|
||||
if (!std.mem.eql(u8, ae.descriptor, be.descriptor)) return false;
|
||||
if (!eql(.{}, ae.type_with_handle, be.type_with_handle)) return false;
|
||||
}
|
||||
},
|
||||
.array_index => {},
|
||||
.@"comptime" => {
|
||||
// TODO
|
||||
},
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn typeVal(node_handle: NodeWithHandle) TypeWithHandle {
|
||||
return .{
|
||||
.type = .{
|
||||
@ -1172,7 +1244,10 @@ pub const TypeWithHandle = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub const Deduplicator = std.HashMapUnmanaged(TypeWithHandle, void, TypeWithHandle.Context, std.hash_map.default_max_load_percentage);
|
||||
|
||||
/// Resolves possible types of a type (single for all except array_index and either)
|
||||
/// Drops duplicates
|
||||
pub fn getAllTypesWithHandles(ty: TypeWithHandle, arena: std.mem.Allocator) ![]const TypeWithHandle {
|
||||
var all_types = std.ArrayListUnmanaged(TypeWithHandle){};
|
||||
try ty.getAllTypesWithHandlesArrayList(arena, &all_types);
|
||||
@ -1866,6 +1941,7 @@ pub const Declaration = union(enum) {
|
||||
/// Function parameter
|
||||
param_payload: struct {
|
||||
param: Ast.full.FnProto.Param,
|
||||
param_idx: u16,
|
||||
func: Ast.Node.Index,
|
||||
},
|
||||
pointer_payload: struct {
|
||||
@ -1888,12 +1964,20 @@ pub const Declaration = union(enum) {
|
||||
},
|
||||
/// always an identifier
|
||||
error_token: Ast.Node.Index,
|
||||
|
||||
pub fn eql(a: Declaration, b: Declaration) bool {
|
||||
return std.meta.eql(a, b);
|
||||
}
|
||||
};
|
||||
|
||||
pub const DeclWithHandle = struct {
|
||||
decl: *Declaration,
|
||||
handle: *const DocumentStore.Handle,
|
||||
|
||||
pub fn eql(a: DeclWithHandle, b: DeclWithHandle) bool {
|
||||
return a.decl.eql(b.decl.*) and std.mem.eql(u8, a.handle.uri, b.handle.uri);
|
||||
}
|
||||
|
||||
pub fn nameToken(self: DeclWithHandle) Ast.TokenIndex {
|
||||
const tree = self.handle.tree;
|
||||
return switch (self.decl.*) {
|
||||
@ -1924,6 +2008,71 @@ pub const DeclWithHandle = struct {
|
||||
.{ .node = node, .handle = self.handle },
|
||||
),
|
||||
.param_payload => |pay| {
|
||||
// handle anytype
|
||||
if (pay.param.type_expr == 0) {
|
||||
var func_decl = Declaration{ .ast_node = pay.func };
|
||||
|
||||
var func_buf: [1]Ast.Node.Index = undefined;
|
||||
const func = tree.fullFnProto(&func_buf, pay.func).?;
|
||||
|
||||
var func_params_len: usize = 0;
|
||||
|
||||
var it = func.iterate(&tree);
|
||||
while (ast.nextFnParam(&it)) |_| {
|
||||
func_params_len += 1;
|
||||
}
|
||||
|
||||
var refs = try references.callsiteReferences(analyser.arena.allocator(), analyser, .{
|
||||
.decl = &func_decl,
|
||||
.handle = self.handle,
|
||||
}, false, false, false);
|
||||
|
||||
// TODO: Set `workspace` to true; current problems
|
||||
// - we gather dependencies, not dependents
|
||||
// - stack overflow due to cyclically anytype resolution(?)
|
||||
|
||||
var possible = std.ArrayListUnmanaged(Type.EitherEntry){};
|
||||
var deduplicator = TypeWithHandle.Deduplicator{};
|
||||
defer deduplicator.deinit(analyser.gpa);
|
||||
|
||||
for (refs.items) |ref| {
|
||||
var handle = analyser.store.getOrLoadHandle(ref.uri).?;
|
||||
|
||||
var call_buf: [1]Ast.Node.Index = undefined;
|
||||
var call = handle.tree.fullCall(&call_buf, ref.call_node).?;
|
||||
|
||||
const real_param_idx = if (func_params_len != 0 and pay.param_idx != 0 and call.ast.params.len == func_params_len - 1)
|
||||
pay.param_idx - 1
|
||||
else
|
||||
pay.param_idx;
|
||||
|
||||
if (real_param_idx >= call.ast.params.len) continue;
|
||||
|
||||
if (try analyser.resolveTypeOfNode(.{
|
||||
// TODO?: this is a """heuristic based approach"""
|
||||
// perhaps it would be better to use proper self detection
|
||||
// maybe it'd be a perf issue and this is fine?
|
||||
// you figure it out future contributor <3
|
||||
.node = call.ast.params[real_param_idx],
|
||||
.handle = handle,
|
||||
})) |ty| {
|
||||
var gop = try deduplicator.getOrPut(analyser.gpa, ty);
|
||||
if (gop.found_existing) continue;
|
||||
|
||||
var loc = offsets.tokenToPosition(handle.tree, main_tokens[call.ast.params[real_param_idx]], .@"utf-8");
|
||||
try possible.append(analyser.arena.allocator(), .{ // TODO: Dedup
|
||||
.type_with_handle = ty,
|
||||
.descriptor = try std.fmt.allocPrint(analyser.arena.allocator(), "{s}:{d}:{d}", .{ handle.uri, loc.line + 1, loc.character + 1 }),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return TypeWithHandle{
|
||||
.type = .{ .data = .{ .either = try possible.toOwnedSlice(analyser.arena.allocator()) }, .is_type_val = false },
|
||||
.handle = self.handle,
|
||||
};
|
||||
}
|
||||
|
||||
const param_decl = pay.param;
|
||||
if (isMetaType(self.handle.tree, param_decl.type_expr)) {
|
||||
var bound_param_it = analyser.bound_type_params.iterator();
|
||||
@ -2662,6 +2811,11 @@ fn makeScopeInternal(context: ScopeContext, node_idx: Ast.Node.Index) error{OutO
|
||||
);
|
||||
defer context.popScope();
|
||||
|
||||
// NOTE: We count the param index ourselves
|
||||
// as param_i stops counting; TODO: change this
|
||||
|
||||
var param_index: usize = 0;
|
||||
|
||||
var it = func.iterate(&tree);
|
||||
while (ast.nextFnParam(&it)) |param| {
|
||||
// Add parameter decls
|
||||
@ -2669,12 +2823,13 @@ fn makeScopeInternal(context: ScopeContext, node_idx: Ast.Node.Index) error{OutO
|
||||
try scopes.items(.decls)[scope_index].put(
|
||||
allocator,
|
||||
tree.tokenSlice(name_token),
|
||||
.{ .param_payload = .{ .param = param, .func = node_idx } },
|
||||
.{ .param_payload = .{ .param = param, .param_idx = @intCast(u16, param_index), .func = node_idx } },
|
||||
);
|
||||
}
|
||||
// Visit parameter types to pick up any error sets and enum
|
||||
// completions
|
||||
try makeScopeInternal(context, param.type_expr);
|
||||
param_index += 1;
|
||||
}
|
||||
|
||||
if (fn_tag == .fn_decl) blk: {
|
||||
@ -2895,6 +3050,8 @@ fn makeScopeInternal(context: ScopeContext, node_idx: Ast.Node.Index) error{OutO
|
||||
.items = switch_case.ast.values,
|
||||
},
|
||||
});
|
||||
|
||||
try makeScopeInternal(context, switch_case.ast.target_expr);
|
||||
} else {
|
||||
try makeScopeInternal(context, switch_case.ast.target_expr);
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ const Builder = struct {
|
||||
starts[identifier_token],
|
||||
)) orelse return;
|
||||
|
||||
if (std.meta.eql(builder.decl_handle, child)) {
|
||||
if (builder.decl_handle.eql(child)) {
|
||||
try builder.add(handle, identifier_token);
|
||||
}
|
||||
},
|
||||
@ -132,7 +132,7 @@ const Builder = struct {
|
||||
!left_type.type.is_type_val,
|
||||
)) orelse return;
|
||||
|
||||
if (std.meta.eql(builder.decl_handle, child)) {
|
||||
if (builder.decl_handle.eql(child)) {
|
||||
try builder.add(handle, datas[node].rhs);
|
||||
}
|
||||
},
|
||||
@ -141,6 +141,53 @@ const Builder = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn gatherReferences(
|
||||
allocator: std.mem.Allocator,
|
||||
analyser: *Analyser,
|
||||
curr_handle: *const DocumentStore.Handle,
|
||||
skip_std_references: bool,
|
||||
include_decl: bool,
|
||||
builder: anytype,
|
||||
handle_behavior: enum { get, get_or_load },
|
||||
) !void {
|
||||
var dependencies = std.StringArrayHashMapUnmanaged(void){};
|
||||
defer {
|
||||
for (dependencies.keys()) |uri| {
|
||||
allocator.free(uri);
|
||||
}
|
||||
dependencies.deinit(allocator);
|
||||
}
|
||||
|
||||
for (analyser.store.handles.values()) |handle| {
|
||||
if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) {
|
||||
if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri))
|
||||
continue;
|
||||
}
|
||||
|
||||
var handle_dependencies = std.ArrayListUnmanaged([]const u8){};
|
||||
defer handle_dependencies.deinit(allocator);
|
||||
try analyser.store.collectDependencies(allocator, handle.*, &handle_dependencies);
|
||||
|
||||
try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len);
|
||||
for (handle_dependencies.items) |uri| {
|
||||
var gop = dependencies.getOrPutAssumeCapacity(uri);
|
||||
if (gop.found_existing) {
|
||||
allocator.free(uri);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (dependencies.keys()) |uri| {
|
||||
if (std.mem.eql(u8, uri, curr_handle.uri)) continue;
|
||||
const handle = switch (handle_behavior) {
|
||||
.get => analyser.store.getHandle(uri),
|
||||
.get_or_load => analyser.store.getOrLoadHandle(uri),
|
||||
} orelse continue;
|
||||
|
||||
try builder.collectReferences(handle, 0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn symbolReferences(
|
||||
allocator: std.mem.Allocator,
|
||||
analyser: *Analyser,
|
||||
@ -180,41 +227,7 @@ pub fn symbolReferences(
|
||||
|
||||
if (decl_handle.decl.* != .ast_node or !workspace) return builder.locations;
|
||||
|
||||
var dependencies = std.StringArrayHashMapUnmanaged(void){};
|
||||
defer {
|
||||
for (dependencies.keys()) |uri| {
|
||||
allocator.free(uri);
|
||||
}
|
||||
dependencies.deinit(allocator);
|
||||
}
|
||||
|
||||
for (analyser.store.handles.values()) |handle| {
|
||||
if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) {
|
||||
if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri))
|
||||
continue;
|
||||
}
|
||||
|
||||
var handle_dependencies = std.ArrayListUnmanaged([]const u8){};
|
||||
defer {
|
||||
for (handle_dependencies.items) |uri| {
|
||||
allocator.free(uri);
|
||||
}
|
||||
handle_dependencies.deinit(allocator);
|
||||
}
|
||||
try analyser.store.collectDependencies(allocator, handle.*, &handle_dependencies);
|
||||
|
||||
try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len);
|
||||
for (handle_dependencies.items) |uri| {
|
||||
dependencies.putAssumeCapacity(uri, {});
|
||||
}
|
||||
}
|
||||
|
||||
for (dependencies.keys()) |uri| {
|
||||
if (std.mem.eql(u8, uri, curr_handle.uri)) continue;
|
||||
const handle = analyser.store.getHandle(uri) orelse continue;
|
||||
|
||||
try builder.collectReferences(handle, 0);
|
||||
}
|
||||
try gatherReferences(allocator, analyser, curr_handle, skip_std_references, include_decl, &builder, .get);
|
||||
},
|
||||
.param_payload => |payload| blk: {
|
||||
// Rename the param tok.
|
||||
@ -243,3 +256,137 @@ pub fn symbolReferences(
|
||||
|
||||
return builder.locations;
|
||||
}
|
||||
|
||||
pub const Callsite = struct {
|
||||
uri: []const u8,
|
||||
call_node: Ast.Node.Index,
|
||||
};
|
||||
|
||||
const CallBuilder = struct {
|
||||
allocator: std.mem.Allocator,
|
||||
callsites: std.ArrayListUnmanaged(Callsite) = .{},
|
||||
/// this is the declaration we are searching for
|
||||
decl_handle: Analyser.DeclWithHandle,
|
||||
analyser: *Analyser,
|
||||
|
||||
const Context = struct {
|
||||
builder: *CallBuilder,
|
||||
handle: *const DocumentStore.Handle,
|
||||
};
|
||||
|
||||
pub fn deinit(self: *CallBuilder) void {
|
||||
self.callsites.deinit(self.allocator);
|
||||
}
|
||||
|
||||
pub fn add(self: *CallBuilder, handle: *const DocumentStore.Handle, call_node: Ast.Node.Index) error{OutOfMemory}!void {
|
||||
try self.callsites.append(self.allocator, .{
|
||||
.uri = handle.uri,
|
||||
.call_node = call_node,
|
||||
});
|
||||
}
|
||||
|
||||
fn collectReferences(self: *CallBuilder, handle: *const DocumentStore.Handle, node: Ast.Node.Index) error{OutOfMemory}!void {
|
||||
const context = Context{
|
||||
.builder = self,
|
||||
.handle = handle,
|
||||
};
|
||||
try ast.iterateChildrenRecursive(handle.tree, node, &context, error{OutOfMemory}, referenceNode);
|
||||
}
|
||||
|
||||
fn referenceNode(self: *const Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!void {
|
||||
const builder = self.builder;
|
||||
const handle = self.handle;
|
||||
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const datas = tree.nodes.items(.data);
|
||||
// const token_tags = tree.tokens.items(.tag);
|
||||
const starts = tree.tokens.items(.start);
|
||||
|
||||
switch (node_tags[node]) {
|
||||
.call,
|
||||
.call_comma,
|
||||
.async_call,
|
||||
.async_call_comma,
|
||||
.call_one,
|
||||
.call_one_comma,
|
||||
.async_call_one,
|
||||
.async_call_one_comma,
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
var call = tree.fullCall(&buf, node).?;
|
||||
|
||||
const called_node = call.ast.fn_expr;
|
||||
|
||||
switch (node_tags[called_node]) {
|
||||
.identifier => {
|
||||
const identifier_token = Analyser.getDeclNameToken(tree, called_node).?;
|
||||
|
||||
const child = (try builder.analyser.lookupSymbolGlobal(
|
||||
handle,
|
||||
offsets.tokenToSlice(tree, identifier_token),
|
||||
starts[identifier_token],
|
||||
)) orelse return;
|
||||
|
||||
if (builder.decl_handle.eql(child)) {
|
||||
try builder.add(handle, node);
|
||||
}
|
||||
},
|
||||
.field_access => {
|
||||
const left_type = try builder.analyser.resolveFieldAccessLhsType(
|
||||
(try builder.analyser.resolveTypeOfNode(.{ .node = datas[called_node].lhs, .handle = handle })) orelse return,
|
||||
);
|
||||
|
||||
const left_type_node = switch (left_type.type.data) {
|
||||
.other => |n| n,
|
||||
else => return,
|
||||
};
|
||||
|
||||
const child = (try builder.analyser.lookupSymbolContainer(
|
||||
.{ .node = left_type_node, .handle = left_type.handle },
|
||||
offsets.tokenToSlice(tree, datas[called_node].rhs),
|
||||
!left_type.type.is_type_val,
|
||||
)) orelse return;
|
||||
|
||||
if (builder.decl_handle.eql(child)) {
|
||||
try builder.add(handle, node);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn callsiteReferences(
|
||||
allocator: std.mem.Allocator,
|
||||
analyser: *Analyser,
|
||||
decl_handle: Analyser.DeclWithHandle,
|
||||
/// add `decl_handle` as a references
|
||||
include_decl: bool,
|
||||
/// exclude references from the std library
|
||||
skip_std_references: bool,
|
||||
/// search other files for references
|
||||
workspace: bool,
|
||||
) error{OutOfMemory}!std.ArrayListUnmanaged(Callsite) {
|
||||
std.debug.assert(decl_handle.decl.* == .ast_node);
|
||||
|
||||
var builder = CallBuilder{
|
||||
.allocator = allocator,
|
||||
.analyser = analyser,
|
||||
.decl_handle = decl_handle,
|
||||
};
|
||||
errdefer builder.deinit();
|
||||
|
||||
const curr_handle = decl_handle.handle;
|
||||
if (include_decl) try builder.add(curr_handle, decl_handle.nameToken());
|
||||
|
||||
try builder.collectReferences(curr_handle, 0);
|
||||
|
||||
if (!workspace) return builder.callsites;
|
||||
|
||||
try gatherReferences(allocator, analyser, curr_handle, skip_std_references, include_decl, &builder, .get_or_load);
|
||||
|
||||
return builder.callsites;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user