* Fix build runner * Add zls module, bump version * Fix path from root issue in modules (thanks daremakemyday from Twitch :)) * Libraryify * remove build_runner backward compatibility * Remove some `= undefined`s in Server.zig Makes library use less dangerous * Consistent mem model + custom build runner possible * Add build runner utils for third-party tooling * Make logs removable in libraries with zls_ prefix * Fix build runner CI * Expose references * Use new addModule
245 lines
8.9 KiB
Zig
245 lines
8.9 KiB
Zig
const std = @import("std");
|
|
const Ast = std.zig.Ast;
|
|
const DocumentStore = @import("DocumentStore.zig");
|
|
const analysis = @import("analysis.zig");
|
|
const types = @import("lsp.zig");
|
|
const offsets = @import("offsets.zig");
|
|
const log = std.log.scoped(.zls_references);
|
|
const ast = @import("ast.zig");
|
|
|
|
pub fn labelReferences(
|
|
allocator: std.mem.Allocator,
|
|
decl: analysis.DeclWithHandle,
|
|
encoding: offsets.Encoding,
|
|
include_decl: bool,
|
|
) error{OutOfMemory}!std.ArrayListUnmanaged(types.Location) {
|
|
std.debug.assert(decl.decl.* == .label_decl); // use `symbolReferences` instead
|
|
const handle = decl.handle;
|
|
const tree = handle.tree;
|
|
const token_tags = tree.tokens.items(.tag);
|
|
|
|
// Find while / for / block from label -> iterate over children nodes, find break and continues, change their labels if they match.
|
|
// This case can be implemented just by scanning tokens.
|
|
const first_tok = decl.decl.label_decl.label;
|
|
const last_tok = ast.lastToken(tree, decl.decl.label_decl.block);
|
|
|
|
var locations = std.ArrayListUnmanaged(types.Location){};
|
|
errdefer locations.deinit(allocator);
|
|
|
|
if (include_decl) {
|
|
// The first token is always going to be the label
|
|
try locations.append(allocator, .{
|
|
.uri = handle.uri,
|
|
.range = offsets.tokenToRange(handle.tree, first_tok, encoding),
|
|
});
|
|
}
|
|
|
|
var curr_tok = first_tok + 1;
|
|
while (curr_tok < last_tok - 2) : (curr_tok += 1) {
|
|
const curr_id = token_tags[curr_tok];
|
|
|
|
if (curr_id != .keyword_break and curr_id != .keyword_continue) continue;
|
|
if (token_tags[curr_tok + 1] != .colon) continue;
|
|
if (token_tags[curr_tok + 2] != .identifier) continue;
|
|
|
|
if (!std.mem.eql(u8, tree.tokenSlice(curr_tok + 2), tree.tokenSlice(first_tok))) continue;
|
|
|
|
try locations.append(allocator, .{
|
|
.uri = handle.uri,
|
|
.range = offsets.tokenToRange(handle.tree, curr_tok + 2, encoding),
|
|
});
|
|
}
|
|
|
|
return locations;
|
|
}
|
|
|
|
const Builder = struct {
|
|
allocator: std.mem.Allocator,
|
|
locations: std.ArrayListUnmanaged(types.Location) = .{},
|
|
/// this is the declaration we are searching for
|
|
decl_handle: analysis.DeclWithHandle,
|
|
store: *DocumentStore,
|
|
encoding: offsets.Encoding,
|
|
|
|
const Context = struct {
|
|
builder: *Builder,
|
|
handle: *const DocumentStore.Handle,
|
|
};
|
|
|
|
pub fn deinit(self: *Builder) void {
|
|
self.locations.deinit(self.allocator);
|
|
}
|
|
|
|
pub fn add(self: *Builder, handle: *const DocumentStore.Handle, token_index: Ast.TokenIndex) error{OutOfMemory}!void {
|
|
try self.locations.append(self.allocator, .{
|
|
.uri = handle.uri,
|
|
.range = offsets.tokenToRange(handle.tree, token_index, self.encoding),
|
|
});
|
|
}
|
|
|
|
fn collectReferences(self: *Builder, handle: *const DocumentStore.Handle, node: Ast.Node.Index) error{OutOfMemory}!void {
|
|
const context = Context{
|
|
.builder = self,
|
|
.handle = handle,
|
|
};
|
|
try ast.iterateChildrenRecursive(handle.tree, node, &context, error{OutOfMemory}, referenceNode);
|
|
}
|
|
|
|
fn referenceNode(self: *const Context, node: Ast.Node.Index) error{OutOfMemory}!void {
|
|
const builder = self.builder;
|
|
const handle = self.handle;
|
|
|
|
const node_tags = handle.tree.nodes.items(.tag);
|
|
const datas = handle.tree.nodes.items(.data);
|
|
const main_tokens = handle.tree.nodes.items(.main_token);
|
|
const starts = handle.tree.tokens.items(.start);
|
|
|
|
switch (node_tags[node]) {
|
|
.identifier => {
|
|
const identifier_token = main_tokens[node];
|
|
|
|
const child = (try analysis.lookupSymbolGlobal(
|
|
builder.store,
|
|
handle,
|
|
offsets.tokenToSlice(handle.tree, identifier_token),
|
|
starts[identifier_token],
|
|
)) orelse return;
|
|
|
|
if (std.meta.eql(builder.decl_handle, child)) {
|
|
try builder.add(handle, identifier_token);
|
|
}
|
|
},
|
|
.field_access => {
|
|
var bound_type_params = analysis.BoundTypeParams{};
|
|
defer bound_type_params.deinit(builder.store.allocator);
|
|
const left_type = try analysis.resolveFieldAccessLhsType(
|
|
builder.store,
|
|
(try analysis.resolveTypeOfNodeInternal(
|
|
builder.store,
|
|
.{ .node = datas[node].lhs, .handle = handle },
|
|
&bound_type_params,
|
|
)) orelse return,
|
|
&bound_type_params,
|
|
);
|
|
|
|
const left_type_node = switch (left_type.type.data) {
|
|
.other => |n| n,
|
|
else => return,
|
|
};
|
|
|
|
const child = (try analysis.lookupSymbolContainer(
|
|
builder.store,
|
|
.{ .node = left_type_node, .handle = left_type.handle },
|
|
offsets.tokenToSlice(handle.tree, datas[node].rhs),
|
|
!left_type.type.is_type_val,
|
|
)) orelse return;
|
|
|
|
if (std.meta.eql(builder.decl_handle, child)) {
|
|
try builder.add(handle, datas[node].rhs);
|
|
}
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
};
|
|
|
|
pub fn symbolReferences(
|
|
allocator: std.mem.Allocator,
|
|
store: *DocumentStore,
|
|
decl_handle: analysis.DeclWithHandle,
|
|
encoding: offsets.Encoding,
|
|
/// add `decl_handle` as a references
|
|
include_decl: bool,
|
|
/// exclude references from the std library
|
|
skip_std_references: bool,
|
|
/// search other files for references
|
|
workspace: bool,
|
|
) error{OutOfMemory}!std.ArrayListUnmanaged(types.Location) {
|
|
std.debug.assert(decl_handle.decl.* != .label_decl); // use `labelReferences` instead
|
|
|
|
var builder = Builder{
|
|
.allocator = allocator,
|
|
.store = store,
|
|
.decl_handle = decl_handle,
|
|
.encoding = encoding,
|
|
};
|
|
errdefer builder.deinit();
|
|
|
|
const curr_handle = decl_handle.handle;
|
|
if (include_decl) try builder.add(curr_handle, decl_handle.nameToken());
|
|
|
|
switch (decl_handle.decl.*) {
|
|
.ast_node,
|
|
.pointer_payload,
|
|
.switch_payload,
|
|
.array_payload,
|
|
.array_index,
|
|
=> {
|
|
try builder.collectReferences(curr_handle, 0);
|
|
|
|
if (decl_handle.decl.* != .ast_node or !workspace) return builder.locations;
|
|
|
|
var dependencies = std.StringArrayHashMapUnmanaged(void){};
|
|
defer {
|
|
for (dependencies.keys()) |uri| {
|
|
allocator.free(uri);
|
|
}
|
|
dependencies.deinit(allocator);
|
|
}
|
|
|
|
for (store.handles.values()) |handle| {
|
|
if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) {
|
|
if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri))
|
|
continue;
|
|
}
|
|
|
|
var handle_dependencies = std.ArrayListUnmanaged([]const u8){};
|
|
defer {
|
|
for (handle_dependencies.items) |uri| {
|
|
allocator.free(uri);
|
|
}
|
|
handle_dependencies.deinit(allocator);
|
|
}
|
|
try store.collectDependencies(allocator, handle.*, &handle_dependencies);
|
|
|
|
try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len);
|
|
for (handle_dependencies.items) |uri| {
|
|
dependencies.putAssumeCapacity(uri, {});
|
|
}
|
|
}
|
|
|
|
for (dependencies.keys()) |uri| {
|
|
if (std.mem.eql(u8, uri, curr_handle.uri)) continue;
|
|
const handle = store.getHandle(uri) orelse continue;
|
|
|
|
try builder.collectReferences(handle, 0);
|
|
}
|
|
},
|
|
.param_payload => |payload| blk: {
|
|
// Rename the param tok.
|
|
for (curr_handle.document_scope.scopes.items(.data)) |scope_data| {
|
|
if (scope_data != .function) continue;
|
|
|
|
const proto = scope_data.function;
|
|
|
|
var buf: [1]Ast.Node.Index = undefined;
|
|
const fn_proto = curr_handle.tree.fullFnProto(&buf, proto).?;
|
|
|
|
var it = fn_proto.iterate(&curr_handle.tree);
|
|
while (ast.nextFnParam(&it)) |candidate| {
|
|
if (!std.meta.eql(candidate, payload.param)) continue;
|
|
|
|
if (curr_handle.tree.nodes.items(.tag)[proto] != .fn_decl) break :blk;
|
|
try builder.collectReferences(curr_handle, curr_handle.tree.nodes.items(.data)[proto].rhs);
|
|
break :blk;
|
|
}
|
|
}
|
|
log.warn("Could not find param decl's function", .{});
|
|
},
|
|
.label_decl => unreachable, // handled separately by labelReferences
|
|
.error_token => {},
|
|
}
|
|
|
|
return builder.locations;
|
|
}
|