Merge branch 'master' into intern-pool

This commit is contained in:
Techarix 2023-02-08 20:10:17 +01:00
commit 6278880f42
16 changed files with 427 additions and 547 deletions

View File

@ -61,14 +61,22 @@ jobs:
repository: "ziglang/zig"
fetch-depth: 0
- name: Checkout zls
- name: Checkout zls (non-PR)
if: github.event_name != 'pull_request_target'
uses: actions/checkout@v3
with:
path: zls
fetch-depth: 0
submodules: true
ref: ${{ github.event.pull_request.head.sha || github.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
- name: Checkout zls (PR)
if: github.event_name == 'pull_request_target'
uses: actions/checkout@v3
with:
path: zls
fetch-depth: 0
submodules: true
ref: "refs/pull/${{ github.event.number }}/merge"
- name: Build zls
run: |
@ -88,7 +96,7 @@ jobs:
run: |
cd $GITHUB_WORKSPACE/sus
pwd
zig build -Drelease-fast
zig build -Doptimize=ReleaseFast
- name: Run sus
continue-on-error: true

View File

@ -54,7 +54,7 @@ jobs:
echo "Building target ${target}..."
if [ "${GITHUB_REF##*/}" == "master" ]; then
echo "Building safe"
zig build -Dtarget=${target} -Drelease-safe --prefix artifacts/${target}/
zig build -Dtarget=${target} -Doptimize=ReleaseSafe --prefix artifacts/${target}/
else
echo "Building debug as action is not running on master"
zig build -Dtarget=${target} --prefix artifacts/${target}/

View File

@ -7,7 +7,7 @@ const zls_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 };
pub fn build(b: *std.build.Builder) !void {
comptime {
const current_zig = builtin.zig_version;
const min_zig = std.SemanticVersion.parse("0.11.0-dev.1524+efa25e7d5") catch return; // build API changes
const min_zig = std.SemanticVersion.parse("0.11.0-dev.1570+693b12f8e") catch return; // addPackage -> addModule
if (current_zig.order(min_zig) == .lt) {
@compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig }));
}
@ -112,17 +112,13 @@ pub fn build(b: *std.build.Builder) !void {
const KNOWN_FOLDERS_DEFAULT_PATH = "src/known-folders/known-folders.zig";
const known_folders_path = b.option([]const u8, "known-folders", "Path to known-folders package (default: " ++ KNOWN_FOLDERS_DEFAULT_PATH ++ ")") orelse KNOWN_FOLDERS_DEFAULT_PATH;
exe.addPackage(.{
.name = "known-folders",
.source = .{ .path = known_folders_path },
});
const known_folders_module = b.createModule(.{ .source_file = .{ .path = known_folders_path } });
exe.addModule("known-folders", known_folders_module);
const TRES_DEFAULT_PATH = "src/tres/tres.zig";
const tres_path = b.option([]const u8, "tres", "Path to tres package (default: " ++ TRES_DEFAULT_PATH ++ ")") orelse TRES_DEFAULT_PATH;
exe.addPackage(.{
.name = "tres",
.source = .{ .path = tres_path },
});
const tres_module = b.createModule(.{ .source_file = .{ .path = tres_path } });
exe.addModule("tres", tres_module);
const check_submodules_step = CheckSubmodulesStep.init(b, &.{
known_folders_path,
@ -156,10 +152,7 @@ pub fn build(b: *std.build.Builder) !void {
.name = "zls_gen",
.root_source_file = .{ .path = "src/config_gen/config_gen.zig" },
});
gen_exe.addPackage(.{
.name = "tres",
.source = .{ .path = tres_path },
});
gen_exe.addModule("tres", tres_module);
const gen_cmd = gen_exe.run();
gen_cmd.addArgs(&.{
@ -203,15 +196,19 @@ pub fn build(b: *std.build.Builder) !void {
});
}
tests.addPackage(.{
.name = "zls",
.source = .{ .path = "src/zls.zig" },
.dependencies = exe.packages.items,
});
tests.addPackage(.{
.name = "tres",
.source = .{ .path = tres_path },
const build_options_module = exe_options.createModule();
const zls_module = b.createModule(.{
.source_file = .{ .path = "src/zls.zig" },
.dependencies = &.{
.{ .name = "known-folders", .module = known_folders_module },
.{ .name = "tres", .module = tres_module },
.{ .name = "build_options", .module = build_options_module },
},
});
tests.addModule("zls", zls_module);
tests.addModule("tres", tres_module);
test_step.dependOn(&tests.step);
var src_tests = b.addTest(.{

View File

@ -1,5 +1,21 @@
{
"nodes": {
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-utils": {
"locked": {
"lastModified": 1667395993,
@ -53,11 +69,11 @@
"known-folders": {
"flake": false,
"locked": {
"lastModified": 1659425144,
"narHash": "sha256-xMgdOKwWqBmw7avcioqQQrrPU1MjzlBMtNjqPfOEtDQ=",
"lastModified": 1672993516,
"narHash": "sha256-PG0dyaAZyLsEqo38HQ59r40eXnlqrmsBoXjDRmteQuw=",
"owner": "ziglibs",
"repo": "known-folders",
"rev": "24845b0103e611c108d6bc334231c464e699742c",
"rev": "6b37490ac7285133bf09441850b8102c9728a776",
"type": "github"
},
"original": {
@ -68,11 +84,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1672057183,
"narHash": "sha256-GN7/10DNNvs1FPj9tlZA2qgNdFuYKKuS3qlHTqAxasQ=",
"lastModified": 1675523619,
"narHash": "sha256-jHvkAwkbAj1s0O5riHMghSDHh9iz8AwLTbXQuVQKQcg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b139e44d78c36c69bcbb825b20dbfa51e7738347",
"rev": "5a211d5e8d18b20b5a2b22157266cc00f8c4f3b9",
"type": "github"
},
"original": {
@ -95,11 +111,11 @@
"tres": {
"flake": false,
"locked": {
"lastModified": 1672008284,
"narHash": "sha256-AtM9SV56PEud1MfbKDZMU2FlsNrI46PkcFQh3yMcDX0=",
"lastModified": 1672378490,
"narHash": "sha256-SH2ajnLeQFv/anUbv8PppdIR80Jh6QUAkDPs05RIXXc=",
"owner": "ziglibs",
"repo": "tres",
"rev": "16774b94efa61757a5302a690837dfb8cf750a11",
"rev": "cd1c321a86225569e3bab1b81f897626af538a5e",
"type": "github"
},
"original": {
@ -110,17 +126,18 @@
},
"zig-overlay": {
"inputs": {
"flake-compat": "flake-compat",
"flake-utils": "flake-utils_2",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1672142864,
"narHash": "sha256-uXljuSZK8DP5c4o9u+gcF+Yc3dKYH1wsHmDpWcFBVRQ=",
"lastModified": 1675556800,
"narHash": "sha256-OfruD3OiVbeQZrcrr/4iFo3Xv7mQkO7HYS+TcTNYY9E=",
"owner": "mitchellh",
"repo": "zig-overlay",
"rev": "16e9191142d2a13d7870c03e500842321a466a74",
"rev": "778b043b184d53b656b4812ddc41a37f29a51a4d",
"type": "github"
},
"original": {

View File

@ -38,7 +38,7 @@
dontInstall = true;
buildPhase = ''
mkdir -p $out
zig build install -Dcpu=baseline -Drelease-safe=true -Ddata_version=master -Dtres=${tres}/tres.zig -Dknown-folders=${known-folders}/known-folders.zig --prefix $out
zig build install -Dcpu=baseline -Doptimize=ReleaseSafe -Ddata_version=master -Dtres=${tres}/tres.zig -Dknown-folders=${known-folders}/known-folders.zig --prefix $out
'';
XDG_CACHE_HOME = ".cache";
};

View File

@ -82,6 +82,9 @@ pub const Handle = struct {
}
self.import_uris.deinit(allocator);
for (self.cimports.items(.source)) |source| {
allocator.free(source);
}
self.cimports.deinit(allocator);
}
};
@ -200,7 +203,7 @@ pub fn refreshDocument(self: *DocumentStore, uri: Uri, new_text: [:0]const u8) !
self.allocator.free(handle.text);
handle.text = new_text;
var new_tree = try std.zig.parse(self.allocator, handle.text);
var new_tree = try Ast.parse(self.allocator, handle.text, .zig);
handle.tree.deinit(self.allocator);
handle.tree = new_tree;
@ -212,16 +215,26 @@ pub fn refreshDocument(self: *DocumentStore, uri: Uri, new_text: [:0]const u8) !
for (handle.import_uris.items) |import_uri| {
self.allocator.free(import_uri);
}
const old_import_count = handle.import_uris.items.len;
const new_import_count = new_import_uris.items.len;
handle.import_uris.deinit(self.allocator);
handle.import_uris = new_import_uris;
var new_cimports = try self.collectCIncludes(handle.*);
const old_cimport_count = handle.cimports.len;
const new_cimport_count = new_cimports.len;
for (handle.cimports.items(.source)) |source| {
self.allocator.free(source);
}
handle.cimports.deinit(self.allocator);
handle.cimports = new_cimports;
// a include could have been removed but it would increase latency
// try self.garbageCollectionImports();
// try self.garbageCollectionCImports();
if (old_import_count != new_import_count or
old_cimport_count != new_cimport_count)
{
self.garbageCollectionImports() catch {};
self.garbageCollectionCImports() catch {};
}
}
pub fn applySave(self: *DocumentStore, handle: *const Handle) !void {
@ -242,7 +255,7 @@ pub fn applySave(self: *DocumentStore, handle: *const Handle) !void {
}
/// The `DocumentStore` represents a graph structure where every
/// handle/document is a node and every `@import` & `@cImport` represent
/// handle/document is a node and every `@import` and `@cImport` represent
/// a directed edge.
/// We can remove every document which cannot be reached from
/// another document that is `open` (see `Handle.open`)
@ -253,97 +266,103 @@ fn garbageCollectionImports(self: *DocumentStore) error{OutOfMemory}!void {
var arena = std.heap.ArenaAllocator.init(self.allocator);
defer arena.deinit();
var reachable_handles = std.StringHashMapUnmanaged(void){};
defer reachable_handles.deinit(arena.allocator());
var reachable = try std.DynamicBitSetUnmanaged.initEmpty(arena.allocator(), self.handles.count());
var queue = std.ArrayListUnmanaged(Uri){};
for (self.handles.values()) |handle| {
for (self.handles.values()) |handle, handle_index| {
if (!handle.open) continue;
try reachable_handles.put(arena.allocator(), handle.uri, {});
reachable.set(handle_index);
try self.collectDependencies(arena.allocator(), handle.*, &queue);
}
while (queue.popOrNull()) |uri| {
const gop = try reachable_handles.getOrPut(arena.allocator(), uri);
if (gop.found_existing) continue;
const handle_index = self.handles.getIndex(uri) orelse continue;
if (reachable.isSet(handle_index)) continue;
reachable.set(handle_index);
const handle = self.handles.get(uri) orelse continue;
const handle = self.handles.values()[handle_index];
try self.collectDependencies(arena.allocator(), handle.*, &queue);
}
var i: usize = 0;
while (i < self.handles.count()) {
const handle = self.handles.values()[i];
if (reachable_handles.contains(handle.uri)) {
i += 1;
continue;
}
var it = reachable.iterator(.{
.kind = .unset,
.direction = .reverse,
});
while (it.next()) |handle_index| {
const handle = self.handles.values()[handle_index];
log.debug("Closing document {s}", .{handle.uri});
var kv = self.handles.fetchSwapRemove(handle.uri).?;
kv.value.deinit(self.allocator);
self.allocator.destroy(kv.value);
self.handles.swapRemoveAt(handle_index);
handle.deinit(self.allocator);
self.allocator.destroy(handle);
}
}
/// see `garbageCollectionImports`
fn garbageCollectionCImports(self: *DocumentStore) error{OutOfMemory}!void {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
if (self.cimports.count() == 0) return;
var reachable_hashes = std.AutoArrayHashMapUnmanaged(Hash, void){};
defer reachable_hashes.deinit(self.allocator);
var reachable = try std.DynamicBitSetUnmanaged.initEmpty(self.allocator, self.cimports.count());
defer reachable.deinit(self.allocator);
for (self.handles.values()) |handle| {
for (handle.cimports.items(.hash)) |hash| {
try reachable_hashes.put(self.allocator, hash, {});
const index = self.cimports.getIndex(hash).?;
reachable.set(index);
}
}
var i: usize = 0;
while (i < self.cimports.count()) {
const hash = self.cimports.keys()[i];
if (reachable_hashes.contains(hash)) {
i += 1;
continue;
}
var kv = self.cimports.fetchSwapRemove(hash).?;
const message = switch (kv.value) {
var it = reachable.iterator(.{
.kind = .unset,
.direction = .reverse,
});
while (it.next()) |cimport_index| {
var result = self.cimports.values()[cimport_index];
const message = switch (result) {
.failure => "",
.success => |uri| uri,
};
log.debug("Destroying cimport {s}", .{message});
kv.value.deinit(self.allocator);
self.cimports.swapRemoveAt(cimport_index);
result.deinit(self.allocator);
}
}
/// see `garbageCollectionImports`
fn garbageCollectionBuildFiles(self: *DocumentStore) error{OutOfMemory}!void {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
var reachable_build_files = std.StringHashMapUnmanaged(void){};
defer reachable_build_files.deinit(self.allocator);
if (self.build_files.count() == 0) return;
var reachable = try std.DynamicBitSetUnmanaged.initEmpty(self.allocator, self.build_files.count());
defer reachable.deinit(self.allocator);
for (self.handles.values()) |handle| {
const build_file_uri = handle.associated_build_file orelse continue;
const build_file_index = self.build_files.getIndex(build_file_uri).?;
try reachable_build_files.put(self.allocator, build_file_uri, {});
reachable.set(build_file_index);
}
var i: usize = 0;
while (i < self.build_files.count()) {
const hash = self.build_files.keys()[i];
if (reachable_build_files.contains(hash)) {
i += 1;
continue;
}
var kv = self.build_files.fetchSwapRemove(hash).?;
log.debug("Destroying build file {s}", .{kv.value.uri});
kv.value.deinit(self.allocator);
var it = reachable.iterator(.{
.kind = .unset,
.direction = .reverse,
});
while (it.next()) |build_file_index| {
var build_file = self.build_files.values()[build_file_index];
log.debug("Destroying build file {s}", .{build_file.uri});
self.build_files.swapRemoveAt(build_file_index);
build_file.deinit(self.allocator);
}
}
@ -623,7 +642,7 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro
var duped_uri = try self.allocator.dupe(u8, uri);
errdefer self.allocator.free(duped_uri);
var tree = try std.zig.parse(self.allocator, text);
var tree = try Ast.parse(self.allocator, text, .zig);
errdefer tree.deinit(self.allocator);
var nodes = tree.nodes.toMultiArrayList();
@ -815,12 +834,11 @@ pub fn collectDependencies(
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
try dependencies.ensureUnusedCapacity(allocator, handle.import_uris.items.len);
try dependencies.ensureUnusedCapacity(allocator, handle.import_uris.items.len + handle.cimports.len);
for (handle.import_uris.items) |uri| {
dependencies.appendAssumeCapacity(try allocator.dupe(u8, uri));
}
try dependencies.ensureUnusedCapacity(allocator, handle.cimports.len);
for (handle.cimports.items(.hash)) |hash| {
const result = store.cimports.get(hash) orelse continue;
switch (result) {
@ -850,7 +868,7 @@ pub fn resolveCImport(self: *DocumentStore, handle: Handle, node: Ast.Node.Index
if (!std.process.can_spawn) return null;
const index = std.mem.indexOfScalar(Ast.Node.Index, handle.cimports.items(.node), node).?;
const index = std.mem.indexOfScalar(Ast.Node.Index, handle.cimports.items(.node), node) orelse return null;
const hash: Hash = handle.cimports.items(.hash)[index];

View File

@ -1029,6 +1029,38 @@ fn gotoDefinitionGlobal(
return try server.gotoDefinitionSymbol(decl, resolve_alias);
}
fn gotoDefinitionBuiltin(
server: *Server,
handle: *const DocumentStore.Handle,
loc: offsets.Loc,
) error{OutOfMemory}!?types.Location {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
const name = offsets.tokenIndexToSlice(handle.tree.source, loc.start);
if (std.mem.eql(u8, name, "@cImport")) {
const index = for (handle.cimports.items(.node)) |cimport_node, index| {
const main_token = handle.tree.nodes.items(.main_token)[cimport_node];
if (loc.start == offsets.tokenToIndex(handle.tree, main_token)) break index;
} else return null;
const hash = handle.cimports.items(.hash)[index];
const result = server.document_store.cimports.get(hash) orelse return null;
switch (result) {
.failure => return null,
.success => |uri| return types.Location{
.uri = uri,
.range = .{
.start = .{ .line = 0, .character = 0 },
.end = .{ .line = 0, .character = 0 },
},
},
}
}
return null;
}
fn hoverDefinitionLabel(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
@ -1044,24 +1076,47 @@ fn hoverDefinitionBuiltin(server: *Server, pos_index: usize, handle: *const Docu
const name = identifierFromPosition(pos_index, handle.*);
if (name.len == 0) return null;
for (data.builtins) |builtin| {
const builtin = for (data.builtins) |builtin| {
if (std.mem.eql(u8, builtin.name[1..], name)) {
return types.Hover{
.contents = .{
.MarkupContent = .{
.kind = .markdown,
.value = try std.fmt.allocPrint(
server.arena.allocator(),
"```zig\n{s}\n```\n{s}",
.{ builtin.signature, builtin.documentation },
),
},
},
};
break builtin;
}
} else return null;
var contents: std.ArrayListUnmanaged(u8) = .{};
var writer = contents.writer(server.arena.allocator());
if (std.mem.eql(u8, name, "cImport")) blk: {
const index = for (handle.cimports.items(.node)) |cimport_node, index| {
const main_token = handle.tree.nodes.items(.main_token)[cimport_node];
const cimport_loc = offsets.tokenToLoc(handle.tree, main_token);
if (cimport_loc.start <= pos_index and pos_index <= cimport_loc.end) break index;
} else break :blk;
const source = handle.cimports.items(.source)[index];
try writer.print(
\\```c
\\{s}
\\```
\\
, .{source});
}
return null;
try writer.print(
\\```zig
\\{s}
\\```
\\{s}
, .{ builtin.signature, builtin.documentation });
return types.Hover{
.contents = .{
.MarkupContent = .{
.kind = .markdown,
.value = contents.items,
},
},
};
}
fn hoverDefinitionGlobal(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
@ -2205,6 +2260,7 @@ fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, resol
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
return switch (pos_context) {
.builtin => |loc| try server.gotoDefinitionBuiltin(handle, loc),
.var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias),
.field_access => |loc| try server.gotoDefinitionFieldAccess(handle, source_index, loc, resolve_alias),
.import_string_literal => try server.gotoDefinitionString(source_index, handle),

View File

@ -39,6 +39,7 @@ pub fn getDocComments(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.In
.aligned_var_decl,
.simple_var_decl,
.container_field_init,
.container_field,
=> {
if (getDocCommentTokenIndex(tokens, base)) |doc_comment_index|
return try collectDocComments(allocator, tree, doc_comment_index, format, false);
@ -58,6 +59,7 @@ pub fn getDocCommentTokenIndex(tokens: []const std.zig.Token.Tag, base_token: As
if (tokens[idx] == .keyword_extern and idx > 0) idx -= 1;
if (tokens[idx] == .keyword_export and idx > 0) idx -= 1;
if (tokens[idx] == .keyword_inline and idx > 0) idx -= 1;
if (tokens[idx] == .identifier and idx > 0) idx -= 1;
if (tokens[idx] == .keyword_pub and idx > 0) idx -= 1;
// Find first doc comment token

View File

@ -1395,14 +1395,18 @@ pub fn iterateChildren(
var buffer: [1]Node.Index = undefined;
const fn_proto = tree.fullFnProto(&buffer, node).?;
for (fn_proto.ast.params) |child| {
try callback(context, child);
var it = fn_proto.iterate(&tree);
while (nextFnParam(&it)) |param| {
try callback(context, param.type_expr);
}
try callback(context, fn_proto.ast.align_expr);
try callback(context, fn_proto.ast.addrspace_expr);
try callback(context, fn_proto.ast.section_expr);
try callback(context, fn_proto.ast.callconv_expr);
try callback(context, fn_proto.ast.return_type);
if (node_tags[node] == .fn_decl) {
try callback(context, node_data[node].rhs);
}
},
.container_decl_arg,

View File

@ -1249,6 +1249,13 @@ pub const builtins = [_]Builtin{
\\Converts a pointer of one type to a pointer of another type.
\\
\\[Optional Pointers](https://ziglang.org/documentation/master/#Optional-Pointers) are allowed. Casting an optional pointer which is [null](https://ziglang.org/documentation/master/#null) to a non-optional pointer invokes safety-checked [Undefined Behavior](https://ziglang.org/documentation/master/#Undefined-Behavior).
\\
\\`@ptrCast` cannot be used for:
\\
\\ - Removing `const` or `volatile` qualifier, use [@qualCast](https://ziglang.org/documentation/master/#qualCast).
\\ - Changing pointer address space, use [@addrSpaceCast](https://ziglang.org/documentation/master/#addrSpaceCast).
\\ - Increasing pointer alignment, use [@alignCast](https://ziglang.org/documentation/master/#alignCast).
\\ - Casting a non-slice pointer to a slice, use slicing syntax `ptr[start..end]`.
,
.arguments = &.{
"comptime DestType: type",
@ -1268,6 +1275,18 @@ pub const builtins = [_]Builtin{
"value: anytype",
},
},
.{
.name = "@qualCast",
.signature = "@qualCast(comptime DestType: type, value: anytype) DestType",
.snippet = "@qualCast(${1:comptime DestType: type}, ${2:value: anytype})",
.documentation =
\\Remove `const` or `volatile` qualifier from a pointer.
,
.arguments = &.{
"comptime DestType: type",
"value: anytype",
},
},
.{
.name = "@rem",
.signature = "@rem(numerator: T, denominator: T) T",
@ -1613,8 +1632,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Performs the square root of a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1627,8 +1645,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Sine trigonometric function on a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1641,8 +1658,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Cosine trigonometric function on a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1655,8 +1671,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Tangent trigonometric function on a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1669,8 +1684,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Base-e exponential function on a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1683,8 +1697,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Base-2 exponential function on a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1697,8 +1710,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Returns the natural logarithm of a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1711,8 +1723,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Returns the logarithm to the base 2 of a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1725,8 +1736,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Returns the logarithm to the base 10 of a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1739,8 +1749,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Returns the absolute value of a floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1753,8 +1762,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Returns the largest integral value not greater than the given floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1767,8 +1775,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Returns the smallest integral value not less than the given floating point number. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1781,8 +1788,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Rounds the given floating point number to an integer, towards zero. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",
@ -1795,8 +1801,7 @@ pub const builtins = [_]Builtin{
.documentation =
\\Rounds the given floating point number to an integer, away from zero. Uses a dedicated hardware instruction when available.
\\
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats, with the caveat that
\\[some float operations are not yet implemented for all float types](https://github.com/ziglang/zig/issues/4026).
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
,
.arguments = &.{
"value: anytype",

View File

@ -13,7 +13,7 @@ pub fn labelReferences(
encoding: offsets.Encoding,
include_decl: bool,
) error{OutOfMemory}!std.ArrayListUnmanaged(types.Location) {
std.debug.assert(decl.decl.* == .label_decl);
std.debug.assert(decl.decl.* == .label_decl); // use `symbolReferences` instead
const handle = decl.handle;
const tree = handle.tree;
const token_tags = tree.tokens.items(.tag);
@ -55,409 +55,127 @@ pub fn labelReferences(
const Builder = struct {
arena: *std.heap.ArenaAllocator,
locations: std.ArrayListUnmanaged(types.Location),
locations: std.ArrayListUnmanaged(types.Location) = .{},
/// this is the declaration we are searching for
decl_handle: analysis.DeclWithHandle,
store: *DocumentStore,
decl: analysis.DeclWithHandle,
encoding: offsets.Encoding,
pub fn init(arena: *std.heap.ArenaAllocator, store: *DocumentStore, decl: analysis.DeclWithHandle, encoding: offsets.Encoding) Builder {
return Builder{
.arena = arena,
.locations = .{},
.store = store,
.decl = decl,
.encoding = encoding,
};
}
const Context = struct {
builder: *Builder,
handle: *const DocumentStore.Handle,
};
pub fn add(self: *Builder, handle: *const DocumentStore.Handle, token_index: Ast.TokenIndex) !void {
pub fn add(self: *Builder, handle: *const DocumentStore.Handle, token_index: Ast.TokenIndex) error{OutOfMemory}!void {
try self.locations.append(self.arena.allocator(), .{
.uri = handle.uri,
.range = offsets.tokenToRange(handle.tree, token_index, self.encoding),
});
}
};
fn symbolReferencesInternal(
builder: *Builder,
node: Ast.Node.Index,
handle: *const DocumentStore.Handle,
is_root: bool,
) error{OutOfMemory}!void {
const tree = handle.tree;
if (!is_root and node == 0 or node > tree.nodes.len) return;
const node_tags = tree.nodes.items(.tag);
const datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const starts = tree.tokens.items(.start);
switch (node_tags[node]) {
.block,
.block_semicolon,
.block_two,
.block_two_semicolon,
=> {
var buffer: [2]Ast.Node.Index = undefined;
const statements = ast.blockStatements(tree, node, &buffer).?;
for (statements) |stmt|
try symbolReferencesInternal(builder, stmt, handle, false);
},
.container_decl,
.container_decl_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.container_decl_two,
.container_decl_two_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.root,
=> {
var buf: [2]Ast.Node.Index = undefined;
const container_decl = tree.fullContainerDecl(&buf, node).?;
for (container_decl.ast.members) |member|
try symbolReferencesInternal(builder, member, handle, false);
},
.error_set_decl => {},
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const var_decl = tree.fullVarDecl(node).?;
try symbolReferencesInternal(builder, var_decl.ast.type_node, handle, false);
try symbolReferencesInternal(builder, var_decl.ast.init_node, handle, false);
},
.container_field,
.container_field_align,
.container_field_init,
=> {
const field = tree.fullContainerField(node).?;
try symbolReferencesInternal(builder, field.ast.type_expr, handle, false);
try symbolReferencesInternal(builder, field.ast.value_expr, handle, false);
},
.identifier => {
const child = (try analysis.lookupSymbolGlobal(builder.store, builder.arena, handle, tree.getNodeSource(node), starts[main_tokens[node]])) orelse return;
if (std.meta.eql(builder.decl, child)) try builder.add(handle, main_tokens[node]);
},
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
const fn_proto = tree.fullFnProto(&buf, node).?;
var it = fn_proto.iterate(&tree);
while (ast.nextFnParam(&it)) |param| {
try symbolReferencesInternal(builder, param.type_expr, handle, false);
}
try symbolReferencesInternal(builder, fn_proto.ast.return_type, handle, false);
try symbolReferencesInternal(builder, fn_proto.ast.align_expr, handle, false);
try symbolReferencesInternal(builder, fn_proto.ast.section_expr, handle, false);
try symbolReferencesInternal(builder, fn_proto.ast.callconv_expr, handle, false);
if (node_tags[node] == .fn_decl) {
try symbolReferencesInternal(builder, datas[node].rhs, handle, false);
}
},
.@"switch",
.switch_comma,
=> {
// TODO When renaming a union(enum) field, also rename switch items that refer to it.
try symbolReferencesInternal(builder, datas[node].lhs, handle, false);
const extra = tree.extraData(datas[node].rhs, Ast.Node.SubRange);
const cases = tree.extra_data[extra.start..extra.end];
for (cases) |case| {
try symbolReferencesInternal(builder, case, handle, false);
}
},
.switch_case_one,
.switch_case_inline_one,
.switch_case,
.switch_case_inline,
=> {
const case = tree.fullSwitchCase(node).?;
try symbolReferencesInternal(builder, case.ast.target_expr, handle, false);
for (case.ast.values) |val|
try symbolReferencesInternal(builder, val, handle, false);
},
.@"while",
.while_simple,
.while_cont,
.for_simple,
.@"for",
=> {
const loop = ast.fullWhile(tree, node).?;
try symbolReferencesInternal(builder, loop.ast.cond_expr, handle, false);
try symbolReferencesInternal(builder, loop.ast.then_expr, handle, false);
try symbolReferencesInternal(builder, loop.ast.cont_expr, handle, false);
try symbolReferencesInternal(builder, loop.ast.else_expr, handle, false);
},
.@"if",
.if_simple,
=> {
const if_node = ast.fullIf(tree, node).?;
try symbolReferencesInternal(builder, if_node.ast.cond_expr, handle, false);
try symbolReferencesInternal(builder, if_node.ast.then_expr, handle, false);
try symbolReferencesInternal(builder, if_node.ast.else_expr, handle, false);
},
.ptr_type,
.ptr_type_aligned,
.ptr_type_bit_range,
.ptr_type_sentinel,
=> {
const ptr_type = ast.fullPtrType(tree, node).?;
if (ptr_type.ast.align_node != 0) {
try symbolReferencesInternal(builder, ptr_type.ast.align_node, handle, false);
if (node_tags[node] == .ptr_type_bit_range) {
try symbolReferencesInternal(builder, ptr_type.ast.bit_range_start, handle, false);
try symbolReferencesInternal(builder, ptr_type.ast.bit_range_end, handle, false);
}
}
try symbolReferencesInternal(builder, ptr_type.ast.sentinel, handle, false);
try symbolReferencesInternal(builder, ptr_type.ast.child_type, handle, false);
},
.array_init,
.array_init_comma,
.array_init_dot,
.array_init_dot_comma,
.array_init_one,
.array_init_one_comma,
.array_init_dot_two,
.array_init_dot_two_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const array_init = tree.fullArrayInit(&buf, node).?;
try symbolReferencesInternal(builder, array_init.ast.type_expr, handle, false);
for (array_init.ast.elements) |e|
try symbolReferencesInternal(builder, e, handle, false);
},
.struct_init,
.struct_init_comma,
.struct_init_dot,
.struct_init_dot_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
.struct_init_one,
.struct_init_one_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?;
try symbolReferencesInternal(builder, struct_init.ast.type_expr, handle, false);
for (struct_init.ast.fields) |field|
try symbolReferencesInternal(builder, field, handle, false);
},
.call,
.call_comma,
.call_one,
.call_one_comma,
.async_call,
.async_call_comma,
.async_call_one,
.async_call_one_comma,
=> {
var buf: [1]Ast.Node.Index = undefined;
const call = tree.fullCall(&buf, node).?;
try symbolReferencesInternal(builder, call.ast.fn_expr, handle, false);
for (call.ast.params) |param| {
try symbolReferencesInternal(builder, param, handle, false);
}
},
.slice,
.slice_sentinel,
.slice_open,
=> {
const slice: Ast.full.Slice = tree.fullSlice(node).?;
try symbolReferencesInternal(builder, slice.ast.sliced, handle, false);
try symbolReferencesInternal(builder, slice.ast.start, handle, false);
try symbolReferencesInternal(builder, slice.ast.end, handle, false);
try symbolReferencesInternal(builder, slice.ast.sentinel, handle, false);
},
.builtin_call,
.builtin_call_comma,
.builtin_call_two,
.builtin_call_two_comma,
=> {
var buffer: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(tree, node, &buffer).?;
for (params) |param|
try symbolReferencesInternal(builder, param, handle, false);
},
.@"asm",
.asm_simple,
=> |tag| {
const full_asm: Ast.full.Asm = if (tag == .@"asm") tree.asmFull(node) else tree.asmSimple(node);
if (full_asm.ast.items.len == 0)
try symbolReferencesInternal(builder, full_asm.ast.template, handle, false);
for (full_asm.inputs) |input|
try symbolReferencesInternal(builder, input, handle, false);
for (full_asm.outputs) |output|
try symbolReferencesInternal(builder, output, handle, false);
},
// TODO implement references for asm
.asm_output => {},
.asm_input => {},
.field_access => {
try symbolReferencesInternal(builder, datas[node].lhs, handle, false);
var bound_type_params = analysis.BoundTypeParams{};
const left_type = try analysis.resolveFieldAccessLhsType(
builder.store,
builder.arena,
(try analysis.resolveTypeOfNodeInternal(builder.store, builder.arena, .{
.node = datas[node].lhs,
.handle = handle,
}, &bound_type_params)) orelse return,
&bound_type_params,
);
const left_type_node = switch (left_type.type.data) {
.other => |n| n,
else => return,
};
const child = (try analysis.lookupSymbolContainer(
builder.store,
builder.arena,
.{ .node = left_type_node, .handle = left_type.handle },
tree.tokenSlice(datas[node].rhs),
!left_type.type.is_type_val,
)) orelse return;
if (std.meta.eql(child, builder.decl)) try builder.add(handle, datas[node].rhs);
},
.@"usingnamespace",
.unwrap_optional,
.bool_not,
.negation,
.bit_not,
.negation_wrap,
.address_of,
.@"try",
.@"await",
.optional_type,
.deref,
.@"suspend",
.@"resume",
.@"continue",
.@"break",
.@"return",
.grouped_expression,
.@"comptime",
.@"nosuspend",
=> try symbolReferencesInternal(builder, datas[node].lhs, handle, false),
.test_decl,
.@"errdefer",
.@"defer",
.anyframe_type,
=> try symbolReferencesInternal(builder, datas[node].rhs, handle, false),
.equal_equal,
.bang_equal,
.less_than,
.greater_than,
.less_or_equal,
.greater_or_equal,
.assign_mul,
.assign_div,
.assign_mod,
.assign_add,
.assign_sub,
.assign_shl,
.assign_shl_sat,
.assign_shr,
.assign_bit_and,
.assign_bit_xor,
.assign_bit_or,
.assign_mul_wrap,
.assign_add_wrap,
.assign_sub_wrap,
.assign_mul_sat,
.assign_add_sat,
.assign_sub_sat,
.assign,
.merge_error_sets,
.mul,
.div,
.mod,
.array_mult,
.mul_wrap,
.mul_sat,
.add,
.sub,
.array_cat,
.add_wrap,
.sub_wrap,
.add_sat,
.sub_sat,
.shl,
.shl_sat,
.shr,
.bit_and,
.bit_xor,
.bit_or,
.@"orelse",
.bool_and,
.bool_or,
.array_type,
.array_type_sentinel,
.array_access,
.@"catch",
.switch_range,
.error_union,
=> {
try symbolReferencesInternal(builder, datas[node].lhs, handle, false);
try symbolReferencesInternal(builder, datas[node].rhs, handle, false);
},
.anyframe_literal,
.char_literal,
.number_literal,
.unreachable_literal,
.enum_literal,
.string_literal,
.multiline_string_literal,
.error_value,
=> {},
fn collectReferences(self: *Builder, handle: *const DocumentStore.Handle, node: Ast.Node.Index) error{OutOfMemory}!void {
const context = Context{
.builder = self,
.handle = handle,
};
try ast.iterateChildrenRecursive(handle.tree, node, &context, error{OutOfMemory}, referenceNode);
}
}
fn referenceNode(self: *const Context, node: Ast.Node.Index) error{OutOfMemory}!void {
const builder = self.builder;
const handle = self.handle;
const node_tags = handle.tree.nodes.items(.tag);
const datas = handle.tree.nodes.items(.data);
const main_tokens = handle.tree.nodes.items(.main_token);
const starts = handle.tree.tokens.items(.start);
switch (node_tags[node]) {
.identifier => {
const identifier_token = main_tokens[node];
const child = (try analysis.lookupSymbolGlobal(
builder.store,
builder.arena,
handle,
offsets.tokenToSlice(handle.tree, identifier_token),
starts[identifier_token],
)) orelse return;
if (std.meta.eql(builder.decl_handle, child)) {
try builder.add(handle, identifier_token);
}
},
.field_access => {
var bound_type_params = analysis.BoundTypeParams{};
const left_type = try analysis.resolveFieldAccessLhsType(
builder.store,
builder.arena,
(try analysis.resolveTypeOfNodeInternal(
builder.store,
builder.arena,
.{ .node = datas[node].lhs, .handle = handle },
&bound_type_params,
)) orelse return,
&bound_type_params,
);
const left_type_node = switch (left_type.type.data) {
.other => |n| n,
else => return,
};
const child = (try analysis.lookupSymbolContainer(
builder.store,
builder.arena,
.{ .node = left_type_node, .handle = left_type.handle },
offsets.tokenToSlice(handle.tree, datas[node].rhs),
!left_type.type.is_type_val,
)) orelse return;
if (std.meta.eql(builder.decl_handle, child)) {
try builder.add(handle, datas[node].rhs);
}
},
else => {},
}
}
};
pub fn symbolReferences(
arena: *std.heap.ArenaAllocator,
store: *DocumentStore,
decl_handle: analysis.DeclWithHandle,
encoding: offsets.Encoding,
/// add `decl_handle` as a references
include_decl: bool,
/// exclude references from the std library
skip_std_references: bool,
/// search other files for references
workspace: bool,
) error{OutOfMemory}!std.ArrayListUnmanaged(types.Location) {
std.debug.assert(decl_handle.decl.* != .label_decl);
std.debug.assert(decl_handle.decl.* != .label_decl); // use `labelReferences` instead
var builder = Builder.init(arena, store, decl_handle, encoding);
var builder = Builder{
.arena = arena,
.store = store,
.decl_handle = decl_handle,
.encoding = encoding,
};
const curr_handle = decl_handle.handle;
if (include_decl) try builder.add(curr_handle, decl_handle.nameToken());
switch (decl_handle.decl.*) {
.ast_node => {
try symbolReferencesInternal(&builder, 0, curr_handle, true);
.ast_node,
.pointer_payload,
.switch_payload,
.array_payload,
.array_index,
=> {
try builder.collectReferences(curr_handle, 0);
if (!workspace) return builder.locations;
if (decl_handle.decl.* != .ast_node or !workspace) return builder.locations;
var dependencies = std.StringArrayHashMapUnmanaged(void){};
@ -476,24 +194,14 @@ pub fn symbolReferences(
}
for (dependencies.keys()) |uri| {
if (std.mem.eql(u8, uri, curr_handle.uri)) continue;
const handle = store.getHandle(uri) orelse continue;
if (std.mem.eql(u8, handle.uri, curr_handle.uri)) continue;
try symbolReferencesInternal(&builder, 0, handle, true);
try builder.collectReferences(handle, 0);
}
},
.pointer_payload,
.switch_payload,
.array_payload,
.array_index,
=> {
try symbolReferencesInternal(&builder, 0, curr_handle, true);
return builder.locations;
},
.param_payload => |pay| blk: {
.param_payload => |payload| blk: {
// Rename the param tok.
const param = pay.param;
for (curr_handle.document_scope.scopes.items(.data)) |scope_data| {
if (scope_data != .function) continue;
@ -504,10 +212,10 @@ pub fn symbolReferences(
var it = fn_proto.iterate(&curr_handle.tree);
while (ast.nextFnParam(&it)) |candidate| {
if (!std.meta.eql(candidate, param)) continue;
if (!std.meta.eql(candidate, payload.param)) continue;
if (curr_handle.tree.nodes.items(.tag)[proto] != .fn_decl) break :blk;
try symbolReferencesInternal(&builder, curr_handle.tree.nodes.items(.data)[proto].rhs, curr_handle, false);
try builder.collectReferences(curr_handle, curr_handle.tree.nodes.items(.data)[proto].rhs);
break :blk;
}
}

View File

@ -164,8 +164,15 @@ fn processStep(
try processIncludeDirs(allocator, include_dirs, install_exe.artifact.include_dirs.items);
try processPkgConfig(allocator, include_dirs, install_exe.artifact);
for (install_exe.artifact.packages.items) |pkg| {
try processPackage(allocator, packages, pkg);
if (@hasField(LibExeObjStep, "modules")) {
var modules_it = install_exe.artifact.modules.iterator();
while (modules_it.next()) |module_entry| {
try processModule(allocator, packages, module_entry);
}
} else { // assuming @hasField(LibExeObjStep, "packages")
for (install_exe.artifact.packages.items) |pkg| {
try processPackage(allocator, packages, pkg);
}
}
} else if (step.cast(LibExeObjStep)) |exe| {
if (exe.root_src) |src| {
@ -177,8 +184,15 @@ fn processStep(
}
try processIncludeDirs(allocator, include_dirs, exe.include_dirs.items);
try processPkgConfig(allocator, include_dirs, exe);
for (exe.packages.items) |pkg| {
try processPackage(allocator, packages, pkg);
if (@hasField(LibExeObjStep, "modules")) {
var modules_it = exe.modules.iterator();
while (modules_it.next()) |module_entry| {
try processModule(allocator, packages, module_entry);
}
} else { // assuming @hasField(LibExeObjStep, "packages")
for (exe.packages.items) |pkg| {
try processPackage(allocator, packages, pkg);
}
}
} else {
for (step.dependencies.items) |unknown_step| {
@ -187,6 +201,30 @@ fn processStep(
}
}
fn processModule(
allocator: std.mem.Allocator,
packages: *std.ArrayListUnmanaged(BuildConfig.Pkg),
module: std.StringArrayHashMap(*std.Build.Module).Entry,
) !void {
for (packages.items) |package| {
if (std.mem.eql(u8, package.name, module.key_ptr.*)) return;
}
const maybe_path = switch (module.value_ptr.*.source_file) {
.path => |path| path,
.generated => |generated| generated.path,
};
if (maybe_path) |path| {
try packages.append(allocator, .{ .name = module.key_ptr.*, .path = path });
}
var deps_it = module.value_ptr.*.dependencies.iterator();
while (deps_it.next()) |module_dep| {
try processModule(allocator, packages, module_dep);
}
}
fn processPackage(
allocator: std.mem.Allocator,
packages: *std.ArrayListUnmanaged(BuildConfig.Pkg),

View File

@ -53,7 +53,7 @@ fn testConvertCInclude(cimport_source: []const u8, expected: []const u8) !void {
const source: [:0]u8 = try std.fmt.allocPrintZ(allocator, "const c = {s};", .{cimport_source});
defer allocator.free(source);
var ast = try std.zig.parse(allocator, source);
var ast = try Ast.parse(allocator, source, .zig);
defer ast.deinit(allocator);
const main_tokens = ast.nodes.items(.main_token);

View File

@ -11,9 +11,6 @@ const offsets = zls.offsets;
const allocator: std.mem.Allocator = std.testing.allocator;
// TODO fix references so that we can stop skipping these tests
const skip_references_tests = true;
test "references" {
try testReferences(
\\const <0> = 0;
@ -59,7 +56,7 @@ test "references - local scope" {
\\ return <0> + bar;
\\}
);
if (skip_references_tests) return error.SkipZigTest;
if (true) return error.SkipZigTest; // TODO
try testReferences(
\\const foo = blk: {
\\ _ = blk: {
@ -73,6 +70,32 @@ test "references - local scope" {
);
}
test "references - struct field access" {
if (true) return error.SkipZigTest; // TODO
try testReferences(
\\const S = struct {placeholder: u32 = 3};
\\pub fn foo() bool {
\\ const s: S = .{};
\\ return s.<0> == s.<0>;
\\}
);
}
test "references - struct decl access" {
try testReferences(
\\const S = struct {
\\ fn <0>() void {}
\\};
\\pub fn foo() bool {
\\ const s: S = .{};
\\ s.<0>();
\\ s.<0>();
\\ <1>();
\\}
\\fn <1>() void {}
);
}
test "references - while continue expression" {
try testReferences(
\\ pub fn foo() void {
@ -83,7 +106,7 @@ test "references - while continue expression" {
}
test "references - label" {
if (skip_references_tests) return error.SkipZigTest;
if (true) return error.SkipZigTest; // TODO
try testReferences(
\\const foo = <0>: {
\\ break :<0> 0;
@ -106,6 +129,8 @@ fn testReferences(source: []const u8) !void {
try ctx.requestDidOpen(file_uri, phr.new_source);
try std.testing.expect(phr.locations.len != 0);
var i: usize = 0;
while (i < phr.locations.len) : (i += 1) {
const var_loc = phr.locations.items(.old)[i];
@ -120,6 +145,14 @@ fn testReferences(source: []const u8) !void {
const response = try ctx.requestGetResponse(?[]types.Location, "textDocument/references", params);
var error_builder = ErrorBuilder.init(allocator, phr.new_source);
defer error_builder.deinit();
errdefer {
const note_loc = phr.locations.items(.new)[i];
error_builder.msgAtLoc("asked for references here", note_loc, .info, .{}) catch {};
error_builder.writeDebug();
}
const locations: []types.Location = response.result orelse {
std.debug.print("Server returned `null` as the result\n", .{});
return error.InvalidResponse;
@ -150,14 +183,6 @@ fn testReferences(source: []const u8) !void {
};
defer allocator.free(expected_locs);
var error_builder = ErrorBuilder.init(allocator, phr.new_source);
defer error_builder.deinit();
errdefer {
const note_loc = phr.locations.items(.new)[i];
error_builder.msgAtLoc("asked for references here", note_loc, .info, .{}) catch {};
error_builder.writeDebug();
}
// keeps track of expected locations that have been given by the server
// used to detect double references and missing references
var visited = try std.DynamicBitSetUnmanaged.initEmpty(allocator, expected_locs.len);

View File

@ -47,7 +47,7 @@ fn testNodesAtLoc(source: []const u8) !void {
const new_source = try allocator.dupeZ(u8, ccp.new_source);
defer allocator.free(new_source);
var tree = try std.zig.parse(allocator, new_source);
var tree = try std.zig.Ast.parse(allocator, new_source, .zig);
defer tree.deinit(allocator);
const nodes = try ast.nodesAtLoc(allocator, tree, inner_loc);

View File

@ -4,6 +4,8 @@ const zls = @import("zls");
const types = zls.types;
const offsets = zls.offsets;
const Ast = std.zig.Ast;
test "offsets - index <-> Position" {
try testIndexPosition("", 0, 0, .{ 0, 0, 0 });
@ -116,8 +118,8 @@ fn testIndexPosition(text: []const u8, index: usize, line: u32, characters: [3]u
try std.testing.expectEqual(index, offsets.positionToIndex(text, position32, .@"utf-32"));
}
fn testTokenToLoc(text: [:0]const u8, token_index: std.zig.Ast.TokenIndex, start: usize, end: usize) !void {
var tree = try std.zig.parse(std.testing.allocator, text);
fn testTokenToLoc(text: [:0]const u8, token_index: Ast.TokenIndex, start: usize, end: usize) !void {
var tree = try Ast.parse(std.testing.allocator, text, .zig);
defer tree.deinit(std.testing.allocator);
const actual = offsets.tokenToLoc(tree, token_index);