Testing improvements (#662)

This commit is contained in:
Techatrix 2022-09-18 22:47:06 +00:00 committed by GitHub
parent b3078c36dd
commit 7f4f002380
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 510 additions and 111 deletions

View File

@ -117,8 +117,6 @@ pub fn build(b: *std.build.Builder) !void {
var tests = b.addTest("tests/tests.zig");
tests.use_stage1 = true;
tests.addPackage(.{ .name = "zls", .source = .{ .path = "src/zls.zig" }, .dependencies = exe.packages.items });
tests.addPackage(.{ .name = "helper", .source = .{ .path = "tests/helper.zig" } });
tests.addPackage(.{ .name = "context", .source = .{ .path = "tests/context.zig" } });
tests.setBuildMode(.Debug);
tests.setTarget(target);
test_step.dependOn(&tests.step);

View File

@ -94,10 +94,7 @@ pub fn tokenLength(tree: Ast, token_index: Ast.TokenIndex, encoding: Encoding) u
}
pub fn rangeLength(text: []const u8, range: types.Range, encoding: Encoding) usize {
const loc: Loc = .{
.start = positionToIndex(text, range.start, encoding),
.end = positionToIndex(text, range.end, encoding),
};
const loc = rangeToLoc(text, range, encoding);
return locLength(text, loc, encoding);
}
@ -163,6 +160,17 @@ pub fn locToRange(text: []const u8, loc: Loc, encoding: Encoding) types.Range {
};
}
pub fn rangeToSlice(text: []const u8, range: types.Range, encodig: Encoding) []const u8 {
return locToSlice(text, rangeToLoc(text, range, encodig));
}
pub fn rangeToLoc(text: []const u8, range: types.Range, encodig: Encoding) Loc {
return .{
.start = positionToIndex(text, range.start, encodig),
.end = positionToIndex(text, range.end, encodig),
};
}
pub fn nodeToLoc(tree: Ast, node: Ast.Node.Index) Loc {
return .{ .start = tokenToIndex(tree, tree.firstToken(node)), .end = tokenToLoc(tree, ast.lastToken(tree, node)).end };
}

View File

@ -516,7 +516,7 @@ pub fn symbolReferences(
try imports.resize(arena.allocator(), 0);
}
},
.param_decl => |param| {
.param_decl => |param| blk: {
// Rename the param tok.
for (curr_handle.document_scope.scopes.items) |scope| {
if (scope.data != .function) continue;
@ -530,9 +530,9 @@ pub fn symbolReferences(
while (ast.nextFnParam(&it)) |candidate| {
if (!std.meta.eql(candidate, param)) continue;
if (curr_handle.tree.nodes.items(.tag)[proto] != .fn_decl) break;
if (curr_handle.tree.nodes.items(.tag)[proto] != .fn_decl) break :blk;
try symbolReferencesInternal(&builder, curr_handle.tree.nodes.items(.data)[proto].rhs, curr_handle);
break;
break :blk;
}
}
log.warn("Could not find param decl's function", .{});

99
tests/ErrorBuilder.zig Normal file
View File

@ -0,0 +1,99 @@
const std = @import("std");
const zls = @import("zls");
const offsets = zls.offsets;
const ErrorBuilder = @This();
allocator: std.mem.Allocator,
items: std.ArrayListUnmanaged(MsgItem) = .{},
source: []const u8,
pub fn init(allocator: std.mem.Allocator, source: []const u8) ErrorBuilder {
return ErrorBuilder{
.allocator = allocator,
.source = source,
};
}
pub fn deinit(builder: *ErrorBuilder) void {
for (builder.items.items) |item| {
builder.allocator.free(item.message);
}
builder.items.deinit(builder.allocator);
}
pub fn msgAtLoc(builder: *ErrorBuilder, comptime fmt: []const u8, loc: offsets.Loc, level: std.log.Level, args: anytype) !void {
try builder.items.append(builder.allocator, .{
.loc = loc,
.level = level,
.message = try std.fmt.allocPrint(builder.allocator, fmt, args),
});
}
pub fn msgAtIndex(builder: *ErrorBuilder, comptime fmt: []const u8, index: usize, level: std.log.Level, args: anytype) !void {
return msgAtLoc(builder, fmt, .{ .start = index, .end = index }, level, args);
}
pub fn hasMessages(builder: *ErrorBuilder) bool {
return builder.items.items.len != 0;
}
pub fn write(builder: *ErrorBuilder, writer: anytype) !void {
if (!builder.hasMessages()) return;
std.sort.sort(MsgItem, builder.items.items, builder, ErrorBuilder.lessThan);
try writer.writeByte('\n');
var start: usize = 0;
for (builder.items.items) |item| {
const line = offsets.lineLocAtIndex(builder.source, item.loc.start);
defer start = line.end;
try writer.writeAll(builder.source[start..line.end]);
try writer.writeByte('\n');
{
var i: usize = line.start;
while (i < item.loc.start) : (i += 1) try writer.writeByte(' ');
while (i < item.loc.end) : (i += 1) try writer.writeByte('^');
if (item.loc.start == item.loc.end) try writer.writeByte('^');
}
const level_txt: []const u8 = switch (item.level) {
.err => "error",
.warn => "warning",
.info => "info",
.debug => "debug",
};
try writer.print(" {s}: {s}", .{ level_txt, item.message });
}
try writer.writeAll(builder.source[start..builder.source.len]);
try writer.writeByte('\n');
}
pub fn writeDebug(builder: *ErrorBuilder) void {
if (!builder.hasMessages()) return;
std.debug.getStderrMutex().lock();
defer std.debug.getStderrMutex().unlock();
nosuspend builder.write(std.io.getStdErr().writer()) catch return;
}
const MsgItem = struct {
loc: offsets.Loc,
level: std.log.Level,
message: []const u8,
};
fn lessThan(builder: *ErrorBuilder, lhs: MsgItem, rhs: MsgItem) bool {
const is_less = lhs.loc.start < rhs.loc.start;
const text = if (is_less) builder.source[lhs.loc.start..rhs.loc.start] else builder.source[rhs.loc.start..lhs.loc.start];
// report messages on the same line in reverse order
if (std.mem.indexOfScalar(u8, text, '\n') == null) {
return !is_less;
}
return is_less;
}

View File

@ -3,6 +3,8 @@ const zls = @import("zls");
const headerPkg = zls.header;
const Server = zls.Server;
const types = zls.types;
const requests = zls.requests;
const initialize_msg =
\\{"processId":6896,"clientInfo":{"name":"vscode","version":"1.46.1"},"rootPath":null,"rootUri":null,"capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional"},"didChangeConfiguration":{"dynamicRegistration":true},"didChangeWatchedFiles":{"dynamicRegistration":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]}},"executeCommand":{"dynamicRegistration":true},"configuration":true,"workspaceFolders":true},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"complexDiagnosticCodeSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true},"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true}},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]}},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}}},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["comment","keyword","number","regexp","operator","namespace","type","struct","class","interface","enum","typeParameter","function","member","macro","variable","parameter","property","label"],"tokenModifiers":["declaration","documentation","static","abstract","deprecated","readonly"]}},"window":{"workDoneProgress":true}},"trace":"off","workspaceFolders":[{"uri":"file://./tests", "name":"root"}]}
@ -101,4 +103,58 @@ pub const Context = struct {
try std.testing.expectEqualStrings(expected, result_json);
}
// helper
pub fn requestDidOpen(self: *Context, uri: []const u8, source: []const u8) !void {
const open_document = requests.OpenDocument{
.params = .{
.textDocument = .{
.uri = uri,
// .languageId = "zig",
// .version = 420,
.text = source,
},
},
};
const params = try std.json.stringifyAlloc(allocator, open_document.params, .{});
defer allocator.free(params);
try self.request("textDocument/didOpen", params, null);
}
pub fn Response(comptime Result: type) type {
return struct {
jsonrpc: []const u8,
id: types.RequestId,
result: Result,
pub fn deinit(self: @This()) void {
const parse_options = std.json.ParseOptions{
.allocator = allocator,
.ignore_unknown_fields = true,
};
std.json.parseFree(@This(), self, parse_options);
}
};
}
pub fn requestGetResponse(self: *Context, comptime Result: type, method: []const u8, request_struct: anytype) !Response(Result) {
const params = try std.json.stringifyAlloc(allocator, request_struct.params, .{});
defer allocator.free(params);
const response_bytes = try self.requestAlloc(method, params);
defer allocator.free(response_bytes);
const parse_options = std.json.ParseOptions{
.allocator = allocator,
.ignore_unknown_fields = true,
};
var token_stream = std.json.TokenStream.init(response_bytes);
const response = try std.json.parse(Response(Result), &token_stream, parse_options);
errdefer std.json.parseFree(Response(Result), response, parse_options);
// TODO validate jsonrpc and id
return response;
}
};

View File

@ -1,73 +1,75 @@
const std = @import("std");
const zls = @import("zls");
pub const Placeholder = struct {
loc: Loc,
pub const Loc = std.zig.Token.Loc;
pub fn placeholderSlice(self: Placeholder, source: []const u8) []const u8 {
return source[self.loc.start..self.loc.end];
}
};
const offsets = zls.offsets;
/// returns an array of all placeholder locations
pub fn collectPlaceholder(allocator: std.mem.Allocator, source: []const u8) ![]Placeholder {
var placeholders = std.ArrayListUnmanaged(Placeholder){};
errdefer placeholders.deinit(allocator);
pub fn collectPlaceholderLocs(allocator: std.mem.Allocator, source: []const u8) ![]offsets.Loc {
var locations = std.ArrayListUnmanaged(offsets.Loc){};
errdefer locations.deinit(allocator);
var source_index: usize = 0;
while (std.mem.indexOfScalarPos(u8, source, source_index, '<')) |start_index| {
const end_index = std.mem.indexOfScalarPos(u8, source, start_index + 1, '>') orelse return error.Invalid;
const end_index = 1 + (std.mem.indexOfScalarPos(u8, source, start_index + 1, '>') orelse return error.Invalid);
try placeholders.append(allocator, .{ .loc = .{
try locations.append(allocator, .{
.start = start_index,
.end = end_index,
} });
});
source_index = end_index + 1;
source_index = end_index;
}
return placeholders.toOwnedSlice(allocator);
return locations.toOwnedSlice(allocator);
}
/// returns `source` without any placeholders
pub fn clearPlaceholders(allocator: std.mem.Allocator, source: []const u8) ![]const u8 {
/// returns `source` where every placeholder is replaced with `new_name`
pub fn replacePlaceholders(allocator: std.mem.Allocator, source: []const u8, new_name: []const u8) ![]const u8 {
var output = std.ArrayListUnmanaged(u8){};
errdefer output.deinit(allocator);
var source_index: usize = 0;
while (std.mem.indexOfScalarPos(u8, source, source_index, '<')) |start_index| {
try output.appendSlice(allocator, source[source_index..start_index]);
try output.appendSlice(allocator, new_name);
source_index = std.mem.indexOfScalarPos(u8, source, start_index + 1, '>') orelse return error.Invalid;
source_index += 1;
source_index = 1 + (std.mem.indexOfScalarPos(u8, source, start_index + 1, '>') orelse return error.Invalid);
}
try output.appendSlice(allocator, source[source_index..source.len]);
return output.toOwnedSlice(allocator);
}
const CollectClearPlaceholdersResult = struct {
/// placeholders relative to the `source` parameter in `collectClearPlaceholders`
placeholders: []Placeholder,
/// placeholders locations to `source`
placeholder_locations: []usize,
/// source without any placeholders
source: []const u8,
/// returns `source` without any placeholders
pub fn clearPlaceholders(allocator: std.mem.Allocator, source: []const u8) ![]const u8 {
return replacePlaceholders(allocator, source, "");
}
pub fn deinit(self: @This(), allocator: std.mem.Allocator) void {
allocator.free(self.placeholders);
allocator.free(self.placeholder_locations);
allocator.free(self.source);
const CollectPlaceholdersResult = struct {
/// list of all placeholder with old and new location
locations: std.MultiArrayList(LocPair),
/// equivalent to calling `replacePlaceholders(source, new_name)`
new_source: []const u8,
pub const LocPair = struct {
/// placeholder location relative to the `source` parameter
old: offsets.Loc,
/// placeholder location relative to `new_source`
new: offsets.Loc,
};
pub fn deinit(self: *@This(), allocator: std.mem.Allocator) void {
self.locations.deinit(allocator);
allocator.free(self.new_source);
}
};
/// see `CollectClearPlaceholdersResult`
pub fn collectClearPlaceholders(allocator: std.mem.Allocator, source: []const u8) !CollectClearPlaceholdersResult {
var placeholders = std.ArrayListUnmanaged(Placeholder){};
errdefer placeholders.deinit(allocator);
pub fn collectClearPlaceholders(allocator: std.mem.Allocator, source: []const u8) !CollectPlaceholdersResult {
return collectReplacePlaceholders(allocator, source, "");
}
var locations = std.ArrayListUnmanaged(usize){};
pub fn collectReplacePlaceholders(allocator: std.mem.Allocator, source: []const u8, new_name: []const u8) !CollectPlaceholdersResult {
var locations = std.MultiArrayList(CollectPlaceholdersResult.LocPair){};
errdefer locations.deinit(allocator);
var new_source = std.ArrayListUnmanaged(u8){};
@ -76,27 +78,84 @@ pub fn collectClearPlaceholders(allocator: std.mem.Allocator, source: []const u8
var source_index: usize = 0;
var new_source_index: usize = 0;
while (std.mem.indexOfScalarPos(u8, source, source_index, '<')) |start_index| {
const end_index = std.mem.indexOfScalarPos(u8, source, start_index + 1, '>') orelse return error.Invalid;
const end_index = 1 + (std.mem.indexOfScalarPos(u8, source, start_index + 1, '>') orelse return error.Invalid);
const placeholder = Placeholder{ .loc = .{
.start = start_index + 1,
const old_loc: offsets.Loc = .{
.start = start_index,
.end = end_index,
} };
};
defer source_index = old_loc.end;
try placeholders.append(allocator, placeholder);
const text = source[source_index..start_index];
new_source_index = new_source_index + (start_index - source_index);
try locations.append(allocator, new_source_index);
try new_source.appendSlice(allocator, source[source_index..start_index]);
const new_loc: offsets.Loc = .{
.start = new_source_index + text.len,
.end = new_source_index + text.len + new_name.len,
};
defer new_source_index = new_loc.end;
source_index = end_index + 1;
try locations.append(allocator, .{
.old = old_loc,
.new = new_loc,
});
try new_source.appendSlice(allocator, text);
try new_source.appendSlice(allocator, new_name);
}
try new_source.appendSlice(allocator, source[source_index..source.len]);
return CollectClearPlaceholdersResult{
.placeholders = placeholders.toOwnedSlice(allocator),
.placeholder_locations = locations.toOwnedSlice(allocator),
.source = new_source.toOwnedSlice(allocator),
return CollectPlaceholdersResult{
.locations = locations,
.new_source = new_source.toOwnedSlice(allocator),
};
}
fn testCollectReplacePlaceholders(
source: []const u8,
expected_source: []const u8,
expected_old_locs: []const offsets.Loc,
expected_new_locs: []const offsets.Loc,
) !void {
const allocator = std.testing.allocator;
const new_name = "foo";
var result = try collectReplacePlaceholders(allocator, source, new_name);
defer result.deinit(allocator);
const expected_old_locs2 = try collectPlaceholderLocs(allocator, source);
defer allocator.free(expected_old_locs2);
const expected_source2 = try replacePlaceholders(allocator, source, new_name);
defer allocator.free(expected_source2);
try std.testing.expectEqualStrings(expected_source, expected_source2);
try std.testing.expectEqualSlices(offsets.Loc, expected_old_locs, expected_old_locs2);
try std.testing.expectEqualStrings(expected_source, result.new_source);
try std.testing.expectEqualSlices(offsets.Loc, expected_old_locs, result.locations.items(.old));
try std.testing.expectEqualSlices(offsets.Loc, expected_new_locs, result.locations.items(.new));
}
test "helper - collectReplacePlaceholders" {
try testCollectReplacePlaceholders("", "", &.{}, &.{});
try testCollectReplacePlaceholders("text", "text", &.{}, &.{});
try testCollectReplacePlaceholders("<>", "foo", &.{
.{ .start = 0, .end = 2 },
}, &.{
.{ .start = 0, .end = 3 },
});
try testCollectReplacePlaceholders("a<>b", "afoob", &.{
.{ .start = 1, .end = 3 },
}, &.{
.{ .start = 1, .end = 4 },
});
try testCollectReplacePlaceholders("<><>", "foofoo", &.{
.{ .start = 0, .end = 2 },
.{ .start = 2, .end = 4 },
}, &.{
.{ .start = 0, .end = 3 },
.{ .start = 3, .end = 6 },
});
}

View File

@ -1,8 +1,9 @@
const std = @import("std");
const zls = @import("zls");
const helper = @import("helper");
const Context = @import("context").Context;
const helper = @import("../helper.zig");
const Context = @import("../context.zig").Context;
const ErrorBuilder = @import("../ErrorBuilder.zig");
const types = zls.types;
const offsets = zls.offsets;
@ -66,84 +67,70 @@ test "inlayhints - builtin call" {
}
fn testInlayHints(source: []const u8) !void {
const phr = try helper.collectClearPlaceholders(allocator, source);
var phr = try helper.collectClearPlaceholders(allocator, source);
defer phr.deinit(allocator);
var ctx = try Context.init();
defer ctx.deinit();
const open_document = requests.OpenDocument{
.params = .{
.textDocument = .{
.uri = "file:///test.zig",
// .languageId = "zig",
// .version = 420,
.text = phr.source,
},
},
};
const did_open_method = try std.json.stringifyAlloc(allocator, open_document.params, .{});
defer allocator.free(did_open_method);
try ctx.request("textDocument/didOpen", did_open_method, null);
try ctx.requestDidOpen("file:///test.zig", phr.new_source);
const range = types.Range{
.start = types.Position{ .line = 0, .character = 0 },
.end = offsets.indexToPosition(phr.source, phr.source.len, .utf16),
.end = offsets.indexToPosition(phr.new_source, phr.new_source.len, .utf16),
};
const method = try std.json.stringifyAlloc(allocator, .{
.textDocument = .{
.uri = "file:///test.zig",
},
.range = range,
}, .{});
defer allocator.free(method);
const response_bytes = try ctx.requestAlloc("textDocument/inlayHint", method);
defer allocator.free(response_bytes);
const InlayHint = struct {
position: types.Position,
label: []const u8,
kind: types.InlayHintKind,
};
const Response = struct {
jsonrpc: []const u8,
id: types.RequestId,
result: []InlayHint,
const request = requests.InlayHint{
.params = .{
.textDocument = .{ .uri = "file:///test.zig" },
.range = range,
},
};
const parse_options = std.json.ParseOptions{
.allocator = allocator,
.ignore_unknown_fields = true,
const response = try ctx.requestGetResponse(?[]InlayHint, "textDocument/inlayHint", request);
defer response.deinit();
const hints: []InlayHint = response.result orelse {
std.debug.print("Server returned `null` as the result\n", .{});
return error.InvalidResponse;
};
var token_stream = std.json.TokenStream.init(response_bytes);
var response = try std.json.parse(Response, &token_stream, parse_options);
defer std.json.parseFree(Response, response, parse_options);
const hints = response.result;
var error_builder = ErrorBuilder.init(allocator, phr.new_source);
defer error_builder.deinit();
errdefer error_builder.writeDebug();
try std.testing.expectEqual(phr.placeholder_locations.len, hints.len);
var i: usize = 0;
outer: while (i < phr.locations.len) : (i += 1) {
const old_loc = phr.locations.items(.old)[i];
const new_loc = phr.locations.items(.new)[i];
outer: for (phr.placeholder_locations) |loc, i| {
const name = phr.placeholders[i].placeholderSlice(source);
const expected_name = offsets.locToSlice(source, old_loc);
const expected_label = expected_name[1 .. expected_name.len - 1]; // convert <name> to name
const position = offsets.indexToPosition(phr.source, loc, .utf16);
const position = offsets.indexToPosition(phr.new_source, new_loc.start, ctx.server.offset_encoding);
for (hints) |hint| {
if (position.line != hint.position.line or position.character != hint.position.character) continue;
const actual_label = hint.label[0 .. hint.label.len - 1]; // exclude :
try std.testing.expect(hint.label.len != 0);
const trimmedLabel = hint.label[0 .. hint.label.len - 1]; // exclude :
try std.testing.expectEqualStrings(name, trimmedLabel);
try std.testing.expectEqual(types.InlayHintKind.Parameter, hint.kind);
if (!std.mem.eql(u8, expected_label, actual_label)) {
try error_builder.msgAtLoc("expected label `{s}` here but got `{s}`!", new_loc, .err, .{ expected_label, actual_label });
}
if (hint.kind != types.InlayHintKind.Parameter) {
try error_builder.msgAtLoc("hint kind should be `{s}` but got `{s}`!", new_loc, .err, .{ @tagName(types.InlayHintKind.Parameter), @tagName(hint.kind) });
}
continue :outer;
}
std.debug.print("Placeholder '{s}' at {}:{} (line:colon) not found!", .{ name, position.line, position.character });
return error.PlaceholderNotFound;
try error_builder.msgAtLoc("expected hint `{s}` here", new_loc, .err, .{expected_label});
}
if (error_builder.hasMessages()) return error.InvalidResponse;
}

View File

@ -0,0 +1,189 @@
const std = @import("std");
const zls = @import("zls");
const helper = @import("../helper.zig");
const Context = @import("../context.zig").Context;
const ErrorBuilder = @import("../ErrorBuilder.zig");
const types = zls.types;
const requests = zls.requests;
const offsets = zls.offsets;
const allocator: std.mem.Allocator = std.testing.allocator;
// TODO fix references so that we can stop skipping these tests
const skip_references_tests = true;
test "references" {
if (skip_references_tests) return error.SkipZigTest;
try testReferences(
\\const <0> = 0;
\\const foo = <0>;
);
try testReferences(
\\var <0> = 0;
\\var foo = <0>;
);
try testReferences(
\\const <0> = struct {};
\\var foo: <0> = <0>{};
);
try testReferences(
\\const <0> = enum {};
\\var foo: <0> = undefined;
);
try testReferences(
\\const <0> = union {};
\\var foo: <0> = <0>{};
);
try testReferences(
\\fn <0>() void {}
\\var foo = <0>();
);
try testReferences(
\\const <0> = error{};
\\fn bar() <0>!void {}
);
}
test "references - global scope" {
if (skip_references_tests) return error.SkipZigTest;
try testReferences(
\\const foo = <0>;
\\const <0> = 0;
\\const bar = <0>;
);
}
test "references - local scope" {
try testReferences(
\\fn foo(<0>: u32, bar: u32) void {
\\ return <0> + bar;
\\}
);
if (skip_references_tests) return error.SkipZigTest;
try testReferences(
\\const foo = blk: {
\\ _ = blk: {
\\ const <0> = 0;
\\ break :blk <0>;
\\ };
\\ const <1> = 0;
\\ break :blk <1>;
\\};
\\const bar = foo;
);
}
test "references - label" {
if (skip_references_tests) return error.SkipZigTest;
try testReferences(
\\const foo = <0>: {
\\ break :<0> 0;
\\};
);
}
fn testReferences(source: []const u8) !void {
const file_uri = "file:///test.zig";
const new_name = "placeholder";
var phr = try helper.collectReplacePlaceholders(allocator, source, new_name);
defer phr.deinit(allocator);
var ctx = try Context.init();
defer ctx.deinit();
try ctx.requestDidOpen(file_uri, phr.new_source);
var i: usize = 0;
while (i < phr.locations.len) : (i += 1) {
const var_loc = phr.locations.items(.old)[i];
const var_name = offsets.locToSlice(source, var_loc);
const var_loc_middle = var_loc.start + (var_loc.end - var_loc.start) / 2;
const request = requests.References{
.params = .{
.textDocument = .{ .uri = file_uri },
.position = offsets.indexToPosition(source, var_loc_middle, ctx.server.offset_encoding),
.context = .{ .includeDeclaration = true },
},
};
const response = try ctx.requestGetResponse(?[]types.Location, "textDocument/references", request);
defer response.deinit();
const locations: []types.Location = response.result orelse {
std.debug.print("Server returned `null` as the result\n", .{});
return error.InvalidResponse;
};
for (locations) |response_location| {
const actual_name = offsets.rangeToSlice(phr.new_source, response_location.range, ctx.server.offset_encoding);
try std.testing.expectEqualStrings(file_uri, response_location.uri);
try std.testing.expectEqualStrings(new_name, actual_name);
}
// collect all new placeholder locations with the given name
const expected_locs: []offsets.Loc = blk: {
var locs = std.ArrayListUnmanaged(offsets.Loc){};
errdefer locs.deinit(allocator);
var j: usize = 0;
while (j < phr.locations.len) : (j += 1) {
const old_loc = phr.locations.items(.old)[j];
const new_loc = phr.locations.items(.new)[j];
const old_loc_name = offsets.locToSlice(source, old_loc);
if (!std.mem.eql(u8, var_name, old_loc_name)) continue;
try locs.append(allocator, new_loc);
}
break :blk locs.toOwnedSlice(allocator);
};
defer allocator.free(expected_locs);
var error_builder = ErrorBuilder.init(allocator, phr.new_source);
defer error_builder.deinit();
errdefer {
const note_loc = phr.locations.items(.new)[i];
error_builder.msgAtLoc("asked for references here", note_loc, .info, .{}) catch {};
error_builder.writeDebug();
}
// keeps track of expected locations that have been given by the server
// used to detect double references and missing references
var visited = try std.DynamicBitSetUnmanaged.initEmpty(allocator, expected_locs.len);
defer visited.deinit(allocator);
for (locations) |response_location| {
const actual_loc = offsets.rangeToLoc(phr.new_source, response_location.range, ctx.server.offset_encoding);
const index = found_index: {
for (expected_locs) |expected_loc, idx| {
if (expected_loc.start != actual_loc.start) continue;
if (expected_loc.end != actual_loc.end) continue;
break :found_index idx;
}
try error_builder.msgAtLoc("server returned unexpected reference!", actual_loc, .err, .{});
return error.UnexpectedReference;
};
if (visited.isSet(index)) {
try error_builder.msgAtLoc("server returned duplicate reference!", actual_loc, .err, .{});
return error.DuplicateReference;
} else {
visited.set(index);
}
}
var has_unvisited = false;
var unvisited_it = visited.iterator(.{ .kind = .unset });
while (unvisited_it.next()) |index| {
try error_builder.msgAtLoc("expected reference here!", expected_locs[index], .err, .{});
has_unvisited = true;
}
if (has_unvisited) return error.ExpectedReference;
}
}

View File

@ -1,7 +1,7 @@
const std = @import("std");
const zls = @import("zls");
const Context = @import("context").Context;
const Context = @import("../context.zig").Context;
const requests = zls.requests;

View File

@ -1,5 +1,7 @@
comptime {
_ = @import("helper.zig");
_ = @import("sessions.zig");
_ = @import("utility/offsets.zig");
_ = @import("utility/position_context.zig");
_ = @import("utility/uri.zig");
@ -11,6 +13,7 @@ comptime {
// LSP features
_ = @import("lsp_features/semantic_tokens.zig");
_ = @import("lsp_features/inlay_hints.zig");
_ = @import("lsp_features/references.zig");
// Language features
_ = @import("language_features/cimport.zig");