Merge branch 'master' into intern-pool

This commit is contained in:
Techarix 2023-02-03 23:19:40 +01:00
commit ef0cfadf8a
22 changed files with 3397 additions and 862 deletions

View File

@ -67,6 +67,8 @@ jobs:
path: zls
fetch-depth: 0
submodules: true
ref: ${{ github.event.pull_request.head.sha || github.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
- name: Build zls
run: |

View File

@ -50,7 +50,7 @@ You can replace `master` with a specific zig version like `0.10.0`. Which versio
### Configuration Options
You can configure zls by editing your `zls.json` configuration file.
Running `zls --show-config-path` will a path to an already existing `zls.json` or a path to the local configuration folder instead.
Running `zls --show-config-path` will show a path to an already existing `zls.json` or a path to the local configuration folder instead.
zls will look for a `zls.json` configuration file in multiple locations with the following priority:
- In the local configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders/blob/master/RESOURCES.md#folder-list))
@ -112,8 +112,7 @@ When `value` is present, the option will be passed the same as in `zig build -Dn
## Features
`zls` supports most language features, including simple type function support, using namespace, payload capture type resolution, custom packages, `cImport` and others.
Currently there is no support for compile time evaluation.
`zls` supports most language features, including simple type function support, using namespace, payload capture type resolution, custom packages, cImport and others. Support for comptime and semantic analysis is Work-in-Progress.
The following LSP features are supported:
- Completions
@ -123,8 +122,11 @@ The following LSP features are supported:
- Find references
- Rename symbol
- Formatting using `zig fmt`
- Semantic token highlighting (implemented by a few clients including VS Code, kak and emacs lsp-mode)
- Inlay hints (implemented by VS Code)
- Semantic token highlighting
- Inlay hints
- Code actions
- Selection ranges
- Folding regions
## Related Projects

View File

@ -7,15 +7,22 @@ const zls_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 };
pub fn build(b: *std.build.Builder) !void {
comptime {
const current_zig = builtin.zig_version;
const min_zig = std.SemanticVersion.parse("0.11.0-dev.1254+1f8f79cd5") catch return; // add helper functions to std.zig.Ast
const min_zig = std.SemanticVersion.parse("0.11.0-dev.1524+efa25e7d5") catch return; // build API changes
if (current_zig.order(min_zig) == .lt) {
@compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig }));
}
}
const target = b.standardTargetOptions(.{});
const mode = b.standardReleaseOptions();
const exe = b.addExecutable("zls", "src/main.zig");
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "zls",
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
const exe_options = b.addOptions();
exe.addOptions("build_options", exe_options);
@ -105,11 +112,17 @@ pub fn build(b: *std.build.Builder) !void {
const KNOWN_FOLDERS_DEFAULT_PATH = "src/known-folders/known-folders.zig";
const known_folders_path = b.option([]const u8, "known-folders", "Path to known-folders package (default: " ++ KNOWN_FOLDERS_DEFAULT_PATH ++ ")") orelse KNOWN_FOLDERS_DEFAULT_PATH;
exe.addPackage(.{ .name = "known-folders", .source = .{ .path = known_folders_path } });
exe.addPackage(.{
.name = "known-folders",
.source = .{ .path = known_folders_path },
});
const TRES_DEFAULT_PATH = "src/tres/tres.zig";
const tres_path = b.option([]const u8, "tres", "Path to tres package (default: " ++ TRES_DEFAULT_PATH ++ ")") orelse TRES_DEFAULT_PATH;
exe.addPackage(.{ .name = "tres", .source = .{ .path = tres_path } });
exe.addPackage(.{
.name = "tres",
.source = .{ .path = tres_path },
});
const check_submodules_step = CheckSubmodulesStep.init(b, &.{
known_folders_path,
@ -137,12 +150,16 @@ pub fn build(b: *std.build.Builder) !void {
}
}
exe.setTarget(target);
exe.setBuildMode(mode);
exe.install();
const gen_exe = b.addExecutable("zls_gen", "src/config_gen/config_gen.zig");
gen_exe.addPackage(.{ .name = "tres", .source = .{ .path = tres_path } });
const gen_exe = b.addExecutable(.{
.name = "zls_gen",
.root_source_file = .{ .path = "src/config_gen/config_gen.zig" },
});
gen_exe.addPackage(.{
.name = "tres",
.source = .{ .path = tres_path },
});
const gen_cmd = gen_exe.run();
gen_cmd.addArgs(&.{
@ -165,7 +182,13 @@ pub fn build(b: *std.build.Builder) !void {
"test-filter",
"Skip tests that do not match filter",
);
var tests = b.addTest("tests/tests.zig");
var tests = b.addTest(.{
.root_source_file = .{ .path = "tests/tests.zig" },
.target = target,
.optimize = .Debug,
});
tests.setFilter(test_filter);
if (coverage) {
@ -180,35 +203,42 @@ pub fn build(b: *std.build.Builder) !void {
});
}
tests.addPackage(.{ .name = "zls", .source = .{ .path = "src/zls.zig" }, .dependencies = exe.packages.items });
tests.addPackage(.{ .name = "tres", .source = .{ .path = tres_path } });
tests.setBuildMode(.Debug);
tests.setTarget(target);
tests.addPackage(.{
.name = "zls",
.source = .{ .path = "src/zls.zig" },
.dependencies = exe.packages.items,
});
tests.addPackage(.{
.name = "tres",
.source = .{ .path = tres_path },
});
test_step.dependOn(&tests.step);
var src_tests = b.addTest("src/zls.zig");
var src_tests = b.addTest(.{
.root_source_file = .{ .path = "src/zls.zig" },
.target = target,
.optimize = .Debug,
});
src_tests.setFilter(test_filter);
src_tests.setBuildMode(.Debug);
src_tests.setTarget(target);
test_step.dependOn(&src_tests.step);
}
const CheckSubmodulesStep = struct {
step: std.build.Step,
builder: *std.build.Builder,
step: std.Build.Step,
builder: *std.Build,
submodules: []const []const u8,
pub fn init(builder: *std.build.Builder, submodules: []const []const u8) *CheckSubmodulesStep {
pub fn init(builder: *std.Build, submodules: []const []const u8) *CheckSubmodulesStep {
var self = builder.allocator.create(CheckSubmodulesStep) catch unreachable;
self.* = CheckSubmodulesStep{
.builder = builder,
.step = std.build.Step.init(.custom, "Check Submodules", builder.allocator, make),
.step = std.Build.Step.init(.custom, "Check Submodules", builder.allocator, make),
.submodules = builder.allocator.dupe([]const u8, submodules) catch unreachable,
};
return self;
}
fn make(step: *std.build.Step) anyerror!void {
fn make(step: *std.Build.Step) anyerror!void {
const self = @fieldParentPtr(CheckSubmodulesStep, "step", step);
for (self.submodules) |path| {
const access = std.fs.accessAbsolute(self.builder.pathFromRoot(path), .{});

View File

@ -626,9 +626,19 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro
var tree = try std.zig.parse(self.allocator, text);
errdefer tree.deinit(self.allocator);
var nodes = tree.nodes.toMultiArrayList();
try nodes.setCapacity(self.allocator, nodes.len);
tree.nodes = nodes.slice();
var tokens = tree.tokens.toMultiArrayList();
try tokens.setCapacity(self.allocator, tokens.len);
tree.tokens = tokens.slice();
var document_scope = try analysis.makeDocumentScope(self.allocator, tree);
errdefer document_scope.deinit(self.allocator);
try document_scope.scopes.setCapacity(self.allocator, document_scope.scopes.len);
break :blk Handle{
.open = open,
.uri = duped_uri,

View File

@ -14,6 +14,7 @@ const offsets = @import("offsets.zig");
const semantic_tokens = @import("semantic_tokens.zig");
const inlay_hints = @import("inlay_hints.zig");
const code_actions = @import("code_actions.zig");
const folding_range = @import("folding_range.zig");
const shared = @import("shared.zig");
const Ast = std.zig.Ast;
const tracy = @import("tracy.zig");
@ -2470,6 +2471,7 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
const hover_kind: types.MarkupKind = if (server.client_capabilities.hover_supports_md) .markdown else .plaintext;
const loc = offsets.rangeToLoc(handle.text, request.range, server.offset_encoding);
// TODO cache hints per document
// because the function could be stored in a different document
@ -2480,27 +2482,44 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty
server.config.*,
&server.document_store,
handle,
request.range,
loc,
hover_kind,
server.offset_encoding,
);
// and only convert and return all hints in range for every request
var visible_hints = hints;
const helper = struct {
fn lessThan(_: void, lhs: inlay_hints.InlayHint, rhs: inlay_hints.InlayHint) bool {
return lhs.token_index < rhs.token_index;
}
};
// small_hints should roughly be sorted by position
std.sort.sort(inlay_hints.InlayHint, hints, {}, helper.lessThan);
var last_index: usize = 0;
var last_position: types.Position = .{ .line = 0, .character = 0 };
var converted_hints = try server.arena.allocator().alloc(types.InlayHint, hints.len);
for (hints) |hint, i| {
if (isPositionBefore(hint.position, request.range.start)) continue;
visible_hints = hints[i..];
break;
}
for (visible_hints) |hint, i| {
if (isPositionBefore(hint.position, request.range.end)) continue;
visible_hints = visible_hints[0..i];
break;
const index = offsets.tokenToIndex(handle.tree, hint.token_index);
const position = offsets.advancePosition(
handle.tree.source,
last_position,
last_index,
index,
server.offset_encoding,
);
defer last_index = index;
defer last_position = position;
converted_hints[i] = types.InlayHint{
.position = position,
.label = .{ .string = hint.label },
.kind = hint.kind,
.tooltip = .{ .MarkupContent = hint.tooltip },
.paddingLeft = false,
.paddingRight = true,
};
}
return visible_hints;
return converted_hints;
}
fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]types.CodeAction {
@ -2538,250 +2557,10 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
const Token = std.zig.Token;
const Node = Ast.Node;
const allocator = server.arena.allocator();
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
const allocator = server.arena.allocator();
const helper = struct {
const Inclusivity = enum { inclusive, exclusive };
fn addTokRange(
p_ranges: *std.ArrayList(types.FoldingRange),
tree: Ast,
start: Ast.TokenIndex,
end: Ast.TokenIndex,
end_reach: Inclusivity,
) std.mem.Allocator.Error!void {
if (tree.tokensOnSameLine(start, end)) return;
std.debug.assert(start <= end);
const start_index = offsets.tokenToIndex(tree, start);
const end_index = offsets.tokenToIndex(tree, end);
const start_line = std.mem.count(u8, tree.source[0..start_index], "\n");
const end_line = start_line + std.mem.count(u8, tree.source[start_index..end_index], "\n");
try p_ranges.append(.{
.startLine = @intCast(u32, start_line),
.endLine = @intCast(u32, end_line) - @boolToInt(end_reach == .exclusive),
});
}
};
// Used to store the result
var ranges = std.ArrayList(types.FoldingRange).init(allocator);
const token_tags: []const Token.Tag = handle.tree.tokens.items(.tag);
const node_tags: []const Node.Tag = handle.tree.nodes.items(.tag);
if (token_tags.len == 0) return null;
if (token_tags[0] == .container_doc_comment) {
var tok: Ast.TokenIndex = 1;
while (tok < token_tags.len) : (tok += 1) {
if (token_tags[tok] != .container_doc_comment) {
break;
}
}
if (tok > 1) { // each container doc comment has its own line, so each one counts for a line
try ranges.append(.{
.startLine = 0,
.endLine = tok - 1,
});
}
}
for (node_tags) |node_tag, i| {
const node = @intCast(Node.Index, i);
switch (node_tag) {
.root => continue,
// only fold the expression pertaining to the if statement, and the else statement, each respectively.
// TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures.
.@"if",
.if_simple,
=> {
const if_full = ast.fullIf(handle.tree, node).?;
const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr);
const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr);
try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive);
if (if_full.ast.else_expr == 0) continue;
const start_tok_2 = if_full.else_token;
const end_tok_2 = ast.lastToken(handle.tree, if_full.ast.else_expr);
try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive);
},
// same as if/else
.@"for",
.for_simple,
.@"while",
.while_cont,
.while_simple,
=> {
const loop_full = ast.fullWhile(handle.tree, node).?;
const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr);
const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr);
try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive);
if (loop_full.ast.else_expr == 0) continue;
const start_tok_2 = loop_full.else_token;
const end_tok_2 = ast.lastToken(handle.tree, loop_full.ast.else_expr);
try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive);
},
.global_var_decl,
.simple_var_decl,
.aligned_var_decl,
.container_field_init,
.container_field_align,
.container_field,
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
=> decl_node_blk: {
doc_comment_range: {
const first_tok: Ast.TokenIndex = handle.tree.firstToken(node);
if (first_tok == 0) break :doc_comment_range;
const end_doc_tok = first_tok - 1;
if (token_tags[end_doc_tok] != .doc_comment) break :doc_comment_range;
var start_doc_tok = end_doc_tok;
while (start_doc_tok != 0) {
if (token_tags[start_doc_tok - 1] != .doc_comment) break;
start_doc_tok -= 1;
}
try helper.addTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive);
}
// Function prototype folding regions
var buffer: [1]Node.Index = undefined;
const fn_proto = handle.tree.fullFnProto(&buffer, node) orelse
break :decl_node_blk;
const list_start_tok: Ast.TokenIndex = fn_proto.lparen;
const list_end_tok: Ast.TokenIndex = ast.lastToken(handle.tree, fn_proto.ast.proto_node);
if (handle.tree.tokensOnSameLine(list_start_tok, list_end_tok)) break :decl_node_blk;
try helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive);
var it = fn_proto.iterate(&handle.tree);
while (ast.nextFnParam(&it)) |param| {
const doc_start_tok = param.first_doc_comment orelse continue;
var doc_end_tok = doc_start_tok;
while (token_tags[doc_end_tok + 1] == .doc_comment)
doc_end_tok += 1;
try helper.addTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive);
}
},
.@"catch",
.@"orelse",
.multiline_string_literal,
// TODO: Similar to condition expressions in control flow structures, should folding multiline grouped expressions be enabled?
// .grouped_expression,
=> {
const start_tok = handle.tree.firstToken(node);
const end_tok = ast.lastToken(handle.tree, node);
try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive);
},
// most other trivial cases can go through here.
else => {
switch (node_tag) {
.array_init,
.array_init_one,
.array_init_dot_two,
.array_init_one_comma,
.array_init_dot_two_comma,
.array_init_dot,
.array_init_dot_comma,
.array_init_comma,
.struct_init,
.struct_init_one,
.struct_init_one_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
.struct_init_dot,
.struct_init_dot_comma,
.struct_init_comma,
.@"switch",
.switch_comma,
=> {},
else => disallow_fold: {
if (ast.isBlock(handle.tree, node))
break :disallow_fold;
if (ast.isCall(handle.tree, node))
break :disallow_fold;
if (ast.isBuiltinCall(handle.tree, node))
break :disallow_fold;
if (ast.isContainer(handle.tree, node) and node_tag != .root)
break :disallow_fold;
continue; // no conditions met, continue iterating without adding this potential folding range
},
}
const start_tok = handle.tree.firstToken(node);
const end_tok = ast.lastToken(handle.tree, node);
try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive);
},
}
}
// Iterate over the source code and look for code regions with #region #endregion
{
// We add opened folding regions to a stack as we go and pop one off when we find a closing brace.
// As an optimization we start with a reasonable capacity, which should work well in most cases since
// people will almost never have nesting that deep.
var stack = try std.ArrayList(u32).initCapacity(allocator, 10);
var i: usize = 0;
var lines_count: u32 = 0;
while (i < handle.tree.source.len) : (i += 1) {
const slice = handle.tree.source[i..];
if (slice[0] == '\n') {
lines_count += 1;
}
if (std.mem.startsWith(u8, slice, "//#region")) {
try stack.append(lines_count);
}
if (std.mem.startsWith(u8, slice, "//#endregion") and stack.items.len > 0) {
const start_line = stack.pop();
const end_line = lines_count;
// Add brace pairs but discard those from the same line, no need to waste memory on them
if (start_line != end_line) {
try ranges.append(.{
.startLine = start_line,
.endLine = end_line,
});
}
}
}
}
return ranges.items;
return try folding_range.generateFoldingRanges(allocator, handle.tree, server.offset_encoding);
}
pub const SelectionRange = struct {

View File

@ -3,6 +3,7 @@
//! when there are parser errors.
const std = @import("std");
const offsets = @import("offsets.zig");
const Ast = std.zig.Ast;
const Node = Ast.Node;
const full = Ast.full;
@ -254,29 +255,29 @@ pub fn forFull(tree: Ast, node: Node.Index) full.While {
pub fn fullPtrType(tree: Ast, node: Node.Index) ?full.PtrType {
return switch (tree.nodes.items(.tag)[node]) {
.ptr_type_aligned => tree.ptrTypeAligned(node),
.ptr_type_sentinel => tree.ptrTypeSentinel(node),
.ptr_type => tree.ptrType(node),
.ptr_type_bit_range => tree.ptrTypeBitRange(node),
.ptr_type_aligned => ptrTypeAligned(tree, node),
.ptr_type_sentinel => ptrTypeSentinel(tree, node),
.ptr_type => ptrTypeSimple(tree, node),
.ptr_type_bit_range => ptrTypeBitRange(tree, node),
else => null,
};
}
pub fn fullIf(tree: Ast, node: Node.Index) ?full.If {
return switch (tree.nodes.items(.tag)[node]) {
.if_simple => tree.ifSimple(node),
.@"if" => tree.ifFull(node),
.if_simple => ifSimple(tree, node),
.@"if" => ifFull(tree, node),
else => null,
};
}
pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
return switch (tree.nodes.items(.tag)[node]) {
.while_simple => tree.whileSimple(node),
.while_cont => tree.whileCont(node),
.@"while" => tree.whileFull(node),
.for_simple => tree.forSimple(node),
.@"for" => tree.forFull(node),
.while_simple => whileSimple(tree, node),
.while_cont => whileCont(tree, node),
.@"while" => whileFull(tree, node),
.for_simple => forSimple(tree, node),
.@"for" => forFull(tree, node),
else => null,
};
}
@ -543,7 +544,9 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
n = tree.extra_data[cases.end - 1]; // last case
}
},
.container_decl_arg => {
.container_decl_arg,
.container_decl_arg_trailing,
=> {
const members = tree.extraData(datas[n].rhs, Node.SubRange);
if (members.end - members.start == 0) {
end_offset += 3; // for the rparen + lbrace + rbrace
@ -567,7 +570,6 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
},
.array_init_comma,
.struct_init_comma,
.container_decl_arg_trailing,
.switch_comma,
=> {
if (datas[n].rhs != 0) {
@ -974,32 +976,6 @@ pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool {
};
}
pub fn isCall(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) {
.call,
.call_comma,
.call_one,
.call_one_comma,
.async_call,
.async_call_comma,
.async_call_one,
.async_call_one_comma,
=> true,
else => false,
};
}
pub fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) {
.block_two,
.block_two_semicolon,
.block,
.block_semicolon,
=> true,
else => false,
};
}
/// returns a list of parameters
pub fn builtinCallParams(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index {
const node_data = tree.nodes.items(.data);
@ -1139,3 +1115,452 @@ pub fn nextFnParam(it: *Ast.full.FnProto.Iterator) ?Ast.full.FnProto.Param {
it.tok_flag = false;
}
}
/// returns an Iterator that yields every child of the given node.
/// see `nodeChildrenAlloc` for a non-iterator allocating variant.
/// the order in which children are given corresponds to the order in which they are found in the source text
pub fn iterateChildren(
tree: Ast,
node: Ast.Node.Index,
context: anytype,
comptime Error: type,
comptime callback: fn (@TypeOf(context), Ast.Node.Index) Error!void,
) Error!void {
const node_tags = tree.nodes.items(.tag);
const node_data = tree.nodes.items(.data);
if (node > tree.nodes.len) return;
const tag = node_tags[node];
switch (tag) {
.@"usingnamespace",
.field_access,
.unwrap_optional,
.bool_not,
.negation,
.bit_not,
.negation_wrap,
.address_of,
.@"try",
.@"await",
.optional_type,
.deref,
.@"suspend",
.@"resume",
.@"return",
.grouped_expression,
.@"comptime",
.@"nosuspend",
.asm_simple,
=> {
try callback(context, node_data[node].lhs);
},
.test_decl,
.@"errdefer",
.@"defer",
.@"break",
.anyframe_type,
=> {
try callback(context, node_data[node].rhs);
},
.@"catch",
.equal_equal,
.bang_equal,
.less_than,
.greater_than,
.less_or_equal,
.greater_or_equal,
.assign_mul,
.assign_div,
.assign_mod,
.assign_add,
.assign_sub,
.assign_shl,
.assign_shl_sat,
.assign_shr,
.assign_bit_and,
.assign_bit_xor,
.assign_bit_or,
.assign_mul_wrap,
.assign_add_wrap,
.assign_sub_wrap,
.assign_mul_sat,
.assign_add_sat,
.assign_sub_sat,
.assign,
.merge_error_sets,
.mul,
.div,
.mod,
.array_mult,
.mul_wrap,
.mul_sat,
.add,
.sub,
.array_cat,
.add_wrap,
.sub_wrap,
.add_sat,
.sub_sat,
.shl,
.shl_sat,
.shr,
.bit_and,
.bit_xor,
.bit_or,
.@"orelse",
.bool_and,
.bool_or,
.array_type,
.array_access,
.array_init_one,
.array_init_one_comma,
.array_init_dot_two,
.array_init_dot_two_comma,
.struct_init_one,
.struct_init_one_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
.switch_range,
.builtin_call_two,
.builtin_call_two_comma,
.container_decl_two,
.container_decl_two_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
.container_field_init,
.container_field_align,
.block_two,
.block_two_semicolon,
.error_union,
=> {
try callback(context, node_data[node].lhs);
try callback(context, node_data[node].rhs);
},
.root,
.array_init_dot,
.array_init_dot_comma,
.struct_init_dot,
.struct_init_dot_comma,
.builtin_call,
.builtin_call_comma,
.container_decl,
.container_decl_trailing,
.tagged_union,
.tagged_union_trailing,
.block,
.block_semicolon,
=> {
for (tree.extra_data[node_data[node].lhs..node_data[node].rhs]) |child| {
try callback(context, child);
}
},
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const var_decl = tree.fullVarDecl(node).?.ast;
try callback(context, var_decl.type_node);
try callback(context, var_decl.align_node);
try callback(context, var_decl.addrspace_node);
try callback(context, var_decl.section_node);
try callback(context, var_decl.init_node);
},
.array_type_sentinel => {
const array_type = tree.arrayTypeSentinel(node).ast;
try callback(context, array_type.elem_count);
try callback(context, array_type.sentinel);
try callback(context, array_type.elem_type);
},
.ptr_type_aligned,
.ptr_type_sentinel,
.ptr_type,
.ptr_type_bit_range,
=> {
const ptr_type = fullPtrType(tree, node).?.ast;
try callback(context, ptr_type.sentinel);
try callback(context, ptr_type.align_node);
try callback(context, ptr_type.bit_range_start);
try callback(context, ptr_type.bit_range_end);
try callback(context, ptr_type.addrspace_node);
try callback(context, ptr_type.child_type);
},
.slice_open,
.slice,
.slice_sentinel,
=> {
const slice = tree.fullSlice(node).?;
try callback(context, slice.ast.sliced);
try callback(context, slice.ast.start);
try callback(context, slice.ast.end);
try callback(context, slice.ast.sentinel);
},
.array_init,
.array_init_comma,
=> {
const array_init = tree.arrayInit(node).ast;
try callback(context, array_init.type_expr);
for (array_init.elements) |child| {
try callback(context, child);
}
},
.struct_init,
.struct_init_comma,
=> {
const struct_init = tree.structInit(node).ast;
try callback(context, struct_init.type_expr);
for (struct_init.fields) |child| {
try callback(context, child);
}
},
.call,
.call_comma,
.async_call,
.async_call_comma,
=> {
const call = tree.callFull(node).ast;
try callback(context, call.fn_expr);
for (call.params) |child| {
try callback(context, child);
}
},
.@"switch",
.switch_comma,
=> {
const cond = node_data[node].lhs;
const extra = tree.extraData(node_data[node].rhs, Ast.Node.SubRange);
const cases = tree.extra_data[extra.start..extra.end];
try callback(context, cond);
for (cases) |child| {
try callback(context, child);
}
},
.switch_case_one,
.switch_case_inline_one,
.switch_case,
.switch_case_inline,
=> {
const switch_case = tree.fullSwitchCase(node).?.ast;
for (switch_case.values) |child| {
try callback(context, child);
}
try callback(context, switch_case.target_expr);
},
.while_simple,
.while_cont,
.@"while",
.for_simple,
.@"for",
=> {
const while_ast = fullWhile(tree, node).?.ast;
try callback(context, while_ast.cond_expr);
try callback(context, while_ast.cont_expr);
try callback(context, while_ast.then_expr);
try callback(context, while_ast.else_expr);
},
.@"if",
.if_simple,
=> {
const if_ast = fullIf(tree, node).?.ast;
try callback(context, if_ast.cond_expr);
try callback(context, if_ast.then_expr);
try callback(context, if_ast.else_expr);
},
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
.fn_decl,
=> {
var buffer: [1]Node.Index = undefined;
const fn_proto = tree.fullFnProto(&buffer, node).?;
for (fn_proto.ast.params) |child| {
try callback(context, child);
}
try callback(context, fn_proto.ast.align_expr);
try callback(context, fn_proto.ast.addrspace_expr);
try callback(context, fn_proto.ast.section_expr);
try callback(context, fn_proto.ast.callconv_expr);
try callback(context, fn_proto.ast.return_type);
},
.container_decl_arg,
.container_decl_arg_trailing,
=> {
const decl = tree.containerDeclArg(node).ast;
try callback(context, decl.arg);
for (decl.members) |child| {
try callback(context, child);
}
},
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
=> {
const decl = tree.taggedUnionEnumTag(node).ast;
try callback(context, decl.arg);
for (decl.members) |child| {
try callback(context, child);
}
},
.container_field => {
const field = tree.containerField(node).ast;
try callback(context, field.type_expr);
try callback(context, field.align_expr);
try callback(context, field.value_expr);
},
.@"asm" => {
const asm_ast = tree.asmFull(node).ast;
try callback(context, asm_ast.template);
for (asm_ast.items) |child| {
try callback(context, child);
}
},
.asm_output,
.asm_input,
=> {}, // TODO
.@"continue",
.anyframe_literal,
.char_literal,
.number_literal,
.unreachable_literal,
.identifier,
.enum_literal,
.string_literal,
.multiline_string_literal,
.error_set_decl,
.error_value,
=> {},
}
}
/// returns an Iterator that recursively yields every child of the given node.
/// see `nodeChildrenRecursiveAlloc` for a non-iterator allocating variant.
pub fn iterateChildrenRecursive(
tree: Ast,
node: Ast.Node.Index,
context: anytype,
comptime Error: type,
comptime callback: fn (@TypeOf(context), Ast.Node.Index) Error!void,
) Error!void {
const RecursiveContext = struct {
tree: Ast,
context: @TypeOf(context),
fn recursive_callback(self: @This(), child_node: Ast.Node.Index) Error!void {
if (child_node == 0) return;
try callback(self.context, child_node);
try iterateChildrenRecursive(self.tree, child_node, self.context, Error, callback);
}
};
try iterateChildren(tree, node, RecursiveContext{
.tree = tree,
.context = context,
}, Error, RecursiveContext.recursive_callback);
}
/// returns the children of the given node.
/// see `iterateChildren` for a callback variant
/// caller owns the returned memory
pub fn nodeChildrenAlloc(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}![]Ast.Node.Index {
const Context = struct {
children: *std.ArrayList(Ast.Node.Index),
fn callback(self: @This(), child_node: Ast.Node.Index) error{OutOfMemory}!void {
if (child_node == 0) return;
try self.children.append(child_node);
}
};
var children = std.ArrayList(Ast.Node.Index).init(allocator);
errdefer children.deinit();
try iterateChildren(tree, node, Context{ .children = &children }, error{OutOfMemory}, Context.callback);
return children.toOwnedSlice();
}
/// returns the children of the given node.
/// see `iterateChildrenRecursive` for a callback variant
/// caller owns the returned memory
pub fn nodeChildrenRecursiveAlloc(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}![]Ast.Node.Index {
const Context = struct {
children: *std.ArrayList(Ast.Node.Index),
fn callback(self: @This(), child_node: Ast.Node.Index) error{OutOfMemory}!void {
if (child_node == 0) return;
try self.children.append(child_node);
}
};
var children = std.ArrayList(Ast.Node.Index).init(allocator);
errdefer children.deinit();
try iterateChildrenRecursive(tree, node, .{ .children = &children }, Context.callback);
return children.toOwnedSlice(allocator);
}
/// returns a list of nodes that together encloses the given source code range
/// caller owns the returned memory
pub fn nodesAtLoc(allocator: std.mem.Allocator, tree: Ast, loc: offsets.Loc) error{OutOfMemory}![]Ast.Node.Index {
std.debug.assert(loc.start <= loc.end and loc.end <= tree.source.len);
var nodes = std.ArrayListUnmanaged(Ast.Node.Index){};
errdefer nodes.deinit(allocator);
var parent: Ast.Node.Index = 0; // root node
try nodes.ensureTotalCapacity(allocator, 32);
while (true) {
const children = try nodeChildrenAlloc(allocator, tree, parent);
defer allocator.free(children);
var children_loc: ?offsets.Loc = null;
for (children) |child_node| {
const child_loc = offsets.nodeToLoc(tree, child_node);
const merge_child = offsets.locIntersect(loc, child_loc) or offsets.locInside(child_loc, loc);
if (merge_child) {
children_loc = if (children_loc) |l| offsets.locMerge(l, child_loc) else child_loc;
try nodes.append(allocator, child_node);
} else {
if (nodes.items.len != 0) break;
}
}
if (children_loc == null or !offsets.locInside(loc, children_loc.?)) {
nodes.clearRetainingCapacity();
nodes.appendAssumeCapacity(parent); // capacity is never 0
return try nodes.toOwnedSlice(allocator);
}
if (nodes.items.len == 1) {
parent = nodes.items[0];
nodes.clearRetainingCapacity();
} else {
return try nodes.toOwnedSlice(allocator);
}
}
}

View File

@ -531,7 +531,14 @@ fn collectBuiltinData(allocator: std.mem.Allocator, version: []const u8, langref
switch (tokenizer.next().id) {
.Separator => {
std.debug.assert(tokenizer.next().id == .TagContent);
std.debug.assert(tokenizer.next().id == .BracketClose);
switch (tokenizer.next().id) {
.Separator => {
std.debug.assert(tokenizer.next().id == .TagContent);
std.debug.assert(tokenizer.next().id == .BracketClose);
},
.BracketClose => {},
else => unreachable,
}
},
.BracketClose => {},
else => unreachable,
@ -865,7 +872,7 @@ const Response = union(enum) {
fn httpGET(allocator: std.mem.Allocator, uri: std.Uri) !Response {
var client = std.http.Client{ .allocator = allocator };
defer client.deinit(allocator);
defer client.deinit();
try client.ca_bundle.rescan(allocator);
var request = try client.request(uri, .{}, .{});

2066
src/data/0.10.1.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@ -9,4 +9,5 @@ pub usingnamespace switch (build_options.data_version) {
.@"0.9.0" => @import("0.9.0.zig"),
.@"0.9.1" => @import("0.9.1.zig"),
.@"0.10.0" => @import("0.10.0.zig"),
.@"0.10.1" => @import("0.10.1.zig"),
};

View File

@ -1600,7 +1600,7 @@ pub const builtins = [_]Builtin{
\\ try expect(src.line == 9);
\\ try expect(src.column == 17);
\\ try expect(std.mem.endsWith(u8, src.fn_name, "doTheTest"));
\\ try expect(std.mem.endsWith(u8, src.file, "source_location.zig"));
\\ try expect(std.mem.endsWith(u8, src.file, "test_src_builtin.zig"));
\\}
\\```
,
@ -1914,7 +1914,7 @@ pub const builtins = [_]Builtin{
\\ - [enum](https://ziglang.org/documentation/master/#enum)
\\ - [Enum Literals](https://ziglang.org/documentation/master/#Enum-Literals)
\\ - [union](https://ziglang.org/documentation/master/#union)
\\`@Type` is not available for [Functions](https://ziglang.org/documentation/master/#Functions).
\\ - [Functions](https://ziglang.org/documentation/master/#Functions)
,
.arguments = &.{
"comptime info: std.builtin.Type",

313
src/folding_range.zig Normal file
View File

@ -0,0 +1,313 @@
const std = @import("std");
const ast = @import("ast.zig");
const types = @import("lsp.zig");
const offsets = @import("offsets.zig");
const Ast = std.zig.Ast;
const FoldingRange = struct {
loc: offsets.Loc,
kind: ?types.FoldingRangeKind = null,
};
const Inclusivity = enum { inclusive, exclusive };
const Builder = struct {
allocator: std.mem.Allocator,
locations: std.ArrayListUnmanaged(FoldingRange),
tree: Ast,
encoding: offsets.Encoding,
pub fn deinit(builder: *Builder) void {
builder.locations.deinit(builder.allocator);
}
pub fn add(
builder: *Builder,
kind: ?types.FoldingRangeKind,
start: Ast.TokenIndex,
end: Ast.TokenIndex,
start_reach: Inclusivity,
end_reach: Inclusivity,
) error{OutOfMemory}!void {
if (builder.tree.tokensOnSameLine(start, end)) return;
std.debug.assert(start <= end);
const start_loc = offsets.tokenToLoc(builder.tree, start);
const end_loc = offsets.tokenToLoc(builder.tree, end);
try builder.locations.append(builder.allocator, .{
.loc = .{
.start = if (start_reach == .exclusive) start_loc.end else start_loc.start,
.end = if (end_reach == .exclusive) end_loc.start else end_loc.end,
},
.kind = kind,
});
}
pub fn addNode(
builder: *Builder,
kind: ?types.FoldingRangeKind,
node: Ast.Node.Index,
start_reach: Inclusivity,
end_reach: Inclusivity,
) error{OutOfMemory}!void {
try builder.add(kind, builder.tree.firstToken(node), ast.lastToken(builder.tree, node), start_reach, end_reach);
}
pub fn getRanges(builder: Builder) error{OutOfMemory}![]types.FoldingRange {
var result = try builder.allocator.alloc(types.FoldingRange, builder.locations.items.len);
errdefer builder.allocator.free(result);
for (result) |*r, i| {
r.* = .{
.startLine = undefined,
.endLine = undefined,
.kind = builder.locations.items[i].kind,
};
}
const Item = struct {
output: *types.FoldingRange,
input: *const FoldingRange,
where: enum { start, end },
const Self = @This();
fn getInputIndex(self: Self) usize {
return switch (self.where) {
.start => self.input.loc.start,
.end => self.input.loc.end,
};
}
fn lessThan(_: void, lhs: Self, rhs: Self) bool {
return lhs.getInputIndex() < rhs.getInputIndex();
}
};
// one item for every start and end position
var items = try builder.allocator.alloc(Item, builder.locations.items.len * 2);
defer builder.allocator.free(items);
for (builder.locations.items) |*folding_range, i| {
items[2 * i + 0] = .{ .output = &result[i], .input = folding_range, .where = .start };
items[2 * i + 1] = .{ .output = &result[i], .input = folding_range, .where = .end };
}
// sort items based on their source position
std.sort.sort(Item, items, {}, Item.lessThan);
var last_index: usize = 0;
var last_position: types.Position = .{ .line = 0, .character = 0 };
for (items) |item| {
const index = item.getInputIndex();
const position = offsets.advancePosition(builder.tree.source, last_position, last_index, index, builder.encoding);
defer last_index = index;
defer last_position = position;
switch (item.where) {
.start => {
item.output.startLine = position.line;
item.output.startCharacter = position.character;
},
.end => {
item.output.endLine = position.line;
item.output.endCharacter = position.character;
},
}
}
return result;
}
};
pub fn generateFoldingRanges(allocator: std.mem.Allocator, tree: Ast, encoding: offsets.Encoding) error{OutOfMemory}![]types.FoldingRange {
var builder = Builder{
.allocator = allocator,
.locations = .{},
.tree = tree,
.encoding = encoding,
};
defer builder.deinit();
const token_tags = tree.tokens.items(.tag);
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
var start_doc_comment: ?Ast.TokenIndex = null;
var end_doc_comment: ?Ast.TokenIndex = null;
for (token_tags) |tag, i| {
const token = @intCast(Ast.TokenIndex, i);
switch (tag) {
.doc_comment,
.container_doc_comment,
=> {
if (start_doc_comment == null) {
start_doc_comment = token;
end_doc_comment = token;
} else {
end_doc_comment = token;
}
},
else => {
if (start_doc_comment != null and end_doc_comment != null) {
try builder.add(.comment, start_doc_comment.?, end_doc_comment.?, .inclusive, .inclusive);
start_doc_comment = null;
end_doc_comment = null;
}
},
}
}
// TODO add folding range normal comments
// TODO add folding range for top level `@Import()`
for (node_tags) |node_tag, i| {
const node = @intCast(Ast.Node.Index, i);
switch (node_tag) {
.root => continue,
// TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures.
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
// .fn_decl
=> {
var buffer: [1]Ast.Node.Index = undefined;
const fn_proto = tree.fullFnProto(&buffer, node).?;
const list_start_tok = fn_proto.lparen;
const list_end_tok = ast.lastToken(tree, node) -| 1;
if (list_start_tok > list_end_tok) continue; // Incomplete, ie `fn a()`
try builder.add(null, list_start_tok, list_end_tok, .exclusive, .exclusive);
},
.block_two,
.block_two_semicolon,
.block,
.block_semicolon,
=> {
try builder.addNode(null, node, .exclusive, .exclusive);
},
.@"switch",
.switch_comma,
=> {
const lhs = tree.nodes.items(.data)[node].lhs;
const start_tok = ast.lastToken(tree, lhs) + 2; // lparen + rbrace
const end_tok = ast.lastToken(tree, node);
try builder.add(null, start_tok, end_tok, .exclusive, .exclusive);
},
.switch_case_one,
.switch_case_inline_one,
.switch_case,
.switch_case_inline,
=> {
const switch_case = tree.fullSwitchCase(node).?.ast;
if (switch_case.values.len >= 4) {
const first_value = tree.firstToken(switch_case.values[0]);
const last_value = ast.lastToken(tree, switch_case.values[switch_case.values.len - 1]);
try builder.add(null, first_value, last_value, .inclusive, .inclusive);
}
},
.container_decl,
.container_decl_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.container_decl_two,
.container_decl_two_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
=> {
var buffer: [2]Ast.Node.Index = undefined;
const container_decl = tree.fullContainerDecl(&buffer, node).?;
if (container_decl.ast.members.len != 0) {
const first_member = container_decl.ast.members[0];
const start_tok = tree.firstToken(first_member) -| 1;
const end_tok = ast.lastToken(tree, node);
try builder.add(null, start_tok, end_tok, .exclusive, .exclusive);
}
},
.call,
.call_comma,
.call_one,
.call_one_comma,
.async_call,
.async_call_comma,
.async_call_one,
.async_call_one_comma,
=> {
const lparen = main_tokens[node];
try builder.add(null, lparen, ast.lastToken(tree, node), .exclusive, .exclusive);
},
// everything after here is mostly untested
.array_init,
.array_init_one,
.array_init_dot_two,
.array_init_one_comma,
.array_init_dot_two_comma,
.array_init_dot,
.array_init_dot_comma,
.array_init_comma,
.struct_init,
.struct_init_one,
.struct_init_one_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
.struct_init_dot,
.struct_init_dot_comma,
.struct_init_comma,
.builtin_call,
.builtin_call_comma,
.builtin_call_two,
.builtin_call_two_comma,
.multiline_string_literal,
.error_set_decl,
.test_decl,
=> {
try builder.addNode(null, node, .inclusive, .inclusive);
},
else => {},
}
}
// We add opened folding regions to a stack as we go and pop one off when we find a closing brace.
var stack = std.ArrayListUnmanaged(usize){};
var i: usize = 0;
while (std.mem.indexOfPos(u8, tree.source, i, "//#")) |possible_region| {
defer i = possible_region + "//#".len;
if (std.mem.startsWith(u8, tree.source[possible_region..], "//#region")) {
try stack.append(allocator, possible_region);
} else if (std.mem.startsWith(u8, tree.source[possible_region..], "//#endregion")) {
const start_index = stack.popOrNull() orelse break; // null means there are more endregions than regions
const end_index = offsets.lineLocAtIndex(tree.source, possible_region).end;
const is_same_line = std.mem.indexOfScalar(u8, tree.source[start_index..end_index], '\n') == null;
if (is_same_line) continue;
try builder.locations.append(allocator, .{
.loc = .{
.start = start_index,
.end = end_index,
},
.kind = .region,
});
}
}
return try builder.getRanges();
}

View File

@ -16,123 +16,112 @@ const Config = @import("Config.zig");
/// non-configurable at runtime
pub const inlay_hints_exclude_builtins: []const u8 = &.{};
/// max number of children in a declaration/array-init/struct-init or similar
/// that will not get a visibility check
pub const inlay_hints_max_inline_children = 12;
/// checks whether node is inside the range
fn isNodeInRange(tree: Ast, node: Ast.Node.Index, range: types.Range) bool {
const endLocation = tree.tokenLocation(0, ast.lastToken(tree, node));
if (endLocation.line < range.start.line) return false;
const beginLocation = tree.tokenLocation(0, tree.firstToken(node));
if (beginLocation.line > range.end.line) return false;
return true;
}
pub const InlayHint = struct {
token_index: Ast.TokenIndex,
label: []const u8,
kind: types.InlayHintKind,
tooltip: types.MarkupContent,
};
const Builder = struct {
arena: std.mem.Allocator,
arena: *std.heap.ArenaAllocator,
store: *DocumentStore,
config: *const Config,
handle: *const DocumentStore.Handle,
hints: std.ArrayListUnmanaged(types.InlayHint),
hints: std.ArrayListUnmanaged(InlayHint),
hover_kind: types.MarkupKind,
encoding: offsets.Encoding,
fn appendParameterHint(self: *Builder, position: types.Position, label: []const u8, tooltip: []const u8, tooltip_noalias: bool, tooltip_comptime: bool) !void {
// TODO allocation could be avoided by extending InlayHint.jsonStringify
fn appendParameterHint(self: *Builder, token_index: Ast.TokenIndex, label: []const u8, tooltip: []const u8, tooltip_noalias: bool, tooltip_comptime: bool) !void {
// adding tooltip_noalias & tooltip_comptime to InlayHint should be enough
const tooltip_text = blk: {
if (tooltip.len == 0) break :blk "";
const prefix = if (tooltip_noalias) if (tooltip_comptime) "noalias comptime " else "noalias " else if (tooltip_comptime) "comptime " else "";
if (self.hover_kind == .markdown) {
break :blk try std.fmt.allocPrint(self.arena, "```zig\n{s}{s}\n```", .{ prefix, tooltip });
break :blk try std.fmt.allocPrint(self.arena.allocator(), "```zig\n{s}{s}\n```", .{ prefix, tooltip });
}
break :blk try std.fmt.allocPrint(self.arena, "{s}{s}", .{ prefix, tooltip });
break :blk try std.fmt.allocPrint(self.arena.allocator(), "{s}{s}", .{ prefix, tooltip });
};
try self.hints.append(self.arena, .{
.position = position,
.label = .{ .string = label },
.kind = types.InlayHintKind.Parameter,
.tooltip = .{ .MarkupContent = .{
try self.hints.append(self.arena.allocator(), .{
.token_index = token_index,
.label = try std.fmt.allocPrint(self.arena.allocator(), "{s}:", .{label}),
.kind = .Parameter,
.tooltip = .{
.kind = self.hover_kind,
.value = tooltip_text,
} },
.paddingLeft = false,
.paddingRight = true,
},
});
}
fn toOwnedSlice(self: *Builder) error{OutOfMemory}![]types.InlayHint {
return self.hints.toOwnedSlice(self.arena);
fn toOwnedSlice(self: *Builder) error{OutOfMemory}![]InlayHint {
return self.hints.toOwnedSlice(self.arena.allocator());
}
};
/// `call` is the function call
/// `decl_handle` should be a function protototype
/// writes parameter hints into `builder.hints`
fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, call: Ast.full.Call, decl_handle: analysis.DeclWithHandle) !void {
fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: analysis.DeclWithHandle) !void {
const handle = builder.handle;
const tree = handle.tree;
const decl = decl_handle.decl;
const decl_tree = decl_handle.handle.tree;
switch (decl.*) {
.ast_node => |fn_node| {
var buffer: [1]Ast.Node.Index = undefined;
if (decl_tree.fullFnProto(&buffer, fn_node)) |fn_proto| {
var i: usize = 0;
var it = fn_proto.iterate(&decl_tree);
const fn_node = switch (decl.*) {
.ast_node => |fn_node| fn_node,
else => return,
};
if (try analysis.hasSelfParam(arena, store, decl_handle.handle, fn_proto)) {
_ = ast.nextFnParam(&it);
}
var buffer: [1]Ast.Node.Index = undefined;
const fn_proto = decl_tree.fullFnProto(&buffer, fn_node) orelse return;
while (ast.nextFnParam(&it)) |param| : (i += 1) {
if (i >= call.ast.params.len) break;
const name_token = param.name_token orelse continue;
const name = decl_tree.tokenSlice(name_token);
var i: usize = 0;
var it = fn_proto.iterate(&decl_tree);
if (builder.config.inlay_hints_hide_redundant_param_names or builder.config.inlay_hints_hide_redundant_param_names_last_token) {
const last_param_token = tree.lastToken(call.ast.params[i]);
const param_name = tree.tokenSlice(last_param_token);
if (try analysis.hasSelfParam(builder.arena, builder.store, decl_handle.handle, fn_proto)) {
_ = ast.nextFnParam(&it);
}
if (std.mem.eql(u8, param_name, name)) {
if (tree.firstToken(call.ast.params[i]) == last_param_token) {
if (builder.config.inlay_hints_hide_redundant_param_names)
continue;
} else {
if (builder.config.inlay_hints_hide_redundant_param_names_last_token)
continue;
}
}
}
while (ast.nextFnParam(&it)) |param| : (i += 1) {
if (i >= call.ast.params.len) break;
const name_token = param.name_token orelse continue;
const name = decl_tree.tokenSlice(name_token);
const token_tags = decl_tree.tokens.items(.tag);
if (builder.config.inlay_hints_hide_redundant_param_names or builder.config.inlay_hints_hide_redundant_param_names_last_token) {
const last_param_token = tree.lastToken(call.ast.params[i]);
const param_name = tree.tokenSlice(last_param_token);
const no_alias = if (param.comptime_noalias) |t| token_tags[t] == .keyword_noalias or token_tags[t - 1] == .keyword_noalias else false;
const comp_time = if (param.comptime_noalias) |t| token_tags[t] == .keyword_comptime or token_tags[t - 1] == .keyword_comptime else false;
const tooltip = if (param.anytype_ellipsis3) |token|
if (token_tags[token] == .keyword_anytype) "anytype" else ""
else
offsets.nodeToSlice(decl_tree, param.type_expr);
try builder.appendParameterHint(
offsets.tokenToPosition(tree, tree.firstToken(call.ast.params[i]), builder.encoding),
name,
tooltip,
no_alias,
comp_time,
);
if (std.mem.eql(u8, param_name, name)) {
if (tree.firstToken(call.ast.params[i]) == last_param_token) {
if (builder.config.inlay_hints_hide_redundant_param_names)
continue;
} else {
if (builder.config.inlay_hints_hide_redundant_param_names_last_token)
continue;
}
}
},
else => {},
}
const token_tags = decl_tree.tokens.items(.tag);
const no_alias = if (param.comptime_noalias) |t| token_tags[t] == .keyword_noalias or token_tags[t - 1] == .keyword_noalias else false;
const comp_time = if (param.comptime_noalias) |t| token_tags[t] == .keyword_comptime or token_tags[t - 1] == .keyword_comptime else false;
const tooltip = if (param.anytype_ellipsis3) |token|
if (token_tags[token] == .keyword_anytype) "anytype" else ""
else
offsets.nodeToSlice(decl_tree, param.type_expr);
try builder.appendParameterHint(
tree.firstToken(call.ast.params[i]),
name,
tooltip,
no_alias,
comp_time,
);
}
}
@ -164,7 +153,7 @@ fn writeBuiltinHint(builder: *Builder, parameters: []const Ast.Node.Index, argum
}
try builder.appendParameterHint(
offsets.tokenToPosition(tree, tree.firstToken(parameters[i]), builder.encoding),
tree.firstToken(parameters[i]),
label orelse "",
std.mem.trim(u8, type_expr, " \t\n"),
no_alias,
@ -174,7 +163,7 @@ fn writeBuiltinHint(builder: *Builder, parameters: []const Ast.Node.Index, argum
}
/// takes a Ast.full.Call (a function call), analysis its function expression, finds its declaration and writes parameter hints into `builder.hints`
fn writeCallNodeHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, call: Ast.full.Call) !void {
fn writeCallNodeHint(builder: *Builder, call: Ast.full.Call) !void {
if (call.ast.params.len == 0) return;
if (builder.config.inlay_hints_exclude_single_argument and call.ast.params.len == 1) return;
@ -187,14 +176,11 @@ fn writeCallNodeHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
switch (node_tags[call.ast.fn_expr]) {
.identifier => {
const location = tree.tokenLocation(0, main_tokens[call.ast.fn_expr]);
const source_index = offsets.tokenToIndex(tree, main_tokens[call.ast.fn_expr]);
const name = offsets.tokenToSlice(tree, main_tokens[call.ast.fn_expr]);
const absolute_index = location.line_start + location.column;
const name = tree.tokenSlice(main_tokens[call.ast.fn_expr]);
if (try analysis.lookupSymbolGlobal(store, arena, handle, name, absolute_index)) |decl_handle| {
try writeCallHint(builder, arena, store, call, decl_handle);
if (try analysis.lookupSymbolGlobal(builder.store, builder.arena, handle, name, source_index)) |decl_handle| {
try writeCallHint(builder, call, decl_handle);
}
},
.field_access => {
@ -205,23 +191,23 @@ fn writeCallNodeHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
const start = offsets.tokenToIndex(tree, lhsToken);
const rhs_loc = offsets.tokenToLoc(tree, rhsToken);
var held_range = try arena.allocator().dupeZ(u8, handle.text[start..rhs_loc.end]);
var held_range = try builder.arena.allocator().dupeZ(u8, handle.text[start..rhs_loc.end]);
var tokenizer = std.zig.Tokenizer.init(held_range);
// note: we have the ast node, traversing it would probably yield better results
// than trying to re-tokenize and re-parse it
if (try analysis.getFieldAccessType(store, arena, handle, rhs_loc.end, &tokenizer)) |result| {
if (try analysis.getFieldAccessType(builder.store, builder.arena, handle, rhs_loc.end, &tokenizer)) |result| {
const container_handle = result.unwrapped orelse result.original;
switch (container_handle.type.data) {
.other => |container_handle_node| {
if (try analysis.lookupSymbolContainer(
store,
arena,
builder.store,
builder.arena,
.{ .node = container_handle_node, .handle = container_handle.handle },
tree.tokenSlice(rhsToken),
true,
)) |decl_handle| {
try writeCallHint(builder, arena, store, call, decl_handle);
try writeCallHint(builder, call, decl_handle);
}
},
else => {},
@ -234,44 +220,18 @@ fn writeCallNodeHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
}
}
/// HACK self-hosted has not implemented async yet
fn callWriteNodeInlayHint(allocator: std.mem.Allocator, args: anytype) error{OutOfMemory}!void {
if (zig_builtin.zig_backend == .other or zig_builtin.zig_backend == .stage1) {
const FrameSize = @sizeOf(@Frame(writeNodeInlayHint));
var child_frame = try allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
defer allocator.free(child_frame);
return await @asyncCall(child_frame, {}, writeNodeInlayHint, args);
} else {
// TODO find a non recursive solution
return @call(.auto, writeNodeInlayHint, args);
}
}
/// iterates over the ast and writes parameter hints into `builder.hints` for every function call and builtin call
/// nodes outside the given range are excluded
fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?Ast.Node.Index, range: types.Range) error{OutOfMemory}!void {
const node = maybe_node orelse return;
fn writeNodeInlayHint(
builder: *Builder,
node: Ast.Node.Index,
) error{OutOfMemory}!void {
const handle = builder.handle;
const tree = handle.tree;
const node_tags = tree.nodes.items(.tag);
const node_data = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
// std.log.info("max: {d} | curr: {d}", .{ node_data.len, node });
// if (node == 0 or node >= node_data.len) return;
if (node == 0) return;
// std.log.info("tag: {any}", .{node_tags[node]});
// std.log.info("src: {s}", .{tree.getNodeSource(node)});
var allocator = arena.allocator();
const tag = node_tags[node];
// NOTE traversing the ast instead of iterating over all nodes allows using visibility
// checks based on the given range which reduce runtimes by orders of magnitude for large files
switch (tag) {
.root => unreachable,
.call_one,
.call_one_comma,
.async_call_one,
@ -283,406 +243,61 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
=> {
var params: [1]Ast.Node.Index = undefined;
const call = tree.fullCall(&params, node).?;
try writeCallNodeHint(builder, arena, store, call);
for (call.ast.params) |param| {
if (call.ast.params.len > inlay_hints_max_inline_children) {
if (!isNodeInRange(tree, param, range)) continue;
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, param, range });
}
try writeCallNodeHint(builder, call);
},
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
=> blk: {
var buffer: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(tree, node, &buffer).?;
if (builder.config.inlay_hints_show_builtin and params.len > 1) {
const name = tree.tokenSlice(main_tokens[node]);
if (!builder.config.inlay_hints_show_builtin or params.len <= 1) break :blk;
outer: for (data.builtins) |builtin| {
if (!std.mem.eql(u8, builtin.name, name)) continue;
const name = tree.tokenSlice(main_tokens[node]);
for (inlay_hints_exclude_builtins) |builtin_name| {
if (std.mem.eql(u8, builtin_name, name)) break :outer;
}
outer: for (data.builtins) |builtin| {
if (!std.mem.eql(u8, builtin.name, name)) continue;
try writeBuiltinHint(builder, params, builtin.arguments);
}
}
for (params) |param| {
if (params.len > inlay_hints_max_inline_children) {
if (!isNodeInRange(tree, param, range)) continue;
for (inlay_hints_exclude_builtins) |builtin_name| {
if (std.mem.eql(u8, builtin_name, name)) break :outer;
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, param, range });
try writeBuiltinHint(builder, params, builtin.arguments);
}
},
.optional_type,
.array_type,
.@"continue",
.anyframe_type,
.anyframe_literal,
.char_literal,
.number_literal,
.unreachable_literal,
.identifier,
.enum_literal,
.string_literal,
.multiline_string_literal,
.error_set_decl,
=> {},
.array_type_sentinel => {
const array_type = tree.arrayTypeSentinel(node);
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, array_type.ast.sentinel, range });
},
.ptr_type_aligned,
.ptr_type_sentinel,
.ptr_type,
.ptr_type_bit_range,
=> {
const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node).?;
if (ptr_type.ast.sentinel != 0) {
return try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.sentinel, range });
}
if (ptr_type.ast.align_node != 0) {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.align_node, range });
if (ptr_type.ast.bit_range_start != 0) {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.bit_range_start, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.bit_range_end, range });
}
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.child_type, range });
},
.@"usingnamespace",
.field_access,
.unwrap_optional,
.bool_not,
.negation,
.bit_not,
.negation_wrap,
.address_of,
.@"try",
.@"await",
.deref,
.@"suspend",
.@"resume",
.@"return",
.grouped_expression,
.@"comptime",
.@"nosuspend",
=> try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].lhs, range }),
.test_decl,
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
.@"errdefer",
.@"defer",
.@"break",
=> try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].rhs, range }),
.@"catch",
.equal_equal,
.bang_equal,
.less_than,
.greater_than,
.less_or_equal,
.greater_or_equal,
.assign_mul,
.assign_div,
.assign_mod,
.assign_add,
.assign_sub,
.assign_shl,
.assign_shl_sat,
.assign_shr,
.assign_bit_and,
.assign_bit_xor,
.assign_bit_or,
.assign_mul_wrap,
.assign_add_wrap,
.assign_sub_wrap,
.assign_mul_sat,
.assign_add_sat,
.assign_sub_sat,
.assign,
.merge_error_sets,
.mul,
.div,
.mod,
.array_mult,
.mul_wrap,
.mul_sat,
.add,
.sub,
.array_cat,
.add_wrap,
.sub_wrap,
.add_sat,
.sub_sat,
.shl,
.shl_sat,
.shr,
.bit_and,
.bit_xor,
.bit_or,
.@"orelse",
.bool_and,
.bool_or,
.array_access,
.switch_range,
.error_union,
=> {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].lhs, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].rhs, range });
},
.slice_open,
.slice,
.slice_sentinel,
=> {
const slice: Ast.full.Slice = tree.fullSlice(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.sliced, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.start, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.end, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.sentinel, range });
},
.array_init_one,
.array_init_one_comma,
.array_init_dot_two,
.array_init_dot_two_comma,
.array_init_dot,
.array_init_dot_comma,
.array_init,
.array_init_comma,
=> {
var buffer: [2]Ast.Node.Index = undefined;
const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buffer, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, array_init.ast.type_expr, range });
for (array_init.ast.elements) |elem| {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, elem, range });
}
},
.struct_init_one,
.struct_init_one_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
.struct_init_dot,
.struct_init_dot_comma,
.struct_init,
.struct_init_comma,
=> {
var buffer: [2]Ast.Node.Index = undefined;
const struct_init: Ast.full.StructInit = tree.fullStructInit(&buffer, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, struct_init.ast.type_expr, range });
for (struct_init.ast.fields) |field_init| {
if (struct_init.ast.fields.len > inlay_hints_max_inline_children) {
if (!isNodeInRange(tree, field_init, range)) continue;
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, field_init, range });
}
},
.@"switch",
.switch_comma,
=> {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].lhs, range });
const extra = tree.extraData(node_data[node].rhs, Ast.Node.SubRange);
const cases = tree.extra_data[extra.start..extra.end];
for (cases) |case_node| {
if (cases.len > inlay_hints_max_inline_children) {
if (!isNodeInRange(tree, case_node, range)) continue;
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, case_node, range });
}
},
.switch_case_one,
.switch_case,
.switch_case_inline_one,
.switch_case_inline,
=> {
const switch_case = tree.fullSwitchCase(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, switch_case.ast.target_expr, range });
},
.while_simple,
.while_cont,
.@"while",
.for_simple,
.@"for",
=> {
const while_node = ast.fullWhile(tree, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cond_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cont_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.then_expr, range });
if (while_node.ast.else_expr != 0) {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.else_expr, range });
}
},
.if_simple,
.@"if",
=> {
const if_node = ast.fullIf(tree, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.cond_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.then_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.else_expr, range });
},
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
.fn_decl,
=> {
var buffer: [1]Ast.Node.Index = undefined;
const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buffer, node).?;
var it = fn_proto.iterate(&tree);
while (ast.nextFnParam(&it)) |param_decl| {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, param_decl.type_expr, range });
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, fn_proto.ast.align_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, fn_proto.ast.addrspace_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, fn_proto.ast.section_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, fn_proto.ast.callconv_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, fn_proto.ast.return_type, range });
if (tag == .fn_decl) {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].rhs, range });
}
},
.container_decl,
.container_decl_trailing,
.container_decl_two,
.container_decl_two_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
=> {
var buffer: [2]Ast.Node.Index = undefined;
const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buffer, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, decl.ast.arg, range });
for (decl.ast.members) |child| {
if (decl.ast.members.len > inlay_hints_max_inline_children) {
if (!isNodeInRange(tree, child, range)) continue;
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, child, range });
}
},
.container_field_init,
.container_field_align,
.container_field,
=> {
const container_field = tree.fullContainerField(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.value_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.align_expr, range });
},
.block_two,
.block_two_semicolon,
=> {
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].lhs, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, node_data[node].rhs, range });
},
.block,
.block_semicolon,
=> {
const subrange = tree.extra_data[node_data[node].lhs..node_data[node].rhs];
for (subrange) |child| {
if (subrange.len > inlay_hints_max_inline_children) {
if (!isNodeInRange(tree, child, range)) continue;
}
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, child, range });
}
},
.asm_simple,
.@"asm",
.asm_output,
.asm_input,
=> {
const asm_node: Ast.full.Asm = tree.fullAsm(node) orelse return;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, asm_node.ast.template, range });
},
.error_value => {},
else => {},
}
}
/// creates a list of `InlayHint`'s from the given document
/// only parameter hints are created
/// only hints in the given range are created
/// only hints in the given loc are created
pub fn writeRangeInlayHint(
arena: *std.heap.ArenaAllocator,
config: Config,
store: *DocumentStore,
handle: *const DocumentStore.Handle,
range: types.Range,
loc: offsets.Loc,
hover_kind: types.MarkupKind,
encoding: offsets.Encoding,
) error{OutOfMemory}![]types.InlayHint {
) error{OutOfMemory}![]InlayHint {
var builder: Builder = .{
.arena = arena.allocator(),
.arena = arena,
.store = store,
.config = &config,
.handle = handle,
.hints = .{},
.hover_kind = hover_kind,
.encoding = encoding,
};
for (handle.tree.rootDecls()) |child| {
if (!isNodeInRange(handle.tree, child, range)) continue;
try writeNodeInlayHint(&builder, arena, store, child, range);
const nodes = try ast.nodesAtLoc(arena.allocator(), handle.tree, loc);
for (nodes) |child| {
try writeNodeInlayHint(&builder, child);
try ast.iterateChildrenRecursive(handle.tree, child, &builder, error{OutOfMemory}, writeNodeInlayHint);
}
return builder.toOwnedSlice();
return try builder.toOwnedSlice();
}

View File

@ -240,6 +240,29 @@ pub fn convertRangeEncoding(text: []const u8, range: types.Range, from_encoding:
};
}
// returns true if a and b intersect
pub fn locIntersect(a: Loc, b: Loc) bool {
std.debug.assert(a.start <= a.end and b.start <= b.end);
const a_start_in_b = b.start <= a.start and a.start <= b.end;
const a_end_in_b = b.start <= a.end and a.end <= b.end;
return a_start_in_b or a_end_in_b;
}
// returns true if a is inside b
pub fn locInside(inner: Loc, outer: Loc) bool {
std.debug.assert(inner.start <= inner.end and outer.start <= outer.end);
return outer.start <= inner.start and inner.end <= outer.end;
}
// returns the union of a and b
pub fn locMerge(a: Loc, b: Loc) Loc {
std.debug.assert(a.start <= a.end and b.start <= b.end);
return .{
.start = @min(a.start, b.start),
.end = @max(a.end, b.end),
};
}
// Helper functions
/// advance `position` which starts at `from_index` to `to_index` accounting for line breaks

View File

@ -74,9 +74,7 @@ const Builder = struct {
const starts = tree.tokens.items(.start);
const next_start = starts[token];
if (next_start < self.previous_position) {
return error.MovedBackwards;
}
if (next_start < self.previous_position) return;
if (self.previous_token) |prev| {
// Highlight gaps between AST nodes. These can contain comments or malformed code.
@ -182,6 +180,8 @@ const Builder = struct {
}
fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void {
if (start < self.previous_position) return;
const text = self.handle.tree.source[self.previous_position..start];
const delta = offsets.indexToPosition(text, text.len, self.encoding);
@ -261,13 +261,8 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan
}
}
const WriteTokensError = error{
OutOfMemory,
MovedBackwards,
};
/// HACK self-hosted has not implemented async yet
fn callWriteNodeTokens(allocator: std.mem.Allocator, args: anytype) WriteTokensError!void {
fn callWriteNodeTokens(allocator: std.mem.Allocator, args: anytype) error{OutOfMemory}!void {
if (zig_builtin.zig_backend == .other or zig_builtin.zig_backend == .stage1) {
const FrameSize = @sizeOf(@Frame(writeNodeTokens));
var child_frame = try allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
@ -280,7 +275,7 @@ fn callWriteNodeTokens(allocator: std.mem.Allocator, args: anytype) WriteTokensE
}
}
fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensError!void {
fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMemory}!void {
const node = maybe_node orelse return;
const handle = builder.handle;
@ -1010,10 +1005,7 @@ pub fn writeAllSemanticTokens(
// reverse the ast from the root declarations
for (handle.tree.rootDecls()) |child| {
writeNodeTokens(&builder, child) catch |err| switch (err) {
error.MovedBackwards => break,
else => |e| return e,
};
try writeNodeTokens(&builder, child);
}
try builder.finish();
return builder.toOwnedSlice();

View File

@ -9,4 +9,5 @@ pub const ZigVersion = enum {
@"0.9.0",
@"0.9.1",
@"0.10.0",
@"0.10.1",
};

View File

@ -48,13 +48,26 @@ pub fn main() !void {
return error.InvalidArgs;
};
const builder = try Builder.create(
allocator,
zig_exe,
build_root,
cache_root,
global_cache_root,
);
const builder = blk: {
// Zig 0.11.0-dev.1524+
if (@hasDecl(std, "Build")) {
const host = try std.zig.system.NativeTargetInfo.detect(.{});
break :blk try Builder.create(
allocator,
zig_exe,
build_root,
cache_root,
global_cache_root,
host,
);
} else break :blk try Builder.create(
allocator,
zig_exe,
build_root,
cache_root,
global_cache_root,
);
};
defer builder.destroy();

View File

@ -1,6 +1,7 @@
// Used by tests as a package, can be used by tools such as
// zigbot9001 to take advantage of zls' tools
pub const ast = @import("ast.zig");
pub const analysis = @import("analysis.zig");
pub const Header = @import("Header.zig");
pub const debug = @import("debug.zig");

View File

@ -11,34 +11,197 @@ const types = zls.types;
const allocator: std.mem.Allocator = std.testing.allocator;
test "foldingRange - empty" {
try testFoldingRange("", "[]");
try testFoldingRange("", &.{});
}
test "foldingRange - smoke" {
test "foldingRange - doc comment" {
try testFoldingRange(
\\/// hello
\\/// world
\\var foo = 5;
, &.{
.{ .startLine = 0, .startCharacter = 0, .endLine = 1, .endCharacter = 9, .kind = .comment },
});
}
test "foldingRange - region" {
try testFoldingRange(
\\const foo = 0;
\\//#region
\\const bar = 1;
\\//#endregion
\\const baz = 2;
, &.{
.{ .startLine = 1, .startCharacter = 0, .endLine = 3, .endCharacter = 12, .kind = .region },
});
try testFoldingRange(
\\//#region
\\const foo = 0;
\\//#region
\\const bar = 1;
\\//#endregion
\\const baz = 2;
\\//#endregion
, &.{
.{ .startLine = 2, .startCharacter = 0, .endLine = 4, .endCharacter = 12, .kind = .region },
.{ .startLine = 0, .startCharacter = 0, .endLine = 6, .endCharacter = 12, .kind = .region },
});
}
test "foldingRange - if" {
try testFoldingRange(
\\const foo = if (false) {
\\
\\};
, &.{
.{ .startLine = 0, .startCharacter = 24, .endLine = 2, .endCharacter = 0 },
});
try testFoldingRange(
\\const foo = if (false) {
\\
\\} else {
\\
\\};
, &.{
.{ .startLine = 0, .startCharacter = 24, .endLine = 2, .endCharacter = 0 },
.{ .startLine = 2, .startCharacter = 8, .endLine = 4, .endCharacter = 0 },
});
}
test "foldingRange - for/while" {
try testFoldingRange(
\\const foo = for ("") |_| {
\\
\\};
, &.{
.{ .startLine = 0, .startCharacter = 26, .endLine = 2, .endCharacter = 0 },
});
try testFoldingRange(
\\const foo = while (true) {
\\
\\};
, &.{
.{ .startLine = 0, .startCharacter = 26, .endLine = 2, .endCharacter = 0 },
});
}
test "foldingRange - switch" {
try testFoldingRange(
\\const foo = switch (5) {
\\ 0 => {},
\\ 1 => {}
\\};
, &.{
.{ .startLine = 0, .startCharacter = 24, .endLine = 3, .endCharacter = 0 },
});
try testFoldingRange(
\\const foo = switch (5) {
\\ 0 => {},
\\ 1 => {},
\\};
, &.{
.{ .startLine = 0, .startCharacter = 24, .endLine = 3, .endCharacter = 0 },
});
}
test "foldingRange - function" {
try testFoldingRange(
\\fn main() u32 {
\\ return 1 + 1;
\\}
,
\\[{"startLine":0,"endLine":1}]
);
}
test "foldingRange - #801" {
, &.{
.{ .startLine = 0, .startCharacter = 15, .endLine = 2, .endCharacter = 0 },
});
try testFoldingRange(
\\fn score(c: u8) !u32 {
\\ return switch(c) {
\\ 'a'...'z' => c - 'a',
\\ 'A'...'Z' => c - 'A',
\\ _ => error
\\ };
\\fn main(
\\ a: ?u32,
\\) u32 {
\\ return 1 + 1;
\\}
,
\\[{"startLine":1,"endLine":4},{"startLine":0,"endLine":5}]
);
, &.{
.{ .startLine = 0, .startCharacter = 8, .endLine = 2, .endCharacter = 0 },
.{ .startLine = 2, .startCharacter = 7, .endLine = 4, .endCharacter = 0 },
});
}
fn testFoldingRange(source: []const u8, expect: []const u8) !void {
test "foldingRange - function with doc comment" {
try testFoldingRange(
\\/// this is
\\/// a function
\\fn foo(
\\ /// this is a parameter
\\ a: u32,
\\ ///
\\ /// this is another parameter
\\ b: u32,
\\) void {}
, &.{
.{ .startLine = 0, .startCharacter = 0, .endLine = 1, .endCharacter = 14, .kind = .comment },
.{ .startLine = 5, .startCharacter = 4, .endLine = 6, .endCharacter = 33, .kind = .comment },
.{ .startLine = 2, .startCharacter = 7, .endLine = 8, .endCharacter = 0 },
});
}
test "foldingRange - container decl" {
try testFoldingRange(
\\const Foo = struct {
\\ alpha: u32,
\\ beta: []const u8,
\\};
, &.{
.{ .startLine = 0, .startCharacter = 20, .endLine = 3, .endCharacter = 0 },
});
try testFoldingRange(
\\const Foo = packed struct(u32) {
\\ alpha: u16,
\\ beta: u16,
\\};
, &.{
// .{ .startLine = 0, .startCharacter = 32, .endLine = 3, .endCharacter = 0 }, // TODO
.{ .startLine = 0, .startCharacter = 32, .endLine = 2, .endCharacter = 11 },
});
try testFoldingRange(
\\const Foo = union {
\\ alpha: u32,
\\ beta: []const u8,
\\};
, &.{
.{ .startLine = 0, .startCharacter = 19, .endLine = 3, .endCharacter = 0 },
});
try testFoldingRange(
\\const Foo = union(enum) {
\\ alpha: u32,
\\ beta: []const u8,
\\};
, &.{
.{ .startLine = 0, .startCharacter = 25, .endLine = 3, .endCharacter = 0 },
});
}
test "foldingRange - call" {
try testFoldingRange(
\\extern fn foo(a: bool, b: ?usize) void;
\\const result = foo(
\\ false,
\\ null,
\\);
, &.{
.{ .startLine = 1, .startCharacter = 19, .endLine = 4, .endCharacter = 0 },
});
}
test "foldingRange - multi-line string literal" {
try testFoldingRange(
\\const foo =
\\ \\hello
\\ \\world
\\;
, &.{
.{ .startLine = 1, .startCharacter = 4, .endLine = 3, .endCharacter = 0 },
});
}
fn testFoldingRange(source: []const u8, expect: []const types.FoldingRange) !void {
var ctx = try Context.init();
defer ctx.deinit();
@ -53,16 +216,16 @@ fn testFoldingRange(source: []const u8, expect: []const u8) !void {
const response = try ctx.requestGetResponse(?[]types.FoldingRange, "textDocument/foldingRange", params);
var actual = std.ArrayList(u8).init(allocator);
defer actual.deinit();
var actual = std.ArrayListUnmanaged(u8){};
defer actual.deinit(allocator);
try tres.stringify(response.result, .{
.emit_null_optional_fields = false,
}, actual.writer());
try expectEqualJson(expect, actual.items);
}
var expected = std.ArrayListUnmanaged(u8){};
defer expected.deinit(allocator);
const options = std.json.StringifyOptions{ .emit_null_optional_fields = false, .whitespace = .{ .indent = .None } };
try tres.stringify(response.result, options, actual.writer(allocator));
try tres.stringify(expect, options, expected.writer(allocator));
fn expectEqualJson(expect: []const u8, actual: []const u8) !void {
// TODO: Actually compare strings as JSON values.
return std.testing.expectEqualStrings(expect, actual);
try std.testing.expectEqualStrings(expected.items, actual.items);
}

View File

@ -120,7 +120,10 @@ fn testInlayHints(source: []const u8) !void {
for (hints) |hint| {
if (position.line != hint.position.line or position.character != hint.position.character) continue;
const actual_label = hint.label[0..hint.label.len];
if(!std.mem.endsWith(u8, hint.label, ":")) {
try error_builder.msgAtLoc("label `{s}` must end with a colon!", new_loc, .err, .{ hint.label });
}
const actual_label = hint.label[0..hint.label.len - 1];
if (!std.mem.eql(u8, expected_label, actual_label)) {
try error_builder.msgAtLoc("expected label `{s}` here but got `{s}`!", new_loc, .err, .{ expected_label, actual_label });

View File

@ -32,6 +32,18 @@ test "semantic tokens - comments" {
// TODO more tests
}
test "semantic tokens - string literals" {
// https://github.com/zigtools/zls/issues/921
try testSemanticTokens(
\\"
\\"",//
\\"":
,
// no idea if this output is correct but at least it doesn't crash
&.{ 1, 3, 3, 8, 0, 1, 0, 2, 4, 0, 0, 0, 2, 9, 0 },
);
}
const file_uri = switch (builtin.os.tag) {
.windows => "file:///C:/test.zig",
else => "file:///test.zig",

View File

@ -1,6 +1,7 @@
comptime {
_ = @import("helper.zig");
_ = @import("utility/ast.zig");
_ = @import("utility/offsets.zig");
_ = @import("utility/position_context.zig");
_ = @import("utility/uri.zig");

76
tests/utility/ast.zig Normal file
View File

@ -0,0 +1,76 @@
const std = @import("std");
const zls = @import("zls");
const helper = @import("../helper.zig");
const Context = @import("../context.zig").Context;
const ErrorBuilder = @import("../ErrorBuilder.zig");
const types = zls.types;
const offsets = zls.offsets;
const ast = zls.ast;
const allocator = std.testing.allocator;
test "nodesAtLoc" {
try testNodesAtLoc(
\\<outer>const<inner> foo<inner> = 5<outer>;
);
try testNodesAtLoc(
\\<outer>const f<inner>oo = 5;
\\var bar = <inner>2<outer>;
);
try testNodesAtLoc(
\\const foo = <outer>5<inner> +<inner> 2<outer>;
);
try testNodesAtLoc(
\\<outer><inner>fn foo(alpha: u32) void {}
\\const _ = foo(5);<inner><outer>
);
}
fn testNodesAtLoc(source: []const u8) !void {
var ccp = try helper.collectClearPlaceholders(allocator, source);
defer ccp.deinit(allocator);
const old_locs = ccp.locations.items(.old);
const locs = ccp.locations.items(.new);
std.debug.assert(ccp.locations.len == 4);
std.debug.assert(std.mem.eql(u8, offsets.locToSlice(source, old_locs[0]), "<outer>"));
std.debug.assert(std.mem.eql(u8, offsets.locToSlice(source, old_locs[1]), "<inner>"));
std.debug.assert(std.mem.eql(u8, offsets.locToSlice(source, old_locs[2]), "<inner>"));
std.debug.assert(std.mem.eql(u8, offsets.locToSlice(source, old_locs[3]), "<outer>"));
const inner_loc = offsets.Loc{ .start = locs[1].start, .end = locs[2].start };
const outer_loc = offsets.Loc{ .start = locs[0].start, .end = locs[3].end };
const new_source = try allocator.dupeZ(u8, ccp.new_source);
defer allocator.free(new_source);
var tree = try std.zig.parse(allocator, new_source);
defer tree.deinit(allocator);
const nodes = try ast.nodesAtLoc(allocator, tree, inner_loc);
defer allocator.free(nodes);
const actual_loc = offsets.Loc{
.start = offsets.nodeToLoc(tree, nodes[0]).start,
.end = offsets.nodeToLoc(tree, nodes[nodes.len - 1]).end,
};
var error_builder = ErrorBuilder.init(allocator, new_source);
defer error_builder.deinit();
errdefer error_builder.writeDebug();
if (outer_loc.start != actual_loc.start) {
try error_builder.msgAtIndex("actual start here", actual_loc.start, .err, .{});
try error_builder.msgAtIndex("expected start here", outer_loc.start, .err, .{});
return error.LocStartMismatch;
}
if (outer_loc.end != actual_loc.end) {
try error_builder.msgAtIndex("actual end here", actual_loc.end, .err, .{});
try error_builder.msgAtIndex("expected end here", outer_loc.end, .err, .{});
return error.LocEndMismatch;
}
}