Merge branch 'master' of https://github.com/SuperAuguste/zls
This commit is contained in:
commit
b08ff62595
16
README.md
16
README.md
@ -14,6 +14,7 @@ Zig Language Server, or `zls`, is a language server for Zig. The Zig wiki states
|
||||
- [VSCode](#vscode)
|
||||
- [Sublime Text 3](#sublime-text-3)
|
||||
- [Kate](#kate)
|
||||
- [Neovim/Vim8](#neovimvim8)
|
||||
- [Related Projects](#related-projects)
|
||||
- [License](#license)
|
||||
|
||||
@ -100,6 +101,21 @@ Install the `zls-vscode` extension from [here](https://github.com/zigtools/zls-v
|
||||
}
|
||||
```
|
||||
|
||||
### Neovim/Vim8
|
||||
|
||||
- Install the CoC engine from [here](https://github.com/neoclide/coc.nvim).
|
||||
- Issue `:CocConfig` from within your Vim editor, and the following snippet:
|
||||
```json
|
||||
{
|
||||
"lanuageserver": {
|
||||
"zls" : {
|
||||
"command": "command_or_path_to_zls",
|
||||
"filetypes": ["zig"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Related Projects
|
||||
- [`sublime-zig-language` by @prime31](https://github.com/prime31/sublime-zig-language)
|
||||
- Supports basic language features
|
||||
|
20
build.zig
20
build.zig
@ -34,7 +34,7 @@ pub fn config(step: *std.build.Step) anyerror!void {
|
||||
|
||||
std.debug.warn("Successfully saved configuration options!\n", .{});
|
||||
|
||||
const editor = try zinput.askSelectOne("Which code editor do you use?", enum { VSCode, Sublime, Kate, Other });
|
||||
const editor = try zinput.askSelectOne("Which code editor do you use?", enum { VSCode, Sublime, Kate, Neovim, Vim8, Other });
|
||||
std.debug.warn("\n", .{});
|
||||
|
||||
switch (editor) {
|
||||
@ -78,6 +78,20 @@ pub fn config(step: *std.build.Step) anyerror!void {
|
||||
\\}}
|
||||
, .{});
|
||||
},
|
||||
.Neovim, .Vim8 => {
|
||||
std.debug.warn(
|
||||
\\To use ZLS in Neovim/Vim8, we recommend using CoC engine. You can get it from 'https://github.com/neoclide/coc.nvim'.
|
||||
\\Then, simply issue cmd from Neovim/Vim8 `:CocConfig`, and add this to your CoC config:
|
||||
\\{{
|
||||
\\ "lanuageserver": {{
|
||||
\\ "zls" : {{
|
||||
\\ "command": "command_or_path_to_zls",
|
||||
\\ "filetypes": ["zig"]
|
||||
\\ }}
|
||||
\\ }}
|
||||
\\}}
|
||||
, .{});
|
||||
},
|
||||
.Other => {
|
||||
std.debug.warn(
|
||||
\\We might not *officially* support your editor, but you can definitely still use ZLS!
|
||||
@ -138,8 +152,8 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
test_step.dependOn(&exe.step);
|
||||
|
||||
var unit_tests = b.addTest("tests/unit_tests.zig");
|
||||
unit_tests.addPackage(.{ .name = "analysis", .path = "src/analysis.zig" });
|
||||
unit_tests.addPackage(.{ .name = "types", .path = "src/types.zig" });
|
||||
unit_tests.addPackage(.{ .name = "analysis", .path = "src/analysis.zig" });
|
||||
unit_tests.addPackage(.{ .name = "types", .path = "src/types.zig" });
|
||||
unit_tests.setBuildMode(.Debug);
|
||||
test_step.dependOn(&unit_tests.step);
|
||||
}
|
||||
|
107
src/analysis.zig
107
src/analysis.zig
@ -278,7 +278,7 @@ fn resolveReturnType(analysis_ctx: *AnalysisContext, fn_decl: *ast.Node.FnProto)
|
||||
if (ret.rhs) |rhs|
|
||||
if (resolveTypeOfNode(analysis_ctx, rhs)) |res_rhs| switch (res_rhs.id) {
|
||||
.ContainerDecl => {
|
||||
analysis_ctx.onContainer(res_rhs.cast(ast.Node.ContainerDecl).?) catch return null;
|
||||
analysis_ctx.onContainer(res_rhs) catch return null;
|
||||
return res_rhs;
|
||||
},
|
||||
else => return null,
|
||||
@ -413,6 +413,15 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
||||
else => decl,
|
||||
};
|
||||
},
|
||||
.ErrorSetDecl => {
|
||||
const set = node.cast(ast.Node.ErrorSetDecl).?;
|
||||
var i: usize = 0;
|
||||
while (set.iterate(i)) |decl| : (i += 1) {
|
||||
// TODO handle errors better?
|
||||
analysis_ctx.error_completions.add(analysis_ctx.tree(), decl) catch {};
|
||||
}
|
||||
return node;
|
||||
},
|
||||
.SuffixOp => {
|
||||
const suffix_op = node.cast(ast.Node.SuffixOp).?;
|
||||
switch (suffix_op.op) {
|
||||
@ -504,10 +513,24 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
||||
};
|
||||
},
|
||||
.ContainerDecl => {
|
||||
analysis_ctx.onContainer(node.cast(ast.Node.ContainerDecl).?) catch return null;
|
||||
analysis_ctx.onContainer(node) catch return null;
|
||||
|
||||
const container = node.cast(ast.Node.ContainerDecl).?;
|
||||
const kind = analysis_ctx.tree().token_ids[container.kind_token];
|
||||
|
||||
if (kind == .Keyword_struct or (kind == .Keyword_union and container.init_arg_expr == .None)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
var i: usize = 0;
|
||||
while (container.iterate(i)) |decl| : (i += 1) {
|
||||
if (decl.id != .ContainerField) continue;
|
||||
// TODO handle errors better?
|
||||
analysis_ctx.enum_completions.add(analysis_ctx.tree(), decl) catch {};
|
||||
}
|
||||
return node;
|
||||
},
|
||||
.MultilineStringLiteral, .StringLiteral, .ErrorSetDecl, .FnProto => return node,
|
||||
.MultilineStringLiteral, .StringLiteral, .FnProto => return node,
|
||||
else => std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}),
|
||||
}
|
||||
return null;
|
||||
@ -553,6 +576,17 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: *ast.Tree) !
|
||||
}
|
||||
}
|
||||
|
||||
fn checkForContainerAndResolveFieldAccessLhsType(analysis_ctx: *AnalysisContext, node: *ast.Node) *ast.Node {
|
||||
const current_node = resolveFieldAccessLhsType(analysis_ctx, node);
|
||||
|
||||
if (current_node.id == .ContainerDecl or current_node.id == .Root) {
|
||||
// TODO: Handle errors
|
||||
analysis_ctx.onContainer(current_node) catch {};
|
||||
}
|
||||
|
||||
return current_node;
|
||||
}
|
||||
|
||||
pub fn getFieldAccessTypeNode(
|
||||
analysis_ctx: *AnalysisContext,
|
||||
tokenizer: *std.zig.Tokenizer,
|
||||
@ -577,7 +611,7 @@ pub fn getFieldAccessTypeNode(
|
||||
.Identifier => {
|
||||
if (after_period.loc.end == tokenizer.buffer.len) return resolveFieldAccessLhsType(analysis_ctx, current_node);
|
||||
|
||||
current_node = resolveFieldAccessLhsType(analysis_ctx, current_node);
|
||||
current_node = checkForContainerAndResolveFieldAccessLhsType(analysis_ctx, current_node);
|
||||
if (getChild(analysis_ctx.tree(), current_node, tokenizer.buffer[after_period.loc.start..after_period.loc.end])) |child| {
|
||||
if (resolveTypeOfNode(analysis_ctx, child)) |child_type| {
|
||||
current_node = child_type;
|
||||
@ -651,8 +685,8 @@ pub fn getFieldAccessTypeNode(
|
||||
},
|
||||
}
|
||||
|
||||
if (current_node.cast(ast.Node.ContainerDecl)) |container_decl| {
|
||||
analysis_ctx.onContainer(container_decl) catch return null;
|
||||
if (current_node.id == .ContainerDecl or current_node.id == .Root) {
|
||||
analysis_ctx.onContainer(current_node) catch return null;
|
||||
}
|
||||
}
|
||||
|
||||
@ -706,12 +740,10 @@ pub fn declsFromIndexInternal(
|
||||
decls: *std.ArrayList(*ast.Node),
|
||||
tree: *ast.Tree,
|
||||
node: *ast.Node,
|
||||
container: **ast.Node,
|
||||
source_index: usize,
|
||||
) error{OutOfMemory}!void {
|
||||
switch (node.id) {
|
||||
.Root, .ContainerDecl => {
|
||||
container.* = node;
|
||||
var node_idx: usize = 0;
|
||||
while (node.iterate(node_idx)) |child_node| : (node_idx += 1) {
|
||||
// Skip over container fields, we can only dot access those.
|
||||
@ -721,7 +753,7 @@ pub fn declsFromIndexInternal(
|
||||
// If the cursor is in a variable decls it will insert itself anyway, we don't need to take care of it.
|
||||
if ((is_contained and child_node.id != .VarDecl) or !is_contained) try decls.append(child_node);
|
||||
if (is_contained) {
|
||||
try declsFromIndexInternal(arena, decls, tree, child_node, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, child_node, source_index);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -759,41 +791,41 @@ pub fn declsFromIndexInternal(
|
||||
|
||||
if (func.body_node) |body_node| {
|
||||
if (!nodeContainsSourceIndex(tree, body_node, source_index)) return;
|
||||
try declsFromIndexInternal(arena, decls, tree, body_node, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, body_node, source_index);
|
||||
}
|
||||
},
|
||||
.TestDecl => {
|
||||
const test_decl = node.cast(ast.Node.TestDecl).?;
|
||||
if (!nodeContainsSourceIndex(tree, test_decl.body_node, source_index)) return;
|
||||
try declsFromIndexInternal(arena, decls, tree, test_decl.body_node, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, test_decl.body_node, source_index);
|
||||
},
|
||||
.Block => {
|
||||
var inode_idx: usize = 0;
|
||||
while (node.iterate(inode_idx)) |inode| : (inode_idx += 1) {
|
||||
if (nodeComesAfterSourceIndex(tree, inode, source_index)) return;
|
||||
try declsFromIndexInternal(arena, decls, tree, inode, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, inode, source_index);
|
||||
}
|
||||
},
|
||||
.Comptime => {
|
||||
const comptime_stmt = node.cast(ast.Node.Comptime).?;
|
||||
if (nodeComesAfterSourceIndex(tree, comptime_stmt.expr, source_index)) return;
|
||||
try declsFromIndexInternal(arena, decls, tree, comptime_stmt.expr, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, comptime_stmt.expr, source_index);
|
||||
},
|
||||
.If => {
|
||||
const if_node = node.cast(ast.Node.If).?;
|
||||
if (nodeContainsSourceIndex(tree, if_node.body, source_index)) {
|
||||
if (if_node.payload) |payload| {
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, source_index);
|
||||
}
|
||||
return try declsFromIndexInternal(arena, decls, tree, if_node.body, container, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, if_node.body, source_index);
|
||||
}
|
||||
|
||||
if (if_node.@"else") |else_node| {
|
||||
if (nodeContainsSourceIndex(tree, else_node.body, source_index)) {
|
||||
if (else_node.payload) |payload| {
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, source_index);
|
||||
}
|
||||
return try declsFromIndexInternal(arena, decls, tree, else_node.body, container, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, else_node.body, source_index);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -801,33 +833,33 @@ pub fn declsFromIndexInternal(
|
||||
const while_node = node.cast(ast.Node.While).?;
|
||||
if (nodeContainsSourceIndex(tree, while_node.body, source_index)) {
|
||||
if (while_node.payload) |payload| {
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, source_index);
|
||||
}
|
||||
return try declsFromIndexInternal(arena, decls, tree, while_node.body, container, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, while_node.body, source_index);
|
||||
}
|
||||
|
||||
if (while_node.@"else") |else_node| {
|
||||
if (nodeContainsSourceIndex(tree, else_node.body, source_index)) {
|
||||
if (else_node.payload) |payload| {
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, source_index);
|
||||
}
|
||||
return try declsFromIndexInternal(arena, decls, tree, else_node.body, container, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, else_node.body, source_index);
|
||||
}
|
||||
}
|
||||
},
|
||||
.For => {
|
||||
const for_node = node.cast(ast.Node.For).?;
|
||||
if (nodeContainsSourceIndex(tree, for_node.body, source_index)) {
|
||||
try declsFromIndexInternal(arena, decls, tree, for_node.payload, container, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, for_node.body, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, for_node.payload, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, for_node.body, source_index);
|
||||
}
|
||||
|
||||
if (for_node.@"else") |else_node| {
|
||||
if (nodeContainsSourceIndex(tree, else_node.body, source_index)) {
|
||||
if (else_node.payload) |payload| {
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, source_index);
|
||||
}
|
||||
return try declsFromIndexInternal(arena, decls, tree, else_node.body, container, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, else_node.body, source_index);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -837,9 +869,9 @@ pub fn declsFromIndexInternal(
|
||||
const case_node = case.*.cast(ast.Node.SwitchCase).?;
|
||||
if (nodeContainsSourceIndex(tree, case_node.expr, source_index)) {
|
||||
if (case_node.payload) |payload| {
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, payload, source_index);
|
||||
}
|
||||
return try declsFromIndexInternal(arena, decls, tree, case_node.expr, container, source_index);
|
||||
return try declsFromIndexInternal(arena, decls, tree, case_node.expr, source_index);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -857,7 +889,7 @@ pub fn declsFromIndexInternal(
|
||||
try decls.append(node);
|
||||
if (node.cast(ast.Node.VarDecl).?.init_node) |child| {
|
||||
if (nodeContainsSourceIndex(tree, child, source_index)) {
|
||||
try declsFromIndexInternal(arena, decls, tree, child, container, source_index);
|
||||
try declsFromIndexInternal(arena, decls, tree, child, source_index);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -872,10 +904,8 @@ pub fn addChildrenNodes(decls: *std.ArrayList(*ast.Node), tree: *ast.Tree, node:
|
||||
}
|
||||
}
|
||||
|
||||
pub fn declsFromIndex(arena: *std.heap.ArenaAllocator, decls: *std.ArrayList(*ast.Node), tree: *ast.Tree, source_index: usize) !*ast.Node {
|
||||
var result = &tree.root_node.base;
|
||||
try declsFromIndexInternal(arena, decls, tree, &tree.root_node.base, &result, source_index);
|
||||
return result;
|
||||
pub fn declsFromIndex(arena: *std.heap.ArenaAllocator, decls: *std.ArrayList(*ast.Node), tree: *ast.Tree, source_index: usize) !void {
|
||||
try declsFromIndexInternal(arena, decls, tree, &tree.root_node.base, source_index);
|
||||
}
|
||||
|
||||
fn nodeContainsSourceIndex(tree: *ast.Tree, node: *ast.Node, source_index: usize) bool {
|
||||
@ -924,6 +954,7 @@ pub const PositionContext = union(enum) {
|
||||
string_literal: SourceRange,
|
||||
field_access: SourceRange,
|
||||
var_access: SourceRange,
|
||||
global_error_set,
|
||||
enum_literal,
|
||||
other,
|
||||
empty,
|
||||
@ -938,6 +969,7 @@ pub const PositionContext = union(enum) {
|
||||
.enum_literal => null,
|
||||
.other => null,
|
||||
.empty => null,
|
||||
.global_error_set => null,
|
||||
};
|
||||
}
|
||||
};
|
||||
@ -1010,6 +1042,7 @@ pub fn documentPositionContext(allocator: *std.mem.Allocator, document: types.Te
|
||||
.enum_literal => curr_ctx.ctx = .empty,
|
||||
.field_access => {},
|
||||
.other => {},
|
||||
.global_error_set => {},
|
||||
else => curr_ctx.ctx = .{
|
||||
.field_access = tokenRangeAppend(curr_ctx.ctx.range().?, tok),
|
||||
},
|
||||
@ -1032,6 +1065,7 @@ pub fn documentPositionContext(allocator: *std.mem.Allocator, document: types.Te
|
||||
(try peek(&stack)).ctx = .empty;
|
||||
}
|
||||
},
|
||||
.Keyword_error => curr_ctx.ctx = .global_error_set,
|
||||
else => curr_ctx.ctx = .empty,
|
||||
}
|
||||
|
||||
@ -1065,14 +1099,13 @@ fn addOutlineNodes(allocator: *std.mem.Allocator, children: *std.ArrayList(types
|
||||
try addOutlineNodes(allocator, children, tree, cchild);
|
||||
return;
|
||||
},
|
||||
else => {}
|
||||
else => {},
|
||||
}
|
||||
_ = try children.append(try getDocumentSymbolsInternal(allocator, tree, child));
|
||||
}
|
||||
|
||||
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) anyerror!types.DocumentSymbol {
|
||||
// const symbols = std.ArrayList(types.DocumentSymbol).init(allocator);
|
||||
|
||||
const start_loc = tree.tokenLocation(0, node.firstToken());
|
||||
const end_loc = tree.tokenLocation(0, node.lastToken());
|
||||
const range = types.Range{
|
||||
@ -1083,14 +1116,14 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no
|
||||
.end = .{
|
||||
.line = @intCast(i64, end_loc.line),
|
||||
.character = @intCast(i64, end_loc.column),
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if (getDeclName(tree, node) == null) {
|
||||
std.debug.warn("NULL NAME: {}\n", .{node.id});
|
||||
}
|
||||
|
||||
// TODO: Get my lazy bum to fix detail newlines
|
||||
// TODO: Get my lazy bum to fix detail newlines
|
||||
return types.DocumentSymbol{
|
||||
.name = getDeclName(tree, node) orelse "no_name",
|
||||
// .detail = (try getDocComments(allocator, tree, node)) orelse "",
|
||||
@ -1099,7 +1132,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no
|
||||
.FnProto => .Function,
|
||||
.VarDecl => .Variable,
|
||||
.ContainerField => .Field,
|
||||
else => .Variable
|
||||
else => .Variable,
|
||||
},
|
||||
.range = range,
|
||||
.selectionRange = range,
|
||||
|
@ -30,12 +30,57 @@ pub const Handle = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const TagStore = struct {
|
||||
values: std.StringHashMap(void),
|
||||
completions: std.ArrayListUnmanaged(types.CompletionItem),
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator) TagStore {
|
||||
return .{
|
||||
.values = std.StringHashMap(void).init(allocator),
|
||||
.completions = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *TagStore) void {
|
||||
const alloc = self.values.allocator;
|
||||
for (self.completions.items) |item| {
|
||||
alloc.free(item.label);
|
||||
if (item.documentation) |some| alloc.free(some.value);
|
||||
}
|
||||
self.values.deinit();
|
||||
self.completions.deinit(self.values.allocator);
|
||||
}
|
||||
|
||||
pub fn add(self: *TagStore, tree: *std.zig.ast.Tree, tag: *std.zig.ast.Node) !void {
|
||||
const name = analysis.nodeToString(tree, tag).?;
|
||||
if (self.values.contains(name)) return;
|
||||
const alloc = self.values.allocator;
|
||||
const item = types.CompletionItem{
|
||||
.label = try std.mem.dupe(alloc, u8, name),
|
||||
.kind = .Constant,
|
||||
.documentation = if (try analysis.getDocComments(alloc, tree, tag)) |docs|
|
||||
.{
|
||||
.kind = .Markdown,
|
||||
.value = docs,
|
||||
}
|
||||
else
|
||||
null,
|
||||
};
|
||||
|
||||
try self.values.putNoClobber(item.label, {});
|
||||
try self.completions.append(self.values.allocator, item);
|
||||
}
|
||||
};
|
||||
|
||||
allocator: *std.mem.Allocator,
|
||||
handles: std.StringHashMap(*Handle),
|
||||
has_zig: bool,
|
||||
build_files: std.ArrayListUnmanaged(*BuildFile),
|
||||
build_runner_path: []const u8,
|
||||
|
||||
error_completions: TagStore,
|
||||
enum_completions: TagStore,
|
||||
|
||||
pub fn init(
|
||||
self: *DocumentStore,
|
||||
allocator: *std.mem.Allocator,
|
||||
@ -47,6 +92,8 @@ pub fn init(
|
||||
self.has_zig = has_zig;
|
||||
self.build_files = .{};
|
||||
self.build_runner_path = build_runner_path;
|
||||
self.error_completions = TagStore.init(allocator);
|
||||
self.enum_completions = TagStore.init(allocator);
|
||||
}
|
||||
|
||||
const LoadPackagesContext = struct {
|
||||
@ -70,7 +117,7 @@ fn loadPackages(context: LoadPackagesContext) !void {
|
||||
// open it and handle the error for file not found.
|
||||
var file_exists = true;
|
||||
check_file_exists: {
|
||||
var fhandle = std.fs.cwd().openFile(target_path, .{.read = true, .write = false }) catch |err| switch (err) {
|
||||
var fhandle = std.fs.cwd().openFile(target_path, .{ .read = true, .write = false }) catch |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
file_exists = false;
|
||||
break :check_file_exists;
|
||||
@ -179,7 +226,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
|
||||
.allocator = self.allocator,
|
||||
.build_runner_path = self.build_runner_path,
|
||||
}) catch |err| {
|
||||
std.debug.warn("Failed to load packages of build file {} (error: {})\n", .{build_file.uri, err});
|
||||
std.debug.warn("Failed to load packages of build file {} (error: {})\n", .{ build_file.uri, err });
|
||||
};
|
||||
} else if (self.has_zig and !in_std) associate_build_file: {
|
||||
// Look into build files to see if we already have one that fits
|
||||
@ -425,7 +472,7 @@ pub fn applyChanges(
|
||||
.allocator = self.allocator,
|
||||
.build_runner_path = self.build_runner_path,
|
||||
}) catch |err| {
|
||||
std.debug.warn("Failed to load packages of build file {} (error: {})\n", .{build_file.uri, err});
|
||||
std.debug.warn("Failed to load packages of build file {} (error: {})\n", .{ build_file.uri, err });
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -478,6 +525,8 @@ pub const AnalysisContext = struct {
|
||||
scope_nodes: []*std.zig.ast.Node,
|
||||
in_container: *std.zig.ast.Node,
|
||||
std_uri: ?[]const u8,
|
||||
error_completions: *TagStore,
|
||||
enum_completions: *TagStore,
|
||||
|
||||
pub fn tree(self: AnalysisContext) *std.zig.ast.Tree {
|
||||
return self.handle.tree;
|
||||
@ -490,12 +539,14 @@ pub const AnalysisContext = struct {
|
||||
self.in_container = &self.tree().root_node.base;
|
||||
}
|
||||
|
||||
pub fn onContainer(self: *AnalysisContext, container: *std.zig.ast.Node.ContainerDecl) !void {
|
||||
if (self.in_container != &container.base) {
|
||||
self.in_container = &container.base;
|
||||
pub fn onContainer(self: *AnalysisContext, container: *std.zig.ast.Node) !void {
|
||||
std.debug.assert(container.id == .ContainerDecl or container.id == .Root);
|
||||
|
||||
if (self.in_container != container) {
|
||||
self.in_container = container;
|
||||
|
||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).fromOwnedSlice(&self.arena.allocator, self.scope_nodes);
|
||||
try analysis.addChildrenNodes(&scope_nodes, self.tree(), &container.base);
|
||||
try analysis.addChildrenNodes(&scope_nodes, self.tree(), container);
|
||||
self.scope_nodes = scope_nodes.items;
|
||||
}
|
||||
}
|
||||
@ -584,6 +635,8 @@ pub const AnalysisContext = struct {
|
||||
.scope_nodes = try std.mem.dupe(&self.arena.allocator, *std.zig.ast.Node, self.scope_nodes),
|
||||
.in_container = self.in_container,
|
||||
.std_uri = self.std_uri,
|
||||
.error_completions = self.error_completions,
|
||||
.enum_completions = self.enum_completions,
|
||||
};
|
||||
}
|
||||
};
|
||||
@ -613,7 +666,7 @@ pub fn analysisContext(
|
||||
zig_lib_path: ?[]const u8,
|
||||
) !AnalysisContext {
|
||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
||||
const in_container = try analysis.declsFromIndex(arena, &scope_nodes, handle.tree, position);
|
||||
try analysis.declsFromIndex(arena, &scope_nodes, handle.tree, position);
|
||||
|
||||
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
||||
return AnalysisContext{
|
||||
@ -621,8 +674,10 @@ pub fn analysisContext(
|
||||
.handle = handle,
|
||||
.arena = arena,
|
||||
.scope_nodes = scope_nodes.items,
|
||||
.in_container = in_container,
|
||||
.in_container = &handle.tree.root_node.base,
|
||||
.std_uri = std_uri,
|
||||
.error_completions = &self.error_completions,
|
||||
.enum_completions = &self.enum_completions,
|
||||
};
|
||||
}
|
||||
|
||||
@ -652,4 +707,6 @@ pub fn deinit(self: *DocumentStore) void {
|
||||
}
|
||||
|
||||
self.build_files.deinit(self.allocator);
|
||||
self.error_completions.deinit();
|
||||
self.enum_completions.deinit();
|
||||
}
|
||||
|
18
src/main.zig
18
src/main.zig
@ -760,6 +760,24 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
||||
}),
|
||||
.var_access, .empty => try completeGlobal(id, pos_index, handle, this_config),
|
||||
.field_access => |range| try completeFieldAccess(id, handle, pos, range, this_config),
|
||||
.global_error_set => try send(types.Response{
|
||||
.id = .{ .Integer = id },
|
||||
.result = .{
|
||||
.CompletionList = .{
|
||||
.isIncomplete = false,
|
||||
.items = document_store.error_completions.completions.items,
|
||||
},
|
||||
},
|
||||
}),
|
||||
.enum_literal => try send(types.Response{
|
||||
.id = .{ .Integer = id },
|
||||
.result = .{
|
||||
.CompletionList = .{
|
||||
.isIncomplete = false,
|
||||
.items = document_store.enum_completions.completions.items,
|
||||
},
|
||||
},
|
||||
}),
|
||||
else => try respondGeneric(id, no_completions_response),
|
||||
}
|
||||
} else {
|
||||
|
Loading…
Reference in New Issue
Block a user