implement global error set completion

This commit is contained in:
Vexu 2020-05-16 19:04:07 +03:00
parent 0720452217
commit 22a863134b
No known key found for this signature in database
GPG Key ID: 59AEB8936E16A6AC
3 changed files with 76 additions and 4 deletions

View File

@ -405,6 +405,16 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
else => decl,
};
},
.ErrorSetDecl => {
const set = node.cast(ast.Node.ErrorSetDecl).?;
var i: usize = 0;
while (set.iterate(i)) |decl| : (i+=1) {
const tag = decl.cast(ast.Node.ErrorTag).?;
// TODO handle errors better?
analysis_ctx.error_completions.add(analysis_ctx.tree(), tag) catch {};
}
return node;
},
.SuffixOp => {
const suffix_op = node.cast(ast.Node.SuffixOp).?;
switch (suffix_op.op) {
@ -499,7 +509,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
analysis_ctx.onContainer(node.cast(ast.Node.ContainerDecl).?) catch return null;
return node;
},
.MultilineStringLiteral, .StringLiteral, .ErrorSetDecl, .FnProto => return node,
.MultilineStringLiteral, .StringLiteral, .FnProto => return node,
else => std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}),
}
return null;
@ -916,6 +926,7 @@ pub const PositionContext = union(enum) {
string_literal: SourceRange,
field_access: SourceRange,
var_access: SourceRange,
global_error_set,
enum_literal,
other,
empty,
@ -930,6 +941,7 @@ pub const PositionContext = union(enum) {
.enum_literal => null,
.other => null,
.empty => null,
.global_error_set => null,
};
}
};
@ -1002,6 +1014,7 @@ pub fn documentPositionContext(allocator: *std.mem.Allocator, document: types.Te
.enum_literal => curr_ctx.ctx = .empty,
.field_access => {},
.other => {},
.global_error_set => {},
else => curr_ctx.ctx = .{
.field_access = tokenRangeAppend(curr_ctx.ctx.range().?, tok),
},
@ -1024,6 +1037,7 @@ pub fn documentPositionContext(allocator: *std.mem.Allocator, document: types.Te
(try peek(&stack)).ctx = .empty;
}
},
.Keyword_error => curr_ctx.ctx = .global_error_set,
else => curr_ctx.ctx = .empty,
}

View File

@ -30,12 +30,56 @@ pub const Handle = struct {
}
};
pub const ErrorCompletion = struct {
values: std.StringHashMap(void),
completions: std.ArrayList(types.CompletionItem),
pub fn init(allocator: *std.mem.Allocator) ErrorCompletion {
return .{
.values = std.StringHashMap(void).init(allocator),
.completions = std.ArrayList(types.CompletionItem).init(allocator),
};
}
pub fn deinit(self: *ErrorCompletion) void {
const alloc = self.values.allocator;
for (self.completions.items) |item| {
alloc.free(item.label);
if (item.documentation) |some| alloc.free(some.value);
}
self.values.deinit();
self.completions.deinit();
}
pub fn add(self: *ErrorCompletion, tree: *std.zig.ast.Tree, tag: *std.zig.ast.Node.ErrorTag) !void {
const name = tree.tokenSlice(tag.name_token);
if (self.values.contains(name)) return;
const alloc = self.values.allocator;
const item = types.CompletionItem{
.label = try std.mem.dupe(alloc, u8, name),
.kind = .Constant,
.documentation = if (try analysis.getDocComments(alloc, tree, &tag.base)) |docs|
.{
.kind = .Markdown,
.value = docs,
}
else
null,
};
try self.values.putNoClobber(item.label, {});
try self.completions.append(item);
}
};
allocator: *std.mem.Allocator,
handles: std.StringHashMap(*Handle),
has_zig: bool,
build_files: std.ArrayListUnmanaged(*BuildFile),
build_runner_path: []const u8,
error_completions: ErrorCompletion,
pub fn init(
self: *DocumentStore,
allocator: *std.mem.Allocator,
@ -47,6 +91,7 @@ pub fn init(
self.has_zig = has_zig;
self.build_files = .{};
self.build_runner_path = build_runner_path;
self.error_completions = ErrorCompletion.init(allocator);
}
const LoadPackagesContext = struct {
@ -478,6 +523,7 @@ pub const AnalysisContext = struct {
scope_nodes: []*std.zig.ast.Node,
in_container: *std.zig.ast.Node,
std_uri: ?[]const u8,
error_completions: *ErrorCompletion,
pub fn tree(self: AnalysisContext) *std.zig.ast.Tree {
return self.handle.tree;
@ -584,6 +630,7 @@ pub const AnalysisContext = struct {
.scope_nodes = try std.mem.dupe(&self.arena.allocator, *std.zig.ast.Node, self.scope_nodes),
.in_container = self.in_container,
.std_uri = self.std_uri,
.error_completions = self.error_completions,
};
}
};
@ -623,6 +670,7 @@ pub fn analysisContext(
.scope_nodes = scope_nodes.items,
.in_container = in_container,
.std_uri = std_uri,
.error_completions = &self.error_completions,
};
}
@ -652,4 +700,5 @@ pub fn deinit(self: *DocumentStore) void {
}
self.build_files.deinit(self.allocator);
self.error_completions.deinit();
}

View File

@ -760,6 +760,15 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
}),
.var_access, .empty => try completeGlobal(id, pos_index, handle, this_config),
.field_access => |range| try completeFieldAccess(id, handle, pos, range, this_config),
.global_error_set => try send(types.Response{
.id = .{ .Integer = id },
.result = .{
.CompletionList = .{
.isIncomplete = false,
.items = document_store.error_completions.completions.items,
},
},
}),
else => try respondGeneric(id, no_completions_response),
}
} else {