implement completion for enum literals
This commit is contained in:
parent
22a863134b
commit
c1ba26e0a2
@ -408,10 +408,9 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
.ErrorSetDecl => {
|
.ErrorSetDecl => {
|
||||||
const set = node.cast(ast.Node.ErrorSetDecl).?;
|
const set = node.cast(ast.Node.ErrorSetDecl).?;
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
while (set.iterate(i)) |decl| : (i+=1) {
|
while (set.iterate(i)) |decl| : (i += 1) {
|
||||||
const tag = decl.cast(ast.Node.ErrorTag).?;
|
|
||||||
// TODO handle errors better?
|
// TODO handle errors better?
|
||||||
analysis_ctx.error_completions.add(analysis_ctx.tree(), tag) catch {};
|
analysis_ctx.error_completions.add(analysis_ctx.tree(), decl) catch {};
|
||||||
}
|
}
|
||||||
return node;
|
return node;
|
||||||
},
|
},
|
||||||
@ -507,6 +506,20 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
},
|
},
|
||||||
.ContainerDecl => {
|
.ContainerDecl => {
|
||||||
analysis_ctx.onContainer(node.cast(ast.Node.ContainerDecl).?) catch return null;
|
analysis_ctx.onContainer(node.cast(ast.Node.ContainerDecl).?) catch return null;
|
||||||
|
|
||||||
|
const container = node.cast(ast.Node.ContainerDecl).?;
|
||||||
|
const kind = analysis_ctx.tree().token_ids[container.kind_token];
|
||||||
|
|
||||||
|
if (kind == .Keyword_struct or (kind == .Keyword_union and container.init_arg_expr == .None)) {
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
var i: usize = 0;
|
||||||
|
while (container.iterate(i)) |decl| : (i += 1) {
|
||||||
|
if (decl.id != .ContainerField) continue;
|
||||||
|
// TODO handle errors better?
|
||||||
|
analysis_ctx.enum_completions.add(analysis_ctx.tree(), decl) catch {};
|
||||||
|
}
|
||||||
return node;
|
return node;
|
||||||
},
|
},
|
||||||
.MultilineStringLiteral, .StringLiteral, .FnProto => return node,
|
.MultilineStringLiteral, .StringLiteral, .FnProto => return node,
|
||||||
@ -1080,7 +1093,7 @@ fn addOutlineNodes(allocator: *std.mem.Allocator, children: *std.ArrayList(types
|
|||||||
// _ = try children.append(try getDocumentSymbolsInternal(allocator, tree, cchild));
|
// _ = try children.append(try getDocumentSymbolsInternal(allocator, tree, cchild));
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
else => {}
|
else => {},
|
||||||
}
|
}
|
||||||
std.debug.warn("{}\n", .{child.id});
|
std.debug.warn("{}\n", .{child.id});
|
||||||
_ = try children.append(try getDocumentSymbolsInternal(allocator, tree, child));
|
_ = try children.append(try getDocumentSymbolsInternal(allocator, tree, child));
|
||||||
@ -1088,7 +1101,6 @@ fn addOutlineNodes(allocator: *std.mem.Allocator, children: *std.ArrayList(types
|
|||||||
|
|
||||||
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) anyerror!types.DocumentSymbol {
|
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) anyerror!types.DocumentSymbol {
|
||||||
// const symbols = std.ArrayList(types.DocumentSymbol).init(allocator);
|
// const symbols = std.ArrayList(types.DocumentSymbol).init(allocator);
|
||||||
|
|
||||||
const start_loc = tree.tokenLocation(0, node.firstToken());
|
const start_loc = tree.tokenLocation(0, node.firstToken());
|
||||||
const end_loc = tree.tokenLocation(0, node.lastToken());
|
const end_loc = tree.tokenLocation(0, node.lastToken());
|
||||||
const range = types.Range{
|
const range = types.Range{
|
||||||
@ -1099,14 +1111,14 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no
|
|||||||
.end = .{
|
.end = .{
|
||||||
.line = @intCast(i64, end_loc.line),
|
.line = @intCast(i64, end_loc.line),
|
||||||
.character = @intCast(i64, end_loc.column),
|
.character = @intCast(i64, end_loc.column),
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (getDeclName(tree, node) == null) {
|
if (getDeclName(tree, node) == null) {
|
||||||
std.debug.warn("NULL NAME: {}\n", .{node.id});
|
std.debug.warn("NULL NAME: {}\n", .{node.id});
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Get my lazy bum to fix detail newlines
|
// TODO: Get my lazy bum to fix detail newlines
|
||||||
return types.DocumentSymbol{
|
return types.DocumentSymbol{
|
||||||
.name = getDeclName(tree, node) orelse "no_name",
|
.name = getDeclName(tree, node) orelse "no_name",
|
||||||
// .detail = (try getDocComments(allocator, tree, node)) orelse "",
|
// .detail = (try getDocComments(allocator, tree, node)) orelse "",
|
||||||
@ -1115,7 +1127,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no
|
|||||||
.FnProto => .Function,
|
.FnProto => .Function,
|
||||||
.VarDecl => .Variable,
|
.VarDecl => .Variable,
|
||||||
.ContainerField => .Field,
|
.ContainerField => .Field,
|
||||||
else => .Variable
|
else => .Variable,
|
||||||
},
|
},
|
||||||
.range = range,
|
.range = range,
|
||||||
.selectionRange = range,
|
.selectionRange = range,
|
||||||
|
@ -30,35 +30,35 @@ pub const Handle = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const ErrorCompletion = struct {
|
pub const TagStore = struct {
|
||||||
values: std.StringHashMap(void),
|
values: std.StringHashMap(void),
|
||||||
completions: std.ArrayList(types.CompletionItem),
|
completions: std.ArrayListUnmanaged(types.CompletionItem),
|
||||||
|
|
||||||
pub fn init(allocator: *std.mem.Allocator) ErrorCompletion {
|
pub fn init(allocator: *std.mem.Allocator) TagStore {
|
||||||
return .{
|
return .{
|
||||||
.values = std.StringHashMap(void).init(allocator),
|
.values = std.StringHashMap(void).init(allocator),
|
||||||
.completions = std.ArrayList(types.CompletionItem).init(allocator),
|
.completions = .{},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(self: *ErrorCompletion) void {
|
pub fn deinit(self: *TagStore) void {
|
||||||
const alloc = self.values.allocator;
|
const alloc = self.values.allocator;
|
||||||
for (self.completions.items) |item| {
|
for (self.completions.items) |item| {
|
||||||
alloc.free(item.label);
|
alloc.free(item.label);
|
||||||
if (item.documentation) |some| alloc.free(some.value);
|
if (item.documentation) |some| alloc.free(some.value);
|
||||||
}
|
}
|
||||||
self.values.deinit();
|
self.values.deinit();
|
||||||
self.completions.deinit();
|
self.completions.deinit(self.values.allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(self: *ErrorCompletion, tree: *std.zig.ast.Tree, tag: *std.zig.ast.Node.ErrorTag) !void {
|
pub fn add(self: *TagStore, tree: *std.zig.ast.Tree, tag: *std.zig.ast.Node) !void {
|
||||||
const name = tree.tokenSlice(tag.name_token);
|
const name = analysis.nodeToString(tree, tag).?;
|
||||||
if (self.values.contains(name)) return;
|
if (self.values.contains(name)) return;
|
||||||
const alloc = self.values.allocator;
|
const alloc = self.values.allocator;
|
||||||
const item = types.CompletionItem{
|
const item = types.CompletionItem{
|
||||||
.label = try std.mem.dupe(alloc, u8, name),
|
.label = try std.mem.dupe(alloc, u8, name),
|
||||||
.kind = .Constant,
|
.kind = .Constant,
|
||||||
.documentation = if (try analysis.getDocComments(alloc, tree, &tag.base)) |docs|
|
.documentation = if (try analysis.getDocComments(alloc, tree, tag)) |docs|
|
||||||
.{
|
.{
|
||||||
.kind = .Markdown,
|
.kind = .Markdown,
|
||||||
.value = docs,
|
.value = docs,
|
||||||
@ -68,7 +68,7 @@ pub const ErrorCompletion = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
try self.values.putNoClobber(item.label, {});
|
try self.values.putNoClobber(item.label, {});
|
||||||
try self.completions.append(item);
|
try self.completions.append(self.values.allocator, item);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -78,7 +78,8 @@ has_zig: bool,
|
|||||||
build_files: std.ArrayListUnmanaged(*BuildFile),
|
build_files: std.ArrayListUnmanaged(*BuildFile),
|
||||||
build_runner_path: []const u8,
|
build_runner_path: []const u8,
|
||||||
|
|
||||||
error_completions: ErrorCompletion,
|
error_completions: TagStore,
|
||||||
|
enum_completions: TagStore,
|
||||||
|
|
||||||
pub fn init(
|
pub fn init(
|
||||||
self: *DocumentStore,
|
self: *DocumentStore,
|
||||||
@ -91,7 +92,8 @@ pub fn init(
|
|||||||
self.has_zig = has_zig;
|
self.has_zig = has_zig;
|
||||||
self.build_files = .{};
|
self.build_files = .{};
|
||||||
self.build_runner_path = build_runner_path;
|
self.build_runner_path = build_runner_path;
|
||||||
self.error_completions = ErrorCompletion.init(allocator);
|
self.error_completions = TagStore.init(allocator);
|
||||||
|
self.enum_completions = TagStore.init(allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
const LoadPackagesContext = struct {
|
const LoadPackagesContext = struct {
|
||||||
@ -523,7 +525,8 @@ pub const AnalysisContext = struct {
|
|||||||
scope_nodes: []*std.zig.ast.Node,
|
scope_nodes: []*std.zig.ast.Node,
|
||||||
in_container: *std.zig.ast.Node,
|
in_container: *std.zig.ast.Node,
|
||||||
std_uri: ?[]const u8,
|
std_uri: ?[]const u8,
|
||||||
error_completions: *ErrorCompletion,
|
error_completions: *TagStore,
|
||||||
|
enum_completions: *TagStore,
|
||||||
|
|
||||||
pub fn tree(self: AnalysisContext) *std.zig.ast.Tree {
|
pub fn tree(self: AnalysisContext) *std.zig.ast.Tree {
|
||||||
return self.handle.tree;
|
return self.handle.tree;
|
||||||
@ -631,6 +634,7 @@ pub const AnalysisContext = struct {
|
|||||||
.in_container = self.in_container,
|
.in_container = self.in_container,
|
||||||
.std_uri = self.std_uri,
|
.std_uri = self.std_uri,
|
||||||
.error_completions = self.error_completions,
|
.error_completions = self.error_completions,
|
||||||
|
.enum_completions = self.enum_completions,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -671,6 +675,7 @@ pub fn analysisContext(
|
|||||||
.in_container = in_container,
|
.in_container = in_container,
|
||||||
.std_uri = std_uri,
|
.std_uri = std_uri,
|
||||||
.error_completions = &self.error_completions,
|
.error_completions = &self.error_completions,
|
||||||
|
.enum_completions = &self.enum_completions,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -701,4 +706,5 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
|
|
||||||
self.build_files.deinit(self.allocator);
|
self.build_files.deinit(self.allocator);
|
||||||
self.error_completions.deinit();
|
self.error_completions.deinit();
|
||||||
|
self.enum_completions.deinit();
|
||||||
}
|
}
|
||||||
|
@ -769,6 +769,15 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
.enum_literal => try send(types.Response{
|
||||||
|
.id = .{ .Integer = id },
|
||||||
|
.result = .{
|
||||||
|
.CompletionList = .{
|
||||||
|
.isIncomplete = false,
|
||||||
|
.items = document_store.enum_completions.completions.items,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
else => try respondGeneric(id, no_completions_response),
|
else => try respondGeneric(id, no_completions_response),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
Loading…
Reference in New Issue
Block a user