Merge pull request #59 from Sergeeeek/master
Show function aliases as function declarations in completions
This commit is contained in:
commit
3673245309
@ -234,6 +234,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
switch (node.id) {
|
switch (node.id) {
|
||||||
.VarDecl => {
|
.VarDecl => {
|
||||||
const vari = node.cast(ast.Node.VarDecl).?;
|
const vari = node.cast(ast.Node.VarDecl).?;
|
||||||
|
|
||||||
return resolveTypeOfNode(analysis_ctx, vari.type_node orelse vari.init_node.?) orelse null;
|
return resolveTypeOfNode(analysis_ctx, vari.type_node orelse vari.init_node.?) orelse null;
|
||||||
},
|
},
|
||||||
.ParamDecl => {
|
.ParamDecl => {
|
||||||
@ -246,11 +247,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
.FnProto => {
|
.FnProto => {
|
||||||
const func = node.cast(ast.Node.FnProto).?;
|
return node;
|
||||||
switch (func.return_type) {
|
|
||||||
.Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(analysis_ctx, return_type),
|
|
||||||
.Invalid => {},
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
.Identifier => {
|
.Identifier => {
|
||||||
if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, analysis_ctx.tree.getNodeSource(node))) |child| {
|
if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, analysis_ctx.tree.getNodeSource(node))) |child| {
|
||||||
@ -265,7 +262,16 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
const suffix_op = node.cast(ast.Node.SuffixOp).?;
|
const suffix_op = node.cast(ast.Node.SuffixOp).?;
|
||||||
switch (suffix_op.op) {
|
switch (suffix_op.op) {
|
||||||
.Call, .StructInitializer => {
|
.Call, .StructInitializer => {
|
||||||
return resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node);
|
const func_or_struct_decl = resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node) orelse return null;
|
||||||
|
|
||||||
|
if (func_or_struct_decl.id == .FnProto) {
|
||||||
|
const func = func_or_struct_decl.cast(ast.Node.FnProto).?;
|
||||||
|
switch (func.return_type) {
|
||||||
|
.Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(analysis_ctx, return_type),
|
||||||
|
.Invalid => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return func_or_struct_decl;
|
||||||
},
|
},
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
|
@ -21,17 +21,17 @@ pub const Handle = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
allocator: *std.mem.Allocator,
|
allocator: *std.mem.Allocator,
|
||||||
handles: std.StringHashMap(Handle),
|
handles: std.StringHashMap(*Handle),
|
||||||
std_uri: ?[]const u8,
|
std_uri: ?[]const u8,
|
||||||
|
|
||||||
pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !void {
|
pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !void {
|
||||||
self.allocator = allocator;
|
self.allocator = allocator;
|
||||||
self.handles = std.StringHashMap(Handle).init(allocator);
|
self.handles = std.StringHashMap(*Handle).init(allocator);
|
||||||
errdefer self.handles.deinit();
|
errdefer self.handles.deinit();
|
||||||
|
|
||||||
if (zig_lib_path) |zpath| {
|
if (zig_lib_path) |zpath| {
|
||||||
const std_path = std.fs.path.resolve(allocator, &[_][]const u8 {
|
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
|
||||||
zpath, "./std/std.zig"
|
zpath, "./std/std.zig",
|
||||||
}) catch |err| block: {
|
}) catch |err| block: {
|
||||||
std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err});
|
std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err});
|
||||||
self.std_uri = null;
|
self.std_uri = null;
|
||||||
@ -52,7 +52,10 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ?
|
|||||||
fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle {
|
fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle {
|
||||||
std.debug.warn("Opened document: {}\n", .{uri});
|
std.debug.warn("Opened document: {}\n", .{uri});
|
||||||
|
|
||||||
var handle = Handle{
|
var handle = try self.allocator.create(Handle);
|
||||||
|
errdefer self.allocator.destroy(handle);
|
||||||
|
|
||||||
|
handle.* = Handle{
|
||||||
.count = 1,
|
.count = 1,
|
||||||
.import_uris = std.ArrayList([]const u8).init(self.allocator),
|
.import_uris = std.ArrayList([]const u8).init(self.allocator),
|
||||||
.document = .{
|
.document = .{
|
||||||
@ -62,7 +65,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
const kv = try self.handles.getOrPutValue(uri, handle);
|
const kv = try self.handles.getOrPutValue(uri, handle);
|
||||||
return &kv.value;
|
return kv.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
|
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
|
||||||
@ -70,7 +73,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H
|
|||||||
std.debug.warn("Document already open: {}, incrementing count\n", .{uri});
|
std.debug.warn("Document already open: {}, incrementing count\n", .{uri});
|
||||||
entry.value.count += 1;
|
entry.value.count += 1;
|
||||||
std.debug.warn("New count: {}\n", .{entry.value.count});
|
std.debug.warn("New count: {}\n", .{entry.value.count});
|
||||||
return &entry.value;
|
return entry.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
const duped_text = try std.mem.dupe(self.allocator, u8, text);
|
const duped_text = try std.mem.dupe(self.allocator, u8, text);
|
||||||
@ -100,6 +103,7 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
|
|||||||
const uri_key = entry.key;
|
const uri_key = entry.key;
|
||||||
self.handles.removeAssertDiscard(uri);
|
self.handles.removeAssertDiscard(uri);
|
||||||
self.allocator.free(uri_key);
|
self.allocator.free(uri_key);
|
||||||
|
self.allocator.destroy(entry.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,7 +113,7 @@ pub fn closeDocument(self: *DocumentStore, uri: []const u8) void {
|
|||||||
|
|
||||||
pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
|
pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
|
||||||
if (self.handles.get(uri)) |entry| {
|
if (self.handles.get(uri)) |entry| {
|
||||||
return &entry.value;
|
return entry.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
@ -175,11 +179,11 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
|
|||||||
if (change.Object.getValue("range")) |range| {
|
if (change.Object.getValue("range")) |range| {
|
||||||
const start_pos = types.Position{
|
const start_pos = types.Position{
|
||||||
.line = range.Object.getValue("start").?.Object.getValue("line").?.Integer,
|
.line = range.Object.getValue("start").?.Object.getValue("line").?.Integer,
|
||||||
.character = range.Object.getValue("start").?.Object.getValue("character").?.Integer
|
.character = range.Object.getValue("start").?.Object.getValue("character").?.Integer,
|
||||||
};
|
};
|
||||||
const end_pos = types.Position{
|
const end_pos = types.Position{
|
||||||
.line = range.Object.getValue("end").?.Object.getValue("line").?.Integer,
|
.line = range.Object.getValue("end").?.Object.getValue("line").?.Integer,
|
||||||
.character = range.Object.getValue("end").?.Object.getValue("character").?.Integer
|
.character = range.Object.getValue("end").?.Object.getValue("character").?.Integer,
|
||||||
};
|
};
|
||||||
|
|
||||||
const change_text = change.Object.getValue("text").?.String;
|
const change_text = change.Object.getValue("text").?.String;
|
||||||
@ -200,12 +204,12 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
|
|||||||
// The first part of the string, [0 .. start_index] need not be changed.
|
// The first part of the string, [0 .. start_index] need not be changed.
|
||||||
// We then copy the last part of the string, [end_index ..] to its
|
// We then copy the last part of the string, [end_index ..] to its
|
||||||
// new position, [start_index + change_len .. ]
|
// new position, [start_index + change_len .. ]
|
||||||
std.mem.copy(u8, document.mem[start_index + change_text.len..][0 .. old_len - end_index], document.mem[end_index .. old_len]);
|
std.mem.copy(u8, document.mem[start_index + change_text.len ..][0 .. old_len - end_index], document.mem[end_index..old_len]);
|
||||||
// Finally, we copy the changes over.
|
// Finally, we copy the changes over.
|
||||||
std.mem.copy(u8, document.mem[start_index..][0 .. change_text.len], change_text);
|
std.mem.copy(u8, document.mem[start_index..][0..change_text.len], change_text);
|
||||||
|
|
||||||
// Reset the text substring.
|
// Reset the text substring.
|
||||||
document.text = document.mem[0 .. new_len];
|
document.text = document.mem[0..new_len];
|
||||||
} else {
|
} else {
|
||||||
const change_text = change.Object.getValue("text").?.String;
|
const change_text = change.Object.getValue("text").?.String;
|
||||||
const old_len = document.text.len;
|
const old_len = document.text.len;
|
||||||
@ -216,8 +220,8 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
|
|||||||
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
document.mem = try self.allocator.realloc(document.mem, realloc_len);
|
||||||
}
|
}
|
||||||
|
|
||||||
std.mem.copy(u8, document.mem[0 .. change_text.len], change_text);
|
std.mem.copy(u8, document.mem[0..change_text.len], change_text);
|
||||||
document.text = document.mem[0 .. change_text.len];
|
document.text = document.mem[0..change_text.len];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -226,8 +230,7 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
|
|||||||
|
|
||||||
fn uriFromImportStr(store: *DocumentStore, allocator: *std.mem.Allocator, handle: Handle, import_str: []const u8) !?[]const u8 {
|
fn uriFromImportStr(store: *DocumentStore, allocator: *std.mem.Allocator, handle: Handle, import_str: []const u8) !?[]const u8 {
|
||||||
return if (std.mem.eql(u8, import_str, "std"))
|
return if (std.mem.eql(u8, import_str, "std"))
|
||||||
if (store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri)
|
if (store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) else {
|
||||||
else {
|
|
||||||
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
|
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -237,8 +240,8 @@ fn uriFromImportStr(store: *DocumentStore, allocator: *std.mem.Allocator, handle
|
|||||||
defer allocator.free(path);
|
defer allocator.free(path);
|
||||||
|
|
||||||
const dir_path = std.fs.path.dirname(path) orelse "";
|
const dir_path = std.fs.path.dirname(path) orelse "";
|
||||||
const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 {
|
const import_path = try std.fs.path.resolve(allocator, &[_][]const u8{
|
||||||
dir_path, import_str
|
dir_path, import_str,
|
||||||
});
|
});
|
||||||
|
|
||||||
defer allocator.free(import_path);
|
defer allocator.free(import_path);
|
||||||
@ -336,6 +339,18 @@ pub const AnalysisContext = struct {
|
|||||||
return &self.tree.root_node.base;
|
return &self.tree.root_node.base;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn clone(self: *AnalysisContext) !AnalysisContext {
|
||||||
|
// Create a new tree so it can be destroyed by the cloned AnalysisContext without affecting the original
|
||||||
|
const tree = try self.handle.tree(self.store.allocator);
|
||||||
|
return AnalysisContext{
|
||||||
|
.store = self.store,
|
||||||
|
.handle = self.handle,
|
||||||
|
.arena = self.arena,
|
||||||
|
.tree = tree,
|
||||||
|
.scope_nodes = self.scope_nodes,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn deinit(self: *AnalysisContext) void {
|
pub fn deinit(self: *AnalysisContext) void {
|
||||||
self.tree.deinit();
|
self.tree.deinit();
|
||||||
}
|
}
|
||||||
@ -367,6 +382,7 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
|
|
||||||
entry.value.import_uris.deinit();
|
entry.value.import_uris.deinit();
|
||||||
self.allocator.free(entry.key);
|
self.allocator.free(entry.key);
|
||||||
|
self.allocator.destroy(entry.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.handles.deinit();
|
self.handles.deinit();
|
||||||
|
62
src/main.zig
62
src/main.zig
@ -170,17 +170,17 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn containerToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.ast.Tree, container: *std.zig.ast.Node, config: Config) !void {
|
fn containerToCompletion(list: *std.ArrayList(types.CompletionItem), analysis_ctx: *DocumentStore.AnalysisContext, container: *std.zig.ast.Node, config: Config) !void {
|
||||||
var index: usize = 0;
|
var index: usize = 0;
|
||||||
while (container.iterate(index)) |child_node| : (index += 1) {
|
while (container.iterate(index)) |child_node| : (index += 1) {
|
||||||
if (analysis.isNodePublic(tree, child_node)) {
|
if (analysis.isNodePublic(analysis_ctx.tree, child_node)) {
|
||||||
try nodeToCompletion(list, tree, child_node, config);
|
try nodeToCompletion(list, analysis_ctx, child_node, config);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, config: Config) error{OutOfMemory}!void {
|
fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), analysis_ctx: *DocumentStore.AnalysisContext, node: *std.zig.ast.Node, config: Config) error{OutOfMemory}!void {
|
||||||
var doc = if (try analysis.getDocComments(list.allocator, tree, node)) |doc_comments|
|
var doc = if (try analysis.getDocComments(list.allocator, analysis_ctx.tree, node)) |doc_comments|
|
||||||
types.MarkupContent{
|
types.MarkupContent{
|
||||||
.kind = .Markdown,
|
.kind = .Markdown,
|
||||||
.value = doc_comments,
|
.value = doc_comments,
|
||||||
@ -190,23 +190,23 @@ fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.a
|
|||||||
|
|
||||||
switch (node.id) {
|
switch (node.id) {
|
||||||
.ErrorSetDecl, .Root, .ContainerDecl => {
|
.ErrorSetDecl, .Root, .ContainerDecl => {
|
||||||
try containerToCompletion(list, tree, node, config);
|
try containerToCompletion(list, analysis_ctx, node, config);
|
||||||
},
|
},
|
||||||
.FnProto => {
|
.FnProto => {
|
||||||
const func = node.cast(std.zig.ast.Node.FnProto).?;
|
const func = node.cast(std.zig.ast.Node.FnProto).?;
|
||||||
if (func.name_token) |name_token| {
|
if (func.name_token) |name_token| {
|
||||||
const insert_text = if (config.enable_snippets)
|
const insert_text = if (config.enable_snippets)
|
||||||
try analysis.getFunctionSnippet(list.allocator, tree, func)
|
try analysis.getFunctionSnippet(list.allocator, analysis_ctx.tree, func)
|
||||||
else
|
else
|
||||||
null;
|
null;
|
||||||
|
|
||||||
const is_type_function = analysis.isTypeFunction(tree, func);
|
const is_type_function = analysis.isTypeFunction(analysis_ctx.tree, func);
|
||||||
|
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = tree.tokenSlice(name_token),
|
.label = analysis_ctx.tree.tokenSlice(name_token),
|
||||||
.kind = if (is_type_function) .Struct else .Function,
|
.kind = if (is_type_function) .Struct else .Function,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getFunctionSignature(tree, func),
|
.detail = analysis.getFunctionSignature(analysis_ctx.tree, func),
|
||||||
.insertText = insert_text,
|
.insertText = insert_text,
|
||||||
.insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText,
|
.insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText,
|
||||||
});
|
});
|
||||||
@ -214,22 +214,37 @@ fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.a
|
|||||||
},
|
},
|
||||||
.VarDecl => {
|
.VarDecl => {
|
||||||
const var_decl = node.cast(std.zig.ast.Node.VarDecl).?;
|
const var_decl = node.cast(std.zig.ast.Node.VarDecl).?;
|
||||||
const is_const = tree.tokens.at(var_decl.mut_token).id == .Keyword_const;
|
const is_const = analysis_ctx.tree.tokens.at(var_decl.mut_token).id == .Keyword_const;
|
||||||
|
|
||||||
|
var child_analysis_context = try analysis_ctx.clone();
|
||||||
|
defer child_analysis_context.deinit();
|
||||||
|
|
||||||
|
const child_node = var_decl.type_node orelse var_decl.init_node.?;
|
||||||
|
const maybe_resolved_node = analysis.resolveTypeOfNode(&child_analysis_context, child_node);
|
||||||
|
|
||||||
|
if (maybe_resolved_node) |resolved_node| {
|
||||||
|
// Special case for function aliases
|
||||||
|
// In the future it might be used to print types of values instead of their declarations
|
||||||
|
if (resolved_node.id == .FnProto) {
|
||||||
|
try nodeToCompletion(list, &child_analysis_context, resolved_node, config);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = tree.tokenSlice(var_decl.name_token),
|
.label = analysis_ctx.tree.tokenSlice(var_decl.name_token),
|
||||||
.kind = if (is_const) .Constant else .Variable,
|
.kind = if (is_const) .Constant else .Variable,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getVariableSignature(tree, var_decl),
|
.detail = analysis.getVariableSignature(analysis_ctx.tree, var_decl),
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.ParamDecl => {
|
.ParamDecl => {
|
||||||
const param = node.cast(std.zig.ast.Node.ParamDecl).?;
|
const param = node.cast(std.zig.ast.Node.ParamDecl).?;
|
||||||
if (param.name_token) |name_token|
|
if (param.name_token) |name_token|
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = tree.tokenSlice(name_token),
|
.label = analysis_ctx.tree.tokenSlice(name_token),
|
||||||
.kind = .Constant,
|
.kind = .Constant,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getParamSignature(tree, param),
|
.detail = analysis.getParamSignature(analysis_ctx.tree, param),
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.PrefixOp => {
|
.PrefixOp => {
|
||||||
@ -248,7 +263,7 @@ fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.a
|
|||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
else => if (analysis.nodeToString(tree, node)) |string| {
|
else => if (analysis.nodeToString(analysis_ctx.tree, node)) |string| {
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = string,
|
.label = string,
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
@ -330,7 +345,7 @@ fn gotoDefinitionFieldAccess(id: i64, handle: *DocumentStore.Handle, position: t
|
|||||||
try respondGeneric(id, null_result_response);
|
try respondGeneric(id, null_result_response);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, config: Config) !void {
|
fn completeGlobal(id: i64, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
||||||
var tree = try handle.tree(allocator);
|
var tree = try handle.tree(allocator);
|
||||||
defer tree.deinit();
|
defer tree.deinit();
|
||||||
|
|
||||||
@ -340,11 +355,17 @@ fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, confi
|
|||||||
// Deallocate all temporary data.
|
// Deallocate all temporary data.
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
|
var analysis_ctx = try document_store.analysisContext(handle, &arena, types.Position{
|
||||||
|
.line = 0,
|
||||||
|
.character = 0,
|
||||||
|
});
|
||||||
|
defer analysis_ctx.deinit();
|
||||||
|
|
||||||
var decl_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
var decl_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
||||||
try analysis.declsFromIndex(&decl_nodes, tree, pos_index);
|
try analysis.declsFromIndex(&decl_nodes, tree, pos_index);
|
||||||
for (decl_nodes.items) |decl_ptr| {
|
for (decl_nodes.items) |decl_ptr| {
|
||||||
var decl = decl_ptr.*;
|
var decl = decl_ptr.*;
|
||||||
try nodeToCompletion(&completions, tree, decl_ptr, config);
|
try nodeToCompletion(&completions, &analysis_ctx, decl_ptr, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
@ -372,9 +393,8 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
|
|||||||
const line_length = line.len - line_start_idx;
|
const line_length = line.len - line_start_idx;
|
||||||
|
|
||||||
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer, line_length)) |node| {
|
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer, line_length)) |node| {
|
||||||
try nodeToCompletion(&completions, analysis_ctx.tree, node, config);
|
try nodeToCompletion(&completions, &analysis_ctx, node, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
.id = .{ .Integer = id },
|
.id = .{ .Integer = id },
|
||||||
.result = .{
|
.result = .{
|
||||||
@ -624,7 +644,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
.var_access, .empty => try completeGlobal(id, pos_index, handle.*, config),
|
.var_access, .empty => try completeGlobal(id, pos_index, handle, config),
|
||||||
.field_access => |start_idx| try completeFieldAccess(id, handle, pos, start_idx, config),
|
.field_access => |start_idx| try completeFieldAccess(id, handle, pos, start_idx, config),
|
||||||
else => try respondGeneric(id, no_completions_response),
|
else => try respondGeneric(id, no_completions_response),
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user