Merge pull request #111 from alexnask/master
Add usingnamespace support
This commit is contained in:
commit
e5656b9651
1278
src/analysis.zig
1278
src/analysis.zig
File diff suppressed because it is too large
Load Diff
@ -21,6 +21,7 @@ pub const Handle = struct {
|
|||||||
count: usize,
|
count: usize,
|
||||||
import_uris: std.ArrayList([]const u8),
|
import_uris: std.ArrayList([]const u8),
|
||||||
tree: *std.zig.ast.Tree,
|
tree: *std.zig.ast.Tree,
|
||||||
|
document_scope: analysis.DocumentScope,
|
||||||
|
|
||||||
associated_build_file: ?*BuildFile,
|
associated_build_file: ?*BuildFile,
|
||||||
is_build_file: ?*BuildFile,
|
is_build_file: ?*BuildFile,
|
||||||
@ -77,6 +78,7 @@ handles: std.StringHashMap(*Handle),
|
|||||||
zig_exe_path: ?[]const u8,
|
zig_exe_path: ?[]const u8,
|
||||||
build_files: std.ArrayListUnmanaged(*BuildFile),
|
build_files: std.ArrayListUnmanaged(*BuildFile),
|
||||||
build_runner_path: []const u8,
|
build_runner_path: []const u8,
|
||||||
|
std_uri: ?[]const u8,
|
||||||
|
|
||||||
error_completions: TagStore,
|
error_completions: TagStore,
|
||||||
enum_completions: TagStore,
|
enum_completions: TagStore,
|
||||||
@ -86,12 +88,14 @@ pub fn init(
|
|||||||
allocator: *std.mem.Allocator,
|
allocator: *std.mem.Allocator,
|
||||||
zig_exe_path: ?[]const u8,
|
zig_exe_path: ?[]const u8,
|
||||||
build_runner_path: []const u8,
|
build_runner_path: []const u8,
|
||||||
|
zig_lib_path: ?[]const u8,
|
||||||
) !void {
|
) !void {
|
||||||
self.allocator = allocator;
|
self.allocator = allocator;
|
||||||
self.handles = std.StringHashMap(*Handle).init(allocator);
|
self.handles = std.StringHashMap(*Handle).init(allocator);
|
||||||
self.zig_exe_path = zig_exe_path;
|
self.zig_exe_path = zig_exe_path;
|
||||||
self.build_files = .{};
|
self.build_files = .{};
|
||||||
self.build_runner_path = build_runner_path;
|
self.build_runner_path = build_runner_path;
|
||||||
|
self.std_uri = try stdUriFromLibPath(allocator, zig_lib_path);
|
||||||
self.error_completions = TagStore.init(allocator);
|
self.error_completions = TagStore.init(allocator);
|
||||||
self.enum_completions = TagStore.init(allocator);
|
self.enum_completions = TagStore.init(allocator);
|
||||||
}
|
}
|
||||||
@ -191,6 +195,12 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
|
|||||||
var handle = try self.allocator.create(Handle);
|
var handle = try self.allocator.create(Handle);
|
||||||
errdefer self.allocator.destroy(handle);
|
errdefer self.allocator.destroy(handle);
|
||||||
|
|
||||||
|
const tree = try std.zig.parse(self.allocator, text);
|
||||||
|
errdefer tree.deinit();
|
||||||
|
|
||||||
|
const document_scope = try analysis.makeDocumentScope(self.allocator, tree);
|
||||||
|
errdefer document_scope.deinit(self.allocator);
|
||||||
|
|
||||||
handle.* = Handle{
|
handle.* = Handle{
|
||||||
.count = 1,
|
.count = 1,
|
||||||
.import_uris = std.ArrayList([]const u8).init(self.allocator),
|
.import_uris = std.ArrayList([]const u8).init(self.allocator),
|
||||||
@ -199,7 +209,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
|
|||||||
.text = text,
|
.text = text,
|
||||||
.mem = text,
|
.mem = text,
|
||||||
},
|
},
|
||||||
.tree = try std.zig.parse(self.allocator, text),
|
.tree = tree,
|
||||||
|
.document_scope = document_scope,
|
||||||
.associated_build_file = null,
|
.associated_build_file = null,
|
||||||
.is_build_file = null,
|
.is_build_file = null,
|
||||||
};
|
};
|
||||||
@ -362,6 +373,9 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
|
|||||||
handle.tree.deinit();
|
handle.tree.deinit();
|
||||||
handle.tree = try std.zig.parse(self.allocator, handle.document.text);
|
handle.tree = try std.zig.parse(self.allocator, handle.document.text);
|
||||||
|
|
||||||
|
handle.document_scope.deinit(self.allocator);
|
||||||
|
handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
|
||||||
|
|
||||||
// TODO: Better algorithm or data structure?
|
// TODO: Better algorithm or data structure?
|
||||||
// Removing the imports is costly since they live in an array list
|
// Removing the imports is costly since they live in an array list
|
||||||
// Perhaps we should use an AutoHashMap([]const u8, {}) ?
|
// Perhaps we should use an AutoHashMap([]const u8, {}) ?
|
||||||
@ -382,7 +396,7 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
|
|||||||
|
|
||||||
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
||||||
for (import_strs.items) |str| {
|
for (import_strs.items) |str| {
|
||||||
const uri = (try uriFromImportStr(self, &arena.allocator, handle.*, str, std_uri)) orelse continue;
|
const uri = (try self.uriFromImportStr(&arena.allocator, handle.*, str)) orelse continue;
|
||||||
|
|
||||||
var idx: usize = 0;
|
var idx: usize = 0;
|
||||||
exists_loop: while (idx < still_exist.len) : (idx += 1) {
|
exists_loop: while (idx < still_exist.len) : (idx += 1) {
|
||||||
@ -485,14 +499,13 @@ pub fn applyChanges(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn uriFromImportStr(
|
pub fn uriFromImportStr(
|
||||||
store: *DocumentStore,
|
self: *DocumentStore,
|
||||||
allocator: *std.mem.Allocator,
|
allocator: *std.mem.Allocator,
|
||||||
handle: Handle,
|
handle: Handle,
|
||||||
import_str: []const u8,
|
import_str: []const u8,
|
||||||
std_uri: ?[]const u8,
|
|
||||||
) !?[]const u8 {
|
) !?[]const u8 {
|
||||||
if (std.mem.eql(u8, import_str, "std")) {
|
if (std.mem.eql(u8, import_str, "std")) {
|
||||||
if (std_uri) |uri| return try std.mem.dupe(allocator, u8, uri) else {
|
if (self.std_uri) |uri| return try std.mem.dupe(allocator, u8, uri) else {
|
||||||
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
|
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -523,132 +536,72 @@ pub fn uriFromImportStr(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const AnalysisContext = struct {
|
pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const u8) !?*Handle {
|
||||||
store: *DocumentStore,
|
const allocator = self.allocator;
|
||||||
handle: *Handle,
|
const final_uri = (try self.uriFromImportStr(
|
||||||
// This arena is used for temporary allocations while analyzing,
|
self.allocator,
|
||||||
// not for the tree allocations.
|
handle.*,
|
||||||
arena: *std.heap.ArenaAllocator,
|
import_str,
|
||||||
scope_nodes: []*std.zig.ast.Node,
|
)) orelse return null;
|
||||||
in_container: *std.zig.ast.Node,
|
|
||||||
std_uri: ?[]const u8,
|
|
||||||
error_completions: *TagStore,
|
|
||||||
enum_completions: *TagStore,
|
|
||||||
|
|
||||||
pub fn tree(self: AnalysisContext) *std.zig.ast.Tree {
|
std.debug.warn("Import final URI: {}\n", .{final_uri});
|
||||||
return self.handle.tree;
|
var consumed_final_uri = false;
|
||||||
}
|
defer if (!consumed_final_uri) allocator.free(final_uri);
|
||||||
|
|
||||||
fn refreshScopeNodes(self: *AnalysisContext) !void {
|
// Check if we already imported this.
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator);
|
for (handle.import_uris.items) |uri| {
|
||||||
try analysis.addChildrenNodes(&scope_nodes, self.tree(), &self.tree().root_node.base);
|
// If we did, set our new handle and return the parsed tree root node.
|
||||||
self.scope_nodes = scope_nodes.items;
|
if (std.mem.eql(u8, uri, final_uri)) {
|
||||||
self.in_container = &self.tree().root_node.base;
|
return self.getHandle(final_uri);
|
||||||
}
|
|
||||||
|
|
||||||
pub fn onContainer(self: *AnalysisContext, container: *std.zig.ast.Node) !void {
|
|
||||||
std.debug.assert(container.id == .ContainerDecl or container.id == .Root);
|
|
||||||
|
|
||||||
if (self.in_container != container) {
|
|
||||||
self.in_container = container;
|
|
||||||
|
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).fromOwnedSlice(&self.arena.allocator, self.scope_nodes);
|
|
||||||
try analysis.addChildrenNodes(&scope_nodes, self.tree(), container);
|
|
||||||
self.scope_nodes = scope_nodes.items;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
|
// New import.
|
||||||
const allocator = self.store.allocator;
|
// Check if the import is already opened by others.
|
||||||
const final_uri = (try uriFromImportStr(
|
if (self.getHandle(final_uri)) |new_handle| {
|
||||||
self.store,
|
// If it is, append it to our imports, increment the count, set our new handle
|
||||||
self.store.allocator,
|
// and return the parsed tree root node.
|
||||||
self.handle.*,
|
try handle.import_uris.append(final_uri);
|
||||||
import_str,
|
consumed_final_uri = true;
|
||||||
self.std_uri,
|
|
||||||
)) orelse return null;
|
|
||||||
|
|
||||||
std.debug.warn("Import final URI: {}\n", .{final_uri});
|
new_handle.count += 1;
|
||||||
var consumed_final_uri = false;
|
return new_handle;
|
||||||
defer if (!consumed_final_uri) allocator.free(final_uri);
|
}
|
||||||
|
|
||||||
// Check if we already imported this.
|
// New document, read the file then call into openDocument.
|
||||||
for (self.handle.import_uris.items) |uri| {
|
const file_path = try URI.parse(allocator, final_uri);
|
||||||
// If we did, set our new handle and return the parsed tree root node.
|
defer allocator.free(file_path);
|
||||||
if (std.mem.eql(u8, uri, final_uri)) {
|
|
||||||
self.handle = self.store.getHandle(final_uri) orelse return null;
|
|
||||||
try self.refreshScopeNodes();
|
|
||||||
return &self.tree().root_node.base;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// New import.
|
var file = std.fs.cwd().openFile(file_path, .{}) catch {
|
||||||
// Check if the import is already opened by others.
|
std.debug.warn("Cannot open import file {}\n", .{file_path});
|
||||||
if (self.store.getHandle(final_uri)) |new_handle| {
|
return null;
|
||||||
// If it is, append it to our imports, increment the count, set our new handle
|
};
|
||||||
// and return the parsed tree root node.
|
|
||||||
try self.handle.import_uris.append(final_uri);
|
|
||||||
consumed_final_uri = true;
|
|
||||||
|
|
||||||
new_handle.count += 1;
|
defer file.close();
|
||||||
self.handle = new_handle;
|
const size = std.math.cast(usize, try file.getEndPos()) catch std.math.maxInt(usize);
|
||||||
try self.refreshScopeNodes();
|
|
||||||
return &self.tree().root_node.base;
|
|
||||||
}
|
|
||||||
|
|
||||||
// New document, read the file then call into openDocument.
|
{
|
||||||
const file_path = try URI.parse(allocator, final_uri);
|
const file_contents = try allocator.alloc(u8, size);
|
||||||
defer allocator.free(file_path);
|
errdefer allocator.free(file_contents);
|
||||||
|
|
||||||
var file = std.fs.cwd().openFile(file_path, .{}) catch {
|
file.inStream().readNoEof(file_contents) catch {
|
||||||
std.debug.warn("Cannot open import file {}\n", .{file_path});
|
std.debug.warn("Could not read from file {}\n", .{file_path});
|
||||||
return null;
|
return null;
|
||||||
};
|
};
|
||||||
|
|
||||||
defer file.close();
|
// Add to import table of current handle.
|
||||||
const size = std.math.cast(usize, try file.getEndPos()) catch std.math.maxInt(usize);
|
try handle.import_uris.append(final_uri);
|
||||||
|
consumed_final_uri = true;
|
||||||
|
|
||||||
{
|
// Swap handles.
|
||||||
const file_contents = try allocator.alloc(u8, size);
|
// This takes ownership of the passed uri and text.
|
||||||
errdefer allocator.free(file_contents);
|
const duped_final_uri = try std.mem.dupe(allocator, u8, final_uri);
|
||||||
|
errdefer allocator.free(duped_final_uri);
|
||||||
file.inStream().readNoEof(file_contents) catch {
|
return try self.newDocument(duped_final_uri, file_contents);
|
||||||
std.debug.warn("Could not read from file {}\n", .{file_path});
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add to import table of current handle.
|
|
||||||
try self.handle.import_uris.append(final_uri);
|
|
||||||
consumed_final_uri = true;
|
|
||||||
|
|
||||||
// Swap handles.
|
|
||||||
// This takes ownership of the passed uri and text.
|
|
||||||
const duped_final_uri = try std.mem.dupe(allocator, u8, final_uri);
|
|
||||||
errdefer allocator.free(duped_final_uri);
|
|
||||||
self.handle = try newDocument(self.store, duped_final_uri, file_contents);
|
|
||||||
}
|
|
||||||
|
|
||||||
try self.refreshScopeNodes();
|
|
||||||
return &self.tree().root_node.base;
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn clone(self: *AnalysisContext) !AnalysisContext {
|
fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
|
||||||
// Copy the cope nodes, the rest are references
|
|
||||||
// that are not owned by the context.
|
|
||||||
return AnalysisContext{
|
|
||||||
.store = self.store,
|
|
||||||
.handle = self.handle,
|
|
||||||
.arena = self.arena,
|
|
||||||
.scope_nodes = try std.mem.dupe(&self.arena.allocator, *std.zig.ast.Node, self.scope_nodes),
|
|
||||||
.in_container = self.in_container,
|
|
||||||
.std_uri = self.std_uri,
|
|
||||||
.error_completions = self.error_completions,
|
|
||||||
.enum_completions = self.enum_completions,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
|
|
||||||
if (zig_lib_path) |zpath| {
|
if (zig_lib_path) |zpath| {
|
||||||
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
|
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
|
||||||
zpath, "./std/std.zig",
|
zpath, "./std/std.zig",
|
||||||
@ -665,29 +618,6 @@ pub fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn analysisContext(
|
|
||||||
self: *DocumentStore,
|
|
||||||
handle: *Handle,
|
|
||||||
arena: *std.heap.ArenaAllocator,
|
|
||||||
position: usize,
|
|
||||||
zig_lib_path: ?[]const u8,
|
|
||||||
) !AnalysisContext {
|
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
|
||||||
const in_container = try analysis.declsFromIndex(arena, &scope_nodes, handle.tree, position);
|
|
||||||
|
|
||||||
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
|
||||||
return AnalysisContext{
|
|
||||||
.store = self,
|
|
||||||
.handle = handle,
|
|
||||||
.arena = arena,
|
|
||||||
.scope_nodes = scope_nodes.items,
|
|
||||||
.in_container = in_container,
|
|
||||||
.std_uri = std_uri,
|
|
||||||
.error_completions = &self.error_completions,
|
|
||||||
.enum_completions = &self.enum_completions,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(self: *DocumentStore) void {
|
pub fn deinit(self: *DocumentStore) void {
|
||||||
var entry_iterator = self.handles.iterator();
|
var entry_iterator = self.handles.iterator();
|
||||||
while (entry_iterator.next()) |entry| {
|
while (entry_iterator.next()) |entry| {
|
||||||
@ -700,6 +630,8 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
entry.value.import_uris.deinit();
|
entry.value.import_uris.deinit();
|
||||||
self.allocator.free(entry.key);
|
self.allocator.free(entry.key);
|
||||||
self.allocator.destroy(entry.value);
|
self.allocator.destroy(entry.value);
|
||||||
|
|
||||||
|
entry.value.document_scope.deinit(self.allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.handles.deinit();
|
self.handles.deinit();
|
||||||
@ -713,6 +645,10 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
self.allocator.destroy(build_file);
|
self.allocator.destroy(build_file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (self.std_uri) |std_uri| {
|
||||||
|
self.allocator.free(std_uri);
|
||||||
|
}
|
||||||
|
|
||||||
self.build_files.deinit(self.allocator);
|
self.build_files.deinit(self.allocator);
|
||||||
self.error_completions.deinit();
|
self.error_completions.deinit();
|
||||||
self.enum_completions.deinit();
|
self.enum_completions.deinit();
|
||||||
|
352
src/main.zig
352
src/main.zig
@ -81,7 +81,7 @@ fn respondGeneric(id: types.RequestId, response: []const u8) !void {
|
|||||||
const json_fmt = "{{\"jsonrpc\":\"2.0\",\"id\":";
|
const json_fmt = "{{\"jsonrpc\":\"2.0\",\"id\":";
|
||||||
|
|
||||||
const stdout_stream = stdout.outStream();
|
const stdout_stream = stdout.outStream();
|
||||||
try stdout_stream.print("Content-Length: {}\r\n\r\n" ++ json_fmt, .{ response.len + id_len + json_fmt.len - 1 });
|
try stdout_stream.print("Content-Length: {}\r\n\r\n" ++ json_fmt, .{response.len + id_len + json_fmt.len - 1});
|
||||||
switch (id) {
|
switch (id) {
|
||||||
.Integer => |int| try stdout_stream.print("{}", .{int}),
|
.Integer => |int| try stdout_stream.print("{}", .{int}),
|
||||||
.String => |str| try stdout_stream.print("\"{}\"", .{str}),
|
.String => |str| try stdout_stream.print("\"{}\"", .{str}),
|
||||||
@ -98,7 +98,7 @@ fn showMessage(@"type": types.MessageType, message: []const u8) !void {
|
|||||||
.params = .{
|
.params = .{
|
||||||
.ShowMessageParams = .{
|
.ShowMessageParams = .{
|
||||||
.@"type" = @"type",
|
.@"type" = @"type",
|
||||||
.message = message
|
.message = message,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@ -196,33 +196,14 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn containerToCompletion(
|
fn resolveVarDeclFnAlias(arena: *std.heap.ArenaAllocator, decl_handle: analysis.NodeWithHandle) !analysis.NodeWithHandle {
|
||||||
list: *std.ArrayList(types.CompletionItem),
|
const decl = decl_handle.node;
|
||||||
analysis_ctx: *DocumentStore.AnalysisContext,
|
const handle = decl_handle.handle;
|
||||||
orig_handle: *DocumentStore.Handle,
|
|
||||||
container: *std.zig.ast.Node,
|
|
||||||
config: Config,
|
|
||||||
) !void {
|
|
||||||
var child_idx: usize = 0;
|
|
||||||
while (container.iterate(child_idx)) |child_node| : (child_idx += 1) {
|
|
||||||
// Declarations in the same file do not need to be public.
|
|
||||||
if (orig_handle == analysis_ctx.handle or analysis.isNodePublic(analysis_ctx.tree(), child_node)) {
|
|
||||||
try nodeToCompletion(list, analysis_ctx, orig_handle, child_node, config);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const ResolveVarDeclFnAliasRewsult = struct {
|
|
||||||
decl: *std.zig.ast.Node,
|
|
||||||
analysis_ctx: DocumentStore.AnalysisContext,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn resolveVarDeclFnAlias(analysis_ctx: *DocumentStore.AnalysisContext, decl: *std.zig.ast.Node) !ResolveVarDeclFnAliasRewsult {
|
|
||||||
var child_analysis_context = try analysis_ctx.clone();
|
|
||||||
if (decl.cast(std.zig.ast.Node.VarDecl)) |var_decl| {
|
if (decl.cast(std.zig.ast.Node.VarDecl)) |var_decl| {
|
||||||
const child_node = block: {
|
const child_node = block: {
|
||||||
if (var_decl.type_node) |type_node| {
|
if (var_decl.type_node) |type_node| {
|
||||||
if (std.mem.eql(u8, "type", analysis_ctx.tree().tokenSlice(type_node.firstToken()))) {
|
if (std.mem.eql(u8, "type", handle.tree.tokenSlice(type_node.firstToken()))) {
|
||||||
break :block var_decl.init_node orelse type_node;
|
break :block var_decl.init_node orelse type_node;
|
||||||
}
|
}
|
||||||
break :block type_node;
|
break :block type_node;
|
||||||
@ -230,29 +211,29 @@ fn resolveVarDeclFnAlias(analysis_ctx: *DocumentStore.AnalysisContext, decl: *st
|
|||||||
break :block var_decl.init_node.?;
|
break :block var_decl.init_node.?;
|
||||||
};
|
};
|
||||||
|
|
||||||
if (analysis.resolveTypeOfNode(&child_analysis_context, child_node)) |resolved_node| {
|
if (try analysis.resolveTypeOfNode(&document_store, arena, .{ .node = child_node, .handle = handle })) |resolved_node| {
|
||||||
if (resolved_node.id == .FnProto) {
|
// TODO Just return it anyway?
|
||||||
return ResolveVarDeclFnAliasRewsult{
|
// This would allow deep goto definition etc.
|
||||||
.decl = resolved_node,
|
// Try it out.
|
||||||
.analysis_ctx = child_analysis_context,
|
if (resolved_node.node.id == .FnProto) {
|
||||||
};
|
return resolved_node;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ResolveVarDeclFnAliasRewsult{
|
return decl_handle;
|
||||||
.decl = decl,
|
|
||||||
.analysis_ctx = analysis_ctx.*,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nodeToCompletion(
|
fn nodeToCompletion(
|
||||||
|
arena: *std.heap.ArenaAllocator,
|
||||||
list: *std.ArrayList(types.CompletionItem),
|
list: *std.ArrayList(types.CompletionItem),
|
||||||
analysis_ctx: *DocumentStore.AnalysisContext,
|
node_handle: analysis.NodeWithHandle,
|
||||||
orig_handle: *DocumentStore.Handle,
|
orig_handle: *DocumentStore.Handle,
|
||||||
node: *std.zig.ast.Node,
|
|
||||||
config: Config,
|
config: Config,
|
||||||
) error{OutOfMemory}!void {
|
) error{OutOfMemory}!void {
|
||||||
const doc = if (try analysis.getDocComments(list.allocator, analysis_ctx.tree(), node)) |doc_comments|
|
const node = node_handle.node;
|
||||||
|
const handle = node_handle.handle;
|
||||||
|
|
||||||
|
const doc = if (try analysis.getDocComments(list.allocator, handle.tree, node)) |doc_comments|
|
||||||
types.MarkupContent{
|
types.MarkupContent{
|
||||||
.kind = .Markdown,
|
.kind = .Markdown,
|
||||||
.value = doc_comments,
|
.value = doc_comments,
|
||||||
@ -262,7 +243,13 @@ fn nodeToCompletion(
|
|||||||
|
|
||||||
switch (node.id) {
|
switch (node.id) {
|
||||||
.ErrorSetDecl, .Root, .ContainerDecl => {
|
.ErrorSetDecl, .Root, .ContainerDecl => {
|
||||||
try containerToCompletion(list, analysis_ctx, orig_handle, node, config);
|
const context = DeclToCompletionContext{
|
||||||
|
.completions = list,
|
||||||
|
.config = &config,
|
||||||
|
.arena = arena,
|
||||||
|
.orig_handle = orig_handle,
|
||||||
|
};
|
||||||
|
try analysis.iterateSymbolsContainer(&document_store, arena, node_handle, orig_handle, declToCompletion, context);
|
||||||
},
|
},
|
||||||
.FnProto => {
|
.FnProto => {
|
||||||
const func = node.cast(std.zig.ast.Node.FnProto).?;
|
const func = node.cast(std.zig.ast.Node.FnProto).?;
|
||||||
@ -271,30 +258,47 @@ fn nodeToCompletion(
|
|||||||
|
|
||||||
const insert_text = if (use_snippets) blk: {
|
const insert_text = if (use_snippets) blk: {
|
||||||
const skip_self_param = if (func.params_len > 0) param_check: {
|
const skip_self_param = if (func.params_len > 0) param_check: {
|
||||||
var child_analysis_ctx = try analysis_ctx.clone();
|
const in_container = analysis.innermostContainer(handle, handle.tree.token_locs[func.firstToken()].start);
|
||||||
break :param_check switch (func.paramsConst()[0].param_type) {
|
switch (func.paramsConst()[0].param_type) {
|
||||||
.type_expr => |type_node| if (analysis_ctx.in_container == analysis.resolveTypeOfNode(&child_analysis_ctx, type_node))
|
.type_expr => |type_node| {
|
||||||
true
|
if (try analysis.resolveTypeOfNode(&document_store, arena, .{
|
||||||
else if (type_node.cast(std.zig.ast.Node.PrefixOp)) |prefix_op|
|
.node = type_node,
|
||||||
prefix_op.op == .PtrType and analysis_ctx.in_container == analysis.resolveTypeOfNode(&child_analysis_ctx, prefix_op.rhs)
|
.handle = handle,
|
||||||
else
|
})) |resolved_type| {
|
||||||
false,
|
if (in_container.node == resolved_type.node)
|
||||||
else => false,
|
break :param_check true;
|
||||||
};
|
}
|
||||||
|
|
||||||
|
if (type_node.cast(std.zig.ast.Node.PrefixOp)) |prefix_op| {
|
||||||
|
if (prefix_op.op == .PtrType) {
|
||||||
|
if (try analysis.resolveTypeOfNode(&document_store, arena, .{
|
||||||
|
.node = prefix_op.rhs,
|
||||||
|
.handle = handle,
|
||||||
|
})) |resolved_prefix_op| {
|
||||||
|
if (in_container.node == resolved_prefix_op.node)
|
||||||
|
break :param_check true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
break :param_check false;
|
||||||
|
},
|
||||||
|
else => break :param_check false,
|
||||||
|
}
|
||||||
} else
|
} else
|
||||||
false;
|
false;
|
||||||
|
|
||||||
break :blk try analysis.getFunctionSnippet(list.allocator, analysis_ctx.tree(), func, skip_self_param);
|
break :blk try analysis.getFunctionSnippet(&arena.allocator, handle.tree, func, skip_self_param);
|
||||||
} else
|
} else
|
||||||
null;
|
null;
|
||||||
|
|
||||||
const is_type_function = analysis.isTypeFunction(analysis_ctx.tree(), func);
|
const is_type_function = analysis.isTypeFunction(handle.tree, func);
|
||||||
|
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = analysis_ctx.tree().tokenSlice(name_token),
|
.label = handle.tree.tokenSlice(name_token),
|
||||||
.kind = if (is_type_function) .Struct else .Function,
|
.kind = if (is_type_function) .Struct else .Function,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getFunctionSignature(analysis_ctx.tree(), func),
|
.detail = analysis.getFunctionSignature(handle.tree, func),
|
||||||
.insertText = insert_text,
|
.insertText = insert_text,
|
||||||
.insertTextFormat = if (use_snippets) .Snippet else .PlainText,
|
.insertTextFormat = if (use_snippets) .Snippet else .PlainText,
|
||||||
});
|
});
|
||||||
@ -302,18 +306,27 @@ fn nodeToCompletion(
|
|||||||
},
|
},
|
||||||
.VarDecl => {
|
.VarDecl => {
|
||||||
const var_decl = node.cast(std.zig.ast.Node.VarDecl).?;
|
const var_decl = node.cast(std.zig.ast.Node.VarDecl).?;
|
||||||
const is_const = analysis_ctx.tree().token_ids[var_decl.mut_token] == .Keyword_const;
|
const is_const = handle.tree.token_ids[var_decl.mut_token] == .Keyword_const;
|
||||||
|
|
||||||
var result = try resolveVarDeclFnAlias(analysis_ctx, node);
|
const result = try resolveVarDeclFnAlias(arena, node_handle);
|
||||||
if (result.decl != node) {
|
if (result.node != node) {
|
||||||
return try nodeToCompletion(list, &result.analysis_ctx, orig_handle, result.decl, config);
|
return try nodeToCompletion(arena, list, result, orig_handle, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = analysis_ctx.tree().tokenSlice(var_decl.name_token),
|
.label = handle.tree.tokenSlice(var_decl.name_token),
|
||||||
.kind = if (is_const) .Constant else .Variable,
|
.kind = if (is_const) .Constant else .Variable,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getVariableSignature(analysis_ctx.tree(), var_decl),
|
.detail = analysis.getVariableSignature(handle.tree, var_decl),
|
||||||
|
});
|
||||||
|
},
|
||||||
|
.ContainerField => {
|
||||||
|
const field = node.cast(std.zig.ast.Node.ContainerField).?;
|
||||||
|
try list.append(.{
|
||||||
|
.label = handle.tree.tokenSlice(field.name_token),
|
||||||
|
.kind = .Field,
|
||||||
|
.documentation = doc,
|
||||||
|
.detail = analysis.getContainerFieldSignature(handle.tree, field),
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.PrefixOp => {
|
.PrefixOp => {
|
||||||
@ -346,12 +359,12 @@ fn nodeToCompletion(
|
|||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
else => if (analysis.nodeToString(analysis_ctx.tree(), node)) |string| {
|
else => if (analysis.nodeToString(handle.tree, node)) |string| {
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = string,
|
.label = string,
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis_ctx.tree().getNodeSource(node)
|
.detail = handle.tree.getNodeSource(node),
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -376,44 +389,94 @@ fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []cons
|
|||||||
return text[start_idx + 1 .. end_idx];
|
return text[start_idx + 1 .. end_idx];
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gotoDefinitionSymbol(id: types.RequestId, analysis_ctx: *DocumentStore.AnalysisContext, decl: *std.zig.ast.Node) !void {
|
fn gotoDefinitionSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle: analysis.DeclWithHandle) !void {
|
||||||
const result = try resolveVarDeclFnAlias(analysis_ctx, decl);
|
var handle = decl_handle.handle;
|
||||||
|
|
||||||
const name_token = analysis.getDeclNameToken(result.analysis_ctx.tree(), result.decl) orelse
|
const location = switch (decl_handle.decl.*) {
|
||||||
return try respondGeneric(id, null_result_response);
|
.ast_node => |node| block: {
|
||||||
|
const result = try resolveVarDeclFnAlias(arena, .{ .node = node, .handle = handle });
|
||||||
|
handle = result.handle;
|
||||||
|
|
||||||
|
const name_token = analysis.getDeclNameToken(result.handle.tree, result.node) orelse
|
||||||
|
return try respondGeneric(id, null_result_response);
|
||||||
|
break :block result.handle.tree.tokenLocation(0, name_token);
|
||||||
|
},
|
||||||
|
else => decl_handle.location(),
|
||||||
|
};
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
.id = id,
|
.id = id,
|
||||||
.result = .{
|
.result = .{
|
||||||
.Location = .{
|
.Location = .{
|
||||||
.uri = result.analysis_ctx.handle.document.uri,
|
.uri = handle.document.uri,
|
||||||
.range = astLocationToRange(result.analysis_ctx.tree().tokenLocation(0, name_token)),
|
.range = astLocationToRange(location),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hoverSymbol(id: types.RequestId, analysis_ctx: *DocumentStore.AnalysisContext, decl: *std.zig.ast.Node) !void {
|
fn hoverSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle: analysis.DeclWithHandle) !void {
|
||||||
const result = try resolveVarDeclFnAlias(analysis_ctx, decl);
|
const handle = decl_handle.handle;
|
||||||
|
|
||||||
const doc_str = if (try analysis.getDocComments(&analysis_ctx.arena.allocator, result.analysis_ctx.tree(), result.decl)) |str|
|
const md_string = switch (decl_handle.decl.*) {
|
||||||
str
|
.ast_node => |node| ast_node: {
|
||||||
else
|
const result = try resolveVarDeclFnAlias(arena, .{ .node = node, .handle = handle });
|
||||||
"";
|
|
||||||
|
|
||||||
const signature_str = switch (result.decl.id) {
|
const doc_str = if (try analysis.getDocComments(&arena.allocator, result.handle.tree, result.node)) |str|
|
||||||
.VarDecl => blk: {
|
str
|
||||||
const var_decl = result.decl.cast(std.zig.ast.Node.VarDecl).?;
|
else
|
||||||
break :blk analysis.getVariableSignature(result.analysis_ctx.tree(), var_decl);
|
"";
|
||||||
|
|
||||||
|
const signature_str = switch (result.node.id) {
|
||||||
|
.VarDecl => blk: {
|
||||||
|
const var_decl = result.node.cast(std.zig.ast.Node.VarDecl).?;
|
||||||
|
break :blk analysis.getVariableSignature(result.handle.tree, var_decl);
|
||||||
|
},
|
||||||
|
.FnProto => blk: {
|
||||||
|
const fn_decl = result.node.cast(std.zig.ast.Node.FnProto).?;
|
||||||
|
break :blk analysis.getFunctionSignature(result.handle.tree, fn_decl);
|
||||||
|
},
|
||||||
|
.ContainerField => blk: {
|
||||||
|
const field = node.cast(std.zig.ast.Node.ContainerField).?;
|
||||||
|
break :blk analysis.getContainerFieldSignature(result.handle.tree, field);
|
||||||
|
},
|
||||||
|
else => analysis.nodeToString(result.handle.tree, result.node) orelse return try respondGeneric(id, null_result_response),
|
||||||
|
};
|
||||||
|
|
||||||
|
break :ast_node try std.fmt.allocPrint(&arena.allocator, "```zig\n{}\n```\n{}", .{ signature_str, doc_str });
|
||||||
},
|
},
|
||||||
.FnProto => blk: {
|
.param_decl => |param| param_decl: {
|
||||||
const fn_decl = result.decl.cast(std.zig.ast.Node.FnProto).?;
|
const doc_str = if (param.doc_comments) |doc_comments|
|
||||||
break :blk analysis.getFunctionSignature(result.analysis_ctx.tree(), fn_decl);
|
try analysis.collectDocComments(&arena.allocator, handle.tree, doc_comments)
|
||||||
|
else
|
||||||
|
"";
|
||||||
|
|
||||||
|
break :param_decl try std.fmt.allocPrint(
|
||||||
|
&arena.allocator,
|
||||||
|
"```zig\n{}\n```\n{}",
|
||||||
|
.{
|
||||||
|
handle.tree.source[handle.tree.token_locs[param.firstToken()].start..handle.tree.token_locs[param.lastToken()].end],
|
||||||
|
doc_str,
|
||||||
|
},
|
||||||
|
);
|
||||||
},
|
},
|
||||||
else => analysis.nodeToString(result.analysis_ctx.tree(), result.decl) orelse return try respondGeneric(id, null_result_response),
|
.pointer_payload => |payload| try std.fmt.allocPrint(
|
||||||
|
&arena.allocator,
|
||||||
|
"```zig\n{}\n```",
|
||||||
|
.{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())},
|
||||||
|
),
|
||||||
|
.array_payload => |payload| try std.fmt.allocPrint(
|
||||||
|
&arena.allocator,
|
||||||
|
"```zig\n{}\n```",
|
||||||
|
.{handle.tree.tokenSlice(payload.identifier.firstToken())},
|
||||||
|
),
|
||||||
|
.switch_payload => |payload| try std.fmt.allocPrint(
|
||||||
|
&arena.allocator,
|
||||||
|
"```zig\n{}\n```",
|
||||||
|
.{handle.tree.tokenSlice(payload.node.value_symbol.firstToken())},
|
||||||
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
const md_string = try std.fmt.allocPrint(&analysis_ctx.arena.allocator, "```zig\n{}\n```\n{}", .{ signature_str, doc_str });
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
.id = id,
|
.id = id,
|
||||||
.result = .{
|
.result = .{
|
||||||
@ -424,50 +487,45 @@ fn hoverSymbol(id: types.RequestId, analysis_ctx: *DocumentStore.AnalysisContext
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn getSymbolGlobal(arena: *std.heap.ArenaAllocator, pos_index: usize, handle: DocumentStore.Handle) !?*std.zig.ast.Node {
|
fn getSymbolGlobal(arena: *std.heap.ArenaAllocator, pos_index: usize, handle: *DocumentStore.Handle) !?analysis.DeclWithHandle {
|
||||||
const name = identifierFromPosition(pos_index, handle);
|
const name = identifierFromPosition(pos_index, handle.*);
|
||||||
if (name.len == 0) return null;
|
if (name.len == 0) return null;
|
||||||
|
|
||||||
var decl_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
return try analysis.lookupSymbolGlobal(&document_store, arena, handle, name, pos_index);
|
||||||
_ = try analysis.declsFromIndex(arena, &decl_nodes, handle.tree, pos_index);
|
|
||||||
|
|
||||||
return analysis.getChildOfSlice(handle.tree, decl_nodes.items, name);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gotoDefinitionGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
fn gotoDefinitionGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
const decl = (try getSymbolGlobal(&arena, pos_index, handle.*)) orelse return try respondGeneric(id, null_result_response);
|
const decl = (try getSymbolGlobal(&arena, pos_index, handle)) orelse return try respondGeneric(id, null_result_response);
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, pos_index, config.zig_lib_path);
|
return try gotoDefinitionSymbol(id, &arena, decl);
|
||||||
return try gotoDefinitionSymbol(id, &analysis_ctx, decl);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hoverDefinitionGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
fn hoverDefinitionGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
const decl = (try getSymbolGlobal(&arena, pos_index, handle.*)) orelse return try respondGeneric(id, null_result_response);
|
const decl = (try getSymbolGlobal(&arena, pos_index, handle)) orelse return try respondGeneric(id, null_result_response);
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, pos_index, config.zig_lib_path);
|
return try hoverSymbol(id, &arena, decl);
|
||||||
return try hoverSymbol(id, &analysis_ctx, decl);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn getSymbolFieldAccess(
|
fn getSymbolFieldAccess(
|
||||||
analysis_ctx: *DocumentStore.AnalysisContext,
|
handle: *DocumentStore.Handle,
|
||||||
|
arena: *std.heap.ArenaAllocator,
|
||||||
position: types.Position,
|
position: types.Position,
|
||||||
range: analysis.SourceRange,
|
range: analysis.SourceRange,
|
||||||
config: Config,
|
config: Config,
|
||||||
) !?*std.zig.ast.Node {
|
) !?analysis.DeclWithHandle {
|
||||||
const pos_index = try analysis_ctx.handle.document.positionToIndex(position);
|
const pos_index = try handle.document.positionToIndex(position);
|
||||||
var name = identifierFromPosition(pos_index, analysis_ctx.handle.*);
|
const name = identifierFromPosition(pos_index, handle.*);
|
||||||
if (name.len == 0) return null;
|
if (name.len == 0) return null;
|
||||||
|
|
||||||
const line = try analysis_ctx.handle.document.getLine(@intCast(usize, position.line));
|
const line = try handle.document.getLine(@intCast(usize, position.line));
|
||||||
var tokenizer = std.zig.Tokenizer.init(line[range.start..range.end]);
|
var tokenizer = std.zig.Tokenizer.init(line[range.start..range.end]);
|
||||||
|
|
||||||
name = try std.mem.dupe(&analysis_ctx.arena.allocator, u8, name);
|
if (try analysis.getFieldAccessTypeNode(&document_store, arena, handle, pos_index, &tokenizer)) |container_handle| {
|
||||||
if (analysis.getFieldAccessTypeNode(analysis_ctx, &tokenizer)) |container| {
|
return try analysis.lookupSymbolContainer(&document_store, arena, container_handle, name, true);
|
||||||
return analysis.getChild(analysis_ctx.tree(), container, name);
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -482,9 +540,8 @@ fn gotoDefinitionFieldAccess(
|
|||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, try handle.document.positionToIndex(position), config.zig_lib_path);
|
const decl = (try getSymbolFieldAccess(handle, &arena, position, range, config)) orelse return try respondGeneric(id, null_result_response);
|
||||||
const decl = (try getSymbolFieldAccess(&analysis_ctx, position, range, config)) orelse return try respondGeneric(id, null_result_response);
|
return try gotoDefinitionSymbol(id, &arena, decl);
|
||||||
return try gotoDefinitionSymbol(id, &analysis_ctx, decl);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hoverDefinitionFieldAccess(
|
fn hoverDefinitionFieldAccess(
|
||||||
@ -497,9 +554,8 @@ fn hoverDefinitionFieldAccess(
|
|||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, try handle.document.positionToIndex(position), config.zig_lib_path);
|
const decl = (try getSymbolFieldAccess(handle, &arena, position, range, config)) orelse return try respondGeneric(id, null_result_response);
|
||||||
const decl = (try getSymbolFieldAccess(&analysis_ctx, position, range, config)) orelse return try respondGeneric(id, null_result_response);
|
return try hoverSymbol(id, &arena, decl);
|
||||||
return try hoverSymbol(id, &analysis_ctx, decl);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gotoDefinitionString(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
fn gotoDefinitionString(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
||||||
@ -513,7 +569,6 @@ fn gotoDefinitionString(id: types.RequestId, pos_index: usize, handle: *Document
|
|||||||
&arena.allocator,
|
&arena.allocator,
|
||||||
handle.*,
|
handle.*,
|
||||||
import_str,
|
import_str,
|
||||||
try DocumentStore.stdUriFromLibPath(&arena.allocator, config.zig_lib_path),
|
|
||||||
)) orelse return try respondGeneric(id, null_result_response);
|
)) orelse return try respondGeneric(id, null_result_response);
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
@ -530,6 +585,55 @@ fn gotoDefinitionString(id: types.RequestId, pos_index: usize, handle: *Document
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const DeclToCompletionContext = struct {
|
||||||
|
completions: *std.ArrayList(types.CompletionItem),
|
||||||
|
config: *const Config,
|
||||||
|
arena: *std.heap.ArenaAllocator,
|
||||||
|
orig_handle: *DocumentStore.Handle,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) !void {
|
||||||
|
const tree = decl_handle.handle.tree;
|
||||||
|
|
||||||
|
switch (decl_handle.decl.*) {
|
||||||
|
.ast_node => |node| try nodeToCompletion(context.arena, context.completions, .{ .node = node, .handle = decl_handle.handle }, context.orig_handle, context.config.*),
|
||||||
|
.param_decl => |param| {
|
||||||
|
const doc = if (param.doc_comments) |doc_comments|
|
||||||
|
types.MarkupContent{
|
||||||
|
.kind = .Markdown,
|
||||||
|
.value = try analysis.collectDocComments(&context.arena.allocator, tree, doc_comments),
|
||||||
|
}
|
||||||
|
else
|
||||||
|
null;
|
||||||
|
|
||||||
|
try context.completions.append(.{
|
||||||
|
.label = tree.tokenSlice(param.name_token.?),
|
||||||
|
.kind = .Constant,
|
||||||
|
.documentation = doc,
|
||||||
|
.detail = tree.source[tree.token_locs[param.firstToken()].start..tree.token_locs[param.lastToken()].end],
|
||||||
|
});
|
||||||
|
},
|
||||||
|
.pointer_payload => |payload| {
|
||||||
|
try context.completions.append(.{
|
||||||
|
.label = tree.tokenSlice(payload.node.value_symbol.firstToken()),
|
||||||
|
.kind = .Variable,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
.array_payload => |payload| {
|
||||||
|
try context.completions.append(.{
|
||||||
|
.label = tree.tokenSlice(payload.identifier.firstToken()),
|
||||||
|
.kind = .Variable,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
.switch_payload => |payload| {
|
||||||
|
try context.completions.append(.{
|
||||||
|
.label = tree.tokenSlice(payload.node.value_symbol.firstToken()),
|
||||||
|
.kind = .Variable,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn completeGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
fn completeGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
||||||
// We use a local arena allocator to deallocate all temporary data without iterating
|
// We use a local arena allocator to deallocate all temporary data without iterating
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
@ -537,16 +641,13 @@ fn completeGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.
|
|||||||
// Deallocate all temporary data.
|
// Deallocate all temporary data.
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, pos_index, config.zig_lib_path);
|
const context = DeclToCompletionContext{
|
||||||
for (analysis_ctx.scope_nodes) |decl_ptr| {
|
.completions = &completions,
|
||||||
var decl = decl_ptr.*;
|
.config = &config,
|
||||||
if (decl.id == .Use) {
|
.arena = &arena,
|
||||||
std.debug.warn("Found use!", .{});
|
.orig_handle = handle,
|
||||||
continue;
|
};
|
||||||
}
|
try analysis.iterateSymbolsGlobal(&document_store, &arena, handle, pos_index, declToCompletion, context);
|
||||||
|
|
||||||
try nodeToCompletion(&completions, &analysis_ctx, handle, decl_ptr, config);
|
|
||||||
}
|
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
.id = id,
|
.id = id,
|
||||||
@ -563,15 +664,16 @@ fn completeFieldAccess(id: types.RequestId, handle: *DocumentStore.Handle, posit
|
|||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, try handle.document.positionToIndex(position), config.zig_lib_path);
|
|
||||||
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
||||||
|
|
||||||
const line = try handle.document.getLine(@intCast(usize, position.line));
|
const line = try handle.document.getLine(@intCast(usize, position.line));
|
||||||
var tokenizer = std.zig.Tokenizer.init(line[range.start..range.end]);
|
var tokenizer = std.zig.Tokenizer.init(line[range.start..range.end]);
|
||||||
|
|
||||||
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| {
|
const pos_index = try handle.document.positionToIndex(position);
|
||||||
try nodeToCompletion(&completions, &analysis_ctx, handle, node, config);
|
if (try analysis.getFieldAccessTypeNode(&document_store, &arena, handle, pos_index, &tokenizer)) |node| {
|
||||||
|
try nodeToCompletion(&arena, &completions, node, handle, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
try send(types.Response{
|
try send(types.Response{
|
||||||
.id = id,
|
.id = id,
|
||||||
.result = .{
|
.result = .{
|
||||||
@ -808,7 +910,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
}
|
}
|
||||||
// Semantic highlighting
|
// Semantic highlighting
|
||||||
else if (std.mem.eql(u8, method, "textDocument/semanticTokens")) {
|
else if (std.mem.eql(u8, method, "textDocument/semanticTokens")) {
|
||||||
// @TODO Implement this (we dont get here from vscode atm even when we get the client capab.)
|
// TODO Implement this (we dont get here from vscode atm even when we get the client capab.)
|
||||||
return try respondGeneric(id, empty_array_response);
|
return try respondGeneric(id, empty_array_response);
|
||||||
}
|
}
|
||||||
// Autocomplete / Signatures
|
// Autocomplete / Signatures
|
||||||
@ -1085,13 +1187,13 @@ pub fn main() anyerror!void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (config.build_runner_path) |build_runner_path| {
|
if (config.build_runner_path) |build_runner_path| {
|
||||||
try document_store.init(allocator, zig_exe_path, try std.mem.dupe(allocator, u8, build_runner_path));
|
try document_store.init(allocator, zig_exe_path, try std.mem.dupe(allocator, u8, build_runner_path), config.zig_lib_path);
|
||||||
} else {
|
} else {
|
||||||
var exe_dir_bytes: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
var exe_dir_bytes: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||||
const exe_dir_path = try std.fs.selfExeDirPath(&exe_dir_bytes);
|
const exe_dir_path = try std.fs.selfExeDirPath(&exe_dir_bytes);
|
||||||
|
|
||||||
const build_runner_path = try std.fs.path.resolve(allocator, &[_][]const u8{ exe_dir_path, "build_runner.zig" });
|
const build_runner_path = try std.fs.path.resolve(allocator, &[_][]const u8{ exe_dir_path, "build_runner.zig" });
|
||||||
try document_store.init(allocator, zig_exe_path, build_runner_path);
|
try document_store.init(allocator, zig_exe_path, build_runner_path, config.zig_lib_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
defer document_store.deinit();
|
defer document_store.deinit();
|
||||||
|
Loading…
Reference in New Issue
Block a user