Merge pull request #79 from alexnask/new_parser
Store trees in handles, reparse on text changes.
This commit is contained in:
commit
075c364767
@ -262,10 +262,10 @@ fn findReturnStatement(tree: *ast.Tree, fn_decl: *ast.Node.FnProto) ?*ast.Node.C
|
|||||||
|
|
||||||
/// Resolves the return type of a function
|
/// Resolves the return type of a function
|
||||||
fn resolveReturnType(analysis_ctx: *AnalysisContext, fn_decl: *ast.Node.FnProto) ?*ast.Node {
|
fn resolveReturnType(analysis_ctx: *AnalysisContext, fn_decl: *ast.Node.FnProto) ?*ast.Node {
|
||||||
if (isTypeFunction(analysis_ctx.tree, fn_decl) and fn_decl.body_node != null) {
|
if (isTypeFunction(analysis_ctx.tree(), fn_decl) and fn_decl.body_node != null) {
|
||||||
// If this is a type function and it only contains a single return statement that returns
|
// If this is a type function and it only contains a single return statement that returns
|
||||||
// a container declaration, we will return that declaration.
|
// a container declaration, we will return that declaration.
|
||||||
const ret = findReturnStatement(analysis_ctx.tree, fn_decl) orelse return null;
|
const ret = findReturnStatement(analysis_ctx.tree(), fn_decl) orelse return null;
|
||||||
if (ret.rhs) |rhs|
|
if (ret.rhs) |rhs|
|
||||||
if (resolveTypeOfNode(analysis_ctx, rhs)) |res_rhs| switch (res_rhs.id) {
|
if (resolveTypeOfNode(analysis_ctx, rhs)) |res_rhs| switch (res_rhs.id) {
|
||||||
.ContainerDecl => {
|
.ContainerDecl => {
|
||||||
@ -293,7 +293,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
return resolveTypeOfNode(analysis_ctx, vari.type_node orelse vari.init_node.?) orelse null;
|
return resolveTypeOfNode(analysis_ctx, vari.type_node orelse vari.init_node.?) orelse null;
|
||||||
},
|
},
|
||||||
.Identifier => {
|
.Identifier => {
|
||||||
if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, analysis_ctx.tree.getNodeSource(node))) |child| {
|
if (getChildOfSlice(analysis_ctx.tree(), analysis_ctx.scope_nodes, analysis_ctx.tree().getNodeSource(node))) |child| {
|
||||||
return resolveTypeOfNode(analysis_ctx, child);
|
return resolveTypeOfNode(analysis_ctx, child);
|
||||||
} else return null;
|
} else return null;
|
||||||
},
|
},
|
||||||
@ -323,11 +323,11 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
.Period => {
|
.Period => {
|
||||||
// Save the child string from this tree since the tree may switch when processing
|
// Save the child string from this tree since the tree may switch when processing
|
||||||
// an import lhs.
|
// an import lhs.
|
||||||
var rhs_str = nodeToString(analysis_ctx.tree, infix_op.rhs) orelse return null;
|
var rhs_str = nodeToString(analysis_ctx.tree(), infix_op.rhs) orelse return null;
|
||||||
// Use the analysis context temporary arena to store the rhs string.
|
// Use the analysis context temporary arena to store the rhs string.
|
||||||
rhs_str = std.mem.dupe(&analysis_ctx.arena.allocator, u8, rhs_str) catch return null;
|
rhs_str = std.mem.dupe(&analysis_ctx.arena.allocator, u8, rhs_str) catch return null;
|
||||||
const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null;
|
const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null;
|
||||||
const child = getChild(analysis_ctx.tree, left, rhs_str) orelse return null;
|
const child = getChild(analysis_ctx.tree(), left, rhs_str) orelse return null;
|
||||||
return resolveTypeOfNode(analysis_ctx, child);
|
return resolveTypeOfNode(analysis_ctx, child);
|
||||||
},
|
},
|
||||||
else => {},
|
else => {},
|
||||||
@ -338,7 +338,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
switch (prefix_op.op) {
|
switch (prefix_op.op) {
|
||||||
.SliceType, .ArrayType => return node,
|
.SliceType, .ArrayType => return node,
|
||||||
.PtrType => {
|
.PtrType => {
|
||||||
const op_token_id = analysis_ctx.tree.token_ids[prefix_op.op_token];
|
const op_token_id = analysis_ctx.tree().token_ids[prefix_op.op_token];
|
||||||
switch (op_token_id) {
|
switch (op_token_id) {
|
||||||
.Asterisk => return resolveTypeOfNode(analysis_ctx, prefix_op.rhs),
|
.Asterisk => return resolveTypeOfNode(analysis_ctx, prefix_op.rhs),
|
||||||
.LBracket, .AsteriskAsterisk => return null,
|
.LBracket, .AsteriskAsterisk => return null,
|
||||||
@ -361,7 +361,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
},
|
},
|
||||||
.BuiltinCall => {
|
.BuiltinCall => {
|
||||||
const builtin_call = node.cast(ast.Node.BuiltinCall).?;
|
const builtin_call = node.cast(ast.Node.BuiltinCall).?;
|
||||||
const call_name = analysis_ctx.tree.tokenSlice(builtin_call.builtin_token);
|
const call_name = analysis_ctx.tree().tokenSlice(builtin_call.builtin_token);
|
||||||
if (std.mem.eql(u8, call_name, "@This")) {
|
if (std.mem.eql(u8, call_name, "@This")) {
|
||||||
if (builtin_call.params_len != 0) return null;
|
if (builtin_call.params_len != 0) return null;
|
||||||
return analysis_ctx.in_container;
|
return analysis_ctx.in_container;
|
||||||
@ -373,7 +373,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
|
|||||||
const import_param = builtin_call.paramsConst()[0];
|
const import_param = builtin_call.paramsConst()[0];
|
||||||
if (import_param.id != .StringLiteral) return null;
|
if (import_param.id != .StringLiteral) return null;
|
||||||
|
|
||||||
const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
|
const import_str = analysis_ctx.tree().tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
|
||||||
return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: {
|
return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: {
|
||||||
std.debug.warn("Error {} while processing import {}\n", .{ err, import_str });
|
std.debug.warn("Error {} while processing import {}\n", .{ err, import_str });
|
||||||
break :block null;
|
break :block null;
|
||||||
@ -442,7 +442,7 @@ pub fn getFieldAccessTypeNode(
|
|||||||
switch (next.id) {
|
switch (next.id) {
|
||||||
.Eof => return current_node,
|
.Eof => return current_node,
|
||||||
.Identifier => {
|
.Identifier => {
|
||||||
if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, tokenizer.buffer[next.loc.start..next.loc.end])) |child| {
|
if (getChildOfSlice(analysis_ctx.tree(), analysis_ctx.scope_nodes, tokenizer.buffer[next.loc.start..next.loc.end])) |child| {
|
||||||
if (resolveTypeOfNode(analysis_ctx, child)) |node_type| {
|
if (resolveTypeOfNode(analysis_ctx, child)) |node_type| {
|
||||||
current_node = node_type;
|
current_node = node_type;
|
||||||
} else return null;
|
} else return null;
|
||||||
@ -456,7 +456,7 @@ pub fn getFieldAccessTypeNode(
|
|||||||
// TODO: This works for now, maybe we should filter based on the partial identifier ourselves?
|
// TODO: This works for now, maybe we should filter based on the partial identifier ourselves?
|
||||||
if (after_period.loc.end == line_length) return current_node;
|
if (after_period.loc.end == line_length) return current_node;
|
||||||
|
|
||||||
if (getChild(analysis_ctx.tree, current_node, tokenizer.buffer[after_period.loc.start..after_period.loc.end])) |child| {
|
if (getChild(analysis_ctx.tree(), current_node, tokenizer.buffer[after_period.loc.start..after_period.loc.end])) |child| {
|
||||||
if (resolveTypeOfNode(analysis_ctx, child)) |child_type| {
|
if (resolveTypeOfNode(analysis_ctx, child)) |child_type| {
|
||||||
current_node = child_type;
|
current_node = child_type;
|
||||||
} else return null;
|
} else return null;
|
||||||
|
@ -9,15 +9,11 @@ pub const Handle = struct {
|
|||||||
document: types.TextDocument,
|
document: types.TextDocument,
|
||||||
count: usize,
|
count: usize,
|
||||||
import_uris: std.ArrayList([]const u8),
|
import_uris: std.ArrayList([]const u8),
|
||||||
|
tree: *std.zig.ast.Tree,
|
||||||
|
|
||||||
pub fn uri(handle: Handle) []const u8 {
|
pub fn uri(handle: Handle) []const u8 {
|
||||||
return handle.document.uri;
|
return handle.document.uri;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a zig AST, with all its errors.
|
|
||||||
pub fn tree(handle: Handle, allocator: *std.mem.Allocator) !*std.zig.ast.Tree {
|
|
||||||
return try std.zig.parse(allocator, handle.document.text);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
allocator: *std.mem.Allocator,
|
allocator: *std.mem.Allocator,
|
||||||
@ -45,9 +41,11 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle {
|
|||||||
.text = text,
|
.text = text,
|
||||||
.mem = text,
|
.mem = text,
|
||||||
},
|
},
|
||||||
|
.tree = try std.zig.parse(self.allocator, text),
|
||||||
};
|
};
|
||||||
const kv = try self.handles.getOrPutValue(uri, handle);
|
|
||||||
return kv.value;
|
try self.handles.putNoClobber(uri, handle);
|
||||||
|
return handle;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
|
pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle {
|
||||||
@ -73,6 +71,8 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
std.debug.warn("Freeing document: {}\n", .{uri});
|
std.debug.warn("Freeing document: {}\n", .{uri});
|
||||||
|
|
||||||
|
entry.value.tree.deinit();
|
||||||
self.allocator.free(entry.value.document.mem);
|
self.allocator.free(entry.value.document.mem);
|
||||||
|
|
||||||
for (entry.value.import_uris.items) |import_uri| {
|
for (entry.value.import_uris.items) |import_uri| {
|
||||||
@ -102,8 +102,11 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if the document text is now sane, move it to sane_text if so.
|
// Check if the document text is now sane, move it to sane_text if so.
|
||||||
fn removeOldImports(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void {
|
fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void {
|
||||||
std.debug.warn("New text for document {}\n", .{handle.uri()});
|
std.debug.warn("New text for document {}\n", .{handle.uri()});
|
||||||
|
handle.tree.deinit();
|
||||||
|
handle.tree = try std.zig.parse(self.allocator, handle.document.text);
|
||||||
|
|
||||||
// TODO: Better algorithm or data structure?
|
// TODO: Better algorithm or data structure?
|
||||||
// Removing the imports is costly since they live in an array list
|
// Removing the imports is costly since they live in an array list
|
||||||
// Perhaps we should use an AutoHashMap([]const u8, {}) ?
|
// Perhaps we should use an AutoHashMap([]const u8, {}) ?
|
||||||
@ -111,14 +114,11 @@ fn removeOldImports(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]cons
|
|||||||
// Try to detect removed imports and decrement their counts.
|
// Try to detect removed imports and decrement their counts.
|
||||||
if (handle.import_uris.items.len == 0) return;
|
if (handle.import_uris.items.len == 0) return;
|
||||||
|
|
||||||
const tree = try handle.tree(self.allocator);
|
|
||||||
defer tree.deinit();
|
|
||||||
|
|
||||||
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var import_strs = std.ArrayList([]const u8).init(&arena.allocator);
|
var import_strs = std.ArrayList([]const u8).init(&arena.allocator);
|
||||||
try analysis.collectImports(&import_strs, tree);
|
try analysis.collectImports(&import_strs, handle.tree);
|
||||||
|
|
||||||
const still_exist = try arena.allocator.alloc(bool, handle.import_uris.items.len);
|
const still_exist = try arena.allocator.alloc(bool, handle.import_uris.items.len);
|
||||||
for (still_exist) |*ex| {
|
for (still_exist) |*ex| {
|
||||||
@ -150,7 +150,7 @@ fn removeOldImports(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]cons
|
|||||||
const uri = handle.import_uris.orderedRemove(idx - offset);
|
const uri = handle.import_uris.orderedRemove(idx - offset);
|
||||||
offset += 1;
|
offset += 1;
|
||||||
|
|
||||||
self.closeDocument(uri);
|
self.decrementCount(uri);
|
||||||
self.allocator.free(uri);
|
self.allocator.free(uri);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -213,7 +213,7 @@ pub fn applyChanges(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try self.removeOldImports(handle, zig_lib_path);
|
try self.refreshDocument(handle, zig_lib_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn uriFromImportStr(
|
pub fn uriFromImportStr(
|
||||||
@ -254,16 +254,19 @@ pub const AnalysisContext = struct {
|
|||||||
// This arena is used for temporary allocations while analyzing,
|
// This arena is used for temporary allocations while analyzing,
|
||||||
// not for the tree allocations.
|
// not for the tree allocations.
|
||||||
arena: *std.heap.ArenaAllocator,
|
arena: *std.heap.ArenaAllocator,
|
||||||
tree: *std.zig.ast.Tree,
|
|
||||||
scope_nodes: []*std.zig.ast.Node,
|
scope_nodes: []*std.zig.ast.Node,
|
||||||
in_container: *std.zig.ast.Node,
|
in_container: *std.zig.ast.Node,
|
||||||
std_uri: ?[]const u8,
|
std_uri: ?[]const u8,
|
||||||
|
|
||||||
|
pub fn tree(self: AnalysisContext) *std.zig.ast.Tree {
|
||||||
|
return self.handle.tree;
|
||||||
|
}
|
||||||
|
|
||||||
fn refreshScopeNodes(self: *AnalysisContext) !void {
|
fn refreshScopeNodes(self: *AnalysisContext) !void {
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator);
|
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator);
|
||||||
try analysis.addChildrenNodes(&scope_nodes, self.tree, &self.tree.root_node.base);
|
try analysis.addChildrenNodes(&scope_nodes, self.tree(), &self.tree().root_node.base);
|
||||||
self.scope_nodes = scope_nodes.items;
|
self.scope_nodes = scope_nodes.items;
|
||||||
self.in_container = &self.tree.root_node.base;
|
self.in_container = &self.tree().root_node.base;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn onContainer(self: *AnalysisContext, container: *std.zig.ast.Node.ContainerDecl) !void {
|
pub fn onContainer(self: *AnalysisContext, container: *std.zig.ast.Node.ContainerDecl) !void {
|
||||||
@ -271,7 +274,7 @@ pub const AnalysisContext = struct {
|
|||||||
self.in_container = &container.base;
|
self.in_container = &container.base;
|
||||||
|
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator);
|
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator);
|
||||||
try analysis.addChildrenNodes(&scope_nodes, self.tree, &container.base);
|
try analysis.addChildrenNodes(&scope_nodes, self.tree(), &container.base);
|
||||||
self.scope_nodes = scope_nodes.items;
|
self.scope_nodes = scope_nodes.items;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -295,11 +298,8 @@ pub const AnalysisContext = struct {
|
|||||||
// If we did, set our new handle and return the parsed tree root node.
|
// If we did, set our new handle and return the parsed tree root node.
|
||||||
if (std.mem.eql(u8, uri, final_uri)) {
|
if (std.mem.eql(u8, uri, final_uri)) {
|
||||||
self.handle = self.store.getHandle(final_uri) orelse return null;
|
self.handle = self.store.getHandle(final_uri) orelse return null;
|
||||||
|
|
||||||
self.tree.deinit();
|
|
||||||
self.tree = try self.handle.tree(allocator);
|
|
||||||
try self.refreshScopeNodes();
|
try self.refreshScopeNodes();
|
||||||
return &self.tree.root_node.base;
|
return &self.tree().root_node.base;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -309,11 +309,8 @@ pub const AnalysisContext = struct {
|
|||||||
// If it is, increment the count, set our new handle and return the parsed tree root node.
|
// If it is, increment the count, set our new handle and return the parsed tree root node.
|
||||||
new_handle.count += 1;
|
new_handle.count += 1;
|
||||||
self.handle = new_handle;
|
self.handle = new_handle;
|
||||||
|
|
||||||
self.tree.deinit();
|
|
||||||
self.tree = try self.handle.tree(allocator);
|
|
||||||
try self.refreshScopeNodes();
|
try self.refreshScopeNodes();
|
||||||
return &self.tree.root_node.base;
|
return &self.tree().root_node.base;
|
||||||
}
|
}
|
||||||
|
|
||||||
// New document, read the file then call into openDocument.
|
// New document, read the file then call into openDocument.
|
||||||
@ -341,38 +338,27 @@ pub const AnalysisContext = struct {
|
|||||||
try self.handle.import_uris.append(final_uri);
|
try self.handle.import_uris.append(final_uri);
|
||||||
consumed_final_uri = true;
|
consumed_final_uri = true;
|
||||||
|
|
||||||
// Swap handles and get new tree.
|
// Swap handles.
|
||||||
// This takes ownership of the passed uri and text.
|
// This takes ownership of the passed uri and text.
|
||||||
const duped_final_uri = try std.mem.dupe(allocator, u8, final_uri);
|
const duped_final_uri = try std.mem.dupe(allocator, u8, final_uri);
|
||||||
errdefer allocator.free(duped_final_uri);
|
errdefer allocator.free(duped_final_uri);
|
||||||
self.handle = try newDocument(self.store, duped_final_uri, file_contents);
|
self.handle = try newDocument(self.store, duped_final_uri, file_contents);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Free old tree, add new one if it exists.
|
|
||||||
// If we return null, no one should access the tree.
|
|
||||||
self.tree.deinit();
|
|
||||||
self.tree = try self.handle.tree(allocator);
|
|
||||||
try self.refreshScopeNodes();
|
try self.refreshScopeNodes();
|
||||||
return &self.tree.root_node.base;
|
return &self.tree().root_node.base;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clone(self: *AnalysisContext) !AnalysisContext {
|
pub fn clone(self: *AnalysisContext) AnalysisContext {
|
||||||
// Create a new tree so it can be destroyed by the cloned AnalysisContext without affecting the original
|
|
||||||
const tree = try self.handle.tree(self.store.allocator);
|
|
||||||
return AnalysisContext{
|
return AnalysisContext{
|
||||||
.store = self.store,
|
.store = self.store,
|
||||||
.handle = self.handle,
|
.handle = self.handle,
|
||||||
.arena = self.arena,
|
.arena = self.arena,
|
||||||
.tree = tree,
|
|
||||||
.scope_nodes = self.scope_nodes,
|
.scope_nodes = self.scope_nodes,
|
||||||
.in_container = self.in_container,
|
.in_container = self.in_container,
|
||||||
.std_uri = self.std_uri,
|
.std_uri = self.std_uri,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(self: *AnalysisContext) void {
|
|
||||||
self.tree.deinit();
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
|
pub fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
|
||||||
@ -399,17 +385,14 @@ pub fn analysisContext(
|
|||||||
position: usize,
|
position: usize,
|
||||||
zig_lib_path: ?[]const u8,
|
zig_lib_path: ?[]const u8,
|
||||||
) !AnalysisContext {
|
) !AnalysisContext {
|
||||||
const tree = try handle.tree(self.allocator);
|
|
||||||
|
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
||||||
const in_container = try analysis.declsFromIndex(arena, &scope_nodes, tree, position);
|
const in_container = try analysis.declsFromIndex(arena, &scope_nodes, handle.tree, position);
|
||||||
|
|
||||||
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
||||||
return AnalysisContext{
|
return AnalysisContext{
|
||||||
.store = self,
|
.store = self,
|
||||||
.handle = handle,
|
.handle = handle,
|
||||||
.arena = arena,
|
.arena = arena,
|
||||||
.tree = tree,
|
|
||||||
.scope_nodes = scope_nodes.items,
|
.scope_nodes = scope_nodes.items,
|
||||||
.in_container = in_container,
|
.in_container = in_container,
|
||||||
.std_uri = std_uri,
|
.std_uri = std_uri,
|
||||||
|
45
src/main.zig
45
src/main.zig
@ -91,8 +91,7 @@ fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
|
fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void {
|
||||||
const tree = try handle.tree(allocator);
|
const tree = handle.tree;
|
||||||
defer tree.deinit();
|
|
||||||
|
|
||||||
// Use an arena for our local memory allocations.
|
// Use an arena for our local memory allocations.
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
@ -179,7 +178,7 @@ fn containerToCompletion(
|
|||||||
var child_idx: usize = 0;
|
var child_idx: usize = 0;
|
||||||
while (container.iterate(child_idx)) |child_node| : (child_idx += 1) {
|
while (container.iterate(child_idx)) |child_node| : (child_idx += 1) {
|
||||||
// Declarations in the same file do not need to be public.
|
// Declarations in the same file do not need to be public.
|
||||||
if (orig_handle == analysis_ctx.handle or analysis.isNodePublic(analysis_ctx.tree, child_node)) {
|
if (orig_handle == analysis_ctx.handle or analysis.isNodePublic(analysis_ctx.tree(), child_node)) {
|
||||||
try nodeToCompletion(list, analysis_ctx, orig_handle, child_node, config);
|
try nodeToCompletion(list, analysis_ctx, orig_handle, child_node, config);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -192,7 +191,7 @@ fn nodeToCompletion(
|
|||||||
node: *std.zig.ast.Node,
|
node: *std.zig.ast.Node,
|
||||||
config: Config,
|
config: Config,
|
||||||
) error{OutOfMemory}!void {
|
) error{OutOfMemory}!void {
|
||||||
var doc = if (try analysis.getDocComments(list.allocator, analysis_ctx.tree, node)) |doc_comments|
|
var doc = if (try analysis.getDocComments(list.allocator, analysis_ctx.tree(), node)) |doc_comments|
|
||||||
types.MarkupContent{
|
types.MarkupContent{
|
||||||
.kind = .Markdown,
|
.kind = .Markdown,
|
||||||
.value = doc_comments,
|
.value = doc_comments,
|
||||||
@ -208,17 +207,17 @@ fn nodeToCompletion(
|
|||||||
const func = node.cast(std.zig.ast.Node.FnProto).?;
|
const func = node.cast(std.zig.ast.Node.FnProto).?;
|
||||||
if (func.name_token) |name_token| {
|
if (func.name_token) |name_token| {
|
||||||
const insert_text = if (config.enable_snippets)
|
const insert_text = if (config.enable_snippets)
|
||||||
try analysis.getFunctionSnippet(list.allocator, analysis_ctx.tree, func)
|
try analysis.getFunctionSnippet(list.allocator, analysis_ctx.tree(), func)
|
||||||
else
|
else
|
||||||
null;
|
null;
|
||||||
|
|
||||||
const is_type_function = analysis.isTypeFunction(analysis_ctx.tree, func);
|
const is_type_function = analysis.isTypeFunction(analysis_ctx.tree(), func);
|
||||||
|
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = analysis_ctx.tree.tokenSlice(name_token),
|
.label = analysis_ctx.tree().tokenSlice(name_token),
|
||||||
.kind = if (is_type_function) .Struct else .Function,
|
.kind = if (is_type_function) .Struct else .Function,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getFunctionSignature(analysis_ctx.tree, func),
|
.detail = analysis.getFunctionSignature(analysis_ctx.tree(), func),
|
||||||
.insertText = insert_text,
|
.insertText = insert_text,
|
||||||
.insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText,
|
.insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText,
|
||||||
});
|
});
|
||||||
@ -226,14 +225,13 @@ fn nodeToCompletion(
|
|||||||
},
|
},
|
||||||
.VarDecl => {
|
.VarDecl => {
|
||||||
const var_decl = node.cast(std.zig.ast.Node.VarDecl).?;
|
const var_decl = node.cast(std.zig.ast.Node.VarDecl).?;
|
||||||
const is_const = analysis_ctx.tree.token_ids[var_decl.mut_token] == .Keyword_const;
|
const is_const = analysis_ctx.tree().token_ids[var_decl.mut_token] == .Keyword_const;
|
||||||
|
|
||||||
var child_analysis_context = try analysis_ctx.clone();
|
var child_analysis_context = analysis_ctx.clone();
|
||||||
defer child_analysis_context.deinit();
|
|
||||||
|
|
||||||
const child_node = block: {
|
const child_node = block: {
|
||||||
if (var_decl.type_node) |type_node| {
|
if (var_decl.type_node) |type_node| {
|
||||||
if (std.mem.eql(u8, "type", analysis_ctx.tree.tokenSlice(type_node.firstToken()))) {
|
if (std.mem.eql(u8, "type", analysis_ctx.tree().tokenSlice(type_node.firstToken()))) {
|
||||||
break :block var_decl.init_node orelse type_node;
|
break :block var_decl.init_node orelse type_node;
|
||||||
}
|
}
|
||||||
break :block type_node;
|
break :block type_node;
|
||||||
@ -250,10 +248,10 @@ fn nodeToCompletion(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = analysis_ctx.tree.tokenSlice(var_decl.name_token),
|
.label = analysis_ctx.tree().tokenSlice(var_decl.name_token),
|
||||||
.kind = if (is_const) .Constant else .Variable,
|
.kind = if (is_const) .Constant else .Variable,
|
||||||
.documentation = doc,
|
.documentation = doc,
|
||||||
.detail = analysis.getVariableSignature(analysis_ctx.tree, var_decl),
|
.detail = analysis.getVariableSignature(analysis_ctx.tree(), var_decl),
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
.PrefixOp => {
|
.PrefixOp => {
|
||||||
@ -272,7 +270,7 @@ fn nodeToCompletion(
|
|||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
else => if (analysis.nodeToString(analysis_ctx.tree, node)) |string| {
|
else => if (analysis.nodeToString(analysis_ctx.tree(), node)) |string| {
|
||||||
try list.append(.{
|
try list.append(.{
|
||||||
.label = string,
|
.label = string,
|
||||||
.kind = .Field,
|
.kind = .Field,
|
||||||
@ -302,8 +300,7 @@ fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []cons
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn gotoDefinitionGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle) !void {
|
fn gotoDefinitionGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle) !void {
|
||||||
var tree = try handle.tree(allocator);
|
const tree = handle.tree;
|
||||||
defer tree.deinit();
|
|
||||||
|
|
||||||
const name = identifierFromPosition(pos_index, handle);
|
const name = identifierFromPosition(pos_index, handle);
|
||||||
if (name.len == 0) return try respondGeneric(id, null_result_response);
|
if (name.len == 0) return try respondGeneric(id, null_result_response);
|
||||||
@ -343,7 +340,6 @@ fn gotoDefinitionFieldAccess(
|
|||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, try handle.document.positionToIndex(position), config.zig_lib_path);
|
var analysis_ctx = try document_store.analysisContext(handle, &arena, try handle.document.positionToIndex(position), config.zig_lib_path);
|
||||||
defer analysis_ctx.deinit();
|
|
||||||
|
|
||||||
const line = try handle.document.getLine(@intCast(usize, position.line));
|
const line = try handle.document.getLine(@intCast(usize, position.line));
|
||||||
var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]);
|
var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]);
|
||||||
@ -352,14 +348,14 @@ fn gotoDefinitionFieldAccess(
|
|||||||
name = try std.mem.dupe(&arena.allocator, u8, name);
|
name = try std.mem.dupe(&arena.allocator, u8, name);
|
||||||
|
|
||||||
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer, line_length)) |container| {
|
if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer, line_length)) |container| {
|
||||||
const decl = analysis.getChild(analysis_ctx.tree, container, name) orelse return try respondGeneric(id, null_result_response);
|
const decl = analysis.getChild(analysis_ctx.tree(), container, name) orelse return try respondGeneric(id, null_result_response);
|
||||||
const name_token = analysis.getDeclNameToken(analysis_ctx.tree, decl) orelse unreachable;
|
const name_token = analysis.getDeclNameToken(analysis_ctx.tree(), decl) orelse unreachable;
|
||||||
return try send(types.Response{
|
return try send(types.Response{
|
||||||
.id = .{ .Integer = id },
|
.id = .{ .Integer = id },
|
||||||
.result = .{
|
.result = .{
|
||||||
.Location = .{
|
.Location = .{
|
||||||
.uri = analysis_ctx.handle.document.uri,
|
.uri = analysis_ctx.handle.document.uri,
|
||||||
.range = astLocationToRange(analysis_ctx.tree.tokenLocation(0, name_token)),
|
.range = astLocationToRange(analysis_ctx.tree().tokenLocation(0, name_token)),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@ -369,8 +365,7 @@ fn gotoDefinitionFieldAccess(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn gotoDefinitionString(id: i64, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
fn gotoDefinitionString(id: i64, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void {
|
||||||
var tree = try handle.tree(allocator);
|
const tree = handle.tree;
|
||||||
defer tree.deinit();
|
|
||||||
|
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
@ -405,8 +400,6 @@ fn completeGlobal(id: i64, pos_index: usize, handle: *DocumentStore.Handle, conf
|
|||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, pos_index, config.zig_lib_path);
|
var analysis_ctx = try document_store.analysisContext(handle, &arena, pos_index, config.zig_lib_path);
|
||||||
defer analysis_ctx.deinit();
|
|
||||||
|
|
||||||
for (analysis_ctx.scope_nodes) |decl_ptr| {
|
for (analysis_ctx.scope_nodes) |decl_ptr| {
|
||||||
var decl = decl_ptr.*;
|
var decl = decl_ptr.*;
|
||||||
try nodeToCompletion(&completions, &analysis_ctx, handle, decl_ptr, config);
|
try nodeToCompletion(&completions, &analysis_ctx, handle, decl_ptr, config);
|
||||||
@ -428,8 +421,6 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
|
|||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, try handle.document.positionToIndex(position), config.zig_lib_path);
|
var analysis_ctx = try document_store.analysisContext(handle, &arena, try handle.document.positionToIndex(position), config.zig_lib_path);
|
||||||
defer analysis_ctx.deinit();
|
|
||||||
|
|
||||||
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
||||||
|
|
||||||
const line = try handle.document.getLine(@intCast(usize, position.line));
|
const line = try handle.document.getLine(@intCast(usize, position.line));
|
||||||
|
Loading…
Reference in New Issue
Block a user