More progress

This commit is contained in:
Alexandros Naskos 2020-06-10 19:54:01 +03:00
parent 9b08d9e88a
commit d6609d918b
3 changed files with 157 additions and 188 deletions

View File

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const AnalysisContext = @import("document_store.zig").AnalysisContext; const DocumentStore = @import("document_store.zig");
const ast = std.zig.ast; const ast = std.zig.ast;
const types = @import("types.zig"); const types = @import("types.zig");
@ -244,15 +244,14 @@ fn findReturnStatement(tree: *ast.Tree, fn_decl: *ast.Node.FnProto) ?*ast.Node.C
} }
/// Resolves the return type of a function /// Resolves the return type of a function
fn resolveReturnType(analysis_ctx: *AnalysisContext, fn_decl: *ast.Node.FnProto) ?*ast.Node { fn resolveReturnType(store: *DocumentStore, fn_decl: *ast.Node.FnProto, handle: *DocumentStore.Handle) !?NodeWithHandle {
if (isTypeFunction(analysis_ctx.tree(), fn_decl) and fn_decl.body_node != null) { if (isTypeFunction(handle.tree, fn_decl) and fn_decl.body_node != null) {
// If this is a type function and it only contains a single return statement that returns // If this is a type function and it only contains a single return statement that returns
// a container declaration, we will return that declaration. // a container declaration, we will return that declaration.
const ret = findReturnStatement(analysis_ctx.tree(), fn_decl) orelse return null; const ret = findReturnStatement(handle.tree, fn_decl) orelse return null;
if (ret.rhs) |rhs| if (ret.rhs) |rhs|
if (resolveTypeOfNode(analysis_ctx, rhs)) |res_rhs| switch (res_rhs.id) { if (try resolveTypeOfNode(store, .{ .node = rhs, .handle = handle })) |res_rhs| switch (res_rhs.node.id) {
.ContainerDecl => { .ContainerDecl => {
analysis_ctx.onContainer(res_rhs) catch return null;
return res_rhs; return res_rhs;
}, },
else => return null, else => return null,
@ -262,16 +261,16 @@ fn resolveReturnType(analysis_ctx: *AnalysisContext, fn_decl: *ast.Node.FnProto)
} }
return switch (fn_decl.return_type) { return switch (fn_decl.return_type) {
.Explicit, .InferErrorSet => |return_type| resolveTypeOfNode(analysis_ctx, return_type), .Explicit, .InferErrorSet => |return_type| try resolveTypeOfNode(store, .{ .node = return_type, .handle = handle }),
.Invalid => null, .Invalid => null,
}; };
} }
/// Resolves the child type of an optional type /// Resolves the child type of an optional type
fn resolveUnwrapOptionalType(analysis_ctx: *AnalysisContext, opt: *ast.Node) ?*ast.Node { fn resolveUnwrapOptionalType(store: *DocumentStore, opt: NodeWithHandle) !?NodeWithHandle {
if (opt.cast(ast.Node.PrefixOp)) |prefix_op| { if (opt.node.cast(ast.Node.PrefixOp)) |prefix_op| {
if (prefix_op.op == .OptionalType) { if (prefix_op.op == .OptionalType) {
return resolveTypeOfNode(analysis_ctx, prefix_op.rhs); return try resolveTypeOfNode(store, .{ .node = prefix_op.rhs, .handle = opt.handle });
} }
} }
@ -279,12 +278,12 @@ fn resolveUnwrapOptionalType(analysis_ctx: *AnalysisContext, opt: *ast.Node) ?*a
} }
/// Resolves the child type of a defer type /// Resolves the child type of a defer type
fn resolveDerefType(analysis_ctx: *AnalysisContext, deref: *ast.Node) ?*ast.Node { fn resolveDerefType(store: *DocumentStore, deref: NodeWithHandle) !?NodeWithHandle {
if (deref.cast(ast.Node.PrefixOp)) |pop| { if (deref.node.cast(ast.Node.PrefixOp)) |pop| {
if (pop.op == .PtrType) { if (pop.op == .PtrType) {
const op_token_id = analysis_ctx.tree().token_ids[pop.op_token]; const op_token_id = deref.handle.tree.token_ids[pop.op_token];
switch (op_token_id) { switch (op_token_id) {
.Asterisk => return resolveTypeOfNode(analysis_ctx, pop.rhs), .Asterisk => return try resolveTypeOfNode(store, .{ .node = pop.rhs, .handle = deref.handle }),
.LBracket, .AsteriskAsterisk => return null, .LBracket, .AsteriskAsterisk => return null,
else => unreachable, else => unreachable,
} }
@ -293,9 +292,9 @@ fn resolveDerefType(analysis_ctx: *AnalysisContext, deref: *ast.Node) ?*ast.Node
return null; return null;
} }
fn makeSliceType(analysis_ctx: *AnalysisContext, child_type: *ast.Node) ?*ast.Node { fn makeSliceType(arena: *std.heap.ArenaAllocator, child_type: *ast.Node) ?*ast.Node {
// TODO: Better values for fields, better way to do this? // TODO: Better values for fields, better way to do this?
var slice_type = analysis_ctx.arena.allocator.create(ast.Node.PrefixOp) catch return null; var slice_type = arena.allocator.create(ast.Node.PrefixOp) catch return null;
slice_type.* = .{ slice_type.* = .{
.op_token = child_type.firstToken(), .op_token = child_type.firstToken(),
.op = .{ .op = .{
@ -315,26 +314,27 @@ fn makeSliceType(analysis_ctx: *AnalysisContext, child_type: *ast.Node) ?*ast.No
/// Resolves bracket access type (both slicing and array access) /// Resolves bracket access type (both slicing and array access)
fn resolveBracketAccessType( fn resolveBracketAccessType(
analysis_ctx: *AnalysisContext, store: *DocumentStore,
lhs: *ast.Node, lhs: NodeWithHandle,
arena: *std.heap.ArenaAllocator,
rhs: enum { Single, Range }, rhs: enum { Single, Range },
) ?*ast.Node { ) !?NodeWithHandle {
if (lhs.cast(ast.Node.PrefixOp)) |pop| { if (lhs.node.cast(ast.Node.PrefixOp)) |pop| {
switch (pop.op) { switch (pop.op) {
.SliceType => { .SliceType => {
if (rhs == .Single) return resolveTypeOfNode(analysis_ctx, pop.rhs); if (rhs == .Single) return resolveTypeOfNode(store, .{ .node = pop.rhs, .handle = lhs.handle });
return lhs; return lhs;
}, },
.ArrayType => { .ArrayType => {
if (rhs == .Single) return resolveTypeOfNode(analysis_ctx, pop.rhs); if (rhs == .Single) return resolveTypeOfNode(store, .{ .node = pop.rhs, .handle = lhs.handle });
return makeSliceType(analysis_ctx, pop.rhs); return NodeWithHandle{ .node = makeSliceType(arena, pop.rhs), .handle = lhs.handle };
}, },
.PtrType => { .PtrType => {
if (pop.rhs.cast(std.zig.ast.Node.PrefixOp)) |child_pop| { if (pop.rhs.cast(std.zig.ast.Node.PrefixOp)) |child_pop| {
switch (child_pop.op) { switch (child_pop.op) {
.ArrayType => { .ArrayType => {
if (rhs == .Single) { if (rhs == .Single) {
return resolveTypeOfNode(analysis_ctx, child_pop.rhs); return resolveTypeOfNode(store, .{ .node = child_pop.rhs, .handle = lhs.handle });
} }
return lhs; return lhs;
}, },
@ -349,12 +349,13 @@ fn resolveBracketAccessType(
} }
/// Called to remove one level of pointerness before a field access /// Called to remove one level of pointerness before a field access
fn resolveFieldAccessLhsType(analysis_ctx: *AnalysisContext, lhs: *ast.Node) *ast.Node { fn resolveFieldAccessLhsType(store: *DocumentStore, lhs: NodeWithHandle) !NodeWithHandle {
return resolveDerefType(analysis_ctx, lhs) orelse lhs; return resolveDerefType(store, lhs) orelse lhs;
} }
// @TODO try errors
/// Resolves the type of a node /// Resolves the type of a node
pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.Node { pub fn resolveTypeOfNode(store: *DocumentStore, node_handle: NodeWithHandle) !?NodeWithHandle {
switch (node.id) { switch (node.id) {
.VarDecl => { .VarDecl => {
const vari = node.cast(ast.Node.VarDecl).?; const vari = node.cast(ast.Node.VarDecl).?;
@ -609,52 +610,50 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: *ast.Tree) !
} }
} }
fn checkForContainerAndResolveFieldAccessLhsType(analysis_ctx: *AnalysisContext, node: *ast.Node) *ast.Node { pub const NodeWithHandle = struct {
const current_node = resolveFieldAccessLhsType(analysis_ctx, node); node: *ast.Node,
handle: *DocumentStore.Handle,
if (current_node.id == .ContainerDecl or current_node.id == .Root) { };
// TODO: Handle errors
analysis_ctx.onContainer(current_node) catch {};
}
return current_node;
}
pub fn getFieldAccessTypeNode( pub fn getFieldAccessTypeNode(
analysis_ctx: *AnalysisContext, store: *DocumentStore,
arena: *std.heap.ArenaAllocator,
handle: *DocumentStore.Handle,
tokenizer: *std.zig.Tokenizer, tokenizer: *std.zig.Tokenizer,
) ?*ast.Node { ) !?NodeWithHandle {
var current_node = analysis_ctx.in_container; var current_node = NodeWithHandle{
.node = &handle.tree.root_node.base,
.handle = handle,
};
while (true) { while (true) {
const tok = tokenizer.next(); const tok = tokenizer.next();
switch (tok.id) { switch (tok.id) {
.Eof => return resolveFieldAccessLhsType(analysis_ctx, current_node), .Eof => return try resolveFieldAccessLhsType(store, current_node),
.Identifier => { .Identifier => {
if (getChildOfSlice(analysis_ctx.tree(), analysis_ctx.scope_nodes, tokenizer.buffer[tok.loc.start..tok.loc.end])) |child| { if (try lookupSymbolGlobal(store, current_node.handle, tokenizer.buffer[tok.loc.start..tok.loc.end], tok.loc.start)) |child| {
if (resolveTypeOfNode(analysis_ctx, child)) |child_type| { current_node = (try child.resolveType(store, arena)) orelse return null;
current_node = child_type;
} else return null;
} else return null; } else return null;
}, },
.Period => { .Period => {
const after_period = tokenizer.next(); const after_period = tokenizer.next();
switch (after_period.id) { switch (after_period.id) {
.Eof => return resolveFieldAccessLhsType(analysis_ctx, current_node), .Eof => return try resolveFieldAccessLhsType(store, current_node),
.Identifier => { .Identifier => {
if (after_period.loc.end == tokenizer.buffer.len) return resolveFieldAccessLhsType(analysis_ctx, current_node); if (after_period.loc.end == tokenizer.buffer.len) return try resolveFieldAccessLhsType(store, current_node);
current_node = checkForContainerAndResolveFieldAccessLhsType(analysis_ctx, current_node); current_node = resolveFieldAccessLhsType(store, current_node);
if (getChild(analysis_ctx.tree(), current_node, tokenizer.buffer[after_period.loc.start..after_period.loc.end])) |child| { if (current_node.node.id != .ContainerDecl and current_node.node.id != .Root) {
if (resolveTypeOfNode(analysis_ctx, child)) |child_type| { // @TODO Is this ok?
current_node = child_type; return null;
} else return null; }
if (lookupSymbolContainer(store, current_node, tokenizer.buffer[after_period.loc.start..after_period.loc.end], true)) |child| {
current_node = (try child.resolveType(store, arena)) orelse return null;
} else return null; } else return null;
}, },
.QuestionMark => { .QuestionMark => {
if (resolveUnwrapOptionalType(analysis_ctx, current_node)) |child_type| { current_node = (try resolveUnwrapOptionalType(store, current_node)) orelse return null;
current_node = child_type;
} else return null;
}, },
else => { else => {
std.debug.warn("Unrecognized token {} after period.\n", .{after_period.id}); std.debug.warn("Unrecognized token {} after period.\n", .{after_period.id});
@ -663,15 +662,13 @@ pub fn getFieldAccessTypeNode(
} }
}, },
.PeriodAsterisk => { .PeriodAsterisk => {
if (resolveDerefType(analysis_ctx, current_node)) |child_type| { current_node = (try resolveDerefType(store, current_node)) orelse return null;
current_node = child_type;
} else return null;
}, },
.LParen => { .LParen => {
switch (current_node.id) { switch (current_node.node.id) {
.FnProto => { .FnProto => {
const func = current_node.cast(ast.Node.FnProto).?; const func = current_node.node.cast(ast.Node.FnProto).?;
if (resolveReturnType(analysis_ctx, func)) |ret| { if (try resolveReturnType(store, func, current_node.handle)) |ret| {
current_node = ret; current_node = ret;
// Skip to the right paren // Skip to the right paren
var paren_count: usize = 1; var paren_count: usize = 1;
@ -704,26 +701,16 @@ pub fn getFieldAccessTypeNode(
} }
} else return null; } else return null;
if (resolveBracketAccessType( current_node = (try resolveBracketAccessType(store, current_node, arena, if (is_range) .Range else .Single)) orelse return null;
analysis_ctx,
current_node,
if (is_range) .Range else .Single,
)) |child_type| {
current_node = child_type;
} else return null;
}, },
else => { else => {
std.debug.warn("Unimplemented token: {}\n", .{tok.id}); std.debug.warn("Unimplemented token: {}\n", .{tok.id});
return null; return null;
}, },
} }
if (current_node.id == .ContainerDecl or current_node.id == .Root) {
analysis_ctx.onContainer(current_node) catch return null;
}
} }
return resolveFieldAccessLhsType(analysis_ctx, current_node); return try resolveFieldAccessLhsType(store, current_node);
} }
pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool { pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool {
@ -1069,8 +1056,6 @@ pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: *ast.Tree) ![]typ
return symbols.items; return symbols.items;
} }
const DocumentStore = @import("document_store.zig");
pub const Declaration = union(enum) { pub const Declaration = union(enum) {
ast_node: *ast.Node, ast_node: *ast.Node,
param_decl: *ast.Node.FnProto.ParamDecl, param_decl: *ast.Node.FnProto.ParamDecl,
@ -1105,6 +1090,35 @@ pub const DeclWithHandle = struct {
.switch_payload => |sp| tree.tokenLocation(0, sp.node.value_symbol.firstToken()), .switch_payload => |sp| tree.tokenLocation(0, sp.node.value_symbol.firstToken()),
}; };
} }
fn resolveType(self: DeclWithHandle, store: *DocumentStore, arena: *std.heap.ArenaAllocator) !?NodeWithHandle {
// resolveTypeOfNode(store: *DocumentStore, node_handle: NodeWithHandle)
return switch (self.decl) {
.ast_node => |node| try resolveTypeOfNode(store, .{ .node = node, .handle = self.handle }),
.param_decl => |param_decl| switch (param_decl.param_type) {
.type_expr => |type_node| try resolveTypeOfNode(store, .{ .node = node, .handle = self.handle }),
else => null,
},
.pointer_payload => |pay| try resolveUnwrapOptionalType(
store,
try resolveTypeOfNode(store, .{
.node = pay.condition,
.handle = self.handle,
}) orelse return null,
),
.array_payload => |pay| try resolveBracketAccessType(
store,
.{
.node = pay.array_expr,
.handle = self.handle,
},
arena,
.Single,
),
// TODO Resolve switch payload types
.switch_payload => |pay| return null,
};
}
}; };
pub fn iterateSymbolsGlobal( pub fn iterateSymbolsGlobal(
@ -1157,7 +1171,9 @@ pub fn lookupSymbolGlobal(store: *DocumentStore, handle: *DocumentStore.Handle,
return null; return null;
} }
pub fn lookupSymbolContainer(store: *DocumentScope, handle: *DocumentStore.Handle, container: *ast.Node, symbol: []const u8, accept_fields: bool) !?DeclWithHandle { pub fn lookupSymbolContainer(store: *DocumentScope, container_handle: NodeWithHandle, symbol: []const u8, accept_fields: bool) !?DeclWithHandle {
const container = container_handle.node;
const handle = container_handle.handle;
std.debug.assert(container.id == .ContainerDecl or container.id == .Root); std.debug.assert(container.id == .ContainerDecl or container.id == .Root);
// Find the container scope. // Find the container scope.
var maybe_container_scope: ?*Scope = null; var maybe_container_scope: ?*Scope = null;

View File

@ -78,6 +78,7 @@ handles: std.StringHashMap(*Handle),
zig_exe_path: ?[]const u8, zig_exe_path: ?[]const u8,
build_files: std.ArrayListUnmanaged(*BuildFile), build_files: std.ArrayListUnmanaged(*BuildFile),
build_runner_path: []const u8, build_runner_path: []const u8,
std_uri: ?[]const u8,
error_completions: TagStore, error_completions: TagStore,
enum_completions: TagStore, enum_completions: TagStore,
@ -87,12 +88,14 @@ pub fn init(
allocator: *std.mem.Allocator, allocator: *std.mem.Allocator,
zig_exe_path: ?[]const u8, zig_exe_path: ?[]const u8,
build_runner_path: []const u8, build_runner_path: []const u8,
zig_lib_path: ?[]const u8,
) !void { ) !void {
self.allocator = allocator; self.allocator = allocator;
self.handles = std.StringHashMap(*Handle).init(allocator); self.handles = std.StringHashMap(*Handle).init(allocator);
self.zig_exe_path = zig_exe_path; self.zig_exe_path = zig_exe_path;
self.build_files = .{}; self.build_files = .{};
self.build_runner_path = build_runner_path; self.build_runner_path = build_runner_path;
self.std_uri = try stdUriFromLibPath(allocator, zig_lib_path);
self.error_completions = TagStore.init(allocator); self.error_completions = TagStore.init(allocator);
self.enum_completions = TagStore.init(allocator); self.enum_completions = TagStore.init(allocator);
} }
@ -393,7 +396,7 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path); const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
for (import_strs.items) |str| { for (import_strs.items) |str| {
const uri = (try uriFromImportStr(self, &arena.allocator, handle.*, str, std_uri)) orelse continue; const uri = (try self.uriFromImportStr(&arena.allocator, handle.*, str)) orelse continue;
var idx: usize = 0; var idx: usize = 0;
exists_loop: while (idx < still_exist.len) : (idx += 1) { exists_loop: while (idx < still_exist.len) : (idx += 1) {
@ -496,14 +499,13 @@ pub fn applyChanges(
} }
pub fn uriFromImportStr( pub fn uriFromImportStr(
store: *DocumentStore, self: *DocumentStore,
allocator: *std.mem.Allocator, allocator: *std.mem.Allocator,
handle: Handle, handle: Handle,
import_str: []const u8, import_str: []const u8,
std_uri: ?[]const u8,
) !?[]const u8 { ) !?[]const u8 {
if (std.mem.eql(u8, import_str, "std")) { if (std.mem.eql(u8, import_str, "std")) {
if (std_uri) |uri| return try std.mem.dupe(allocator, u8, uri) else { if (self.std_uri) |uri| return try std.mem.dupe(allocator, u8, uri) else {
std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
return null; return null;
} }
@ -534,28 +536,12 @@ pub fn uriFromImportStr(
} }
} }
pub const AnalysisContext = struct { pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const u8) !?*Handle {
store: *DocumentStore, const allocator = self.allocator;
handle: *Handle, const final_uri = (try self.uriFromImportStr(
// This arena is used for temporary allocations while analyzing, self.allocator,
// not for the tree allocations. handle.*,
arena: *std.heap.ArenaAllocator,
std_uri: ?[]const u8,
error_completions: *TagStore,
enum_completions: *TagStore,
pub fn tree(self: AnalysisContext) *std.zig.ast.Tree {
return self.handle.tree;
}
pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
const allocator = self.store.allocator;
const final_uri = (try uriFromImportStr(
self.store,
self.store.allocator,
self.handle.*,
import_str, import_str,
self.std_uri,
)) orelse return null; )) orelse return null;
std.debug.warn("Import final URI: {}\n", .{final_uri}); std.debug.warn("Import final URI: {}\n", .{final_uri});
@ -563,25 +549,23 @@ pub const AnalysisContext = struct {
defer if (!consumed_final_uri) allocator.free(final_uri); defer if (!consumed_final_uri) allocator.free(final_uri);
// Check if we already imported this. // Check if we already imported this.
for (self.handle.import_uris.items) |uri| { for (handle.import_uris.items) |uri| {
// If we did, set our new handle and return the parsed tree root node. // If we did, set our new handle and return the parsed tree root node.
if (std.mem.eql(u8, uri, final_uri)) { if (std.mem.eql(u8, uri, final_uri)) {
self.handle = self.store.getHandle(final_uri) orelse return null; return self.getHandle(final_uri);
return &self.tree().root_node.base;
} }
} }
// New import. // New import.
// Check if the import is already opened by others. // Check if the import is already opened by others.
if (self.store.getHandle(final_uri)) |new_handle| { if (self.getHandle(final_uri)) |new_handle| {
// If it is, append it to our imports, increment the count, set our new handle // If it is, append it to our imports, increment the count, set our new handle
// and return the parsed tree root node. // and return the parsed tree root node.
try self.handle.import_uris.append(final_uri); try handle.import_uris.append(final_uri);
consumed_final_uri = true; consumed_final_uri = true;
new_handle.count += 1; new_handle.count += 1;
self.handle = new_handle; return new_handle;
return &self.tree().root_node.base;
} }
// New document, read the file then call into openDocument. // New document, read the file then call into openDocument.
@ -606,21 +590,18 @@ pub const AnalysisContext = struct {
}; };
// Add to import table of current handle. // Add to import table of current handle.
try self.handle.import_uris.append(final_uri); try handle.import_uris.append(final_uri);
consumed_final_uri = true; consumed_final_uri = true;
// Swap handles. // Swap handles.
// This takes ownership of the passed uri and text. // This takes ownership of the passed uri and text.
const duped_final_uri = try std.mem.dupe(allocator, u8, final_uri); const duped_final_uri = try std.mem.dupe(allocator, u8, final_uri);
errdefer allocator.free(duped_final_uri); errdefer allocator.free(duped_final_uri);
self.handle = try newDocument(self.store, duped_final_uri, file_contents); return try self.newDocument(duped_final_uri, file_contents);
} }
}
return &self.tree().root_node.base; fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
}
};
pub fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
if (zig_lib_path) |zpath| { if (zig_lib_path) |zpath| {
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{ const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
zpath, "./std/std.zig", zpath, "./std/std.zig",
@ -637,23 +618,6 @@ pub fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u
return null; return null;
} }
pub fn analysisContext(
self: *DocumentStore,
handle: *Handle,
arena: *std.heap.ArenaAllocator,
zig_lib_path: ?[]const u8,
) !AnalysisContext {
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
return AnalysisContext{
.store = self,
.handle = handle,
.arena = arena,
.std_uri = std_uri,
.error_completions = &self.error_completions,
.enum_completions = &self.enum_completions,
};
}
pub fn deinit(self: *DocumentStore) void { pub fn deinit(self: *DocumentStore) void {
var entry_iterator = self.handles.iterator(); var entry_iterator = self.handles.iterator();
while (entry_iterator.next()) |entry| { while (entry_iterator.next()) |entry| {
@ -681,6 +645,10 @@ pub fn deinit(self: *DocumentStore) void {
self.allocator.destroy(build_file); self.allocator.destroy(build_file);
} }
if (self.std_uri) |std_uri| {
self.allocator.free(std_uri);
}
self.build_files.deinit(self.allocator); self.build_files.deinit(self.allocator);
self.error_completions.deinit(); self.error_completions.deinit();
self.enum_completions.deinit(); self.enum_completions.deinit();

View File

@ -247,9 +247,7 @@ fn resolveVarDeclFnAlias(analysis_ctx: *DocumentStore.AnalysisContext, decl: *st
fn nodeToCompletion( fn nodeToCompletion(
list: *std.ArrayList(types.CompletionItem), list: *std.ArrayList(types.CompletionItem),
analysis_ctx: *DocumentStore.AnalysisContext, node_handle: analysis.NodeWithHandle,
orig_handle: *DocumentStore.Handle,
node: *std.zig.ast.Node,
config: Config, config: Config,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
const doc = if (try analysis.getDocComments(list.allocator, analysis_ctx.tree(), node)) |doc_comments| const doc = if (try analysis.getDocComments(list.allocator, analysis_ctx.tree(), node)) |doc_comments|
@ -519,7 +517,6 @@ fn gotoDefinitionString(id: types.RequestId, pos_index: usize, handle: *Document
&arena.allocator, &arena.allocator,
handle.*, handle.*,
import_str, import_str,
try DocumentStore.stdUriFromLibPath(&arena.allocator, config.zig_lib_path),
)) orelse return try respondGeneric(id, null_result_response); )) orelse return try respondGeneric(id, null_result_response);
try send(types.Response{ try send(types.Response{
@ -545,9 +542,7 @@ const DeclToCompletionContext = struct {
fn decltoCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) !void { fn decltoCompletion(context: DeclToCompletionContext, decl_handle: analysis.DeclWithHandle) !void {
switch (decl_handle.decl.*) { switch (decl_handle.decl.*) {
.ast_node => |node| { .ast_node => |node| {
// @TODO Remove analysis context try nodeToCompletion(context.completions, .{ .node = node, .handle = decl_handle.handle }, context.config.*);
var analysis_ctx = try document_store.analysisContext(decl_handle.handle, context.arena, context.config.zig_lib_path);
try nodeToCompletion(context.completions, &analysis_ctx, decl_handle.handle, node, context.config.*);
}, },
else => {}, else => {},
// @TODO The rest // @TODO The rest
@ -568,16 +563,6 @@ fn completeGlobal(id: types.RequestId, pos_index: usize, handle: *DocumentStore.
}; };
try analysis.iterateSymbolsGlobal(&document_store, handle, pos_index, decltoCompletion, context); try analysis.iterateSymbolsGlobal(&document_store, handle, pos_index, decltoCompletion, context);
// for (analysis_ctx.scope_nodes) |decl_ptr| {
// var decl = decl_ptr.*;
// if (decl.id == .Use) {
// std.debug.warn("Found use!", .{});
// continue;
// }
// try nodeToCompletion(&completions, &analysis_ctx, handle, decl_ptr, config);
// }
try send(types.Response{ try send(types.Response{
.id = id, .id = id,
.result = .{ .result = .{
@ -1115,13 +1100,13 @@ pub fn main() anyerror!void {
} }
if (config.build_runner_path) |build_runner_path| { if (config.build_runner_path) |build_runner_path| {
try document_store.init(allocator, zig_exe_path, try std.mem.dupe(allocator, u8, build_runner_path)); try document_store.init(allocator, zig_exe_path, try std.mem.dupe(allocator, u8, build_runner_path), config.zig_lib_path);
} else { } else {
var exe_dir_bytes: [std.fs.MAX_PATH_BYTES]u8 = undefined; var exe_dir_bytes: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const exe_dir_path = try std.fs.selfExeDirPath(&exe_dir_bytes); const exe_dir_path = try std.fs.selfExeDirPath(&exe_dir_bytes);
const build_runner_path = try std.fs.path.resolve(allocator, &[_][]const u8{ exe_dir_path, "build_runner.zig" }); const build_runner_path = try std.fs.path.resolve(allocator, &[_][]const u8{ exe_dir_path, "build_runner.zig" });
try document_store.init(allocator, zig_exe_path, build_runner_path); try document_store.init(allocator, zig_exe_path, build_runner_path, config.zig_lib_path);
} }
defer document_store.deinit(); defer document_store.deinit();