diff --git a/src/analysis.zig b/src/analysis.zig index b3dcb8f..a9db2e0 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -344,10 +344,8 @@ fn maybeCollectImport(tree: *ast.Tree, builtin_call: *ast.Node.BuiltinCall, arr: /// Collects all imports we can find into a slice of import paths (without quotes). /// The import paths are valid as long as the tree is. -pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const u8 { +pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: *ast.Tree) !void { // TODO: Currently only detects `const smth = @import("string literal")<.SometThing>;` - var arr = std.ArrayList([]const u8).init(allocator); - var idx: usize = 0; while (tree.root_node.iterate(idx)) |decl| : (idx += 1) { if (decl.id != .VarDecl) continue; @@ -357,7 +355,7 @@ pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const switch (var_decl.init_node.?.id) { .BuiltinCall => { const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?; - try maybeCollectImport(tree, builtin_call, &arr); + try maybeCollectImport(tree, builtin_call, import_arr); }, .InfixOp => { const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?; @@ -367,13 +365,11 @@ pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const else => continue, } if (infix_op.lhs.id != .BuiltinCall) continue; - try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, &arr); + try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, import_arr); }, else => {}, } } - - return arr.toOwnedSlice(); } pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { diff --git a/src/document_store.zig b/src/document_store.zig index c745e82..02dd85f 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -61,7 +61,6 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { .mem = text, }, }; - try self.checkSanity(&handle); const kv = try self.handles.getOrPutValue(uri, handle); return &kv.value; } @@ -117,10 +116,7 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { } // Check if the document text is now sane, move it to sane_text if so. -fn checkSanity(self: *DocumentStore, handle: *Handle) !void { - const tree = try handle.tree(self.allocator); - defer tree.deinit(); - +fn removeOldImports(self: *DocumentStore, handle: *Handle) !void { std.debug.warn("New text for document {}\n", .{handle.uri()}); // TODO: Better algorithm or data structure? // Removing the imports is costly since they live in an array list @@ -129,19 +125,22 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { // Try to detect removed imports and decrement their counts. if (handle.import_uris.items.len == 0) return; - const import_strs = try analysis.collectImports(self.allocator, tree); - defer self.allocator.free(import_strs); + const tree = try handle.tree(self.allocator); + defer tree.deinit(); - const still_exist = try self.allocator.alloc(bool, handle.import_uris.items.len); - defer self.allocator.free(still_exist); + var arena = std.heap.ArenaAllocator.init(self.allocator); + defer arena.deinit(); + var import_strs = std.ArrayList([]const u8).init(&arena.allocator); + try analysis.collectImports(&import_strs, tree); + + const still_exist = try arena.allocator.alloc(bool, handle.import_uris.items.len); for (still_exist) |*ex| { ex.* = false; } - for (import_strs) |str| { - const uri = (try uriFromImportStr(self, handle.*, str)) orelse continue; - defer self.allocator.free(uri); + for (import_strs.items) |str| { + const uri = (try uriFromImportStr(self, &arena.allocator, handle.*, str)) orelse continue; var idx: usize = 0; exists_loop: while (idx < still_exist.len) : (idx += 1) { @@ -222,29 +221,29 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. } } - try self.checkSanity(handle); + try self.removeOldImports(handle); } -fn uriFromImportStr(store: *DocumentStore, handle: Handle, import_str: []const u8) !?[]const u8 { +fn uriFromImportStr(store: *DocumentStore, allocator: *std.mem.Allocator, handle: Handle, import_str: []const u8) !?[]const u8 { return if (std.mem.eql(u8, import_str, "std")) - if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri) + if (store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) else { std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); return null; } else b: { // Find relative uri - const path = try URI.parse(store.allocator, handle.uri()); - defer store.allocator.free(path); + const path = try URI.parse(allocator, handle.uri()); + defer allocator.free(path); const dir_path = std.fs.path.dirname(path) orelse ""; - const import_path = try std.fs.path.resolve(store.allocator, &[_][]const u8 { + const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 { dir_path, import_str }); - defer store.allocator.free(import_path); + defer allocator.free(import_path); - break :b (try URI.fromPath(store.allocator, import_path)); + break :b (try URI.fromPath(allocator, import_path)); }; } @@ -265,7 +264,7 @@ pub const AnalysisContext = struct { pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; - const final_uri = (try uriFromImportStr(self.store, self.handle.*, import_str)) orelse return null; + const final_uri = (try uriFromImportStr(self.store, self.store.allocator, self.handle.*, import_str)) orelse return null; std.debug.warn("Import final URI: {}\n", .{final_uri}); var consumed_final_uri = false;