From f5e4586c760093445888f11e51c36bf3861666ba Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 14:51:07 +0300 Subject: [PATCH] Detect import removal and decrement document referece count --- src/analysis.zig | 45 ++++++++++++++++++++ src/document_store.zig | 97 +++++++++++++++++++++++++++++++----------- 2 files changed, 116 insertions(+), 26 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index e1c39aa..ff58b41 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -257,6 +257,51 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. return null; } +fn maybeCollectImport(tree: *ast.Tree, builtin_call: *ast.Node.BuiltinCall, arr: *std.ArrayList([]const u8)) !void { + if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return; + if (builtin_call.params.len > 1) return; + + const import_param = builtin_call.params.at(0).*; + if (import_param.id != .StringLiteral) return; + + const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); + try arr.append(import_str[1 .. import_str.len - 1]); +} + +/// Collects all imports we can find into a slice of import paths (without quotes). +/// The import paths are valid as long as the tree is. +pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const u8 { + // TODO: Currently only detects `const smth = @import("string literal")<.SometThing>;` + var arr = std.ArrayList([]const u8).init(allocator); + + var idx: usize = 0; + while (tree.root_node.iterate(idx)) |decl| : (idx += 1) { + if (decl.id != .VarDecl) continue; + const var_decl = decl.cast(ast.Node.VarDecl).?; + if (var_decl.init_node == null) continue; + + switch(var_decl.init_node.?.id) { + .BuiltinCall => { + const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?; + try maybeCollectImport(tree, builtin_call, &arr); + }, + .InfixOp => { + const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?; + + switch(infix_op.op) { + .Period => {}, + else => continue, + } + if (infix_op.lhs.id != .BuiltinCall) continue; + try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, &arr); + }, + else => {}, + } + } + + return arr.toOwnedSlice(); +} + pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { var current_node = &analysis_ctx.tree.root_node.base; diff --git a/src/document_store.zig b/src/document_store.zig index 8eb9da9..af73839 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -1,6 +1,7 @@ const std = @import("std"); const types = @import("types.zig"); const URI = @import("uri.zig"); +const analysis = @import("analysis.zig"); const DocumentStore = @This(); @@ -151,6 +152,52 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { } handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); + + // TODO: Better algorithm or data structure? + // Removing the imports is costly since they live in an array list + // Perhaps we should use an AutoHashMap([]const u8, {}) ? + + // Try to detect removed imports and decrement their counts. + if (handle.import_uris.items.len == 0) return; + + const import_strs = try analysis.collectImports(self.allocator, dirty_tree); + defer self.allocator.free(import_strs); + + const still_exist = try self.allocator.alloc(bool, handle.import_uris.items.len); + defer self.allocator.free(still_exist); + + for (still_exist) |*ex| { + ex.* = false; + } + + for (import_strs) |str| { + const uri = (try uriFromImportStr(self, handle, str)) orelse continue; + defer self.allocator.free(uri); + + var idx: usize = 0; + exists_loop: while (idx < still_exist.len) : (idx += 1) { + if (still_exist[idx]) continue; + + if (std.mem.eql(u8, handle.import_uris.items[idx], uri)) { + still_exist[idx] = true; + break :exists_loop; + } + } + } + + // Go through still_exist, remove the items that are false and decrement their handle counts. + var offset: usize = 0; + var idx: usize = 0; + while (idx < still_exist.len) : (idx += 1) { + if (still_exist[idx]) continue; + + std.debug.warn("Import removed: {}\n", .{handle.import_uris.items[idx - offset]}); + const uri = handle.import_uris.orderedRemove(idx - offset); + offset += 1; + + self.closeDocument(uri); + self.allocator.free(uri); + } } pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void { @@ -209,11 +256,29 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. try self.checkSanity(handle); } -// @TODO: We only reduce the count upon closing, -// find a way to reduce it when removing imports. -// Perhaps on new sane text we can go through imports -// and remove those that are in the import_uris table -// but not in the file anymore. +fn uriFromImportStr(store: *DocumentStore, handle: *Handle, import_str: []const u8) !?[]const u8 { + return if (std.mem.eql(u8, import_str, "std")) + if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri) + else { + std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); + return null; + } + else b: { + // Find relative uri + const path = try URI.parse(store.allocator, handle.uri()); + defer store.allocator.free(path); + + const dir_path = std.fs.path.dirname(path) orelse ""; + const import_path = try std.fs.path.resolve(store.allocator, &[_][]const u8 { + dir_path, import_str + }); + + defer store.allocator.free(import_path); + + break :b (try URI.fromPath(store.allocator, import_path)); + }; +} + pub const AnalysisContext = struct { store: *DocumentStore, handle: *Handle, @@ -224,27 +289,7 @@ pub const AnalysisContext = struct { pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; - - const final_uri = if (std.mem.eql(u8, import_str, "std")) - if (self.store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) - else { - std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); - return null; - } - else b: { - // Find relative uri - const path = try URI.parse(allocator, self.handle.uri()); - defer allocator.free(path); - - const dir_path = std.fs.path.dirname(path) orelse ""; - const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 { - dir_path, import_str - }); - - defer allocator.free(import_path); - - break :b (try URI.fromPath(allocator, import_path)); - }; + const final_uri = (try uriFromImportStr(self.store, self.handle, import_str)) orelse return null; std.debug.warn("Import final URI: {}\n", .{final_uri}); var consumed_final_uri = false;