From 9a2695ecdb89de21e4ddf34e24a5bc440285edef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonathan=20H=C3=A4hne?= Date: Mon, 29 Mar 2021 11:21:39 +0200 Subject: [PATCH] Improve refreshDocument algorithm Do not use an arena, orderedRemove or bool array. Also, rudimentary tests suggest the config parser does not account for a substantial amount of the compile time. --- src/document_store.zig | 67 +++++++++++++++++------------------------- src/main.zig | 2 -- 2 files changed, 27 insertions(+), 42 deletions(-) diff --git a/src/document_store.zig b/src/document_store.zig index f563bb1..756a732 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -359,53 +359,40 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const handle.document_scope.deinit(self.allocator); handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree); + + var new_imports = std.ArrayList([]const u8).init(self.allocator); + errdefer new_imports.deinit(); + try analysis.collectImports(&new_imports, handle.tree); - // TODO: Better algorithm or data structure? - // Removing the imports is costly since they live in an array list - // Perhaps we should use an AutoHashMap([]const u8, {}) ? - - // Try to detect removed imports and decrement their counts. - if (handle.import_uris.items.len == 0) return; - - var arena = std.heap.ArenaAllocator.init(self.allocator); - defer arena.deinit(); - - var import_strs = std.ArrayList([]const u8).init(&arena.allocator); - try analysis.collectImports(&import_strs, handle.tree); - - const still_exist = try arena.allocator.alloc(bool, handle.import_uris.items.len); - for (still_exist) |*ex| { - ex.* = false; + // Convert to URIs + var i: usize = 0; + while (i < new_imports.items.len) { + if (try self.uriFromImportStr(self.allocator, handle.*, new_imports.items[i])) |uri| { + // The raw import strings are owned by the document and do not need to be freed here. + new_imports.items[i] = uri; + i += 1; + } else { + _ = new_imports.swapRemove(i); + } } - const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path); - for (import_strs.items) |str| { - const uri = (try self.uriFromImportStr(&arena.allocator, handle.*, str)) orelse continue; + const old_imports = handle.import_uris; + handle.import_uris = new_imports; + defer old_imports.deinit(); - exists_loop: for (still_exist) |*does_still_exist, i| { - if (does_still_exist.*) continue; - - if (std.mem.eql(u8, handle.import_uris.items[i], uri)) { - does_still_exist.* = true; - break :exists_loop; + // Remove all old_imports that do not exist anymore + for (old_imports.items) |old| { + still_exists: { + for (new_imports) |new| { + if (std.mem.eql(u8, new, old)) { + break :still_exists; + } } + log.debug("Import removed: {s}", .{old}); + self.decrementCount(uri); + self.allocator.free(uri); } } - - // Go through still_exist, remove the items that are false and decrement their handle counts. - var idx: usize = 0; - for (still_exist) |does_still_exist| { - if (does_still_exist) { - idx += 1; - continue; - } - - log.debug("Import removed: {s}", .{handle.import_uris.items[idx]}); - const uri = handle.import_uris.orderedRemove(idx); - - self.decrementCount(uri); - self.allocator.free(uri); - } } pub fn applySave(self: *DocumentStore, handle: *Handle) !void { diff --git a/src/main.zig b/src/main.zig index 99cdcdd..c0e8df0 100644 --- a/src/main.zig +++ b/src/main.zig @@ -1213,8 +1213,6 @@ fn loadConfig(folder_path: []const u8) ?Config { }; defer allocator.free(file_buf); - // TODO: Uh oh. Profile the actual build time impact - // of adding config options and consider alternatives (TOML?) @setEvalBranchQuota(2000); // TODO: Better errors? Doesn't seem like std.json can provide us positions or context. var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| {