Improve refreshDocument algorithm

Do not use an arena, orderedRemove or bool array.

Also, rudimentary tests suggest the config parser does not account for
a substantial amount of the compile time.
This commit is contained in:
Jonathan Hähne 2021-03-29 11:21:39 +02:00
parent 962327425d
commit 9a2695ecdb
2 changed files with 27 additions and 42 deletions

View File

@ -360,52 +360,39 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
handle.document_scope.deinit(self.allocator); handle.document_scope.deinit(self.allocator);
handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree); handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
// TODO: Better algorithm or data structure? var new_imports = std.ArrayList([]const u8).init(self.allocator);
// Removing the imports is costly since they live in an array list errdefer new_imports.deinit();
// Perhaps we should use an AutoHashMap([]const u8, {}) ? try analysis.collectImports(&new_imports, handle.tree);
// Try to detect removed imports and decrement their counts. // Convert to URIs
if (handle.import_uris.items.len == 0) return; var i: usize = 0;
while (i < new_imports.items.len) {
var arena = std.heap.ArenaAllocator.init(self.allocator); if (try self.uriFromImportStr(self.allocator, handle.*, new_imports.items[i])) |uri| {
defer arena.deinit(); // The raw import strings are owned by the document and do not need to be freed here.
new_imports.items[i] = uri;
var import_strs = std.ArrayList([]const u8).init(&arena.allocator); i += 1;
try analysis.collectImports(&import_strs, handle.tree); } else {
_ = new_imports.swapRemove(i);
const still_exist = try arena.allocator.alloc(bool, handle.import_uris.items.len); }
for (still_exist) |*ex| {
ex.* = false;
} }
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path); const old_imports = handle.import_uris;
for (import_strs.items) |str| { handle.import_uris = new_imports;
const uri = (try self.uriFromImportStr(&arena.allocator, handle.*, str)) orelse continue; defer old_imports.deinit();
exists_loop: for (still_exist) |*does_still_exist, i| { // Remove all old_imports that do not exist anymore
if (does_still_exist.*) continue; for (old_imports.items) |old| {
still_exists: {
if (std.mem.eql(u8, handle.import_uris.items[i], uri)) { for (new_imports) |new| {
does_still_exist.* = true; if (std.mem.eql(u8, new, old)) {
break :exists_loop; break :still_exists;
}
} }
log.debug("Import removed: {s}", .{old});
self.decrementCount(uri);
self.allocator.free(uri);
} }
} }
// Go through still_exist, remove the items that are false and decrement their handle counts.
var idx: usize = 0;
for (still_exist) |does_still_exist| {
if (does_still_exist) {
idx += 1;
continue;
}
log.debug("Import removed: {s}", .{handle.import_uris.items[idx]});
const uri = handle.import_uris.orderedRemove(idx);
self.decrementCount(uri);
self.allocator.free(uri);
}
} }
pub fn applySave(self: *DocumentStore, handle: *Handle) !void { pub fn applySave(self: *DocumentStore, handle: *Handle) !void {

View File

@ -1213,8 +1213,6 @@ fn loadConfig(folder_path: []const u8) ?Config {
}; };
defer allocator.free(file_buf); defer allocator.free(file_buf);
// TODO: Uh oh. Profile the actual build time impact
// of adding config options and consider alternatives (TOML?)
@setEvalBranchQuota(2000); @setEvalBranchQuota(2000);
// TODO: Better errors? Doesn't seem like std.json can provide us positions or context. // TODO: Better errors? Doesn't seem like std.json can provide us positions or context.
var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| { var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| {