finish document store garbage collection

This commit is contained in:
Techatrix 2022-10-09 01:50:03 +02:00
parent ce7afe03f2
commit e024a9ec38

View File

@ -152,7 +152,9 @@ pub fn closeDocument(self: *DocumentStore, uri: Uri) void {
log.warn("Document already closed: {s}", .{uri}); log.warn("Document already closed: {s}", .{uri});
} }
self.garbageCollection() catch {}; self.garbageCollectionImports() catch {};
self.garbageCollectionCImports() catch {};
self.garbageCollectionBuildFiles() catch {};
} }
pub fn refreshDocument(self: *DocumentStore, handle: *Handle) !void { pub fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
@ -178,9 +180,11 @@ pub fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
handle.cimports.deinit(self.allocator); handle.cimports.deinit(self.allocator);
handle.cimports = new_cimports; handle.cimports = new_cimports;
// TODO detect changes try self.ensureDependenciesProcessed(handle.*);
// try self.ensureImportsProcessed(handle.*);
try self.ensureCImportsProcessed(handle.*); // a include could have been removed but it would increase latency
// try self.garbageCollectionImports();
// try self.garbageCollectionCImports();
} }
pub fn applySave(self: *DocumentStore, handle: *Handle) !void { pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
@ -205,7 +209,7 @@ pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
/// a directed edge. /// a directed edge.
/// We can remove every document which cannot be reached from /// We can remove every document which cannot be reached from
/// another document that is `open` (see `Handle.open`) /// another document that is `open` (see `Handle.open`)
fn garbageCollection(self: *DocumentStore) error{OutOfMemory}!void { fn garbageCollectionImports(self: *DocumentStore) error{OutOfMemory}!void {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
@ -238,24 +242,20 @@ fn garbageCollection(self: *DocumentStore) error{OutOfMemory}!void {
try collectDependencies(self.allocator, self, handle, &queue); try collectDependencies(self.allocator, self, handle, &queue);
} }
var unreachable_handles = std.ArrayListUnmanaged(Uri){}; var i: usize = 0;
defer unreachable_handles.deinit(self.allocator); while (i < self.handles.count()) {
const handle = self.handles.values()[i];
for (self.handles.values()) |handle| { if (reachable_handles.contains(handle.uri)) {
if (reachable_handles.contains(handle.uri)) continue; i += 1;
try unreachable_handles.append(self.allocator, handle.uri); continue;
} }
std.log.debug("Closing document {s}", .{handle.uri});
for (unreachable_handles.items) |uri| { var kv = self.handles.fetchSwapRemove(handle.uri).?;
std.log.debug("Closing document {s}", .{uri});
var kv = self.handles.fetchSwapRemove(uri) orelse continue;
kv.value.deinit(self.allocator); kv.value.deinit(self.allocator);
} }
try self.garbageCollectionCImportResults();
} }
fn garbageCollectionCImportResults(self: *DocumentStore) error{OutOfMemory}!void { fn garbageCollectionCImports(self: *DocumentStore) error{OutOfMemory}!void {
const tracy_zone = tracy.trace(@src()); const tracy_zone = tracy.trace(@src());
defer tracy_zone.end(); defer tracy_zone.end();
@ -270,17 +270,38 @@ fn garbageCollectionCImportResults(self: *DocumentStore) error{OutOfMemory}!void
} }
} }
var unreachable_hashes = std.ArrayListUnmanaged(Hash){}; var i: usize = 0;
defer unreachable_hashes.deinit(self.allocator); while (i < self.cimports.count()) {
const hash = self.cimports.keys()[i];
if (reachable_hashes.contains(hash)) {
i += 1;
continue;
}
var kv = self.cimports.fetchSwapRemove(hash).?;
std.log.debug("Destroying cimport {s}", .{kv.value.success});
kv.value.deinit(self.allocator);
}
}
for (self.cimports.keys()) |hash| { fn garbageCollectionBuildFiles(self: *DocumentStore) error{OutOfMemory}!void {
if (reachable_hashes.contains(hash)) continue; var reachable_build_files = std.StringHashMapUnmanaged(void){};
try unreachable_hashes.append(self.allocator, hash); defer reachable_build_files.deinit(self.allocator);
for (self.handles.values()) |handle| {
const build_file_uri = handle.associated_build_file orelse continue;
try reachable_build_files.put(self.allocator, build_file_uri, {});
} }
for (unreachable_hashes.items) |hash| { var i: usize = 0;
var kv = self.cimports.fetchSwapRemove(hash) orelse continue; while (i < self.build_files.count()) {
std.log.debug("Destroying cimport {s}", .{kv.value.success}); const hash = self.build_files.keys()[i];
if (reachable_build_files.contains(hash)) {
i += 1;
continue;
}
var kv = self.build_files.fetchSwapRemove(hash).?;
std.log.debug("Destroying build file {s}", .{kv.value.uri});
kv.value.deinit(self.allocator); kv.value.deinit(self.allocator);
} }
} }
@ -319,6 +340,9 @@ fn ensureDependenciesProcessed(self: *DocumentStore, handle: Handle) error{OutOf
} }
fn ensureCImportsProcessed(self: *DocumentStore, handle: Handle) error{OutOfMemory}!void { fn ensureCImportsProcessed(self: *DocumentStore, handle: Handle) error{OutOfMemory}!void {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
for (handle.cimports.items(.hash)) |hash, index| { for (handle.cimports.items(.hash)) |hash, index| {
if (self.cimports.contains(hash)) continue; if (self.cimports.contains(hash)) continue;