Detect import removal and decrement document referece count

This commit is contained in:
Alexandros Naskos 2020-05-14 14:51:07 +03:00
parent 112d38e7fa
commit f5e4586c76
2 changed files with 116 additions and 26 deletions

View File

@ -257,6 +257,51 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.
return null; return null;
} }
fn maybeCollectImport(tree: *ast.Tree, builtin_call: *ast.Node.BuiltinCall, arr: *std.ArrayList([]const u8)) !void {
if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return;
if (builtin_call.params.len > 1) return;
const import_param = builtin_call.params.at(0).*;
if (import_param.id != .StringLiteral) return;
const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
try arr.append(import_str[1 .. import_str.len - 1]);
}
/// Collects all imports we can find into a slice of import paths (without quotes).
/// The import paths are valid as long as the tree is.
pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const u8 {
// TODO: Currently only detects `const smth = @import("string literal")<.SometThing>;`
var arr = std.ArrayList([]const u8).init(allocator);
var idx: usize = 0;
while (tree.root_node.iterate(idx)) |decl| : (idx += 1) {
if (decl.id != .VarDecl) continue;
const var_decl = decl.cast(ast.Node.VarDecl).?;
if (var_decl.init_node == null) continue;
switch(var_decl.init_node.?.id) {
.BuiltinCall => {
const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?;
try maybeCollectImport(tree, builtin_call, &arr);
},
.InfixOp => {
const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?;
switch(infix_op.op) {
.Period => {},
else => continue,
}
if (infix_op.lhs.id != .BuiltinCall) continue;
try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, &arr);
},
else => {},
}
}
return arr.toOwnedSlice();
}
pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node {
var current_node = &analysis_ctx.tree.root_node.base; var current_node = &analysis_ctx.tree.root_node.base;

View File

@ -1,6 +1,7 @@
const std = @import("std"); const std = @import("std");
const types = @import("types.zig"); const types = @import("types.zig");
const URI = @import("uri.zig"); const URI = @import("uri.zig");
const analysis = @import("analysis.zig");
const DocumentStore = @This(); const DocumentStore = @This();
@ -151,6 +152,52 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void {
} }
handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text);
// TODO: Better algorithm or data structure?
// Removing the imports is costly since they live in an array list
// Perhaps we should use an AutoHashMap([]const u8, {}) ?
// Try to detect removed imports and decrement their counts.
if (handle.import_uris.items.len == 0) return;
const import_strs = try analysis.collectImports(self.allocator, dirty_tree);
defer self.allocator.free(import_strs);
const still_exist = try self.allocator.alloc(bool, handle.import_uris.items.len);
defer self.allocator.free(still_exist);
for (still_exist) |*ex| {
ex.* = false;
}
for (import_strs) |str| {
const uri = (try uriFromImportStr(self, handle, str)) orelse continue;
defer self.allocator.free(uri);
var idx: usize = 0;
exists_loop: while (idx < still_exist.len) : (idx += 1) {
if (still_exist[idx]) continue;
if (std.mem.eql(u8, handle.import_uris.items[idx], uri)) {
still_exist[idx] = true;
break :exists_loop;
}
}
}
// Go through still_exist, remove the items that are false and decrement their handle counts.
var offset: usize = 0;
var idx: usize = 0;
while (idx < still_exist.len) : (idx += 1) {
if (still_exist[idx]) continue;
std.debug.warn("Import removed: {}\n", .{handle.import_uris.items[idx - offset]});
const uri = handle.import_uris.orderedRemove(idx - offset);
offset += 1;
self.closeDocument(uri);
self.allocator.free(uri);
}
} }
pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void { pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void {
@ -209,11 +256,29 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
try self.checkSanity(handle); try self.checkSanity(handle);
} }
// @TODO: We only reduce the count upon closing, fn uriFromImportStr(store: *DocumentStore, handle: *Handle, import_str: []const u8) !?[]const u8 {
// find a way to reduce it when removing imports. return if (std.mem.eql(u8, import_str, "std"))
// Perhaps on new sane text we can go through imports if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri)
// and remove those that are in the import_uris table else {
// but not in the file anymore. std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
return null;
}
else b: {
// Find relative uri
const path = try URI.parse(store.allocator, handle.uri());
defer store.allocator.free(path);
const dir_path = std.fs.path.dirname(path) orelse "";
const import_path = try std.fs.path.resolve(store.allocator, &[_][]const u8 {
dir_path, import_str
});
defer store.allocator.free(import_path);
break :b (try URI.fromPath(store.allocator, import_path));
};
}
pub const AnalysisContext = struct { pub const AnalysisContext = struct {
store: *DocumentStore, store: *DocumentStore,
handle: *Handle, handle: *Handle,
@ -224,27 +289,7 @@ pub const AnalysisContext = struct {
pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
const allocator = self.store.allocator; const allocator = self.store.allocator;
const final_uri = (try uriFromImportStr(self.store, self.handle, import_str)) orelse return null;
const final_uri = if (std.mem.eql(u8, import_str, "std"))
if (self.store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri)
else {
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
return null;
}
else b: {
// Find relative uri
const path = try URI.parse(allocator, self.handle.uri());
defer allocator.free(path);
const dir_path = std.fs.path.dirname(path) orelse "";
const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 {
dir_path, import_str
});
defer allocator.free(import_path);
break :b (try URI.fromPath(allocator, import_path));
};
std.debug.warn("Import final URI: {}\n", .{final_uri}); std.debug.warn("Import final URI: {}\n", .{final_uri});
var consumed_final_uri = false; var consumed_final_uri = false;