Better import handling
This commit is contained in:
parent
afc6d1fd1f
commit
f382a1b22d
@ -1202,9 +1202,8 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: ast.Tree) !v
|
|||||||
if (tags[i + 3] != .r_paren)
|
if (tags[i + 3] != .r_paren)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
|
||||||
const str = tree.tokenSlice(@intCast(u32, i + 2));
|
const str = tree.tokenSlice(@intCast(u32, i + 2));
|
||||||
try import_arr.append(str[1..str.len-1]);
|
try import_arr.append(str[1 .. str.len - 1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,10 @@ const BuildFile = struct {
|
|||||||
pub const Handle = struct {
|
pub const Handle = struct {
|
||||||
document: types.TextDocument,
|
document: types.TextDocument,
|
||||||
count: usize,
|
count: usize,
|
||||||
import_uris: std.ArrayList([]const u8),
|
/// Contains one entry for every import in the document
|
||||||
|
import_uris: []const []const u8,
|
||||||
|
/// Items in thsi array list come from `import_uris`
|
||||||
|
imports_used: std.ArrayListUnmanaged([]const u8),
|
||||||
tree: std.zig.ast.Tree,
|
tree: std.zig.ast.Tree,
|
||||||
document_scope: analysis.DocumentScope,
|
document_scope: analysis.DocumentScope,
|
||||||
|
|
||||||
@ -151,7 +154,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) anyerror!*Hand
|
|||||||
|
|
||||||
handle.* = Handle{
|
handle.* = Handle{
|
||||||
.count = 1,
|
.count = 1,
|
||||||
.import_uris = std.ArrayList([]const u8).init(self.allocator),
|
.import_uris = &.{},
|
||||||
|
.imports_used = .{},
|
||||||
.document = .{
|
.document = .{
|
||||||
.uri = uri,
|
.uri = uri,
|
||||||
.text = text,
|
.text = text,
|
||||||
@ -329,13 +333,17 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
|
|||||||
entry.value.tree.deinit(self.allocator);
|
entry.value.tree.deinit(self.allocator);
|
||||||
self.allocator.free(entry.value.document.mem);
|
self.allocator.free(entry.value.document.mem);
|
||||||
|
|
||||||
for (entry.value.import_uris.items) |import_uri| {
|
for (entry.value.imports_used.items) |import_uri| {
|
||||||
self.decrementCount(import_uri);
|
self.decrementCount(import_uri);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (entry.value.import_uris) |import_uri| {
|
||||||
self.allocator.free(import_uri);
|
self.allocator.free(import_uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
entry.value.document_scope.deinit(self.allocator);
|
entry.value.document_scope.deinit(self.allocator);
|
||||||
entry.value.import_uris.deinit();
|
entry.value.imports_used.deinit(self.allocator);
|
||||||
|
self.allocator.free(entry.value.import_uris);
|
||||||
self.allocator.destroy(entry.value);
|
self.allocator.destroy(entry.value);
|
||||||
const uri_key = entry.key;
|
const uri_key = entry.key;
|
||||||
self.handles.removeAssertDiscard(uri);
|
self.handles.removeAssertDiscard(uri);
|
||||||
@ -351,7 +359,6 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
|
|||||||
return self.handles.get(uri);
|
return self.handles.get(uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the document text is now sane, move it to sane_text if so.
|
|
||||||
fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void {
|
fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void {
|
||||||
log.debug("New text for document {s}", .{handle.uri()});
|
log.debug("New text for document {s}", .{handle.uri()});
|
||||||
handle.tree.deinit(self.allocator);
|
handle.tree.deinit(self.allocator);
|
||||||
@ -377,11 +384,17 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
|
|||||||
}
|
}
|
||||||
|
|
||||||
const old_imports = handle.import_uris;
|
const old_imports = handle.import_uris;
|
||||||
handle.import_uris = new_imports;
|
handle.import_uris = new_imports.toOwnedSlice();
|
||||||
defer old_imports.deinit();
|
defer {
|
||||||
|
for (old_imports) |uri| {
|
||||||
|
self.allocator.free(uri);
|
||||||
|
}
|
||||||
|
self.allocator.free(old_imports);
|
||||||
|
}
|
||||||
|
|
||||||
// Remove all old_imports that do not exist anymore
|
i = 0;
|
||||||
for (old_imports.items) |old| {
|
while (i < handle.imports_used.items.len) {
|
||||||
|
const old = handle.imports_used.items[i];
|
||||||
still_exists: {
|
still_exists: {
|
||||||
for (new_imports.items) |new| {
|
for (new_imports.items) |new| {
|
||||||
if (std.mem.eql(u8, new, old)) {
|
if (std.mem.eql(u8, new, old)) {
|
||||||
@ -390,8 +403,10 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
|
|||||||
}
|
}
|
||||||
log.debug("Import removed: {s}", .{old});
|
log.debug("Import removed: {s}", .{old});
|
||||||
self.decrementCount(old);
|
self.decrementCount(old);
|
||||||
|
_ = handle.imports_used.swapRemove(i);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
self.allocator.free(old);
|
i += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -505,7 +520,9 @@ pub fn uriFromImportStr(
|
|||||||
} else {
|
} else {
|
||||||
const base = handle.uri();
|
const base = handle.uri();
|
||||||
var base_len = base.len;
|
var base_len = base.len;
|
||||||
while (base[base_len - 1] != '/' and base_len > 0) { base_len -= 1; }
|
while (base[base_len - 1] != '/' and base_len > 0) {
|
||||||
|
base_len -= 1;
|
||||||
|
}
|
||||||
base_len -= 1;
|
base_len -= 1;
|
||||||
if (base_len <= 0) {
|
if (base_len <= 0) {
|
||||||
return error.UriBadScheme;
|
return error.UriBadScheme;
|
||||||
@ -515,21 +532,19 @@ pub fn uriFromImportStr(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const u8) !?*Handle {
|
pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const u8) !?*Handle {
|
||||||
|
std.debug.print("RESOLVING IMPORT STR: {s}\n", .{import_str});
|
||||||
const allocator = self.allocator;
|
const allocator = self.allocator;
|
||||||
const final_uri = (try self.uriFromImportStr(
|
const final_uri = (try self.uriFromImportStr(
|
||||||
self.allocator,
|
self.allocator,
|
||||||
handle.*,
|
handle.*,
|
||||||
import_str,
|
import_str,
|
||||||
)) orelse return null;
|
)) orelse return null;
|
||||||
|
|
||||||
var consumed_final_uri = false;
|
var consumed_final_uri = false;
|
||||||
defer if (!consumed_final_uri) allocator.free(final_uri);
|
defer if (!consumed_final_uri) allocator.free(final_uri);
|
||||||
|
|
||||||
// Check if we already imported this.
|
for (handle.imports_used.items) |uri| {
|
||||||
for (handle.import_uris.items) |uri| {
|
|
||||||
// If we did, set our new handle and return the parsed tree root node.
|
|
||||||
if (std.mem.eql(u8, uri, final_uri)) {
|
if (std.mem.eql(u8, uri, final_uri)) {
|
||||||
return self.getHandle(final_uri);
|
return self.getHandle(final_uri).?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -538,7 +553,7 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
|||||||
if (self.getHandle(final_uri)) |new_handle| {
|
if (self.getHandle(final_uri)) |new_handle| {
|
||||||
// If it is, append it to our imports, increment the count, set our new handle
|
// If it is, append it to our imports, increment the count, set our new handle
|
||||||
// and return the parsed tree root node.
|
// and return the parsed tree root node.
|
||||||
try handle.import_uris.append(final_uri);
|
try handle.imports_used.append(self.allocator, final_uri);
|
||||||
consumed_final_uri = true;
|
consumed_final_uri = true;
|
||||||
|
|
||||||
new_handle.count += 1;
|
new_handle.count += 1;
|
||||||
@ -567,7 +582,7 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Add to import table of current handle.
|
// Add to import table of current handle.
|
||||||
try handle.import_uris.append(final_uri);
|
try handle.imports_used.append(self.allocator, final_uri);
|
||||||
consumed_final_uri = true;
|
consumed_final_uri = true;
|
||||||
|
|
||||||
// Swap handles.
|
// Swap handles.
|
||||||
@ -601,12 +616,11 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
entry.value.document_scope.deinit(self.allocator);
|
entry.value.document_scope.deinit(self.allocator);
|
||||||
entry.value.tree.deinit(self.allocator);
|
entry.value.tree.deinit(self.allocator);
|
||||||
self.allocator.free(entry.value.document.mem);
|
self.allocator.free(entry.value.document.mem);
|
||||||
|
for (entry.value.import_uris) |uri| {
|
||||||
for (entry.value.import_uris.items) |uri| {
|
|
||||||
self.allocator.free(uri);
|
self.allocator.free(uri);
|
||||||
}
|
}
|
||||||
|
self.allocator.free(entry.value.import_uris);
|
||||||
entry.value.import_uris.deinit();
|
entry.value.imports_used.deinit(self.allocator);
|
||||||
self.allocator.free(entry.key);
|
self.allocator.free(entry.key);
|
||||||
self.allocator.destroy(entry.value);
|
self.allocator.destroy(entry.value);
|
||||||
}
|
}
|
||||||
@ -637,7 +651,7 @@ fn tagStoreCompletionItems(
|
|||||||
) ![]types.CompletionItem {
|
) ![]types.CompletionItem {
|
||||||
// TODO Better solution for deciding what tags to include
|
// TODO Better solution for deciding what tags to include
|
||||||
var max_len: usize = @field(base.document_scope, name).count();
|
var max_len: usize = @field(base.document_scope, name).count();
|
||||||
for (base.import_uris.items) |uri| {
|
for (base.imports_used.items) |uri| {
|
||||||
max_len += @field(self.handles.get(uri).?.document_scope, name).count();
|
max_len += @field(self.handles.get(uri).?.document_scope, name).count();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -646,7 +660,7 @@ fn tagStoreCompletionItems(
|
|||||||
result_set.entries.appendSliceAssumeCapacity(@field(base.document_scope, name).entries.items);
|
result_set.entries.appendSliceAssumeCapacity(@field(base.document_scope, name).entries.items);
|
||||||
try result_set.reIndex(&arena.allocator);
|
try result_set.reIndex(&arena.allocator);
|
||||||
|
|
||||||
for (base.import_uris.items) |uri| {
|
for (base.imports_used.items) |uri| {
|
||||||
const curr_set = &@field(self.handles.get(uri).?.document_scope, name);
|
const curr_set = &@field(self.handles.get(uri).?.document_scope, name);
|
||||||
for (curr_set.entries.items) |entry| {
|
for (curr_set.entries.items) |entry| {
|
||||||
result_set.putAssumeCapacity(entry.key, {});
|
result_set.putAssumeCapacity(entry.key, {});
|
||||||
|
@ -1672,7 +1672,10 @@ fn processJsonRpc(arena: *std.heap.ArenaAllocator, parser: *std.json.Parser, jso
|
|||||||
logger.debug("Method without return value not implemented: {s}", .{method});
|
logger.debug("Method without return value not implemented: {s}", .{method});
|
||||||
}
|
}
|
||||||
|
|
||||||
const stack_frames = switch (std.builtin.mode) { .Debug => 10, else => 0 };
|
const stack_frames = switch (std.builtin.mode) {
|
||||||
|
.Debug => 10,
|
||||||
|
else => 0,
|
||||||
|
};
|
||||||
var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_frames }){};
|
var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_frames }){};
|
||||||
|
|
||||||
pub fn main() anyerror!void {
|
pub fn main() anyerror!void {
|
||||||
|
@ -553,7 +553,7 @@ pub fn symbolReferences(
|
|||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
blk: while (i < imports.items.len) : (i += 1) {
|
blk: while (i < imports.items.len) : (i += 1) {
|
||||||
const import = imports.items[i];
|
const import = imports.items[i];
|
||||||
for (import.import_uris.items) |uri| {
|
for (import.imports_used.items) |uri| {
|
||||||
const h = store.getHandle(uri) orelse break;
|
const h = store.getHandle(uri) orelse break;
|
||||||
|
|
||||||
if (h == curr_handle) {
|
if (h == curr_handle) {
|
||||||
|
Loading…
Reference in New Issue
Block a user