Merge pull request #267 from InterplanetaryEngineer/master

Accelerate symbolReferencesInternal further and improve refreshDocument algorithm
This commit is contained in:
Alexandros Naskos 2021-03-29 05:38:46 -07:00 committed by GitHub
commit bd3f0460ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 116 additions and 72 deletions

View File

@ -237,14 +237,12 @@ pub fn build(b: *std.build.Builder) !void {
const test_step = b.step("test", "Run all the tests"); const test_step = b.step("test", "Run all the tests");
test_step.dependOn(builder.getInstallStep()); test_step.dependOn(builder.getInstallStep());
var unit_tests = b.addTest("tests/unit_tests.zig"); var unit_tests = b.addTest("src/unit_tests.zig");
unit_tests.addPackage(.{ .name = "analysis", .path = "src/analysis.zig" });
unit_tests.addPackage(.{ .name = "types", .path = "src/types.zig" });
unit_tests.setBuildMode(.Debug); unit_tests.setBuildMode(.Debug);
test_step.dependOn(&unit_tests.step);
var session_tests = b.addTest("tests/sessions.zig"); var session_tests = b.addTest("tests/sessions.zig");
session_tests.addPackage(.{ .name = "header", .path = "src/header.zig" }); session_tests.addPackage(.{ .name = "header", .path = "src/header.zig" });
session_tests.setBuildMode(.Debug); session_tests.setBuildMode(.Debug);
test_step.dependOn(&session_tests.step); test_step.dependOn(&session_tests.step);
} }

View File

@ -359,53 +359,40 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const
handle.document_scope.deinit(self.allocator); handle.document_scope.deinit(self.allocator);
handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree); handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
var new_imports = std.ArrayList([]const u8).init(self.allocator);
errdefer new_imports.deinit();
try analysis.collectImports(&new_imports, handle.tree);
// TODO: Better algorithm or data structure? // Convert to URIs
// Removing the imports is costly since they live in an array list var i: usize = 0;
// Perhaps we should use an AutoHashMap([]const u8, {}) ? while (i < new_imports.items.len) {
if (try self.uriFromImportStr(self.allocator, handle.*, new_imports.items[i])) |uri| {
// Try to detect removed imports and decrement their counts. // The raw import strings are owned by the document and do not need to be freed here.
if (handle.import_uris.items.len == 0) return; new_imports.items[i] = uri;
i += 1;
var arena = std.heap.ArenaAllocator.init(self.allocator); } else {
defer arena.deinit(); _ = new_imports.swapRemove(i);
}
var import_strs = std.ArrayList([]const u8).init(&arena.allocator);
try analysis.collectImports(&import_strs, handle.tree);
const still_exist = try arena.allocator.alloc(bool, handle.import_uris.items.len);
for (still_exist) |*ex| {
ex.* = false;
} }
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path); const old_imports = handle.import_uris;
for (import_strs.items) |str| { handle.import_uris = new_imports;
const uri = (try self.uriFromImportStr(&arena.allocator, handle.*, str)) orelse continue; defer old_imports.deinit();
exists_loop: for (still_exist) |*does_still_exist, i| { // Remove all old_imports that do not exist anymore
if (does_still_exist.*) continue; for (old_imports.items) |old| {
still_exists: {
if (std.mem.eql(u8, handle.import_uris.items[i], uri)) { for (new_imports.items) |new| {
does_still_exist.* = true; if (std.mem.eql(u8, new, old)) {
break :exists_loop; break :still_exists;
}
} }
log.debug("Import removed: {s}", .{old});
self.decrementCount(old);
self.allocator.free(old);
} }
} }
// Go through still_exist, remove the items that are false and decrement their handle counts.
var idx: usize = 0;
for (still_exist) |does_still_exist| {
if (does_still_exist) {
idx += 1;
continue;
}
log.debug("Import removed: {s}", .{handle.import_uris.items[idx]});
const uri = handle.import_uris.orderedRemove(idx);
self.decrementCount(uri);
self.allocator.free(uri);
}
} }
pub fn applySave(self: *DocumentStore, handle: *Handle) !void { pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
@ -513,18 +500,14 @@ pub fn uriFromImportStr(
} }
return null; return null;
} else { } else {
// Find relative uri const base = handle.uri();
const path = try URI.parse(allocator, handle.uri()); var base_len = base.len;
defer allocator.free(path); while (base[base_len - 1] != '/' and base_len > 0) { base_len -= 1; }
base_len -= 1;
const dir_path = std.fs.path.dirname(path) orelse ""; if (base_len <= 0) {
const import_path = try std.fs.path.resolve(allocator, &[_][]const u8{ return error.UriBadScheme;
dir_path, import_str, }
}); return try URI.pathRelative(allocator, base[0..base_len], import_str);
defer allocator.free(import_path);
return try URI.fromPath(allocator, import_path);
} }
} }

View File

@ -1213,8 +1213,6 @@ fn loadConfig(folder_path: []const u8) ?Config {
}; };
defer allocator.free(file_buf); defer allocator.free(file_buf);
// TODO: Uh oh. Profile the actual build time impact
// of adding config options and consider alternatives (TOML?)
@setEvalBranchQuota(2000); @setEvalBranchQuota(2000);
// TODO: Better errors? Doesn't seem like std.json can provide us positions or context. // TODO: Better errors? Doesn't seem like std.json can provide us positions or context.
var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| { var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| {

View File

@ -1,5 +1,7 @@
const analysis = @import("analysis"); const analysis = @import("analysis.zig");
const types = @import("types"); const types = @import("types.zig");
const offsets = @import("offsets.zig");
const URI = @import("uri.zig");
const std = @import("std"); const std = @import("std");
@ -30,11 +32,11 @@ fn testContext(comptime line: []const u8, comptime tag: anytype, comptime range:
const doc = try makeUnnamedDocument(final_line); const doc = try makeUnnamedDocument(final_line);
defer freeDocument(doc); defer freeDocument(doc);
var arena = std.heap.ArenaAllocator.init(allocator);
defer arena.deinit();
const ctx = try analysis.documentPositionContext(allocator, doc, types.Position{ const p = try offsets.documentPosition(doc, .{ .line = 0, .character = @intCast(i64, cursor_idx) }, .utf8);
.line = 0, const ctx = try analysis.documentPositionContext(&arena, doc, p);
.character = @intCast(i64, cursor_idx),
});
if (std.meta.activeTag(ctx) != tag) { if (std.meta.activeTag(ctx) != tag) {
std.debug.warn("Expected tag {}, got {}\n", .{ tag, std.meta.activeTag(ctx) }); std.debug.warn("Expected tag {}, got {}\n", .{ tag, std.meta.activeTag(ctx) });
@ -100,3 +102,15 @@ test "documentPositionContext" {
"Se", "Se",
); );
} }
test "pathRelative and escapes" {
const join1 = try URI.pathRelative(allocator, "file://project/zig", "/src/main+.zig");
defer allocator.free(join1);
std.testing.expectEqualStrings("file://project/zig/src/main%2B.zig", join1);
const join2 = try URI.pathRelative(allocator, "file://project/zig/wow", "../]src]/]main.zig");
defer allocator.free(join2);
std.testing.expectEqualStrings("file://project/zig/%5Dsrc%5D/%5Dmain.zig", join2);
}

View File

@ -1,13 +1,24 @@
const std = @import("std"); const std = @import("std");
const mem = std.mem;
const reserved_chars = &[_]u8{ const reserved_chars = &[_]u8{
'!', '#', '$', '%', '&', '\'', '!', '#', '$', '%', '&', '\'',
'(', ')', '*', '+', ',', ':', '(', ')', '*', '+', ',', ':',
';', '=', '?', '@', '[', ']', ';', '=', '?', '@', '[', ']',
}; };
const reserved_escapes = comptime blk: {
var escapes: [reserved_chars.len][3]u8
= [_][3]u8{[_]u8{undefined} ** 3} ** reserved_chars.len;
for (reserved_chars) |c, i| {
escapes[i][0] = '%';
_ = std.fmt.bufPrint(escapes[i][1..], "{X}", .{c}) catch unreachable;
}
break :blk &escapes;
};
/// Returns a URI from a path, caller owns the memory allocated with `allocator` /// Returns a URI from a path, caller owns the memory allocated with `allocator`
pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 { pub fn fromPath(allocator: *mem.Allocator, path: []const u8) ![]const u8 {
if (path.len == 0) return ""; if (path.len == 0) return "";
const prefix = if (std.builtin.os.tag == .windows) "file:///" else "file://"; const prefix = if (std.builtin.os.tag == .windows) "file:///" else "file://";
@ -19,10 +30,8 @@ pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 {
for (path) |char| { for (path) |char| {
if (char == std.fs.path.sep) { if (char == std.fs.path.sep) {
try buf.append('/'); try buf.append('/');
} else if (std.mem.indexOfScalar(u8, reserved_chars, char) != null) { } else if (mem.indexOfScalar(u8, reserved_chars, char)) |reserved| {
// Write '%' + hex with uppercase try buf.appendSlice(&reserved_escapes[reserved]);
try buf.append('%');
try std.fmt.format(out_stream, "{X}", .{char});
} else { } else {
try buf.append(char); try buf.append(char);
} }
@ -32,7 +41,7 @@ pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 {
if (std.builtin.os.tag == .windows) { if (std.builtin.os.tag == .windows) {
if (buf.items.len > prefix.len + 1 and if (buf.items.len > prefix.len + 1 and
std.ascii.isAlpha(buf.items[prefix.len]) and std.ascii.isAlpha(buf.items[prefix.len]) and
std.mem.startsWith(u8, buf.items[prefix.len + 1 ..], "%3A")) mem.startsWith(u8, buf.items[prefix.len + 1 ..], "%3A"))
{ {
buf.items[prefix.len] = std.ascii.toLower(buf.items[prefix.len]); buf.items[prefix.len] = std.ascii.toLower(buf.items[prefix.len]);
} }
@ -41,6 +50,47 @@ pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 {
return buf.toOwnedSlice(); return buf.toOwnedSlice();
} }
/// Move along `rel` from `base` with a single allocation.
/// `base` is a URI of a folder, `rel` is a raw relative path.
pub fn pathRelative(allocator: *mem.Allocator, base: []const u8, rel: []const u8) ![]const u8 {
const max_size = base.len + rel.len * 3 + 1;
var result = try allocator.alloc(u8, max_size);
errdefer allocator.free(result);
mem.copy(u8, result, base);
var result_index: usize = base.len;
var it = mem.tokenize(rel, "/");
while (it.next()) |component| {
if (mem.eql(u8, component, ".")) {
continue;
} else if (mem.eql(u8, component, "..")) {
while (true) {
if (result_index == 0)
return error.UriBadScheme;
result_index -= 1;
if (result[result_index] == '/')
break;
}
} else {
result[result_index] = '/';
result_index += 1;
for (component) |char| {
if (mem.indexOfScalar(u8, reserved_chars, char)) |reserved| {
const escape = &reserved_escapes[reserved];
mem.copy(u8, result[result_index..], escape);
result_index += escape.len;
} else {
result[result_index] = char;
result_index += 1;
}
}
}
}
return allocator.resize(result, result_index);
}
pub const UriParseError = error{ pub const UriParseError = error{
UriBadScheme, UriBadScheme,
UriBadHexChar, UriBadHexChar,
@ -59,8 +109,8 @@ fn parseHex(c: u8) !u8 {
} }
/// Caller should free memory /// Caller should free memory
pub fn parse(allocator: *std.mem.Allocator, str: []const u8) ![]u8 { pub fn parse(allocator: *mem.Allocator, str: []const u8) ![]u8 {
if (str.len < 7 or !std.mem.eql(u8, "file://", str[0..7])) return error.UriBadScheme; if (str.len < 7 or !mem.eql(u8, "file://", str[0..7])) return error.UriBadScheme;
const uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7)); const uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7));
errdefer allocator.free(uri); errdefer allocator.free(uri);
@ -89,3 +139,4 @@ pub fn parse(allocator: *std.mem.Allocator, str: []const u8) ![]u8 {
return allocator.shrink(uri, i); return allocator.shrink(uri, i);
} }