From 31116279233f6f85a831accde3732f08227073ed Mon Sep 17 00:00:00 2001 From: Vexu Date: Thu, 14 May 2020 17:22:15 +0300 Subject: [PATCH] parsing now always results in a AST --- src/analysis.zig | 16 +++++----- src/document_store.zig | 70 +++++++++--------------------------------- src/main.zig | 25 +++++---------- src/types.zig | 1 - 4 files changed, 29 insertions(+), 83 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 637bd89..a02af92 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -78,7 +78,8 @@ fn collectDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, doc_commen pub fn getFunctionSignature(tree: *ast.Tree, func: *ast.Node.FnProto) []const u8 { const start = tree.tokens.at(func.firstToken()).start; const end = tree.tokens.at(switch (func.return_type) { - .Explicit, .InferErrorSet => |node| node.lastToken() + .Explicit, .InferErrorSet => |node| node.lastToken(), + .Invalid => |r_paren| r_paren, }).end; return tree.source[start..end]; } @@ -200,6 +201,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. const func = node.cast(ast.Node.FnProto).?; switch (func.return_type) { .Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(analysis_ctx, return_type), + .Invalid => {}, } }, .Identifier => { @@ -260,7 +262,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { - std.debug.warn("Error {} while proessing import {}\n", .{err, import_str}); + std.debug.warn("Error {} while processing import {}\n", .{err, import_str}); break :block null; }; }, @@ -359,17 +361,13 @@ pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool { switch (node.id) { .VarDecl => { const var_decl = node.cast(ast.Node.VarDecl).?; - if (var_decl.visib_token) |visib_token| { - return std.mem.eql(u8, tree.tokenSlice(visib_token), "pub"); - } else return false; + return var_decl.visib_token != null; }, .FnProto => { const func = node.cast(ast.Node.FnProto).?; - if (func.visib_token) |visib_token| { - return std.mem.eql(u8, tree.tokenSlice(visib_token), "pub"); - } else return false; + return func.visib_token != null; }, - .ContainerField => { + .ContainerField, .ErrorTag => { return true; }, else => { diff --git a/src/document_store.zig b/src/document_store.zig index 2ad2256..99178f1 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -14,24 +14,10 @@ pub const Handle = struct { return handle.document.uri; } - /// Returns the zig AST resulting from parsing the document's text, even - /// if it contains errors. - pub fn dirtyTree(handle: Handle, allocator: *std.mem.Allocator) !*std.zig.ast.Tree { + /// Returns a zig AST, with all its errors. + pub fn tree(handle: Handle, allocator: *std.mem.Allocator) !*std.zig.ast.Tree { return try std.zig.parse(allocator, handle.document.text); } - - /// Returns a zig AST with no errors, either from the current text or - /// the stored sane text, null if no such ast exists. - pub fn saneTree(handle: Handle, allocator: *std.mem.Allocator) !?*std.zig.ast.Tree { - var tree = try std.zig.parse(allocator, handle.document.text); - if (tree.errors.len == 0) return tree; - - tree.deinit(); - if (handle.document.sane_text) |sane| { - return try std.zig.parse(allocator, sane); - } - return null; - } }; allocator: *std.mem.Allocator, @@ -61,7 +47,7 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ? } } -/// This function assersts the document is not open yet and takes owneship +/// This function asserts the document is not open yet and takes ownership /// of the uri and text passed in. fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { std.debug.warn("Opened document: {}\n", .{uri}); @@ -78,7 +64,6 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { .uri = uri, .text = text, .mem = text, - .sane_text = null, }, }; try self.checkSanity(&handle); @@ -110,9 +95,6 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void { std.debug.warn("Freeing document: {}\n", .{uri}); self.allocator.free(entry.value.document.mem); - if (entry.value.document.sane_text) |sane| { - self.allocator.free(sane); - } for (entry.value.import_uris.items) |import_uri| { self.decrementCount(import_uri); @@ -141,18 +123,10 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { // Check if the document text is now sane, move it to sane_text if so. fn checkSanity(self: *DocumentStore, handle: *Handle) !void { - const dirty_tree = try handle.dirtyTree(self.allocator); - defer dirty_tree.deinit(); - - if (dirty_tree.errors.len > 0) return; - - std.debug.warn("New sane text for document {}\n", .{handle.uri()}); - if (handle.document.sane_text) |sane| { - self.allocator.free(sane); - } - - handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); + const tree = try handle.tree(self.allocator); + defer tree.deinit(); + std.debug.warn("New text for document {}\n", .{handle.uri()}); // TODO: Better algorithm or data structure? // Removing the imports is costly since they live in an array list // Perhaps we should use an AutoHashMap([]const u8, {}) ? @@ -160,7 +134,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { // Try to detect removed imports and decrement their counts. if (handle.import_uris.items.len == 0) return; - const import_strs = try analysis.collectImports(self.allocator, dirty_tree); + const import_strs = try analysis.collectImports(self.allocator, tree); defer self.allocator.free(import_strs); const still_exist = try self.allocator.alloc(bool, handle.import_uris.items.len); @@ -302,11 +276,8 @@ pub const AnalysisContext = struct { self.handle = self.store.getHandle(final_uri) orelse return null; self.tree.deinit(); - if (try self.handle.saneTree(allocator)) |tree| { - self.tree = tree; - return &self.tree.root_node.base; - } - return null; + self.tree = try self.handle.tree(allocator); + return &self.tree.root_node.base; } } @@ -318,11 +289,8 @@ pub const AnalysisContext = struct { self.handle = new_handle; self.tree.deinit(); - if (try self.handle.saneTree(allocator)) |tree| { - self.tree = tree; - return &self.tree.root_node.base; - } - return null; + self.tree = try self.handle.tree(allocator); + return &self.tree.root_node.base; } // New document, read the file then call into openDocument. @@ -358,11 +326,8 @@ pub const AnalysisContext = struct { // Free old tree, add new one if it exists. // If we return null, no one should access the tree. self.tree.deinit(); - if (try self.handle.saneTree(allocator)) |tree| { - self.tree = tree; - return &self.tree.root_node.base; - } - return null; + self.tree = try self.handle.tree(allocator); + return &self.tree.root_node.base; } pub fn deinit(self: *AnalysisContext) void { @@ -370,14 +335,12 @@ pub const AnalysisContext = struct { } }; -pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator) !?AnalysisContext { - const tree = (try handle.saneTree(self.allocator)) orelse return null; - +pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator) !AnalysisContext { return AnalysisContext{ .store = self, .handle = handle, .arena = arena, - .tree = tree, + .tree = try handle.tree(self.allocator), }; } @@ -385,9 +348,6 @@ pub fn deinit(self: *DocumentStore) void { var entry_iterator = self.handles.iterator(); while (entry_iterator.next()) |entry| { self.allocator.free(entry.value.document.mem); - if (entry.value.document.sane_text) |sane| { - self.allocator.free(sane); - } for (entry.value.import_uris.items) |uri| { self.allocator.free(uri); diff --git a/src/main.zig b/src/main.zig index c08048a..e79eae2 100644 --- a/src/main.zig +++ b/src/main.zig @@ -99,7 +99,7 @@ fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range { } fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { - const tree = try handle.dirtyTree(allocator); + const tree = try handle.tree(allocator); defer tree.deinit(); // Use an arena for our local memory allocations. @@ -145,7 +145,7 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { std.mem.eql(u8, tree.tokenSlice(ident.token), "type") else false, - .InferErrorSet => false, + .InferErrorSet, .Invalid => false, }; const func_name = tree.tokenSlice(name_token); @@ -234,7 +234,7 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s } fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { - var tree = (try handle.saneTree(allocator)) orelse return respondGeneric(id, no_completions_response); + var tree = try handle.tree(allocator); defer tree.deinit(); // We use a local arena allocator to deallocate all temporary data without iterating @@ -266,17 +266,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P var arena = std.heap.ArenaAllocator.init(allocator); defer arena.deinit(); - var analysis_ctx = (try document_store.analysisContext(handle, &arena)) orelse { - return send(types.Response{ - .id = .{ .Integer = id }, - .result = .{ - .CompletionList = .{ - .isIncomplete = false, - .items = &[_]types.CompletionItem{}, - }, - }, - }); - }; + var analysis_ctx = try document_store.analysisContext(handle, &arena); defer analysis_ctx.deinit(); var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); @@ -580,7 +570,7 @@ const debug_alloc: ?*std.testing.LeakCountAllocator = if (build_options.allocati pub fn main() anyerror!void { // TODO: Use a better purpose general allocator once std has one. // Probably after the generic composable allocators PR? - // This is not too bad for now since most allocations happen in local areans. + // This is not too bad for now since most allocations happen in local arenas. allocator = std.heap.page_allocator; if (build_options.allocation_info) { @@ -626,7 +616,7 @@ pub fn main() anyerror!void { const bytes_read = conf_file.readAll(file_buf) catch break :config_read; if (bytes_read != conf_file_stat.size) break :config_read; - // TODO: Better errors? Doesnt seem like std.json can provide us positions or context. + // TODO: Better errors? Doesn't seem like std.json can provide us positions or context. config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| { std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err}); break :config_read; @@ -655,8 +645,7 @@ pub fn main() anyerror!void { const c = buffer.items[index]; if (c >= '0' and c <= '9') { content_len = content_len * 10 + (c - '0'); - } - if (c == '\r' and buffer.items[index + 1] == '\n') { + } else if (c == '\r' and buffer.items[index + 1] == '\n') { index += 2; break; } diff --git a/src/types.zig b/src/types.zig index deee0ef..80094ac 100644 --- a/src/types.zig +++ b/src/types.zig @@ -140,7 +140,6 @@ pub const TextDocument = struct { text: String, // This holds the memory that we have actually allocated. mem: []u8, - sane_text: ?String = null, pub fn positionToIndex(self: TextDocument, position: Position) !usize { var split_iterator = std.mem.split(self.text, "\n");