From 31116279233f6f85a831accde3732f08227073ed Mon Sep 17 00:00:00 2001 From: Vexu Date: Thu, 14 May 2020 17:22:15 +0300 Subject: [PATCH 01/14] parsing now always results in a AST --- src/analysis.zig | 16 +++++----- src/document_store.zig | 70 +++++++++--------------------------------- src/main.zig | 25 +++++---------- src/types.zig | 1 - 4 files changed, 29 insertions(+), 83 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 637bd89..a02af92 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -78,7 +78,8 @@ fn collectDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, doc_commen pub fn getFunctionSignature(tree: *ast.Tree, func: *ast.Node.FnProto) []const u8 { const start = tree.tokens.at(func.firstToken()).start; const end = tree.tokens.at(switch (func.return_type) { - .Explicit, .InferErrorSet => |node| node.lastToken() + .Explicit, .InferErrorSet => |node| node.lastToken(), + .Invalid => |r_paren| r_paren, }).end; return tree.source[start..end]; } @@ -200,6 +201,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. const func = node.cast(ast.Node.FnProto).?; switch (func.return_type) { .Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(analysis_ctx, return_type), + .Invalid => {}, } }, .Identifier => { @@ -260,7 +262,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { - std.debug.warn("Error {} while proessing import {}\n", .{err, import_str}); + std.debug.warn("Error {} while processing import {}\n", .{err, import_str}); break :block null; }; }, @@ -359,17 +361,13 @@ pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool { switch (node.id) { .VarDecl => { const var_decl = node.cast(ast.Node.VarDecl).?; - if (var_decl.visib_token) |visib_token| { - return std.mem.eql(u8, tree.tokenSlice(visib_token), "pub"); - } else return false; + return var_decl.visib_token != null; }, .FnProto => { const func = node.cast(ast.Node.FnProto).?; - if (func.visib_token) |visib_token| { - return std.mem.eql(u8, tree.tokenSlice(visib_token), "pub"); - } else return false; + return func.visib_token != null; }, - .ContainerField => { + .ContainerField, .ErrorTag => { return true; }, else => { diff --git a/src/document_store.zig b/src/document_store.zig index 2ad2256..99178f1 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -14,24 +14,10 @@ pub const Handle = struct { return handle.document.uri; } - /// Returns the zig AST resulting from parsing the document's text, even - /// if it contains errors. - pub fn dirtyTree(handle: Handle, allocator: *std.mem.Allocator) !*std.zig.ast.Tree { + /// Returns a zig AST, with all its errors. + pub fn tree(handle: Handle, allocator: *std.mem.Allocator) !*std.zig.ast.Tree { return try std.zig.parse(allocator, handle.document.text); } - - /// Returns a zig AST with no errors, either from the current text or - /// the stored sane text, null if no such ast exists. - pub fn saneTree(handle: Handle, allocator: *std.mem.Allocator) !?*std.zig.ast.Tree { - var tree = try std.zig.parse(allocator, handle.document.text); - if (tree.errors.len == 0) return tree; - - tree.deinit(); - if (handle.document.sane_text) |sane| { - return try std.zig.parse(allocator, sane); - } - return null; - } }; allocator: *std.mem.Allocator, @@ -61,7 +47,7 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ? } } -/// This function assersts the document is not open yet and takes owneship +/// This function asserts the document is not open yet and takes ownership /// of the uri and text passed in. fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { std.debug.warn("Opened document: {}\n", .{uri}); @@ -78,7 +64,6 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { .uri = uri, .text = text, .mem = text, - .sane_text = null, }, }; try self.checkSanity(&handle); @@ -110,9 +95,6 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void { std.debug.warn("Freeing document: {}\n", .{uri}); self.allocator.free(entry.value.document.mem); - if (entry.value.document.sane_text) |sane| { - self.allocator.free(sane); - } for (entry.value.import_uris.items) |import_uri| { self.decrementCount(import_uri); @@ -141,18 +123,10 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { // Check if the document text is now sane, move it to sane_text if so. fn checkSanity(self: *DocumentStore, handle: *Handle) !void { - const dirty_tree = try handle.dirtyTree(self.allocator); - defer dirty_tree.deinit(); - - if (dirty_tree.errors.len > 0) return; - - std.debug.warn("New sane text for document {}\n", .{handle.uri()}); - if (handle.document.sane_text) |sane| { - self.allocator.free(sane); - } - - handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); + const tree = try handle.tree(self.allocator); + defer tree.deinit(); + std.debug.warn("New text for document {}\n", .{handle.uri()}); // TODO: Better algorithm or data structure? // Removing the imports is costly since they live in an array list // Perhaps we should use an AutoHashMap([]const u8, {}) ? @@ -160,7 +134,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { // Try to detect removed imports and decrement their counts. if (handle.import_uris.items.len == 0) return; - const import_strs = try analysis.collectImports(self.allocator, dirty_tree); + const import_strs = try analysis.collectImports(self.allocator, tree); defer self.allocator.free(import_strs); const still_exist = try self.allocator.alloc(bool, handle.import_uris.items.len); @@ -302,11 +276,8 @@ pub const AnalysisContext = struct { self.handle = self.store.getHandle(final_uri) orelse return null; self.tree.deinit(); - if (try self.handle.saneTree(allocator)) |tree| { - self.tree = tree; - return &self.tree.root_node.base; - } - return null; + self.tree = try self.handle.tree(allocator); + return &self.tree.root_node.base; } } @@ -318,11 +289,8 @@ pub const AnalysisContext = struct { self.handle = new_handle; self.tree.deinit(); - if (try self.handle.saneTree(allocator)) |tree| { - self.tree = tree; - return &self.tree.root_node.base; - } - return null; + self.tree = try self.handle.tree(allocator); + return &self.tree.root_node.base; } // New document, read the file then call into openDocument. @@ -358,11 +326,8 @@ pub const AnalysisContext = struct { // Free old tree, add new one if it exists. // If we return null, no one should access the tree. self.tree.deinit(); - if (try self.handle.saneTree(allocator)) |tree| { - self.tree = tree; - return &self.tree.root_node.base; - } - return null; + self.tree = try self.handle.tree(allocator); + return &self.tree.root_node.base; } pub fn deinit(self: *AnalysisContext) void { @@ -370,14 +335,12 @@ pub const AnalysisContext = struct { } }; -pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator) !?AnalysisContext { - const tree = (try handle.saneTree(self.allocator)) orelse return null; - +pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator) !AnalysisContext { return AnalysisContext{ .store = self, .handle = handle, .arena = arena, - .tree = tree, + .tree = try handle.tree(self.allocator), }; } @@ -385,9 +348,6 @@ pub fn deinit(self: *DocumentStore) void { var entry_iterator = self.handles.iterator(); while (entry_iterator.next()) |entry| { self.allocator.free(entry.value.document.mem); - if (entry.value.document.sane_text) |sane| { - self.allocator.free(sane); - } for (entry.value.import_uris.items) |uri| { self.allocator.free(uri); diff --git a/src/main.zig b/src/main.zig index c08048a..e79eae2 100644 --- a/src/main.zig +++ b/src/main.zig @@ -99,7 +99,7 @@ fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range { } fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { - const tree = try handle.dirtyTree(allocator); + const tree = try handle.tree(allocator); defer tree.deinit(); // Use an arena for our local memory allocations. @@ -145,7 +145,7 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { std.mem.eql(u8, tree.tokenSlice(ident.token), "type") else false, - .InferErrorSet => false, + .InferErrorSet, .Invalid => false, }; const func_name = tree.tokenSlice(name_token); @@ -234,7 +234,7 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s } fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { - var tree = (try handle.saneTree(allocator)) orelse return respondGeneric(id, no_completions_response); + var tree = try handle.tree(allocator); defer tree.deinit(); // We use a local arena allocator to deallocate all temporary data without iterating @@ -266,17 +266,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P var arena = std.heap.ArenaAllocator.init(allocator); defer arena.deinit(); - var analysis_ctx = (try document_store.analysisContext(handle, &arena)) orelse { - return send(types.Response{ - .id = .{ .Integer = id }, - .result = .{ - .CompletionList = .{ - .isIncomplete = false, - .items = &[_]types.CompletionItem{}, - }, - }, - }); - }; + var analysis_ctx = try document_store.analysisContext(handle, &arena); defer analysis_ctx.deinit(); var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); @@ -580,7 +570,7 @@ const debug_alloc: ?*std.testing.LeakCountAllocator = if (build_options.allocati pub fn main() anyerror!void { // TODO: Use a better purpose general allocator once std has one. // Probably after the generic composable allocators PR? - // This is not too bad for now since most allocations happen in local areans. + // This is not too bad for now since most allocations happen in local arenas. allocator = std.heap.page_allocator; if (build_options.allocation_info) { @@ -626,7 +616,7 @@ pub fn main() anyerror!void { const bytes_read = conf_file.readAll(file_buf) catch break :config_read; if (bytes_read != conf_file_stat.size) break :config_read; - // TODO: Better errors? Doesnt seem like std.json can provide us positions or context. + // TODO: Better errors? Doesn't seem like std.json can provide us positions or context. config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| { std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err}); break :config_read; @@ -655,8 +645,7 @@ pub fn main() anyerror!void { const c = buffer.items[index]; if (c >= '0' and c <= '9') { content_len = content_len * 10 + (c - '0'); - } - if (c == '\r' and buffer.items[index + 1] == '\n') { + } else if (c == '\r' and buffer.items[index + 1] == '\n') { index += 2; break; } diff --git a/src/types.zig b/src/types.zig index deee0ef..80094ac 100644 --- a/src/types.zig +++ b/src/types.zig @@ -140,7 +140,6 @@ pub const TextDocument = struct { text: String, // This holds the memory that we have actually allocated. mem: []u8, - sane_text: ?String = null, pub fn positionToIndex(self: TextDocument, position: Position) !usize { var split_iterator = std.mem.split(self.text, "\n"); From 675aa13e8b9cf775eef2c57f4b3103549094def8 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Fri, 15 May 2020 15:35:28 +0200 Subject: [PATCH 02/14] Mention that zls must be built with zig master --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 6804a97..e396d5c 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,8 @@ zig build Then, you can use the `zls` executable in an editor of your choice that has a Zig language server client! +*Note:`zls` itself must be built using the master branch of zig currently due to a bug in `std.json` which was [fixed](https://github.com/ziglang/zig/pull/5167) after 0.6.0 was released.* + ### Configuration Options You can configure zls by providing a zls.json file in the same directory as the executable. From 3c6b3350e2907fa54cda1ead708dda0bdb5b0f24 Mon Sep 17 00:00:00 2001 From: markfirmware Date: Fri, 15 May 2020 12:44:49 -0400 Subject: [PATCH 03/14] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e396d5c..ac8b416 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![Zig Language Server](https://raw.githubusercontent.com/SuperAuguste/zls/master/.assets/zls.svg) +![Zig Language Server](https://raw.githubusercontent.com/zigtools/zls/master/.assets/zls.svg) ![CI](https://github.com/zigtools/zls/workflows/CI/badge.svg) ![Zig Tools](https://img.shields.io/static/v1?label=zigtools&message=for%20all%20of%20ziguanity&color=F7A41D&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAIAAACRXR/mAAAEDklEQVR4nOzYbUwbZRwA8Oe53vXuSltaa2lxc+KKBGcyBhLDgH3SiSMZ++TQRT8uJH4x8S0myL6YmUjUbIkfZvZtX3TJlAwjiYImxuBwa1hEtxAcQ8GFQrtBWXvXu17vTH1ux1lv99IeLcn6//Rw9/D0l+ft/28xsC2jyrISVZaV2KYsfCsGxSDYs5OIhPCAB0tlpFt3hF//yqYyUsVYrQ3Eaz2ew0/Tta7/rENOlCZnuTMTqZHLrJlxoF2ggAf7+FVff2eNfrf+U/HRaMZwNHtmqzGMf/NucNfDxqNFQqY+0QZWYxifGKoL1TrQnzlRGrvKXphio/M8ANLEUKjeL7+aW86e+5EpB4vEwRevBxTTtSX++Gd3rv6ZBQCEfdi3g3VqU8/J1dspsRysd454n3rUidq//MH1Dcc3WEkxNdUTalNsXTYFPNgr3TULcWE0qn0CStryXhoufPqIi8wfusWE0DEYW0sbm9Rvj52Oj1zROAElXacvd7mQCQAwdH4dmdwUNGkCAAwc9GiOXBKrp4VGjcWEcGFKXo6B59wmTQCA7mbSTWmsWEmstsflXfXdTEa8d4e375YfMpx46AM9EwDAgcGWXYSdLAyCkE8+Zdf/5pXnqxs51HCR2Pv9PgxqmJbXckr/HQGHnSx1cNnN9tnvU5msPHXHumvODjy0w194AvqGV5X+bkrDUDxLlPI3J2rXujb3x+9LwoufxNWymY/qC3Ybw22m7cTdnJ0sAMD8ioAaHU+Q6ucTv3FqmXJalRPQHnEqnW/GBJtZk7Mcajy/l/bSUEdWcCqP7pczejItXr+lwSr+lg/7sK5meZIoJ2x5jPhpli+QHTixcvxZd73fcfkGd2Y8hUqu1gbihX0U6vP1NCNqlWFF3vL/v8c7BmMsb/yPXhr+cKJOyVed78VQAi2IYhZRM7eYMflr4MjbQcV0/ue0pqkYln6+o53wwJNkwT5Dl9zR/fTUyXBnk7zuiwnhzXPr9/sUa3vLZA7OZKXxGfbSHJ9kRIqAe3YSB/dS6iIxsZHrG47rFDkW9pb5ukA/ri3xL52+fUPrXlDC7GzZYmI48dTY3eGLG5weyTTLkmluOTs5y3U1k5EQ7vg3I64kc9F5fnwm8/lkGhWJhmHMsmpSvy06DE5iRUwGrEqZ9FgYBF++EayISY91pJ1qu1dnltmkx+ptlev0JCOW2aTH8rvlWvbKPFdmkx5rNSkXjZ1NZGMYL6dJL/kc2kd99VYQtRlOvDTHt0ecys9DW2rKfyO634ubK0J3M9kQzM8TgcPdIZwiYHlMeiwJgNEo+0yjE8mUmF7gD38Y31KTcQWBQdDbSvW20XVex1paHJtmL0ZZzTL3gYht+ktzlWUlqiwrUWVZiX8CAAD//7jyYLmjqPd4AAAAAElFTkSuQmCC) @@ -20,7 +20,7 @@ Zig Language Server, or `zls`, is a language server for Zig. The Zig wiki states Installing `zls` is pretty simple; ```bash -git clone https://github.com/SuperAuguste/zls +git clone https://github.com/zigtools/zls cd zls zig build ``` From 3d5e775bf75e001428bfd4fe13d6d365dc8201c0 Mon Sep 17 00:00:00 2001 From: Noam Preil Date: Fri, 15 May 2020 15:10:53 -0400 Subject: [PATCH 04/14] Make style guideline warnings opt-in --- README.md | 1 + src/config.zig | 4 ++++ src/main.zig | 54 ++++++++++++++++++++++++++------------------------ 3 files changed, 33 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index ac8b416..56762a1 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ The following options are currently available. | --- | --- | --- | --- | | `enable_snippets` | `bool` | `true` | Enables snippet completion, set to false for compatibility with language clients that do not support snippets (such as ale). | | `zig_lib_path` | `?[]const u8` | `null` | zig library path, used to analyze std library imports. | +| `warn_style` | `bool` | `false` | Enables warnings for style *guideline* mismatches | ## Usage diff --git a/src/config.zig b/src/config.zig index 1b70383..36c94cd 100644 --- a/src/config.zig +++ b/src/config.zig @@ -5,3 +5,7 @@ enable_snippets: bool = true, /// zig library path zig_lib_path: ?[]const u8 = null, + +/// Whether to pay attention to style issues. This is opt-in since the style +/// guide explicitly states that the style info provided is a guideline only. +warn_style: bool = false, diff --git a/src/main.zig b/src/main.zig index 7e25fc5..6db997a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -137,34 +137,36 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { if (is_extern) break :blk; - if (func.name_token) |name_token| { - const loc = tree.tokenLocation(0, name_token); + if (config.warn_style) { + if (func.name_token) |name_token| { + const loc = tree.tokenLocation(0, name_token); - const is_type_function = switch (func.return_type) { - .Explicit => |node| if (node.cast(std.zig.ast.Node.Identifier)) |ident| - std.mem.eql(u8, tree.tokenSlice(ident.token), "type") - else - false, - .InferErrorSet, .Invalid => false, - }; + const is_type_function = switch (func.return_type) { + .Explicit => |node| if (node.cast(std.zig.ast.Node.Identifier)) |ident| + std.mem.eql(u8, tree.tokenSlice(ident.token), "type") + else + false, + .InferErrorSet, .Invalid => false, + }; - const func_name = tree.tokenSlice(name_token); - if (!is_type_function and !analysis.isCamelCase(func_name)) { - try diagnostics.append(.{ - .range = astLocationToRange(loc), - .severity = .Information, - .code = "BadStyle", - .source = "zls", - .message = "Functions should be camelCase", - }); - } else if (is_type_function and !analysis.isPascalCase(func_name)) { - try diagnostics.append(.{ - .range = astLocationToRange(loc), - .severity = .Information, - .code = "BadStyle", - .source = "zls", - .message = "Type functions should be PascalCase", - }); + const func_name = tree.tokenSlice(name_token); + if (!is_type_function and !analysis.isCamelCase(func_name)) { + try diagnostics.append(.{ + .range = astLocationToRange(loc), + .severity = .Information, + .code = "BadStyle", + .source = "zls", + .message = "Functions should be camelCase", + }); + } else if (is_type_function and !analysis.isPascalCase(func_name)) { + try diagnostics.append(.{ + .range = astLocationToRange(loc), + .severity = .Information, + .code = "BadStyle", + .source = "zls", + .message = "Type functions should be PascalCase", + }); + } } } }, From 5a2043ded5e0bef984a70e8db6b86f72624e51fb Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Sat, 16 May 2020 17:21:42 +0300 Subject: [PATCH 05/14] Fixed the build for the latest std.zig parser fixes --- src/analysis.zig | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index a02af92..0a1797a 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -120,21 +120,22 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: try buffer.appendSlice(": "); } - if (param_decl.var_args_token) |_| { - try buffer.appendSlice("..."); - continue; - } + switch (param_decl.param_type) { + .var_args => try buffer.appendSlice("..."), + .var_type => try buffer.appendSlice("var"), + .type_expr => |type_expr| { + var curr_tok = type_expr.firstToken(); + var end_tok =type_expr.lastToken(); + while (curr_tok <= end_tok) : (curr_tok += 1) { + const id = tree.tokens.at(curr_tok).id; + const is_comma = tree.tokens.at(curr_tok).id == .Comma; - var curr_tok = param_decl.type_node.firstToken(); - var end_tok = param_decl.type_node.lastToken(); - while (curr_tok <= end_tok) : (curr_tok += 1) { - const id = tree.tokens.at(curr_tok).id; - const is_comma = tree.tokens.at(curr_tok).id == .Comma; + if (curr_tok == end_tok and is_comma) continue; - if (curr_tok == end_tok and is_comma) continue; - - try buffer.appendSlice(tree.tokenSlice(curr_tok)); - if (is_comma or id == .Keyword_const) try buffer.append(' '); + try buffer.appendSlice(tree.tokenSlice(curr_tok)); + if (is_comma or id == .Keyword_const) try buffer.append(' '); + } + } } try buffer.append('}'); From a7b97552017b1bb1f91edb1ea1e91961da5c953c Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Sat, 16 May 2020 17:24:41 +0300 Subject: [PATCH 06/14] Add a space --- src/analysis.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/analysis.zig b/src/analysis.zig index 0a1797a..4cf5324 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -125,7 +125,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: .var_type => try buffer.appendSlice("var"), .type_expr => |type_expr| { var curr_tok = type_expr.firstToken(); - var end_tok =type_expr.lastToken(); + var end_tok = type_expr.lastToken(); while (curr_tok <= end_tok) : (curr_tok += 1) { const id = tree.tokens.at(curr_tok).id; const is_comma = tree.tokens.at(curr_tok).id == .Comma; From 220d6689290312c910f18df195d0b626273dc1ac Mon Sep 17 00:00:00 2001 From: SuperAuguste Date: Sat, 16 May 2020 14:06:48 -0400 Subject: [PATCH 07/14] local var completion --- src/analysis.zig | 122 +++++++++++++++++++++++++++++++++++++++-- src/document_store.zig | 7 ++- src/main.zig | 24 ++++++-- 3 files changed, 140 insertions(+), 13 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 4cf5324..1a21e72 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -149,9 +149,13 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: pub fn getVariableSignature(tree: *ast.Tree, var_decl: *ast.Node.VarDecl) []const u8 { const start = tree.tokens.at(var_decl.firstToken()).start; const end = tree.tokens.at(var_decl.semicolon_token).start; - // var end = - // if (var_decl.init_n) |body| tree.tokens.at(body.firstToken()).start - // else tree.tokens.at(var_decl.name_token).end; + return tree.source[start..end]; +} + +/// Gets a param signature +pub fn getParamSignature(tree: *ast.Tree, param: *ast.Node.ParamDecl) []const u8 { + const start = tree.tokens.at(param.firstToken()).start; + const end = tree.tokens.at(param.lastToken()).end; return tree.source[start..end]; } @@ -191,13 +195,51 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { return null; } +/// Gets the child of slice +pub fn getChildOfSlice(tree: *ast.Tree, nodes: []*ast.Node, name: []const u8) ?*ast.Node { + // var index: usize = 0; + for (nodes) |child| { + switch (child.id) { + .VarDecl => { + const vari = child.cast(ast.Node.VarDecl).?; + if (std.mem.eql(u8, tree.tokenSlice(vari.name_token), name)) return child; + }, + .ParamDecl => { + const decl = child.cast(ast.Node.ParamDecl).?; + if (decl.name_token != null and std.mem.eql(u8, tree.tokenSlice(decl.name_token.?), name)) return child; + }, + .FnProto => { + const func = child.cast(ast.Node.FnProto).?; + if (func.name_token != null and std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return child; + }, + .ContainerField => { + const field = child.cast(ast.Node.ContainerField).?; + if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; + }, + else => {} + } + // index += 1; + } + return null; +} + /// Resolves the type of a node pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.Node { + std.debug.warn("NODE {}\n", .{node}); switch (node.id) { .VarDecl => { const vari = node.cast(ast.Node.VarDecl).?; return resolveTypeOfNode(analysis_ctx, vari.type_node orelse vari.init_node.?) orelse null; }, + .ParamDecl => { + const decl = node.cast(ast.Node.ParamDecl).?; + switch (decl.param_type) { + .var_type, .type_expr => |var_type| { + return resolveTypeOfNode(analysis_ctx, var_type) orelse null; + }, + else => {} + } + }, .FnProto => { const func = node.cast(ast.Node.FnProto).?; switch (func.return_type) { @@ -206,7 +248,9 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. } }, .Identifier => { - if (getChild(analysis_ctx.tree, &analysis_ctx.tree.root_node.base, analysis_ctx.tree.getNodeSource(node))) |child| { + // std.debug.warn("IDENTIFIER {}\n", .{analysis_ctx.tree.getNodeSource(node)}); + if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, analysis_ctx.tree.getNodeSource(node))) |child| { + // std.debug.warn("CHILD {}\n", .{child}); return resolveTypeOfNode(analysis_ctx, child); } else return null; }, @@ -331,7 +375,7 @@ pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zi .Identifier => { // var root = current_node.cast(ast.Node.Root).?; // current_node. - if (getChild(analysis_ctx.tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { + if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, tokenizer.buffer[next.start..next.end])) |child| { if (resolveTypeOfNode(analysis_ctx, child)) |node_type| { current_node = node_type; } else return null; @@ -406,3 +450,71 @@ pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { return null; } + +pub fn declsFromIndexInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node, nodes: *std.ArrayList(*ast.Node)) anyerror!void { + switch (node.id) { + .FnProto => { + const func = node.cast(ast.Node.FnProto).?; + + var param_index: usize = 0; + while (param_index < func.params.len) : (param_index += 1) + try declsFromIndexInternal(allocator, tree, func.params.at(param_index).*, nodes); + + if (func.body_node) |body_node| + try declsFromIndexInternal(allocator, tree, body_node, nodes); + }, + .Block => { + var index: usize = 0; + + while (node.iterate(index)) |inode| { + try declsFromIndexInternal(allocator, tree, inode, nodes); + index += 1; + } + }, + .VarDecl => { + try nodes.append(node); + }, + .ParamDecl => { + try nodes.append(node); + }, + else => { + try nodes.appendSlice(try getCompletionsFromNode(allocator, tree, node)); + } + } +} + +pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) ![]*ast.Node { + var nodes = std.ArrayList(*ast.Node).init(allocator); + + var index: usize = 0; + while (node.iterate(index)) |child_node| { + try nodes.append(child_node); + + index += 1; + } + + return nodes.items; +} + +pub fn declsFromIndex(allocator: *std.mem.Allocator, tree: *ast.Tree, index: usize) ![]*ast.Node { + var iindex: usize = 0; + + var node = &tree.root_node.base; + var nodes = std.ArrayList(*ast.Node).init(allocator); + + try nodes.appendSlice(try getCompletionsFromNode(allocator, tree, node)); + + while (node.iterate(iindex)) |inode| { + if (tree.tokens.at(inode.firstToken()).start < index and index < tree.tokens.at(inode.lastToken()).start) { + try declsFromIndexInternal(allocator, tree, inode, &nodes); + } + + iindex += 1; + } + + if (tree.tokens.at(node.firstToken()).start < index and index < tree.tokens.at(node.lastToken()).start) { + return nodes.items; + } + + return nodes.items; +} diff --git a/src/document_store.zig b/src/document_store.zig index 99178f1..80f0dfc 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -260,6 +260,7 @@ pub const AnalysisContext = struct { // not for the tree allocations. arena: *std.heap.ArenaAllocator, tree: *std.zig.ast.Tree, + scope_nodes: []*std.zig.ast.Node, pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; @@ -335,12 +336,14 @@ pub const AnalysisContext = struct { } }; -pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator) !AnalysisContext { +pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator, position: types.Position) !AnalysisContext { + const tree = try handle.tree(self.allocator); return AnalysisContext{ .store = self, .handle = handle, .arena = arena, - .tree = try handle.tree(self.allocator), + .tree = tree, + .scope_nodes = try analysis.declsFromIndex(&arena.allocator, tree, try handle.document.positionToIndex(position)) }; } diff --git a/src/main.zig b/src/main.zig index 6db997a..46f1c7d 100644 --- a/src/main.zig +++ b/src/main.zig @@ -223,6 +223,16 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s .detail = analysis.getVariableSignature(tree, var_decl), }; }, + .ParamDecl => { + const param = decl.cast(std.zig.ast.Node.ParamDecl).?; + if (param.name_token) |name_token| + return types.CompletionItem{ + .label = tree.tokenSlice(name_token), + .kind = .Variable, + .documentation = doc, + .detail = analysis.getParamSignature(tree, param), + }; + }, else => if (analysis.nodeToString(tree, decl)) |string| { return types.CompletionItem{ .label = string, @@ -235,7 +245,7 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s return null; } -fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { +fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, config: Config) !void { var tree = try handle.tree(allocator); defer tree.deinit(); @@ -245,10 +255,11 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { // Deallocate all temporary data. defer arena.deinit(); - var decls = tree.root_node.decls.iterator(0); - while (decls.next()) |decl_ptr| { + // var decls = tree.root_node.decls.iterator(0); + var decls = try analysis.declsFromIndex(&arena.allocator, tree, pos_index); + for (decls) |decl_ptr| { var decl = decl_ptr.*; - if (try nodeToCompletion(&arena.allocator, tree, decl, config)) |completion| { + if (try nodeToCompletion(&arena.allocator, tree, decl_ptr, config)) |completion| { try completions.append(completion); } } @@ -268,7 +279,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P var arena = std.heap.ArenaAllocator.init(allocator); defer arena.deinit(); - var analysis_ctx = try document_store.analysisContext(handle, &arena); + var analysis_ctx = try document_store.analysisContext(handle, &arena, position); defer analysis_ctx.deinit(); var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); @@ -276,6 +287,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P var line = try handle.document.getLine(@intCast(usize, position.line)); var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]); + // var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position)); if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| { var index: usize = 0; while (node.iterate(index)) |child_node| { @@ -537,7 +549,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v }, }, }), - .var_access, .empty => try completeGlobal(id, handle.*, config), + .var_access, .empty => try completeGlobal(id, pos_index, handle.*, config), .field_access => |start_idx| try completeFieldAccess(id, handle, pos, start_idx, config), else => try respondGeneric(id, no_completions_response), } From 15a277918f24d28c9d500fbf90b8fb35920fcbc0 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Sun, 17 May 2020 01:30:59 +0300 Subject: [PATCH 08/14] Correctly resolve type of field access --- src/analysis.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/analysis.zig b/src/analysis.zig index 4cf5324..5b2f7e3 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -239,7 +239,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. // Use the analysis context temporary arena to store the rhs string. rhs_str = std.mem.dupe(&analysis_ctx.arena.allocator, u8, rhs_str) catch return null; const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null; - return getChild(analysis_ctx.tree, left, rhs_str); + return resolveTypeOfNode(analysis_ctx, getChild(analysis_ctx.tree, left, rhs_str) orelse return null); }, else => {} } From 843db497901c3d68a11df467840f1fcbd2dc67f3 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Sun, 17 May 2020 14:40:32 +0300 Subject: [PATCH 09/14] Better debug allocator --- src/debug_allocator.zig | 124 ++++++++++++++++++++++++++++++++++++++++ src/main.zig | 9 +-- 2 files changed, 129 insertions(+), 4 deletions(-) create mode 100644 src/debug_allocator.zig diff --git a/src/debug_allocator.zig b/src/debug_allocator.zig new file mode 100644 index 0000000..29f2591 --- /dev/null +++ b/src/debug_allocator.zig @@ -0,0 +1,124 @@ +//! This allocator collects information about allocation sizes + +const std = @import("std"); + +const DebugAllocator = @This(); + +fn toMB(value: var) f64 { + return switch (@TypeOf(value)) { + f64 => value / (1024 * 1024), + else => @intToFloat(f64, value) / (1024 * 1024), + }; +} + +const Stats = struct { + mean: f64 = 0, + mean_of_squares: f64 = 0, + total: usize = 0, + count: usize = 0, + + fn addSample(self: *Stats, value: usize) void { + const count_f64 = @intToFloat(f64, self.count); + self.mean = (self.mean * count_f64 + @intToFloat(f64, value)) / (count_f64 + 1); + self.mean_of_squares = (self.mean_of_squares * count_f64 + @intToFloat(f64, value * value)) / (count_f64 + 1); + self.total += value; + self.count += 1; + } + + fn stdDev(self: Stats) f64 { + return std.math.sqrt(self.mean_of_squares - self.mean * self.mean); + } +}; + +pub const AllocationInfo = struct { + allocation_stats: Stats = Stats{}, + deallocation_count: usize = 0, + deallocation_total: usize = 0, + + reallocation_stats: Stats = Stats{}, + shrink_stats: Stats = Stats{}, + + pub fn format( + self: AllocationInfo, + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + out_stream: var, + ) !void { + @setEvalBranchQuota(2000); + + // TODO: Make these behave like {Bi}, which doesnt work on floating point numbers. + return std.fmt.format( + out_stream, + \\------------------------------------------ Allocation info ------------------------------------------ + \\{} total allocations (total: {d:.2} MB, mean: {d:.2} MB, std. dev: {d:.2} MB), {} deallocations + \\{} current allocations ({d:.2} MB) + \\{} reallocations (total: {d:.2} MB, mean: {d:.2} MB, std. dev: {d:.2} MB) + \\{} shrinks (total: {d:.2} MB, mean: {d:.2} MB, std. dev: {d:.2} MB) + \\----------------------------------------------------------------------------------------------------- + , + .{ + self.allocation_stats.count, + toMB(self.allocation_stats.total), + toMB(self.allocation_stats.mean), + toMB(self.allocation_stats.stdDev()), + self.deallocation_count, + self.allocation_stats.count - self.deallocation_count, + toMB(self.allocation_stats.total + self.reallocation_stats.total - self.deallocation_total - self.shrink_stats.total), + self.reallocation_stats.count, + toMB(self.reallocation_stats.total), + toMB(self.reallocation_stats.mean), + toMB(self.reallocation_stats.stdDev()), + self.shrink_stats.count, + toMB(self.shrink_stats.total), + toMB(self.shrink_stats.mean), + toMB(self.shrink_stats.stdDev()), + }, + ); + } +}; + +base_allocator: *std.mem.Allocator, +info: AllocationInfo, + +// Interface implementation +allocator: std.mem.Allocator, + +pub fn init(base_allocator: *std.mem.Allocator) DebugAllocator { + return .{ + .base_allocator = base_allocator, + .info = .{}, + .allocator = .{ + .reallocFn = realloc, + .shrinkFn = shrink, + }, + }; +} + +fn realloc(allocator: *std.mem.Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) ![]u8 { + const self = @fieldParentPtr(DebugAllocator, "allocator", allocator); + var data = try self.base_allocator.reallocFn(self.base_allocator, old_mem, old_align, new_size, new_align); + if (old_mem.len == 0) { + self.info.allocation_stats.addSample(new_size); + } else if (new_size > old_mem.len) { + self.info.reallocation_stats.addSample(new_size - old_mem.len); + } else if (new_size < old_mem.len) { + self.info.shrink_stats.addSample(old_mem.len - new_size); + } + return data; +} + +fn shrink(allocator: *std.mem.Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) []u8 { + const self = @fieldParentPtr(DebugAllocator, "allocator", allocator); + if (new_size == 0) { + if (self.info.allocation_stats.count == self.info.deallocation_count) { + @panic("error - too many calls to free, most likely double free"); + } + self.info.deallocation_total += old_mem.len; + self.info.deallocation_count += 1; + } else if (new_size < old_mem.len) { + self.info.shrink_stats.addSample(old_mem.len - new_size); + } else if (new_size > old_mem.len) { + @panic("error - trying to shrink to a bigger size"); + } + return self.base_allocator.shrinkFn(self.base_allocator, old_mem, old_align, new_size, new_align); +} diff --git a/src/main.zig b/src/main.zig index 6db997a..ead1763 100644 --- a/src/main.zig +++ b/src/main.zig @@ -3,6 +3,7 @@ const build_options = @import("build_options"); const Config = @import("config.zig"); const DocumentStore = @import("document_store.zig"); +const DebugAllocator = @import("debug_allocator.zig"); const data = @import("data/" ++ build_options.data_version ++ ".zig"); const types = @import("types.zig"); const analysis = @import("analysis.zig"); @@ -565,9 +566,9 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v } } -var debug_alloc_state: std.testing.LeakCountAllocator = undefined; +var debug_alloc_state: DebugAllocator = undefined; // We can now use if(leak_count_alloc) |alloc| { ... } as a comptime check. -const debug_alloc: ?*std.testing.LeakCountAllocator = if (build_options.allocation_info) &debug_alloc_state else null; +const debug_alloc: ?*DebugAllocator = if (build_options.allocation_info) &debug_alloc_state else null; pub fn main() anyerror!void { // TODO: Use a better purpose general allocator once std has one. @@ -578,7 +579,7 @@ pub fn main() anyerror!void { if (build_options.allocation_info) { // TODO: Use a better debugging allocator, track size in bytes, memory reserved etc.. // Initialize the leak counting allocator. - debug_alloc_state = std.testing.LeakCountAllocator.init(allocator); + debug_alloc_state = DebugAllocator.init(allocator); allocator = &debug_alloc_state.allocator; } @@ -708,7 +709,7 @@ pub fn main() anyerror!void { offset += bytes_read; if (debug_alloc) |dbg| { - try log("Allocations alive: {}", .{dbg.count}); + try log("{}", .{dbg.info}); } } } From 38213e1322f389198c24dead49f388c4b13670b1 Mon Sep 17 00:00:00 2001 From: SuperAuguste Date: Sun, 17 May 2020 09:40:20 -0400 Subject: [PATCH 10/14] fixed struct init --- src/analysis.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/analysis.zig b/src/analysis.zig index bb064b2..e6aaadb 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -267,7 +267,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. .SuffixOp => { const suffix_op = node.cast(ast.Node.SuffixOp).?; switch (suffix_op.op) { - .Call => { + .Call, .StructInitializer => { return resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node); }, else => {} From 6e8165804f7661bf84f091dc423b7af2ab2c6690 Mon Sep 17 00:00:00 2001 From: Vexu Date: Sun, 17 May 2020 17:23:04 +0300 Subject: [PATCH 11/14] add completion for arrays and slices --- src/analysis.zig | 77 ++++++++++++++++++++++----------------- src/main.zig | 93 +++++++++++++++++++++++++++--------------------- 2 files changed, 97 insertions(+), 73 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index e6aaadb..91a2f00 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -12,7 +12,7 @@ pub fn getFunctionByName(tree: *ast.Tree, name: []const u8) ?*ast.Node.FnProto { const func = decl.cast(ast.Node.FnProto).?; if (std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return func; }, - else => {} + else => {}, } } @@ -55,9 +55,9 @@ pub fn getDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast const param = node.cast(ast.Node.ParamDecl).?; if (param.doc_comments) |doc_comments| { return try collectDocComments(allocator, tree, doc_comments); - } + } }, - else => {} + else => {}, } return null; } @@ -102,8 +102,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: const param = param_ptr.*; const param_decl = param.cast(ast.Node.ParamDecl).?; - if (param_num != 1) try buffer.appendSlice(", ${") - else try buffer.appendSlice("${"); + if (param_num != 1) try buffer.appendSlice(", ${") else try buffer.appendSlice("${"); try buf_stream.print("{}:", .{param_num}); @@ -135,7 +134,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: try buffer.appendSlice(tree.tokenSlice(curr_tok)); if (is_comma or id == .Keyword_const) try buffer.append(' '); } - } + }, } try buffer.append('}'); @@ -159,6 +158,16 @@ pub fn getParamSignature(tree: *ast.Tree, param: *ast.Node.ParamDecl) []const u8 return tree.source[start..end]; } +pub fn isTypeFunction(tree: *ast.Tree, func: *ast.Node.FnProto) bool { + switch (func.return_type) { + .Explicit => |node| return if (node.cast(std.zig.ast.Node.Identifier)) |ident| + std.mem.eql(u8, tree.tokenSlice(ident.token), "type") + else + false, + .InferErrorSet, .Invalid => return false, + } +} + // STYLE pub fn isCamelCase(name: []const u8) bool { @@ -188,7 +197,7 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { const field = child.cast(ast.Node.ContainerField).?; if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; }, - else => {} + else => {}, } index += 1; } @@ -216,7 +225,7 @@ pub fn getChildOfSlice(tree: *ast.Tree, nodes: []*ast.Node, name: []const u8) ?* const field = child.cast(ast.Node.ContainerField).?; if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; }, - else => {} + else => {}, } // index += 1; } @@ -237,7 +246,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. .var_type, .type_expr => |var_type| { return resolveTypeOfNode(analysis_ctx, var_type) orelse null; }, - else => {} + else => {}, } }, .FnProto => { @@ -270,7 +279,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. .Call, .StructInitializer => { return resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node); }, - else => {} + else => {}, } }, .InfixOp => { @@ -285,16 +294,22 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null; return resolveTypeOfNode(analysis_ctx, getChild(analysis_ctx.tree, left, rhs_str) orelse return null); }, - else => {} + else => {}, } }, .PrefixOp => { const prefix_op = node.cast(ast.Node.PrefixOp).?; switch (prefix_op.op) { + .SliceType, .ArrayType => return node, .PtrType => { - return resolveTypeOfNode(analysis_ctx, prefix_op.rhs); + const op_token = analysis_ctx.tree.tokens.at(prefix_op.op_token); + switch (op_token.id) { + .Asterisk => return resolveTypeOfNode(analysis_ctx, prefix_op.rhs), + .LBracket, .AsteriskAsterisk => return null, + else => unreachable, + } }, - else => {} + else => {}, } }, .BuiltinCall => { @@ -307,13 +322,16 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { - std.debug.warn("Error {} while processing import {}\n", .{err, import_str}); + std.debug.warn("Error {} while processing import {}\n", .{ err, import_str }); break :block null; }; }, + .MultilineStringLiteral, .StringLiteral => { + return node; + }, else => { std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); - } + }, } return null; } @@ -340,16 +358,16 @@ pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const if (decl.id != .VarDecl) continue; const var_decl = decl.cast(ast.Node.VarDecl).?; if (var_decl.init_node == null) continue; - - switch(var_decl.init_node.?.id) { + + switch (var_decl.init_node.?.id) { .BuiltinCall => { const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?; try maybeCollectImport(tree, builtin_call, &arr); }, .InfixOp => { const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?; - - switch(infix_op.op) { + + switch (infix_op.op) { .Period => {}, else => continue, } @@ -395,7 +413,7 @@ pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zi }, else => { std.debug.warn("Not implemented; {}\n", .{next.id}); - } + }, } } @@ -412,15 +430,8 @@ pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool { const func = node.cast(ast.Node.FnProto).?; return func.visib_token != null; }, - .ContainerField, .ErrorTag => { - return true; - }, - else => { - return false; - } + else => return true, } - - return false; } pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { @@ -445,9 +456,9 @@ pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { }, else => { std.debug.warn("INVALID: {}\n", .{node.id}); - } + }, } - + return null; } @@ -461,7 +472,7 @@ pub fn declsFromIndexInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no try declsFromIndexInternal(allocator, tree, func.params.at(param_index).*, nodes); if (func.body_node) |body_node| - try declsFromIndexInternal(allocator, tree, body_node, nodes); + try declsFromIndexInternal(allocator, tree, body_node, nodes); }, .Block => { var index: usize = 0; @@ -479,7 +490,7 @@ pub fn declsFromIndexInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, no }, else => { try nodes.appendSlice(try getCompletionsFromNode(allocator, tree, node)); - } + }, } } @@ -489,7 +500,7 @@ pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *ast.Tree, no var index: usize = 0; while (node.iterate(index)) |child_node| { try nodes.append(child_node); - + index += 1; } diff --git a/src/main.zig b/src/main.zig index 380f7e5..739b06a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -142,13 +142,7 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { if (func.name_token) |name_token| { const loc = tree.tokenLocation(0, name_token); - const is_type_function = switch (func.return_type) { - .Explicit => |node| if (node.cast(std.zig.ast.Node.Identifier)) |ident| - std.mem.eql(u8, tree.tokenSlice(ident.token), "type") - else - false, - .InferErrorSet, .Invalid => false, - }; + const is_type_function = analysis.isTypeFunction(tree, func); const func_name = tree.tokenSlice(name_token); if (!is_type_function and !analysis.isCamelCase(func_name)) { @@ -187,8 +181,17 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { }); } -fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *std.zig.ast.Node, config: Config) !?types.CompletionItem { - var doc = if (try analysis.getDocComments(alloc, tree, decl)) |doc_comments| +fn containerToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.ast.Tree, container: *std.zig.ast.Node, config: Config) !void { + var index: usize = 0; + while (container.iterate(index)) |child_node| : (index+=1) { + if (analysis.isNodePublic(tree, child_node)) { + try nodeToCompletion(list, tree, child_node, config); + } + } +} + +fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, config: Config) error{OutOfMemory}!void { + var doc = if (try analysis.getDocComments(list.allocator, tree, node)) |doc_comments| types.MarkupContent{ .kind = .Markdown, .value = doc_comments, @@ -196,54 +199,74 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s else null; - switch (decl.id) { + switch (node.id) { + .ErrorSetDecl, .Root, .ContainerDecl => { + try containerToCompletion(list, tree, node, config); + }, .FnProto => { - const func = decl.cast(std.zig.ast.Node.FnProto).?; + const func = node.cast(std.zig.ast.Node.FnProto).?; if (func.name_token) |name_token| { const insert_text = if (config.enable_snippets) - try analysis.getFunctionSnippet(alloc, tree, func) + try analysis.getFunctionSnippet(list.allocator, tree, func) else null; - return types.CompletionItem{ + const is_type_function = analysis.isTypeFunction(tree, func); + + try list.append(.{ .label = tree.tokenSlice(name_token), - .kind = .Function, + .kind = if (is_type_function) .Struct else .Function, .documentation = doc, .detail = analysis.getFunctionSignature(tree, func), .insertText = insert_text, .insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText, - }; + }); } }, .VarDecl => { - const var_decl = decl.cast(std.zig.ast.Node.VarDecl).?; - return types.CompletionItem{ + const var_decl = node.cast(std.zig.ast.Node.VarDecl).?; + const is_const = tree.tokens.at(var_decl.mut_token).id == .Keyword_const; + try list.append(.{ .label = tree.tokenSlice(var_decl.name_token), - .kind = .Variable, + .kind = if (is_const) .Constant else .Variable, .documentation = doc, .detail = analysis.getVariableSignature(tree, var_decl), - }; + }); }, .ParamDecl => { - const param = decl.cast(std.zig.ast.Node.ParamDecl).?; + const param = node.cast(std.zig.ast.Node.ParamDecl).?; if (param.name_token) |name_token| - return types.CompletionItem{ + try list.append(.{ .label = tree.tokenSlice(name_token), - .kind = .Variable, + .kind = .Constant, .documentation = doc, .detail = analysis.getParamSignature(tree, param), - }; + }); }, - else => if (analysis.nodeToString(tree, decl)) |string| { - return types.CompletionItem{ + .PrefixOp => { + try list.append(.{ + .label = "len", + .kind = .Field, + }); + try list.append(.{ + .label = "ptr", + .kind = .Field, + }); + }, + .StringLiteral => { + try list.append(.{ + .label = "len", + .kind = .Field, + }); + }, + else => if (analysis.nodeToString(tree, node)) |string| { + try list.append(.{ .label = string, .kind = .Field, .documentation = doc, - }; + }); }, } - - return null; } fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, config: Config) !void { @@ -260,9 +283,7 @@ fn completeGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle, confi var decls = try analysis.declsFromIndex(&arena.allocator, tree, pos_index); for (decls) |decl_ptr| { var decl = decl_ptr.*; - if (try nodeToCompletion(&arena.allocator, tree, decl_ptr, config)) |completion| { - try completions.append(completion); - } + try nodeToCompletion(&completions, tree, decl_ptr, config); } try send(types.Response{ @@ -290,15 +311,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P // var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position)); if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| { - var index: usize = 0; - while (node.iterate(index)) |child_node| { - if (analysis.isNodePublic(analysis_ctx.tree, child_node)) { - if (try nodeToCompletion(&arena.allocator, analysis_ctx.tree, child_node, config)) |completion| { - try completions.append(completion); - } - } - index += 1; - } + try nodeToCompletion(&completions, analysis_ctx.tree, node, config); } try send(types.Response{ From c1599c755d661bbefb8501954af4cb2f657b47b5 Mon Sep 17 00:00:00 2001 From: daurnimator Date: Mon, 18 May 2020 00:39:04 +1000 Subject: [PATCH 12/14] Misc cleanups --- src/document_store.zig | 19 +++++++------------ src/main.zig | 29 +++++++++++++---------------- src/types.zig | 10 +++++----- src/uri.zig | 9 ++++----- 4 files changed, 29 insertions(+), 38 deletions(-) diff --git a/src/document_store.zig b/src/document_store.zig index 80f0dfc..7d52f57 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -52,11 +52,6 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ? fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { std.debug.warn("Opened document: {}\n", .{uri}); - errdefer { - self.allocator.free(uri); - self.allocator.free(text); - } - var handle = Handle{ .count = 1, .import_uris = std.ArrayList([]const u8).init(self.allocator), @@ -67,8 +62,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { }, }; try self.checkSanity(&handle); - try self.handles.putNoClobber(uri, handle); - return &(self.handles.get(uri) orelse unreachable).value; + const kv = try self.handles.getOrPutValue(uri, handle); + return &kv.value; } pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { @@ -84,7 +79,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H const duped_uri = try std.mem.dupe(self.allocator, u8, uri); errdefer self.allocator.free(duped_uri); - return self.newDocument(duped_uri, duped_text); + return try self.newDocument(duped_uri, duped_text); } fn decrementCount(self: *DocumentStore, uri: []const u8) void { @@ -145,7 +140,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { } for (import_strs) |str| { - const uri = (try uriFromImportStr(self, handle, str)) orelse continue; + const uri = (try uriFromImportStr(self, handle.*, str)) orelse continue; defer self.allocator.free(uri); var idx: usize = 0; @@ -175,7 +170,7 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { } pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void { - var document = &handle.document; + const document = &handle.document; for (content_changes.items) |change| { if (change.Object.getValue("range")) |range| { @@ -230,7 +225,7 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. try self.checkSanity(handle); } -fn uriFromImportStr(store: *DocumentStore, handle: *Handle, import_str: []const u8) !?[]const u8 { +fn uriFromImportStr(store: *DocumentStore, handle: Handle, import_str: []const u8) !?[]const u8 { return if (std.mem.eql(u8, import_str, "std")) if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri) else { @@ -264,7 +259,7 @@ pub const AnalysisContext = struct { pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; - const final_uri = (try uriFromImportStr(self.store, self.handle, import_str)) orelse return null; + const final_uri = (try uriFromImportStr(self.store, self.handle.*, import_str)) orelse return null; std.debug.warn("Import final URI: {}\n", .{final_uri}); var consumed_final_uri = false; diff --git a/src/main.zig b/src/main.zig index 380f7e5..9f61426 100644 --- a/src/main.zig +++ b/src/main.zig @@ -285,7 +285,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); - var line = try handle.document.getLine(@intCast(usize, position.line)); + const line = try handle.document.getLine(@intCast(usize, position.line)); var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]); // var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position)); @@ -607,9 +607,10 @@ pub fn main() anyerror!void { const stdin = std.io.getStdIn().inStream(); stdout = std.io.getStdOut().outStream(); - // Read he configuration, if any. - var config = Config{}; + // Read the configuration, if any. const config_parse_options = std.json.ParseOptions{ .allocator = allocator }; + var config = Config{}; + defer std.json.parseFree(Config, config, config_parse_options); // TODO: Investigate using std.fs.Watch to detect writes to the config and reload it. config_read: { @@ -619,30 +620,26 @@ pub fn main() anyerror!void { var exec_dir = std.fs.cwd().openDir(exec_dir_path, .{}) catch break :config_read; defer exec_dir.close(); - var conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read; + const conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read; defer conf_file.close(); - const conf_file_stat = conf_file.stat() catch break :config_read; - - // Allocate enough memory for the whole file. - var file_buf = try allocator.alloc(u8, conf_file_stat.size); + // Max 1MB + const file_buf = conf_file.inStream().readAllAlloc(allocator, 0x1000000) catch break :config_read; defer allocator.free(file_buf); - const bytes_read = conf_file.readAll(file_buf) catch break :config_read; - if (bytes_read != conf_file_stat.size) break :config_read; - // TODO: Better errors? Doesn't seem like std.json can provide us positions or context. config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| { std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err}); break :config_read; }; } - defer std.json.parseFree(Config, config, config_parse_options); - if (config.zig_lib_path != null and !std.fs.path.isAbsolute(config.zig_lib_path.?)) { - std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{}); - allocator.free(config.zig_lib_path.?); - config.zig_lib_path = null; + if (config.zig_lib_path) |zig_lib_path| { + if (!std.fs.path.isAbsolute(zig_lib_path)) { + std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{}); + allocator.free(zig_lib_path); + config.zig_lib_path = null; + } } try document_store.init(allocator, config.zig_lib_path); diff --git a/src/types.zig b/src/types.zig index 80094ac..97c1f2f 100644 --- a/src/types.zig +++ b/src/types.zig @@ -185,11 +185,11 @@ pub const MarkupKind = enum(u1) { options: json.StringifyOptions, out_stream: var, ) !void { - if (@enumToInt(value) == 0) { - try json.stringify("plaintext", options, out_stream); - } else { - try json.stringify("markdown", options, out_stream); - } + const str = switch (value) { + .PlainText => "plaintext", + .Markdown => "markdown", + }; + try json.stringify(str, options, out_stream); } }; diff --git a/src/uri.zig b/src/uri.zig index a331cb3..ad9d4fb 100644 --- a/src/uri.zig +++ b/src/uri.zig @@ -14,7 +14,7 @@ pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 { var buf = std.ArrayList(u8).init(allocator); try buf.appendSlice(prefix); - var out_stream = buf.outStream(); + const out_stream = buf.outStream(); for (path) |char| { if (char == std.fs.path.sep) { @@ -55,17 +55,16 @@ fn parseHex(c: u8) !u8 { pub fn parse(allocator: *std.mem.Allocator, str: []const u8) ![]u8 { if (str.len < 7 or !std.mem.eql(u8, "file://", str[0..7])) return error.UriBadScheme; - var uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7)); + const uri = try allocator.alloc(u8, str.len - (if (std.fs.path.sep == '\\') 8 else 7)); errdefer allocator.free(uri); const path = if (std.fs.path.sep == '\\') str[8..] else str[7..]; var i: usize = 0; var j: usize = 0; - var e: usize = path.len; - while (j < e) : (i += 1) { + while (j < path.len) : (i += 1) { if (path[j] == '%') { - if (j + 2 >= e) return error.UriBadEscape; + if (j + 2 >= path.len) return error.UriBadEscape; const upper = try parseHex(path[j + 1]); const lower = try parseHex(path[j + 2]); uri[i] = (upper << 4) + lower; From 0e67ffd8c48569ad1006fe537d10c60bfcfae447 Mon Sep 17 00:00:00 2001 From: daurnimator Date: Mon, 18 May 2020 00:50:13 +1000 Subject: [PATCH 13/14] Rewrite request reading --- src/header.zig | 44 +++++++++++++++++++++++++++++ src/main.zig | 77 ++++++++------------------------------------------ 2 files changed, 55 insertions(+), 66 deletions(-) create mode 100644 src/header.zig diff --git a/src/header.zig b/src/header.zig new file mode 100644 index 0000000..3483e24 --- /dev/null +++ b/src/header.zig @@ -0,0 +1,44 @@ +const std = @import("std"); +const mem = std.mem; + +const RequestHeader = struct { + content_length: usize, + + /// null implies "application/vscode-jsonrpc; charset=utf-8" + content_type: ?[]const u8, + + pub fn deinit(self: @This(), allocator: *mem.Allocator) void { + if (self.content_type) |ct| allocator.free(ct); + } +}; + +pub fn readRequestHeader(allocator: *mem.Allocator, instream: var) !RequestHeader { + var r = RequestHeader{ + .content_length = undefined, + .content_type = null, + }; + errdefer r.deinit(allocator); + + var has_content_length = false; + while (true) { + const header = try instream.readUntilDelimiterAlloc(allocator, '\n', 0x100); + defer allocator.free(header); + if (header.len == 0 or header[header.len - 1] != '\r') return error.MissingCarriageReturn; + if (header.len == 1) break; + + const header_name = header[0..mem.indexOf(u8, header, ": ") orelse return error.MissingColon]; + const header_value = header[header_name.len + 2..header.len-1]; + if (mem.eql(u8, header_name, "Content-Length")) { + if (header_value.len == 0) return error.MissingHeaderValue; + r.content_length = std.fmt.parseInt(usize, header_value, 10) catch return error.InvalidContentLength; + has_content_length = true; + } else if (mem.eql(u8, header_name, "Content-Type")) { + r.content_type = try mem.dupe(allocator, u8, header_value); + } else { + return error.UnknownHeader; + } + } + if (!has_content_length) return error.MissingContentLength; + + return r; +} diff --git a/src/main.zig b/src/main.zig index 9f61426..f8470b1 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4,6 +4,7 @@ const build_options = @import("build_options"); const Config = @import("config.zig"); const DocumentStore = @import("document_store.zig"); const DebugAllocator = @import("debug_allocator.zig"); +const readRequestHeader = @import("header.zig").readRequestHeader; const data = @import("data/" ++ build_options.data_version ++ ".zig"); const types = @import("types.zig"); const analysis = @import("analysis.zig"); @@ -649,73 +650,17 @@ pub fn main() anyerror!void { var json_parser = std.json.Parser.init(allocator, false); defer json_parser.deinit(); - var offset: usize = 0; - var bytes_read: usize = 0; - - var index: usize = 0; - var content_len: usize = 0; - - stdin_poll: while (true) { - if (offset >= 16 and std.mem.startsWith(u8, buffer.items, "Content-Length: ")) { - index = 16; - while (index <= offset + 10) : (index += 1) { - const c = buffer.items[index]; - if (c >= '0' and c <= '9') { - content_len = content_len * 10 + (c - '0'); - } else if (c == '\r' and buffer.items[index + 1] == '\n') { - index += 2; - break; - } - } - - if (buffer.items[index] == '\r') { - index += 2; - if (buffer.items.len < index + content_len) { - try buffer.resize(index + content_len); - } - - body_poll: while (offset < content_len + index) { - bytes_read = try stdin.readAll(buffer.items[offset .. index + content_len]); - if (bytes_read == 0) { - try log("0 bytes read; exiting!", .{}); - return; - } - - offset += bytes_read; - } - - try processJsonRpc(&json_parser, buffer.items[index .. index + content_len], config); - json_parser.reset(); - - offset = 0; - content_len = 0; - } else { - try log("\\r not found", .{}); - } - } else if (offset >= 16) { - try log("Offset is greater than 16!", .{}); + while (true) { + const headers = readRequestHeader(allocator, stdin) catch |err| { + try log("{}; exiting!", .{@errorName(err)}); return; - } - - if (offset < 16) { - bytes_read = try stdin.readAll(buffer.items[offset..25]); - } else { - if (offset == buffer.items.len) { - try buffer.resize(buffer.items.len * 2); - } - if (index + content_len > buffer.items.len) { - bytes_read = try stdin.readAll(buffer.items[offset..buffer.items.len]); - } else { - bytes_read = try stdin.readAll(buffer.items[offset .. index + content_len]); - } - } - - if (bytes_read == 0) { - try log("0 bytes read; exiting!", .{}); - return; - } - - offset += bytes_read; + }; + defer headers.deinit(allocator); + const buf = try allocator.alloc(u8, headers.content_length); + defer allocator.free(buf); + try stdin.readNoEof(buf); + try processJsonRpc(&json_parser, buf, config); + json_parser.reset(); if (debug_alloc) |dbg| { try log("{}", .{dbg.info}); From 14749ce02b96951a1c830810deb24148aa944852 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Sun, 17 May 2020 18:21:02 +0300 Subject: [PATCH 14/14] Added missing errdefer --- src/document_store.zig | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/document_store.zig b/src/document_store.zig index 7d52f57..478fd08 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -316,7 +316,9 @@ pub const AnalysisContext = struct { // Swap handles and get new tree. // This takes ownership of the passed uri and text. - self.handle = try newDocument(self.store, try std.mem.dupe(allocator, u8, final_uri), file_contents); + const duped_final_uri = try std.mem.dupe(allocator, u8, final_uri); + errdefer allocator.free(duped_final_uri); + self.handle = try newDocument(self.store, duped_final_uri, file_contents); } // Free old tree, add new one if it exists.