From be95849a158a8f56a37ead2f0f5f5ddade149656 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Wed, 13 May 2020 22:35:14 +0300 Subject: [PATCH 01/14] Added pointer type resolution --- src/analysis.zig | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/analysis.zig b/src/analysis.zig index 19a818f..fdff10f 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -223,6 +223,15 @@ pub fn resolveTypeOfNode(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?*std else => {} } }, + .PrefixOp => { + const prefix_op = node.cast(std.zig.ast.Node.PrefixOp).?; + switch (prefix_op.op) { + .PtrType => { + return resolveTypeOfNode(tree, prefix_op.rhs); + }, + else => {} + } + }, else => { std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); } @@ -247,7 +256,7 @@ pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, token if (resolveTypeOfNode(tree, child)) |child_type| { current_node = child_type; } else return null; - } + } else return null; } else return null; }, .Period => { From 307dceb7032cb03921678b987b74425335e6c01f Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 02:10:41 +0300 Subject: [PATCH 02/14] Preparation for imports, abstracted document hashmap into a document storage type --- src/analysis.zig | 85 ++++++++------ src/config.zig | 3 + src/document_store.zig | 202 +++++++++++++++++++++++++++++++++ src/main.zig | 245 +++++++++++++---------------------------- src/types.zig | 5 +- 5 files changed, 334 insertions(+), 206 deletions(-) create mode 100644 src/document_store.zig diff --git a/src/analysis.zig b/src/analysis.zig index fdff10f..cde68b1 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1,5 +1,5 @@ const std = @import("std"); - +const types = @import("types.zig"); const ast = std.zig.ast; /// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing) @@ -154,20 +154,20 @@ pub fn isPascalCase(name: []const u8) bool { // ANALYSIS ENGINE /// Gets the child of node -pub fn getChild(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, name: []const u8) ?*std.zig.ast.Node { +pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { var index: usize = 0; while (node.iterate(index)) |child| { switch (child.id) { .VarDecl => { - const vari = child.cast(std.zig.ast.Node.VarDecl).?; + const vari = child.cast(ast.Node.VarDecl).?; if (std.mem.eql(u8, tree.tokenSlice(vari.name_token), name)) return child; }, .FnProto => { - const func = child.cast(std.zig.ast.Node.FnProto).?; + const func = child.cast(ast.Node.FnProto).?; if (func.name_token != null and std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return child; }, .ContainerField => { - const field = child.cast(std.zig.ast.Node.ContainerField).?; + const field = child.cast(ast.Node.ContainerField).?; if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; }, else => {} @@ -178,44 +178,44 @@ pub fn getChild(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, name: []const } /// Resolves the type of a node -pub fn resolveTypeOfNode(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?*std.zig.ast.Node { +pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.ImportCtx) ?*ast.Node { switch (node.id) { .VarDecl => { - const vari = node.cast(std.zig.ast.Node.VarDecl).?; - return resolveTypeOfNode(tree, vari.type_node orelse vari.init_node.?) orelse null; + const vari = node.cast(ast.Node.VarDecl).?; + return resolveTypeOfNode(tree, vari.type_node orelse vari.init_node.?, import_ctx) orelse null; }, .FnProto => { - const func = node.cast(std.zig.ast.Node.FnProto).?; + const func = node.cast(ast.Node.FnProto).?; switch (func.return_type) { - .Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(tree, return_type);} + .Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(tree, return_type, import_ctx);} } }, .Identifier => { if (getChild(tree, &tree.root_node.base, tree.getNodeSource(node))) |child| { - return resolveTypeOfNode(tree, child); + return resolveTypeOfNode(tree, child, import_ctx); } else return null; }, .ContainerDecl => { return node; }, .ContainerField => { - const field = node.cast(std.zig.ast.Node.ContainerField).?; - return resolveTypeOfNode(tree, field.type_expr.?); + const field = node.cast(ast.Node.ContainerField).?; + return resolveTypeOfNode(tree, field.type_expr.?, import_ctx); }, .SuffixOp => { - const suffix_op = node.cast(std.zig.ast.Node.SuffixOp).?; + const suffix_op = node.cast(ast.Node.SuffixOp).?; switch (suffix_op.op) { .Call => { - return resolveTypeOfNode(tree, suffix_op.lhs.node); + return resolveTypeOfNode(tree, suffix_op.lhs.node, import_ctx); }, else => {} } }, .InfixOp => { - const infix_op = node.cast(std.zig.ast.Node.InfixOp).?; + const infix_op = node.cast(ast.Node.InfixOp).?; switch (infix_op.op) { .Period => { - var left = resolveTypeOfNode(tree, infix_op.lhs).?; + var left = resolveTypeOfNode(tree, infix_op.lhs, import_ctx) orelse return null; if (nodeToString(tree, infix_op.rhs)) |string| { return getChild(tree, left, string); } else return null; @@ -224,14 +224,27 @@ pub fn resolveTypeOfNode(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?*std } }, .PrefixOp => { - const prefix_op = node.cast(std.zig.ast.Node.PrefixOp).?; + const prefix_op = node.cast(ast.Node.PrefixOp).?; switch (prefix_op.op) { .PtrType => { - return resolveTypeOfNode(tree, prefix_op.rhs); + return resolveTypeOfNode(tree, prefix_op.rhs, import_ctx); }, else => {} } }, + .BuiltinCall => { + const builtin_call = node.cast(ast.Node.BuiltinCall).?; + if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; + if (builtin_call.params.len > 1) return null; + + const import_param = builtin_call.params.at(0).*; + if (import_param.id != .StringLiteral) return null; + + var import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); + import_str = import_str[1 .. import_str.len - 1]; + + return resolveImport(import_str); + }, else => { std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); } @@ -239,8 +252,14 @@ pub fn resolveTypeOfNode(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?*std return null; } -pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, tokenizer: *std.zig.Tokenizer) ?*std.zig.ast.Node { - var current_node = node; +fn resolveImport(import: []const u8) ?*ast.Node { + // @TODO: Write this + std.debug.warn("Resolving import {}\n", .{import}); + return null; +} + +pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, import_ctx: *types.ImportCtx) ?*ast.Node { + var current_node = &tree.root_node.base; while (true) { var next = tokenizer.next(); @@ -249,13 +268,11 @@ pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, token return current_node; }, .Identifier => { - // var root = current_node.cast(std.zig.ast.Node.Root).?; + // var root = current_node.cast(ast.Node.Root).?; // current_node. if (getChild(tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { - if (resolveTypeOfNode(tree, child)) |node_type| { - if (resolveTypeOfNode(tree, child)) |child_type| { - current_node = child_type; - } else return null; + if (resolveTypeOfNode(tree, child, import_ctx)) |node_type| { + current_node = node_type; } else return null; } else return null; }, @@ -265,7 +282,7 @@ pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, token return current_node; } else if (after_period.id == .Identifier) { if (getChild(tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { - if (resolveTypeOfNode(tree, child)) |child_type| { + if (resolveTypeOfNode(tree, child, import_ctx)) |child_type| { current_node = child_type; } else return null; } else return null; @@ -280,8 +297,8 @@ pub fn getNodeFromTokens(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node, token return current_node; } -pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ![]*std.zig.ast.Node { - var nodes = std.ArrayList(*std.zig.ast.Node).init(allocator); +pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) ![]*ast.Node { + var nodes = std.ArrayList(*ast.Node).init(allocator); var index: usize = 0; while (node.iterate(index)) |child_node| { @@ -293,18 +310,18 @@ pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *std.zig.ast. return nodes.items; } -pub fn nodeToString(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?[]const u8 { +pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { switch (node.id) { .ContainerField => { - const field = node.cast(std.zig.ast.Node.ContainerField).?; + const field = node.cast(ast.Node.ContainerField).?; return tree.tokenSlice(field.name_token); }, .Identifier => { - const field = node.cast(std.zig.ast.Node.Identifier).?; + const field = node.cast(ast.Node.Identifier).?; return tree.tokenSlice(field.token); }, .FnProto => { - const func = node.cast(std.zig.ast.Node.FnProto).?; + const func = node.cast(ast.Node.FnProto).?; if (func.name_token) |name_token| { return tree.tokenSlice(name_token); } @@ -317,7 +334,7 @@ pub fn nodeToString(tree: *std.zig.ast.Tree, node: *std.zig.ast.Node) ?[]const u return null; } -pub fn nodesToString(tree: *std.zig.ast.Tree, maybe_nodes: ?[]*std.zig.ast.Node) void { +pub fn nodesToString(tree: *ast.Tree, maybe_nodes: ?[]*ast.Node) void { if (maybe_nodes) |nodes| { for (nodes) |node| { std.debug.warn("- {}\n", .{nodeToString(tree, node)}); diff --git a/src/config.zig b/src/config.zig index 9c2ff4e..a27baf4 100644 --- a/src/config.zig +++ b/src/config.zig @@ -2,3 +2,6 @@ /// Whether to enable snippet completions enable_snippets: bool = true, + +/// zig installation path +zig_path: ?[]const u8 = null, diff --git a/src/document_store.zig b/src/document_store.zig new file mode 100644 index 0000000..58a73eb --- /dev/null +++ b/src/document_store.zig @@ -0,0 +1,202 @@ +const std = @import("std"); +const types = @import("types.zig"); + +const DocumentStore = @This(); + +pub const Handle = struct { + document: types.TextDocument, + count: usize, + import_uris: [][]const u8, + + pub fn uri(handle: Handle) []const u8 { + return handle.document.uri; + } + + /// Returns the zig AST resulting from parsing the document's text, even + /// if it contains errors. + pub fn dirtyTree(handle: Handle, allocator: *std.mem.Allocator) !*std.zig.ast.Tree { + return try std.zig.parse(allocator, handle.document.text); + } + + /// Returns a zig AST with no errors, either from the current text or + /// the stored sane text, null if no such ast exists. + pub fn saneTree(handle: Handle, allocator: *std.mem.Allocator) !?*std.zig.ast.Tree { + var tree = try std.zig.parse(allocator, handle.document.text); + if (tree.errors.len == 0) return tree; + + tree.deinit(); + if (handle.document.sane_text) |sane| { + return try std.zig.parse(allocator, sane); + } + return null; + } +}; + +allocator: *std.mem.Allocator, +handles: std.StringHashMap(Handle), +std_path: ?[]const u8, + +pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_path: ?[]const u8) void { + self.allocator = allocator; + self.handles = std.StringHashMap(Handle).init(allocator); + errdefer self.handles.deinit(); + + if (zig_path) |zpath| { + // pub fn resolve(allocator: *Allocator, paths: []const []const u8) ![]u8 + self.std_path = std.fs.path.resolve(allocator, &[_][]const u8 { + zpath, "lib/zig/std" + }) catch |err| block: { + std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err}); + break :block null; + }; + } else { + self.std_path = null; + } +} + +pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { + if (self.handles.get(uri)) |entry| { + std.debug.warn("Document already open: {}, incrementing count\n", .{uri}); + entry.value.count += 1; + std.debug.warn("New count: {}\n", .{entry.value.count}); + self.allocator.free(uri); + return &entry.value; + } + + std.debug.warn("Opened document: {}\n", .{uri}); + const duped_text = try std.mem.dupe(self.allocator, u8, text); + errdefer self.allocator.free(duped_text); + const duped_uri = try std.mem.dupe(self.allocator, u8, uri); + errdefer self.allocator.free(duped_uri); + + var handle = Handle{ + .count = 1, + .import_uris = &[_][]const u8 {}, + .document = .{ + .uri = duped_uri, + .text = duped_text, + .mem = duped_text, + .sane_text = null, + }, + }; + try self.checkSanity(&handle); + try self.handles.putNoClobber(duped_uri, handle); + return &(self.handles.get(duped_uri) orelse unreachable).value; +} + +fn decrementCount(self: *DocumentStore, uri: []const u8) void { + if (self.handles.get(uri)) |entry| { + entry.value.count -= 1; + if (entry.value.count == 0) { + std.debug.warn("Freeing document: {}\n", .{uri}); + } + + self.allocator.free(entry.value.document.uri); + self.allocator.free(entry.value.document.mem); + if (entry.value.document.sane_text) |sane| { + self.allocator.free(sane); + } + + for (entry.value.import_uris) |import_uri| { + self.decrementCount(import_uri); + self.allocator.free(import_uri); + } + + if (entry.value.import_uris.len > 0) { + self.allocator.free(entry.value.import_uris); + } + + const uri_key = entry.key; + self.handles.removeAssertDiscard(uri); + self.allocator.free(uri_key); + } +} + +pub fn closeDocument(self: *DocumentStore, uri: []const u8) void { + self.decrementCount(uri); +} + +pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { + if (self.handles.get(uri)) |entry| { + return &entry.value; + } + + return null; +} + +// Check if the document text is now sane, move it to sane_text if so. +fn checkSanity(self: *DocumentStore, handle: *Handle) !void { + const dirty_tree = try handle.dirtyTree(self.allocator); + defer dirty_tree.deinit(); + + if (dirty_tree.errors.len == 0) { + std.debug.warn("New sane text for document {}\n", .{handle.uri()}); + if (handle.document.sane_text) |sane| { + self.allocator.free(sane); + } + + handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); + } +} + +pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void { + var document = &handle.document; + + for (content_changes.items) |change| { + if (change.Object.getValue("range")) |range| { + const start_pos = types.Position{ + .line = range.Object.getValue("start").?.Object.getValue("line").?.Integer, + .character = range.Object.getValue("start").?.Object.getValue("character").?.Integer + }; + const end_pos = types.Position{ + .line = range.Object.getValue("end").?.Object.getValue("line").?.Integer, + .character = range.Object.getValue("end").?.Object.getValue("character").?.Integer + }; + + const change_text = change.Object.getValue("text").?.String; + const start_index = try document.positionToIndex(start_pos); + const end_index = try document.positionToIndex(end_pos); + + const old_len = document.text.len; + const new_len = old_len + change_text.len; + if (new_len > document.mem.len) { + // We need to reallocate memory. + // We reallocate twice the current filesize or the new length, if it's more than that + // so that we can reduce the amount of realloc calls. + // We can tune this to find a better size if needed. + const realloc_len = std.math.max(2 * old_len, new_len); + document.mem = try self.allocator.realloc(document.mem, realloc_len); + } + + // The first part of the string, [0 .. start_index] need not be changed. + // We then copy the last part of the string, [end_index ..] to its + // new position, [start_index + change_len .. ] + std.mem.copy(u8, document.mem[start_index + change_text.len..][0 .. old_len - end_index], document.mem[end_index .. old_len]); + // Finally, we copy the changes over. + std.mem.copy(u8, document.mem[start_index..][0 .. change_text.len], change_text); + + // Reset the text substring. + document.text = document.mem[0 .. new_len]; + } else { + const change_text = change.Object.getValue("text").?.String; + const old_len = document.text.len; + + if (change_text.len > document.mem.len) { + // Like above. + const realloc_len = std.math.max(2 * old_len, change_text.len); + document.mem = try self.allocator.realloc(document.mem, realloc_len); + } + + std.mem.copy(u8, document.mem[0 .. change_text.len], change_text); + document.text = document.mem[0 .. change_text.len]; + } + } + + try self.checkSanity(handle); +} + +pub fn deinit(self: *DocumentStore) void { + // @TODO: Deinit everything! + + self.handles.deinit(); +} diff --git a/src/main.zig b/src/main.zig index 4b8d8fc..fa7123d 100644 --- a/src/main.zig +++ b/src/main.zig @@ -2,7 +2,7 @@ const std = @import("std"); const build_options = @import("build_options"); const Config = @import("config.zig"); -const Uri = @import("uri.zig"); +const DocumentStore = @import("document_store.zig"); const data = @import("data/" ++ build_options.data_version ++ ".zig"); const types = @import("types.zig"); const analysis = @import("analysis.zig"); @@ -12,8 +12,7 @@ const analysis = @import("analysis.zig"); var stdout: std.fs.File.OutStream = undefined; var allocator: *std.mem.Allocator = undefined; -/// Documents hashmap, types.DocumentUri:types.TextDocument -var documents: std.StringHashMap(types.TextDocument) = undefined; +var document_store: DocumentStore = undefined; const initialize_response = \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}} ; @@ -78,48 +77,6 @@ fn respondGeneric(id: i64, response: []const u8) !void { try stdout.writeAll(response); } -fn freeDocument(document: types.TextDocument) void { - allocator.free(document.uri); - allocator.free(document.mem); - if (document.sane_text) |str| { - allocator.free(str); - } -} - -fn openDocument(uri: []const u8, text: []const u8) !void { - const duped_uri = try std.mem.dupe(allocator, u8, uri); - const duped_text = try std.mem.dupe(allocator, u8, text); - - const res = try documents.put(duped_uri, .{ - .uri = duped_uri, - .text = duped_text, - .mem = duped_text, - }); - - if (res) |entry| { - try log("Document already open: {}, closing old.", .{uri}); - freeDocument(entry.value); - } else { - try log("Opened document: {}", .{uri}); - } -} - -fn closeDocument(uri: []const u8) !void { - if (documents.remove(uri)) |entry| { - try log("Closing document: {}", .{uri}); - freeDocument(entry.value); - } -} - -fn cacheSane(document: *types.TextDocument) !void { - try log("Caching sane text for document: {}", .{document.uri}); - - if (document.sane_text) |old_sane| { - allocator.free(old_sane); - } - document.sane_text = try std.mem.dupe(allocator, u8, document.text); -} - // TODO: Is this correct or can we get a better end? fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range { return .{ @@ -134,8 +91,8 @@ fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range { }; } -fn publishDiagnostics(document: *types.TextDocument, config: Config) !void { - const tree = try std.zig.parse(allocator, document.text); +fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { + const tree = try handle.dirtyTree(allocator); defer tree.deinit(); // Use an arena for our local memory allocations. @@ -163,7 +120,6 @@ fn publishDiagnostics(document: *types.TextDocument, config: Config) !void { } if (tree.errors.len == 0) { - try cacheSane(document); var decls = tree.root_node.decls.iterator(0); while (decls.next()) |decl_ptr| { var decl = decl_ptr.*; @@ -214,7 +170,7 @@ fn publishDiagnostics(document: *types.TextDocument, config: Config) !void { .method = "textDocument/publishDiagnostics", .params = .{ .PublishDiagnosticsParams = .{ - .uri = document.uri, + .uri = handle.uri(), .diagnostics = diagnostics.items, }, }, @@ -268,18 +224,8 @@ fn nodeToCompletion(alloc: *std.mem.Allocator, tree: *std.zig.ast.Tree, decl: *s return null; } -fn completeGlobal(id: i64, document: *types.TextDocument, config: Config) !void { - // The tree uses its own arena, so we just pass our main allocator. - var tree = try std.zig.parse(allocator, document.text); - - if (tree.errors.len > 0) { - if (document.sane_text) |sane_text| { - tree.deinit(); - tree = try std.zig.parse(allocator, sane_text); - } else return try respondGeneric(id, no_completions_response); - } - else try cacheSane(document); - +fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { + var tree = (try handle.saneTree(allocator)) orelse return respondGeneric(id, no_completions_response); defer tree.deinit(); // We use a local arena allocator to deallocate all temporary data without iterating @@ -307,40 +253,8 @@ fn completeGlobal(id: i64, document: *types.TextDocument, config: Config) !void }); } -fn completeFieldAccess(id: i64, document: *types.TextDocument, position: types.Position, config: Config) !void { - if (document.sane_text) |sane_text| { - var tree = try std.zig.parse(allocator, sane_text); - defer tree.deinit(); - - // We use a local arena allocator to deallocate all temporary data without iterating - var arena = std.heap.ArenaAllocator.init(allocator); - var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); - // Deallocate all temporary data. - defer arena.deinit(); - - var line = try document.getLine(@intCast(usize, position.line)); - var tokenizer = std.zig.Tokenizer.init(line); - - if (analysis.getNodeFromTokens(tree, &tree.root_node.base, &tokenizer)) |node| { - var index: usize = 0; - while (node.iterate(index)) |child_node| { - if (try nodeToCompletion(&arena.allocator, tree, child_node, config)) |completion| { - try completions.append(completion); - } - index += 1; - } - } - - try send(types.Response{ - .id = .{.Integer = id}, - .result = .{ - .CompletionList = .{ - .isIncomplete = false, - .items = completions.items, - }, - }, - }); - } else { +fn completeFieldAccess(id: i64, handle: DocumentStore.Handle, position: types.Position, config: Config) !void { + const tree = (try handle.saneTree(allocator)) orelse { return try send(types.Response{ .id = .{.Integer = id}, .result = .{ @@ -350,7 +264,38 @@ fn completeFieldAccess(id: i64, document: *types.TextDocument, position: types.P }, }, }); + }; + defer tree.deinit(); + + // We use a local arena allocator to deallocate all temporary data without iterating + var arena = std.heap.ArenaAllocator.init(allocator); + var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); + // Deallocate all temporary data. + defer arena.deinit(); + + var line = try handle.document.getLine(@intCast(usize, position.line)); + var tokenizer = std.zig.Tokenizer.init(line); + + // @TODO Pass import ctx. + if (analysis.getFieldAccessTypeNode(tree, &tokenizer, {})) |node| { + var index: usize = 0; + while (node.iterate(index)) |child_node| { + if (try nodeToCompletion(&arena.allocator, tree, child_node, config)) |completion| { + try completions.append(completion); + } + index += 1; + } } + + try send(types.Response{ + .id = .{.Integer = id}, + .result = .{ + .CompletionList = .{ + .isIncomplete = false, + .items = completions.items, + }, + }, + }); } // Compute builtin completions at comptime. @@ -524,73 +469,27 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v const uri = document.getValue("uri").?.String; const text = document.getValue("text").?.String; - try openDocument(uri, text); - try publishDiagnostics(&(documents.get(uri).?.value), config); + const handle = try document_store.openDocument(uri, text); + try publishDiagnostics(handle.*, config); } else if (std.mem.eql(u8, method, "textDocument/didChange")) { const text_document = params.getValue("textDocument").?.Object; const uri = text_document.getValue("uri").?.String; - - var document = &(documents.get(uri).?.value); const content_changes = params.getValue("contentChanges").?.Array; - for (content_changes.items) |change| { - if (change.Object.getValue("range")) |range| { - const start_pos = types.Position{ - .line = range.Object.getValue("start").?.Object.getValue("line").?.Integer, - .character = range.Object.getValue("start").?.Object.getValue("character").?.Integer - }; - const end_pos = types.Position{ - .line = range.Object.getValue("end").?.Object.getValue("line").?.Integer, - .character = range.Object.getValue("end").?.Object.getValue("character").?.Integer - }; + const handle = document_store.getHandle(uri) orelse { + try log("Trying to change non existent document {}", .{uri}); + return; + }; - const change_text = change.Object.getValue("text").?.String; - const start_index = try document.positionToIndex(start_pos); - const end_index = try document.positionToIndex(end_pos); - - const old_len = document.text.len; - const new_len = old_len + change_text.len; - if (new_len > document.mem.len) { - // We need to reallocate memory. - // We reallocate twice the current filesize or the new length, if it's more than that - // so that we can reduce the amount of realloc calls. - // We can tune this to find a better size if needed. - const realloc_len = std.math.max(2 * old_len, new_len); - document.mem = try allocator.realloc(document.mem, realloc_len); - } - - // The first part of the string, [0 .. start_index] need not be changed. - // We then copy the last part of the string, [end_index ..] to its - // new position, [start_index + change_len .. ] - std.mem.copy(u8, document.mem[start_index + change_text.len..][0 .. old_len - end_index], document.mem[end_index .. old_len]); - // Finally, we copy the changes over. - std.mem.copy(u8, document.mem[start_index..][0 .. change_text.len], change_text); - - // Reset the text substring. - document.text = document.mem[0 .. new_len]; - } else { - const change_text = change.Object.getValue("text").?.String; - const old_len = document.text.len; - - if (change_text.len > document.mem.len) { - // Like above. - const realloc_len = std.math.max(2 * old_len, change_text.len); - document.mem = try allocator.realloc(document.mem, realloc_len); - } - - std.mem.copy(u8, document.mem[0 .. change_text.len], change_text); - document.text = document.mem[0 .. change_text.len]; - } - } - - try publishDiagnostics(document, config); + try document_store.applyChanges(handle, content_changes); + try publishDiagnostics(handle.*, config); } else if (std.mem.eql(u8, method, "textDocument/didSave")) { // noop } else if (std.mem.eql(u8, method, "textDocument/didClose")) { const document = params.getValue("textDocument").?.Object; const uri = document.getValue("uri").?.String; - try closeDocument(uri); + document_store.closeDocument(uri); } // Autocomplete / Signatures else if (std.mem.eql(u8, method, "textDocument/completion")) { @@ -598,14 +497,18 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v const uri = text_document.getValue("uri").?.String; const position = params.getValue("position").?.Object; - var document = &(documents.get(uri).?.value); + const handle = document_store.getHandle(uri) orelse { + try log("Trying to complete in non existent document {}", .{uri}); + return; + }; + const pos = types.Position{ .line = position.getValue("line").?.Integer, .character = position.getValue("character").?.Integer - 1, }; if (pos.character >= 0) { - const pos_index = try document.positionToIndex(pos); - const pos_context = documentPositionContext(document.*, pos_index); + const pos_index = try handle.document.positionToIndex(pos); + const pos_context = documentPositionContext(handle.document, pos_index); if (pos_context == .builtin) { try send(types.Response{ @@ -618,9 +521,9 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v }, }); } else if (pos_context == .var_access or pos_context == .empty) { - try completeGlobal(id, document, config); + try completeGlobal(id, handle.*, config); } else if (pos_context == .field_access) { - try completeFieldAccess(id, document, pos, config); + try completeFieldAccess(id, handle.*, pos, config); } else { try respondGeneric(id, no_completions_response); } @@ -628,18 +531,18 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v try respondGeneric(id, no_completions_response); } } else if (std.mem.eql(u8, method, "textDocument/signatureHelp")) { - try respondGeneric(id, - \\,"result":{"signatures":[{ - \\"label": "nameOfFunction(aNumber: u8)", - \\"documentation": {"kind": "markdown", "value": "Description of the function in **Markdown**!"}, - \\"parameters": [ - \\{"label": [15, 27], "documentation": {"kind": "markdown", "value": "An argument"}} - \\] - \\}]}} - ); // try respondGeneric(id, - // \\,"result":{"signatures":[]}} + // \\,"result":{"signatures":[{ + // \\"label": "nameOfFunction(aNumber: u8)", + // \\"documentation": {"kind": "markdown", "value": "Description of the function in **Markdown**!"}, + // \\"parameters": [ + // \\{"label": [15, 27], "documentation": {"kind": "markdown", "value": "An argument"}} + // \\] + // \\}]}} // ); + try respondGeneric(id, + \\,"result":{"signatures":[]}} + ); } else if (root.Object.getValue("id")) |_| { try log("Method with return value not implemented: {}", .{method}); try respondGeneric(id, not_implemented_response); @@ -677,11 +580,9 @@ pub fn main() anyerror!void { const stdin = std.io.getStdIn().inStream(); stdout = std.io.getStdOut().outStream(); - - documents = std.StringHashMap(types.TextDocument).init(allocator); - // Read he configuration, if any. var config = Config{}; + const config_parse_options = std.json.ParseOptions{ .allocator=allocator }; // TODO: Investigate using std.fs.Watch to detect writes to the config and reload it. config_read: { @@ -704,13 +605,15 @@ pub fn main() anyerror!void { if (bytes_read != conf_file_stat.size) break :config_read; // TODO: Better errors? Doesnt seem like std.json can provide us positions or context. - // Note that we don't need to pass an allocator to parse since we are not using pointer or slice fields. - // Thus, we don't need to even call parseFree. - config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{}) catch |err| { + config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| { std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err}); break :config_read; }; } + defer std.json.parseFree(Config, config, config_parse_options); + + document_store.init(allocator, config.zig_path); + defer document_store.deinit(); // This JSON parser is passed to processJsonRpc and reset. var json_parser = std.json.Parser.init(allocator, false); diff --git a/src/types.zig b/src/types.zig index 8375cec..3226d31 100644 --- a/src/types.zig +++ b/src/types.zig @@ -3,6 +3,9 @@ const std = @import("std"); const json = std.json; +// @TODO +pub const ImportCtx = void; + // JSON Types pub const String = []const u8; @@ -135,7 +138,7 @@ pub const PublishDiagnosticsParams = struct { }; pub const TextDocument = struct { - uri: DocumentUri, + uri: String, // This is a substring of mem starting at 0 text: String, // This holds the memory that we have actually allocated. From 31f1d2fa3eea55c19b73133172fc90bed13d8f12 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 04:54:05 +0300 Subject: [PATCH 03/14] First draft of imports --- src/analysis.zig | 19 ++--- src/config.zig | 2 +- src/document_store.zig | 158 +++++++++++++++++++++++++++++++++++------ src/main.zig | 11 +-- src/types.zig | 5 +- src/uri.zig | 32 ++++++++- 6 files changed, 183 insertions(+), 44 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index cde68b1..9042b22 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const types = @import("types.zig"); +const ImportContext = @import("document_store.zig").ImportContext; const ast = std.zig.ast; /// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing) @@ -178,7 +178,7 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { } /// Resolves the type of a node -pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.ImportCtx) ?*ast.Node { +pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportContext) ?*ast.Node { switch (node.id) { .VarDecl => { const vari = node.cast(ast.Node.VarDecl).?; @@ -240,10 +240,9 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.Im const import_param = builtin_call.params.at(0).*; if (import_param.id != .StringLiteral) return null; - var import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); - import_str = import_str[1 .. import_str.len - 1]; - - return resolveImport(import_str); + const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); + // @TODO: Handle error better. + return (import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch unreachable); }, else => { std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); @@ -252,13 +251,7 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *types.Im return null; } -fn resolveImport(import: []const u8) ?*ast.Node { - // @TODO: Write this - std.debug.warn("Resolving import {}\n", .{import}); - return null; -} - -pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, import_ctx: *types.ImportCtx) ?*ast.Node { +pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, import_ctx: *ImportContext) ?*ast.Node { var current_node = &tree.root_node.base; while (true) { diff --git a/src/config.zig b/src/config.zig index a27baf4..5913b6f 100644 --- a/src/config.zig +++ b/src/config.zig @@ -4,4 +4,4 @@ enable_snippets: bool = true, /// zig installation path -zig_path: ?[]const u8 = null, +zig_lib_path: ?[]const u8 = null, diff --git a/src/document_store.zig b/src/document_store.zig index 58a73eb..0824ace 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -1,12 +1,13 @@ const std = @import("std"); const types = @import("types.zig"); +const URI = @import("uri.zig"); const DocumentStore = @This(); pub const Handle = struct { document: types.TextDocument, count: usize, - import_uris: [][]const u8, + import_uris: std.ArrayList([]const u8), pub fn uri(handle: Handle) []const u8 { return handle.document.uri; @@ -34,32 +35,37 @@ pub const Handle = struct { allocator: *std.mem.Allocator, handles: std.StringHashMap(Handle), -std_path: ?[]const u8, +std_uri: ?[]const u8, -pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_path: ?[]const u8) void { +pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !void { self.allocator = allocator; self.handles = std.StringHashMap(Handle).init(allocator); errdefer self.handles.deinit(); - if (zig_path) |zpath| { - // pub fn resolve(allocator: *Allocator, paths: []const []const u8) ![]u8 - self.std_path = std.fs.path.resolve(allocator, &[_][]const u8 { - zpath, "lib/zig/std" + if (zig_lib_path) |zpath| { + const std_path = std.fs.path.resolve(allocator, &[_][]const u8 { + zpath, "./std/std.zig" }) catch |err| block: { std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err}); - break :block null; + self.std_uri = null; + return; }; + + defer allocator.free(std_path); + // Get the std_path as a URI, so we can just append to it! + self.std_uri = try URI.fromPath(allocator, std_path); + std.debug.warn("Standard library base uri: {}\n", .{self.std_uri}); } else { - self.std_path = null; + self.std_uri = null; } } +// TODO: Normalize URIs somehow, probably just lowercase pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { if (self.handles.get(uri)) |entry| { std.debug.warn("Document already open: {}, incrementing count\n", .{uri}); entry.value.count += 1; std.debug.warn("New count: {}\n", .{entry.value.count}); - self.allocator.free(uri); return &entry.value; } @@ -71,7 +77,7 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H var handle = Handle{ .count = 1, - .import_uris = &[_][]const u8 {}, + .import_uris = std.ArrayList([]const u8).init(self.allocator), .document = .{ .uri = duped_uri, .text = duped_text, @@ -97,14 +103,12 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void { self.allocator.free(sane); } - for (entry.value.import_uris) |import_uri| { + for (entry.value.import_uris.items) |import_uri| { self.decrementCount(import_uri); self.allocator.free(import_uri); } - if (entry.value.import_uris.len > 0) { - self.allocator.free(entry.value.import_uris); - } + entry.value.import_uris.deinit(); const uri_key = entry.key; self.handles.removeAssertDiscard(uri); @@ -129,14 +133,14 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { const dirty_tree = try handle.dirtyTree(self.allocator); defer dirty_tree.deinit(); - if (dirty_tree.errors.len == 0) { - std.debug.warn("New sane text for document {}\n", .{handle.uri()}); - if (handle.document.sane_text) |sane| { - self.allocator.free(sane); - } + if (dirty_tree.errors.len > 0) return; - handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); + std.debug.warn("New sane text for document {}\n", .{handle.uri()}); + if (handle.document.sane_text) |sane| { + self.allocator.free(sane); } + + handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); } pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void { @@ -195,6 +199,118 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. try self.checkSanity(handle); } +// @TODO: We only reduce the count upon closing, +// find a way to reduce it when removing imports. +// Perhaps on new sane text we can go through imports +// and remove those that are in the import_uris table +// but not in the file anymore. +pub const ImportContext = struct { + store: *DocumentStore, + handle: *Handle, + trees: std.ArrayList(*std.zig.ast.Tree), + + pub fn onImport(self: *ImportContext, import_str: []const u8) !?*std.zig.ast.Node { + const allocator = self.store.allocator; + + const final_uri = if (std.mem.eql(u8, import_str, "std")) + if (self.store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) + else { + std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); + return null; + } + else b: { + // Find relative uri + const path = try URI.parse(allocator, self.handle.uri()); + defer allocator.free(path); + + const dir_path = std.fs.path.dirname(path) orelse ""; + const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 { + dir_path, import_str + }); + + break :b import_path; + }; + + // @TODO Clean up code, lots of repetition + { + errdefer allocator.free(final_uri); + + // Check if we already imported this. + for (self.handle.import_uris.items) |uri| { + // If we did, set our new handle and return the parsed tree root node. + if (std.mem.eql(u8, uri, final_uri)) { + self.handle = self.store.getHandle(final_uri) orelse return null; + if (try self.handle.saneTree(allocator)) |tree| { + try self.trees.append(tree); + return &tree.root_node.base; + } + return null; + } + } + } + + // New import. + // Add to import table of current handle. + try self.handle.import_uris.append(final_uri); + + // Check if the import is already opened by others. + if (self.store.getHandle(final_uri)) |new_handle| { + // If it is, increment the count, set our new handle and return the parsed tree root node. + new_handle.count += 1; + self.handle = new_handle; + if (try self.handle.saneTree(allocator)) |tree| { + try self.trees.append(tree); + return &tree.root_node.base; + } + return null; + } + + // New document, read the file then call into openDocument. + const file_path = try URI.parse(allocator, final_uri); + defer allocator.free(file_path); + + var file = std.fs.cwd().openFile(file_path, .{}) catch { + std.debug.warn("Cannot open import file {}", .{file_path}); + return null; + }; + + defer file.close(); + const size = std.math.cast(usize, try file.getEndPos()) catch std.math.maxInt(usize); + + // TODO: This is wasteful, we know we don't need to copy the text on this openDocument call + const file_contents = try allocator.alloc(u8, size); + defer allocator.free(file_contents); + + file.inStream().readNoEof(file_contents) catch { + std.debug.warn("Could not read from file {}", .{file_path}); + return null; + }; + + self.handle = try openDocument(self.store, final_uri, file_contents); + if (try self.handle.saneTree(allocator)) |tree| { + try self.trees.append(tree); + return &tree.root_node.base; + } + return null; + } + + pub fn deinit(self: *ImportContext) void { + for (self.trees.items) |tree| { + tree.deinit(); + } + + self.trees.deinit(); + } +}; + +pub fn importContext(self: *DocumentStore, handle: *Handle) ImportContext { + return .{ + .store = self, + .handle = handle, + .trees = std.ArrayList(*std.zig.ast.Tree).init(self.allocator), + }; +} + pub fn deinit(self: *DocumentStore) void { // @TODO: Deinit everything! diff --git a/src/main.zig b/src/main.zig index fa7123d..0a1f7a5 100644 --- a/src/main.zig +++ b/src/main.zig @@ -253,7 +253,7 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { }); } -fn completeFieldAccess(id: i64, handle: DocumentStore.Handle, position: types.Position, config: Config) !void { +fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, config: Config) !void { const tree = (try handle.saneTree(allocator)) orelse { return try send(types.Response{ .id = .{.Integer = id}, @@ -277,7 +277,9 @@ fn completeFieldAccess(id: i64, handle: DocumentStore.Handle, position: types.Po var tokenizer = std.zig.Tokenizer.init(line); // @TODO Pass import ctx. - if (analysis.getFieldAccessTypeNode(tree, &tokenizer, {})) |node| { + var import_ctx = document_store.importContext(handle); + defer import_ctx.deinit(); + if (analysis.getFieldAccessTypeNode(tree, &tokenizer, &import_ctx)) |node| { var index: usize = 0; while (node.iterate(index)) |child_node| { if (try nodeToCompletion(&arena.allocator, tree, child_node, config)) |completion| { @@ -523,7 +525,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v } else if (pos_context == .var_access or pos_context == .empty) { try completeGlobal(id, handle.*, config); } else if (pos_context == .field_access) { - try completeFieldAccess(id, handle.*, pos, config); + try completeFieldAccess(id, handle, pos, config); } else { try respondGeneric(id, no_completions_response); } @@ -612,7 +614,8 @@ pub fn main() anyerror!void { } defer std.json.parseFree(Config, config, config_parse_options); - document_store.init(allocator, config.zig_path); + // @TODO Check is_absolute + try document_store.init(allocator, config.zig_lib_path); defer document_store.deinit(); // This JSON parser is passed to processJsonRpc and reset. diff --git a/src/types.zig b/src/types.zig index 3226d31..8375cec 100644 --- a/src/types.zig +++ b/src/types.zig @@ -3,9 +3,6 @@ const std = @import("std"); const json = std.json; -// @TODO -pub const ImportCtx = void; - // JSON Types pub const String = []const u8; @@ -138,7 +135,7 @@ pub const PublishDiagnosticsParams = struct { }; pub const TextDocument = struct { - uri: String, + uri: DocumentUri, // This is a substring of mem starting at 0 text: String, // This holds the memory that we have actually allocated. diff --git a/src/uri.zig b/src/uri.zig index 971c866..e1ad483 100644 --- a/src/uri.zig +++ b/src/uri.zig @@ -1,7 +1,37 @@ const std = @import("std"); -// Original code: https://github.com/andersfr/zig-lsp/blob/master/uri.zig +const reserved_chars = &[_]u8 { + '!', '#', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', ':', + ';', '=', '?', '@', '[', ']', +}; +/// Returns a URI from a path, caller owns the memory allocated with `allocator` +pub fn fromPath(allocator: *std.mem.Allocator, path: []const u8) ![]const u8 { + if (path.len == 0) return ""; + const prefix = if (std.builtin.os.tag == .windows) "file:///" else "file://"; + + var buf = std.ArrayList(u8).init(allocator); + try buf.appendSlice(prefix); + + var out_stream = buf.outStream(); + + for (path) |char| { + if (char == std.fs.path.sep) { + try buf.append('/'); + } else if (std.mem.indexOfScalar(u8, reserved_chars, char) != null) { + // Write '%' + hex with uppercase + try buf.append('%'); + try std.fmt.format(out_stream, "{X}", .{char}); + } else { + try buf.append(std.ascii.toLower(char)); + } + } + + return buf.toOwnedSlice(); +} + +// Original code: https://github.com/andersfr/zig-lsp/blob/master/uri.zig fn parseHex(c: u8) !u8 { return switch(c) { '0'...'9' => c - '0', From 86d264f4885e12fb7374ada2c02d23adf8f9e619 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 05:00:00 +0300 Subject: [PATCH 04/14] 1 level deep imports now work but we crash when adding levels --- src/document_store.zig | 5 +++++ src/main.zig | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/document_store.zig b/src/document_store.zig index 0824ace..46f1b3e 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -209,6 +209,11 @@ pub const ImportContext = struct { handle: *Handle, trees: std.ArrayList(*std.zig.ast.Tree), + pub fn lastTree(self: *ImportContext) *std.zig.ast.Tree { + std.debug.assert(self.trees.items.len > 0); + return self.trees.items[self.trees.items.len - 1]; + } + pub fn onImport(self: *ImportContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; diff --git a/src/main.zig b/src/main.zig index 0a1f7a5..1f25a52 100644 --- a/src/main.zig +++ b/src/main.zig @@ -282,7 +282,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P if (analysis.getFieldAccessTypeNode(tree, &tokenizer, &import_ctx)) |node| { var index: usize = 0; while (node.iterate(index)) |child_node| { - if (try nodeToCompletion(&arena.allocator, tree, child_node, config)) |completion| { + if (try nodeToCompletion(&arena.allocator, import_ctx.lastTree(), child_node, config)) |completion| { try completions.append(completion); } index += 1; From 95b6e6cd6a9c3ff409b44daed6b398ba42e324be Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 05:14:37 +0300 Subject: [PATCH 05/14] Fixed relative URI code, get latest tree in analysis --- src/analysis.zig | 37 ++++++++++++++++++++----------------- src/document_store.zig | 10 +++++++--- src/main.zig | 2 +- 3 files changed, 28 insertions(+), 21 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 9042b22..3a8ffa2 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -179,20 +179,22 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { /// Resolves the type of a node pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportContext) ?*ast.Node { + var latest_tree = import_ctx.lastTree() orelse tree; + switch (node.id) { .VarDecl => { const vari = node.cast(ast.Node.VarDecl).?; - return resolveTypeOfNode(tree, vari.type_node orelse vari.init_node.?, import_ctx) orelse null; + return resolveTypeOfNode(latest_tree, vari.type_node orelse vari.init_node.?, import_ctx) orelse null; }, .FnProto => { const func = node.cast(ast.Node.FnProto).?; switch (func.return_type) { - .Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(tree, return_type, import_ctx);} + .Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(latest_tree, return_type, import_ctx);} } }, .Identifier => { - if (getChild(tree, &tree.root_node.base, tree.getNodeSource(node))) |child| { - return resolveTypeOfNode(tree, child, import_ctx); + if (getChild(latest_tree, &latest_tree.root_node.base, latest_tree.getNodeSource(node))) |child| { + return resolveTypeOfNode(latest_tree, child, import_ctx); } else return null; }, .ContainerDecl => { @@ -200,13 +202,13 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo }, .ContainerField => { const field = node.cast(ast.Node.ContainerField).?; - return resolveTypeOfNode(tree, field.type_expr.?, import_ctx); + return resolveTypeOfNode(latest_tree, field.type_expr.?, import_ctx); }, .SuffixOp => { const suffix_op = node.cast(ast.Node.SuffixOp).?; switch (suffix_op.op) { .Call => { - return resolveTypeOfNode(tree, suffix_op.lhs.node, import_ctx); + return resolveTypeOfNode(latest_tree, suffix_op.lhs.node, import_ctx); }, else => {} } @@ -215,9 +217,9 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo const infix_op = node.cast(ast.Node.InfixOp).?; switch (infix_op.op) { .Period => { - var left = resolveTypeOfNode(tree, infix_op.lhs, import_ctx) orelse return null; - if (nodeToString(tree, infix_op.rhs)) |string| { - return getChild(tree, left, string); + var left = resolveTypeOfNode(latest_tree, infix_op.lhs, import_ctx) orelse return null; + if (nodeToString(latest_tree, infix_op.rhs)) |string| { + return getChild(latest_tree, left, string); } else return null; }, else => {} @@ -227,21 +229,21 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo const prefix_op = node.cast(ast.Node.PrefixOp).?; switch (prefix_op.op) { .PtrType => { - return resolveTypeOfNode(tree, prefix_op.rhs, import_ctx); + return resolveTypeOfNode(latest_tree, prefix_op.rhs, import_ctx); }, else => {} } }, .BuiltinCall => { const builtin_call = node.cast(ast.Node.BuiltinCall).?; - if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; + if (!std.mem.eql(u8, latest_tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; if (builtin_call.params.len > 1) return null; const import_param = builtin_call.params.at(0).*; if (import_param.id != .StringLiteral) return null; - const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); - // @TODO: Handle error better. + const import_str = latest_tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); + // @TODO: Handle this error better. return (import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch unreachable); }, else => { @@ -255,6 +257,7 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im var current_node = &tree.root_node.base; while (true) { + var latest_tree = import_ctx.lastTree() orelse tree; var next = tokenizer.next(); switch (next.id) { .Eof => { @@ -263,8 +266,8 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im .Identifier => { // var root = current_node.cast(ast.Node.Root).?; // current_node. - if (getChild(tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { - if (resolveTypeOfNode(tree, child, import_ctx)) |node_type| { + if (getChild(latest_tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { + if (resolveTypeOfNode(latest_tree, child, import_ctx)) |node_type| { current_node = node_type; } else return null; } else return null; @@ -274,8 +277,8 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im if (after_period.id == .Eof) { return current_node; } else if (after_period.id == .Identifier) { - if (getChild(tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { - if (resolveTypeOfNode(tree, child, import_ctx)) |child_type| { + if (getChild(latest_tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { + if (resolveTypeOfNode(latest_tree, child, import_ctx)) |child_type| { current_node = child_type; } else return null; } else return null; diff --git a/src/document_store.zig b/src/document_store.zig index 46f1b3e..f42d375 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -209,8 +209,8 @@ pub const ImportContext = struct { handle: *Handle, trees: std.ArrayList(*std.zig.ast.Tree), - pub fn lastTree(self: *ImportContext) *std.zig.ast.Tree { - std.debug.assert(self.trees.items.len > 0); + pub fn lastTree(self: *ImportContext) ?*std.zig.ast.Tree { + if (self.trees.items.len == 0) return null; return self.trees.items[self.trees.items.len - 1]; } @@ -233,9 +233,13 @@ pub const ImportContext = struct { dir_path, import_str }); - break :b import_path; + defer allocator.free(import_path); + + break :b (try URI.fromPath(allocator, import_path)); }; + std.debug.warn("Import final URI: {}\n", .{final_uri}); + // @TODO Clean up code, lots of repetition { errdefer allocator.free(final_uri); diff --git a/src/main.zig b/src/main.zig index 1f25a52..0f19a99 100644 --- a/src/main.zig +++ b/src/main.zig @@ -282,7 +282,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P if (analysis.getFieldAccessTypeNode(tree, &tokenizer, &import_ctx)) |node| { var index: usize = 0; while (node.iterate(index)) |child_node| { - if (try nodeToCompletion(&arena.allocator, import_ctx.lastTree(), child_node, config)) |completion| { + if (try nodeToCompletion(&arena.allocator, import_ctx.lastTree() orelse tree, child_node, config)) |completion| { try completions.append(completion); } index += 1; From 60ffc5f5515094b8ea94ea22ef322d9088ef9f38 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 11:40:17 +0300 Subject: [PATCH 06/14] Fixed crash while closing document (uri double free) --- src/document_store.zig | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/src/document_store.zig b/src/document_store.zig index f42d375..698ee82 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -60,7 +60,6 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ? } } -// TODO: Normalize URIs somehow, probably just lowercase pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { if (self.handles.get(uri)) |entry| { std.debug.warn("Document already open: {}, incrementing count\n", .{uri}); @@ -93,11 +92,10 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H fn decrementCount(self: *DocumentStore, uri: []const u8) void { if (self.handles.get(uri)) |entry| { entry.value.count -= 1; - if (entry.value.count == 0) { - std.debug.warn("Freeing document: {}\n", .{uri}); - } + if (entry.value.count > 0) + return; - self.allocator.free(entry.value.document.uri); + std.debug.warn("Freeing document: {}\n", .{uri}); self.allocator.free(entry.value.document.mem); if (entry.value.document.sane_text) |sane| { self.allocator.free(sane); @@ -204,6 +202,9 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. // Perhaps on new sane text we can go through imports // and remove those that are in the import_uris table // but not in the file anymore. +// @TODO: Make this hold a single tree, remove tree param +// from analysis functions that take an import_context. +// (can we reset-reuse it or do we need to deinit-init a new one?) pub const ImportContext = struct { store: *DocumentStore, handle: *Handle, @@ -239,11 +240,11 @@ pub const ImportContext = struct { }; std.debug.warn("Import final URI: {}\n", .{final_uri}); + var consumed_final_uri = false; + defer if (!consumed_final_uri) allocator.free(final_uri); // @TODO Clean up code, lots of repetition { - errdefer allocator.free(final_uri); - // Check if we already imported this. for (self.handle.import_uris.items) |uri| { // If we did, set our new handle and return the parsed tree root node. @@ -259,9 +260,6 @@ pub const ImportContext = struct { } // New import. - // Add to import table of current handle. - try self.handle.import_uris.append(final_uri); - // Check if the import is already opened by others. if (self.store.getHandle(final_uri)) |new_handle| { // If it is, increment the count, set our new handle and return the parsed tree root node. @@ -279,7 +277,7 @@ pub const ImportContext = struct { defer allocator.free(file_path); var file = std.fs.cwd().openFile(file_path, .{}) catch { - std.debug.warn("Cannot open import file {}", .{file_path}); + std.debug.warn("Cannot open import file {}\n", .{file_path}); return null; }; @@ -291,11 +289,16 @@ pub const ImportContext = struct { defer allocator.free(file_contents); file.inStream().readNoEof(file_contents) catch { - std.debug.warn("Could not read from file {}", .{file_path}); + std.debug.warn("Could not read from file {}\n", .{file_path}); return null; }; + // Add to import table of current handle. + try self.handle.import_uris.append(final_uri); + consumed_final_uri = true; + // Swap handles and get new tree. self.handle = try openDocument(self.store, final_uri, file_contents); + if (try self.handle.saneTree(allocator)) |tree| { try self.trees.append(tree); return &tree.root_node.base; From 76e9a079a339a9cb30545f6c3bc5c1c3209de2fa Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 11:51:49 +0300 Subject: [PATCH 07/14] Avoid unnecessary copy of document text when opening new imports --- src/document_store.zig | 88 ++++++++++++++++++++++++------------------ 1 file changed, 50 insertions(+), 38 deletions(-) diff --git a/src/document_store.zig b/src/document_store.zig index 698ee82..a737dc9 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -60,6 +60,31 @@ pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ? } } +/// This function assersts the document is not open yet and takes owneship +/// of the uri and text passed in. +fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { + std.debug.warn("Opened document: {}\n", .{uri}); + + errdefer { + self.allocator.free(uri); + self.allocator.free(text); + } + + var handle = Handle{ + .count = 1, + .import_uris = std.ArrayList([]const u8).init(self.allocator), + .document = .{ + .uri = uri, + .text = text, + .mem = text, + .sane_text = null, + }, + }; + try self.checkSanity(&handle); + try self.handles.putNoClobber(uri, handle); + return &(self.handles.get(uri) orelse unreachable).value; +} + pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*Handle { if (self.handles.get(uri)) |entry| { std.debug.warn("Document already open: {}, incrementing count\n", .{uri}); @@ -68,25 +93,12 @@ pub fn openDocument(self: *DocumentStore, uri: []const u8, text: []const u8) !*H return &entry.value; } - std.debug.warn("Opened document: {}\n", .{uri}); const duped_text = try std.mem.dupe(self.allocator, u8, text); errdefer self.allocator.free(duped_text); const duped_uri = try std.mem.dupe(self.allocator, u8, uri); errdefer self.allocator.free(duped_uri); - var handle = Handle{ - .count = 1, - .import_uris = std.ArrayList([]const u8).init(self.allocator), - .document = .{ - .uri = duped_uri, - .text = duped_text, - .mem = duped_text, - .sane_text = null, - }, - }; - try self.checkSanity(&handle); - try self.handles.putNoClobber(duped_uri, handle); - return &(self.handles.get(duped_uri) orelse unreachable).value; + return self.newDocument(duped_uri, duped_text); } fn decrementCount(self: *DocumentStore, uri: []const u8) void { @@ -243,19 +255,16 @@ pub const ImportContext = struct { var consumed_final_uri = false; defer if (!consumed_final_uri) allocator.free(final_uri); - // @TODO Clean up code, lots of repetition - { - // Check if we already imported this. - for (self.handle.import_uris.items) |uri| { - // If we did, set our new handle and return the parsed tree root node. - if (std.mem.eql(u8, uri, final_uri)) { - self.handle = self.store.getHandle(final_uri) orelse return null; - if (try self.handle.saneTree(allocator)) |tree| { - try self.trees.append(tree); - return &tree.root_node.base; - } - return null; + // Check if we already imported this. + for (self.handle.import_uris.items) |uri| { + // If we did, set our new handle and return the parsed tree root node. + if (std.mem.eql(u8, uri, final_uri)) { + self.handle = self.store.getHandle(final_uri) orelse return null; + if (try self.handle.saneTree(allocator)) |tree| { + try self.trees.append(tree); + return &tree.root_node.base; } + return null; } } @@ -284,20 +293,23 @@ pub const ImportContext = struct { defer file.close(); const size = std.math.cast(usize, try file.getEndPos()) catch std.math.maxInt(usize); - // TODO: This is wasteful, we know we don't need to copy the text on this openDocument call - const file_contents = try allocator.alloc(u8, size); - defer allocator.free(file_contents); + { + const file_contents = try allocator.alloc(u8, size); + errdefer allocator.free(file_contents); - file.inStream().readNoEof(file_contents) catch { - std.debug.warn("Could not read from file {}\n", .{file_path}); - return null; - }; + file.inStream().readNoEof(file_contents) catch { + std.debug.warn("Could not read from file {}\n", .{file_path}); + return null; + }; - // Add to import table of current handle. - try self.handle.import_uris.append(final_uri); - consumed_final_uri = true; - // Swap handles and get new tree. - self.handle = try openDocument(self.store, final_uri, file_contents); + // Add to import table of current handle. + try self.handle.import_uris.append(final_uri); + consumed_final_uri = true; + + // Swap handles and get new tree. + // This takes ownership of the passed uri and text. + self.handle = try newDocument(self.store, try std.mem.dupe(allocator, u8, final_uri), file_contents); + } if (try self.handle.saneTree(allocator)) |tree| { try self.trees.append(tree); From 7a8a4e1ec5350160fca3ae8be47b134640f1c5fa Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 12:12:04 +0300 Subject: [PATCH 08/14] Fixed crash when completing import with dot access --- src/analysis.zig | 53 ++++++++++++++++++++++-------------------- src/document_store.zig | 44 ++++++++++++++++++++--------------- src/main.zig | 13 ++++------- 3 files changed, 58 insertions(+), 52 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 3a8ffa2..e3a3341 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -178,23 +178,21 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { } /// Resolves the type of a node -pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportContext) ?*ast.Node { - var latest_tree = import_ctx.lastTree() orelse tree; - +pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node { switch (node.id) { .VarDecl => { const vari = node.cast(ast.Node.VarDecl).?; - return resolveTypeOfNode(latest_tree, vari.type_node orelse vari.init_node.?, import_ctx) orelse null; + return resolveTypeOfNode(import_ctx, vari.type_node orelse vari.init_node.?) orelse null; }, .FnProto => { const func = node.cast(ast.Node.FnProto).?; switch (func.return_type) { - .Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(latest_tree, return_type, import_ctx);} + .Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(import_ctx, return_type), } }, .Identifier => { - if (getChild(latest_tree, &latest_tree.root_node.base, latest_tree.getNodeSource(node))) |child| { - return resolveTypeOfNode(latest_tree, child, import_ctx); + if (getChild(import_ctx.tree, &import_ctx.tree.root_node.base, import_ctx.tree.getNodeSource(node))) |child| { + return resolveTypeOfNode(import_ctx, child); } else return null; }, .ContainerDecl => { @@ -202,13 +200,13 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo }, .ContainerField => { const field = node.cast(ast.Node.ContainerField).?; - return resolveTypeOfNode(latest_tree, field.type_expr.?, import_ctx); + return resolveTypeOfNode(import_ctx, field.type_expr.?); }, .SuffixOp => { const suffix_op = node.cast(ast.Node.SuffixOp).?; switch (suffix_op.op) { .Call => { - return resolveTypeOfNode(latest_tree, suffix_op.lhs.node, import_ctx); + return resolveTypeOfNode(import_ctx, suffix_op.lhs.node); }, else => {} } @@ -217,10 +215,14 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo const infix_op = node.cast(ast.Node.InfixOp).?; switch (infix_op.op) { .Period => { - var left = resolveTypeOfNode(latest_tree, infix_op.lhs, import_ctx) orelse return null; - if (nodeToString(latest_tree, infix_op.rhs)) |string| { - return getChild(latest_tree, left, string); - } else return null; + // Save the child string from this tree since the tree may switch when processing + // an import lhs. + var rhs_str = nodeToString(import_ctx.tree, infix_op.rhs) orelse return null; + // @TODO: This is hackish, pass an explicit allocator or smth + rhs_str = std.mem.dupe(import_ctx.store.allocator, u8, rhs_str) catch return null; + defer import_ctx.store.allocator.free(rhs_str); + const left = resolveTypeOfNode(import_ctx, infix_op.lhs) orelse return null; + return getChild(import_ctx.tree, left, rhs_str); }, else => {} } @@ -229,22 +231,24 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo const prefix_op = node.cast(ast.Node.PrefixOp).?; switch (prefix_op.op) { .PtrType => { - return resolveTypeOfNode(latest_tree, prefix_op.rhs, import_ctx); + return resolveTypeOfNode(import_ctx, prefix_op.rhs); }, else => {} } }, .BuiltinCall => { const builtin_call = node.cast(ast.Node.BuiltinCall).?; - if (!std.mem.eql(u8, latest_tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; + if (!std.mem.eql(u8, import_ctx.tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; if (builtin_call.params.len > 1) return null; const import_param = builtin_call.params.at(0).*; if (import_param.id != .StringLiteral) return null; - const import_str = latest_tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); - // @TODO: Handle this error better. - return (import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch unreachable); + const import_str = import_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); + return import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { + std.debug.warn("Error {} while proessing import {}\n", .{err, import_str}); + break :block null; + }; }, else => { std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); @@ -253,11 +257,10 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo return null; } -pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, import_ctx: *ImportContext) ?*ast.Node { - var current_node = &tree.root_node.base; +pub fn getFieldAccessTypeNode(import_ctx: *ImportContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { + var current_node = &import_ctx.tree.root_node.base; while (true) { - var latest_tree = import_ctx.lastTree() orelse tree; var next = tokenizer.next(); switch (next.id) { .Eof => { @@ -266,8 +269,8 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im .Identifier => { // var root = current_node.cast(ast.Node.Root).?; // current_node. - if (getChild(latest_tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { - if (resolveTypeOfNode(latest_tree, child, import_ctx)) |node_type| { + if (getChild(import_ctx.tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { + if (resolveTypeOfNode(import_ctx, child)) |node_type| { current_node = node_type; } else return null; } else return null; @@ -277,8 +280,8 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im if (after_period.id == .Eof) { return current_node; } else if (after_period.id == .Identifier) { - if (getChild(latest_tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { - if (resolveTypeOfNode(latest_tree, child, import_ctx)) |child_type| { + if (getChild(import_ctx.tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { + if (resolveTypeOfNode(import_ctx, child)) |child_type| { current_node = child_type; } else return null; } else return null; diff --git a/src/document_store.zig b/src/document_store.zig index a737dc9..d0fb4bd 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -220,12 +220,13 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. pub const ImportContext = struct { store: *DocumentStore, handle: *Handle, - trees: std.ArrayList(*std.zig.ast.Tree), + tree: *std.zig.ast.Tree, - pub fn lastTree(self: *ImportContext) ?*std.zig.ast.Tree { - if (self.trees.items.len == 0) return null; - return self.trees.items[self.trees.items.len - 1]; - } + // @TODO RemoveMe + // pub fn lastTree(self: *ImportContext) ?*std.zig.ast.Tree { + // if (self.trees.items.len == 0) return null; + // return self.trees.items[self.trees.items.len - 1]; + // } pub fn onImport(self: *ImportContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; @@ -260,9 +261,11 @@ pub const ImportContext = struct { // If we did, set our new handle and return the parsed tree root node. if (std.mem.eql(u8, uri, final_uri)) { self.handle = self.store.getHandle(final_uri) orelse return null; + + self.tree.deinit(); if (try self.handle.saneTree(allocator)) |tree| { - try self.trees.append(tree); - return &tree.root_node.base; + self.tree = tree; + return &self.tree.root_node.base; } return null; } @@ -274,9 +277,11 @@ pub const ImportContext = struct { // If it is, increment the count, set our new handle and return the parsed tree root node. new_handle.count += 1; self.handle = new_handle; + + self.tree.deinit(); if (try self.handle.saneTree(allocator)) |tree| { - try self.trees.append(tree); - return &tree.root_node.base; + self.tree = tree; + return &self.tree.root_node.base; } return null; } @@ -311,27 +316,28 @@ pub const ImportContext = struct { self.handle = try newDocument(self.store, try std.mem.dupe(allocator, u8, final_uri), file_contents); } + // Free old tree, add new one if it exists. + // If we return null, no one should access the tree. + self.tree.deinit(); if (try self.handle.saneTree(allocator)) |tree| { - try self.trees.append(tree); - return &tree.root_node.base; + self.tree = tree; + return &self.tree.root_node.base; } return null; } pub fn deinit(self: *ImportContext) void { - for (self.trees.items) |tree| { - tree.deinit(); - } - - self.trees.deinit(); + self.tree.deinit(); } }; -pub fn importContext(self: *DocumentStore, handle: *Handle) ImportContext { - return .{ +pub fn importContext(self: *DocumentStore, handle: *Handle) !?ImportContext { + const tree = (try handle.saneTree(self.allocator)) orelse return null; + + return ImportContext{ .store = self, .handle = handle, - .trees = std.ArrayList(*std.zig.ast.Tree).init(self.allocator), + .tree = tree, }; } diff --git a/src/main.zig b/src/main.zig index 0f19a99..8f4d8a1 100644 --- a/src/main.zig +++ b/src/main.zig @@ -254,8 +254,8 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { } fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, config: Config) !void { - const tree = (try handle.saneTree(allocator)) orelse { - return try send(types.Response{ + var import_ctx = (try document_store.importContext(handle)) orelse { + return send(types.Response{ .id = .{.Integer = id}, .result = .{ .CompletionList = .{ @@ -265,7 +265,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P }, }); }; - defer tree.deinit(); + defer import_ctx.deinit(); // We use a local arena allocator to deallocate all temporary data without iterating var arena = std.heap.ArenaAllocator.init(allocator); @@ -276,13 +276,10 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P var line = try handle.document.getLine(@intCast(usize, position.line)); var tokenizer = std.zig.Tokenizer.init(line); - // @TODO Pass import ctx. - var import_ctx = document_store.importContext(handle); - defer import_ctx.deinit(); - if (analysis.getFieldAccessTypeNode(tree, &tokenizer, &import_ctx)) |node| { + if (analysis.getFieldAccessTypeNode(&import_ctx, &tokenizer)) |node| { var index: usize = 0; while (node.iterate(index)) |child_node| { - if (try nodeToCompletion(&arena.allocator, import_ctx.lastTree() orelse tree, child_node, config)) |completion| { + if (try nodeToCompletion(&arena.allocator, import_ctx.tree, child_node, config)) |completion| { try completions.append(completion); } index += 1; From 6db3c74550b33e1b05eb2dfb73e436ef3b7274cf Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 12:23:20 +0300 Subject: [PATCH 09/14] Renamed import context to analysis context, added arena for temporary allocations while analyzing --- src/analysis.zig | 47 +++++++++++++++++++++--------------------- src/document_store.zig | 20 ++++++++---------- src/main.zig | 15 +++++++------- 3 files changed, 39 insertions(+), 43 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index e3a3341..5cf4b65 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const ImportContext = @import("document_store.zig").ImportContext; +const AnalysisContext = @import("document_store.zig").AnalysisContext; const ast = std.zig.ast; /// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing) @@ -178,21 +178,21 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { } /// Resolves the type of a node -pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node { +pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.Node { switch (node.id) { .VarDecl => { const vari = node.cast(ast.Node.VarDecl).?; - return resolveTypeOfNode(import_ctx, vari.type_node orelse vari.init_node.?) orelse null; + return resolveTypeOfNode(analysis_ctx, vari.type_node orelse vari.init_node.?) orelse null; }, .FnProto => { const func = node.cast(ast.Node.FnProto).?; switch (func.return_type) { - .Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(import_ctx, return_type), + .Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(analysis_ctx, return_type), } }, .Identifier => { - if (getChild(import_ctx.tree, &import_ctx.tree.root_node.base, import_ctx.tree.getNodeSource(node))) |child| { - return resolveTypeOfNode(import_ctx, child); + if (getChild(analysis_ctx.tree, &analysis_ctx.tree.root_node.base, analysis_ctx.tree.getNodeSource(node))) |child| { + return resolveTypeOfNode(analysis_ctx, child); } else return null; }, .ContainerDecl => { @@ -200,13 +200,13 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node }, .ContainerField => { const field = node.cast(ast.Node.ContainerField).?; - return resolveTypeOfNode(import_ctx, field.type_expr.?); + return resolveTypeOfNode(analysis_ctx, field.type_expr.?); }, .SuffixOp => { const suffix_op = node.cast(ast.Node.SuffixOp).?; switch (suffix_op.op) { .Call => { - return resolveTypeOfNode(import_ctx, suffix_op.lhs.node); + return resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node); }, else => {} } @@ -217,12 +217,11 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node .Period => { // Save the child string from this tree since the tree may switch when processing // an import lhs. - var rhs_str = nodeToString(import_ctx.tree, infix_op.rhs) orelse return null; - // @TODO: This is hackish, pass an explicit allocator or smth - rhs_str = std.mem.dupe(import_ctx.store.allocator, u8, rhs_str) catch return null; - defer import_ctx.store.allocator.free(rhs_str); - const left = resolveTypeOfNode(import_ctx, infix_op.lhs) orelse return null; - return getChild(import_ctx.tree, left, rhs_str); + var rhs_str = nodeToString(analysis_ctx.tree, infix_op.rhs) orelse return null; + // Use the analysis context temporary arena to store the rhs string. + rhs_str = std.mem.dupe(&analysis_ctx.arena.allocator, u8, rhs_str) catch return null; + const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null; + return getChild(analysis_ctx.tree, left, rhs_str); }, else => {} } @@ -231,21 +230,21 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node const prefix_op = node.cast(ast.Node.PrefixOp).?; switch (prefix_op.op) { .PtrType => { - return resolveTypeOfNode(import_ctx, prefix_op.rhs); + return resolveTypeOfNode(analysis_ctx, prefix_op.rhs); }, else => {} } }, .BuiltinCall => { const builtin_call = node.cast(ast.Node.BuiltinCall).?; - if (!std.mem.eql(u8, import_ctx.tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; + if (!std.mem.eql(u8, analysis_ctx.tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; if (builtin_call.params.len > 1) return null; const import_param = builtin_call.params.at(0).*; if (import_param.id != .StringLiteral) return null; - const import_str = import_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); - return import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { + const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); + return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { std.debug.warn("Error {} while proessing import {}\n", .{err, import_str}); break :block null; }; @@ -257,8 +256,8 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node return null; } -pub fn getFieldAccessTypeNode(import_ctx: *ImportContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { - var current_node = &import_ctx.tree.root_node.base; +pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { + var current_node = &analysis_ctx.tree.root_node.base; while (true) { var next = tokenizer.next(); @@ -269,8 +268,8 @@ pub fn getFieldAccessTypeNode(import_ctx: *ImportContext, tokenizer: *std.zig.To .Identifier => { // var root = current_node.cast(ast.Node.Root).?; // current_node. - if (getChild(import_ctx.tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { - if (resolveTypeOfNode(import_ctx, child)) |node_type| { + if (getChild(analysis_ctx.tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { + if (resolveTypeOfNode(analysis_ctx, child)) |node_type| { current_node = node_type; } else return null; } else return null; @@ -280,8 +279,8 @@ pub fn getFieldAccessTypeNode(import_ctx: *ImportContext, tokenizer: *std.zig.To if (after_period.id == .Eof) { return current_node; } else if (after_period.id == .Identifier) { - if (getChild(import_ctx.tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { - if (resolveTypeOfNode(import_ctx, child)) |child_type| { + if (getChild(analysis_ctx.tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { + if (resolveTypeOfNode(analysis_ctx, child)) |child_type| { current_node = child_type; } else return null; } else return null; diff --git a/src/document_store.zig b/src/document_store.zig index d0fb4bd..1b142f5 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -217,18 +217,15 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. // @TODO: Make this hold a single tree, remove tree param // from analysis functions that take an import_context. // (can we reset-reuse it or do we need to deinit-init a new one?) -pub const ImportContext = struct { +pub const AnalysisContext = struct { store: *DocumentStore, handle: *Handle, + // This arena is used for temporary allocations while analyzing, + // not for the tree allocations. + arena: *std.heap.ArenaAllocator, tree: *std.zig.ast.Tree, - // @TODO RemoveMe - // pub fn lastTree(self: *ImportContext) ?*std.zig.ast.Tree { - // if (self.trees.items.len == 0) return null; - // return self.trees.items[self.trees.items.len - 1]; - // } - - pub fn onImport(self: *ImportContext, import_str: []const u8) !?*std.zig.ast.Node { + pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; const final_uri = if (std.mem.eql(u8, import_str, "std")) @@ -326,17 +323,18 @@ pub const ImportContext = struct { return null; } - pub fn deinit(self: *ImportContext) void { + pub fn deinit(self: *AnalysisContext) void { self.tree.deinit(); } }; -pub fn importContext(self: *DocumentStore, handle: *Handle) !?ImportContext { +pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator) !?AnalysisContext { const tree = (try handle.saneTree(self.allocator)) orelse return null; - return ImportContext{ + return AnalysisContext{ .store = self, .handle = handle, + .arena = arena, .tree = tree, }; } diff --git a/src/main.zig b/src/main.zig index 8f4d8a1..e33676d 100644 --- a/src/main.zig +++ b/src/main.zig @@ -254,7 +254,10 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void { } fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, config: Config) !void { - var import_ctx = (try document_store.importContext(handle)) orelse { + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + var analysis_ctx = (try document_store.analysisContext(handle, &arena)) orelse { return send(types.Response{ .id = .{.Integer = id}, .result = .{ @@ -265,21 +268,17 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P }, }); }; - defer import_ctx.deinit(); + defer analysis_ctx.deinit(); - // We use a local arena allocator to deallocate all temporary data without iterating - var arena = std.heap.ArenaAllocator.init(allocator); var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); - // Deallocate all temporary data. - defer arena.deinit(); var line = try handle.document.getLine(@intCast(usize, position.line)); var tokenizer = std.zig.Tokenizer.init(line); - if (analysis.getFieldAccessTypeNode(&import_ctx, &tokenizer)) |node| { + if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| { var index: usize = 0; while (node.iterate(index)) |child_node| { - if (try nodeToCompletion(&arena.allocator, import_ctx.tree, child_node, config)) |completion| { + if (try nodeToCompletion(&arena.allocator, analysis_ctx.tree, child_node, config)) |completion| { try completions.append(completion); } index += 1; From 112d38e7faff3ad47356439aed96c34227b2ec39 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 13:57:55 +0300 Subject: [PATCH 10/14] Fixed crash in snippet generation of varargs functions --- src/analysis.zig | 1 + src/document_store.zig | 5 +---- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 5cf4b65..e1c39aa 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -110,6 +110,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: if (param_decl.var_args_token) |_| { try buffer.appendSlice("..."); + continue; } var curr_tok = param_decl.type_node.firstToken(); diff --git a/src/document_store.zig b/src/document_store.zig index 1b142f5..8eb9da9 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -214,9 +214,6 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. // Perhaps on new sane text we can go through imports // and remove those that are in the import_uris table // but not in the file anymore. -// @TODO: Make this hold a single tree, remove tree param -// from analysis functions that take an import_context. -// (can we reset-reuse it or do we need to deinit-init a new one?) pub const AnalysisContext = struct { store: *DocumentStore, handle: *Handle, @@ -227,7 +224,7 @@ pub const AnalysisContext = struct { pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; - + const final_uri = if (std.mem.eql(u8, import_str, "std")) if (self.store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) else { From f5e4586c760093445888f11e51c36bf3861666ba Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 14:51:07 +0300 Subject: [PATCH 11/14] Detect import removal and decrement document referece count --- src/analysis.zig | 45 ++++++++++++++++++++ src/document_store.zig | 97 +++++++++++++++++++++++++++++++----------- 2 files changed, 116 insertions(+), 26 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index e1c39aa..ff58b41 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -257,6 +257,51 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. return null; } +fn maybeCollectImport(tree: *ast.Tree, builtin_call: *ast.Node.BuiltinCall, arr: *std.ArrayList([]const u8)) !void { + if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return; + if (builtin_call.params.len > 1) return; + + const import_param = builtin_call.params.at(0).*; + if (import_param.id != .StringLiteral) return; + + const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); + try arr.append(import_str[1 .. import_str.len - 1]); +} + +/// Collects all imports we can find into a slice of import paths (without quotes). +/// The import paths are valid as long as the tree is. +pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const u8 { + // TODO: Currently only detects `const smth = @import("string literal")<.SometThing>;` + var arr = std.ArrayList([]const u8).init(allocator); + + var idx: usize = 0; + while (tree.root_node.iterate(idx)) |decl| : (idx += 1) { + if (decl.id != .VarDecl) continue; + const var_decl = decl.cast(ast.Node.VarDecl).?; + if (var_decl.init_node == null) continue; + + switch(var_decl.init_node.?.id) { + .BuiltinCall => { + const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?; + try maybeCollectImport(tree, builtin_call, &arr); + }, + .InfixOp => { + const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?; + + switch(infix_op.op) { + .Period => {}, + else => continue, + } + if (infix_op.lhs.id != .BuiltinCall) continue; + try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, &arr); + }, + else => {}, + } + } + + return arr.toOwnedSlice(); +} + pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { var current_node = &analysis_ctx.tree.root_node.base; diff --git a/src/document_store.zig b/src/document_store.zig index 8eb9da9..af73839 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -1,6 +1,7 @@ const std = @import("std"); const types = @import("types.zig"); const URI = @import("uri.zig"); +const analysis = @import("analysis.zig"); const DocumentStore = @This(); @@ -151,6 +152,52 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { } handle.document.sane_text = try std.mem.dupe(self.allocator, u8, handle.document.text); + + // TODO: Better algorithm or data structure? + // Removing the imports is costly since they live in an array list + // Perhaps we should use an AutoHashMap([]const u8, {}) ? + + // Try to detect removed imports and decrement their counts. + if (handle.import_uris.items.len == 0) return; + + const import_strs = try analysis.collectImports(self.allocator, dirty_tree); + defer self.allocator.free(import_strs); + + const still_exist = try self.allocator.alloc(bool, handle.import_uris.items.len); + defer self.allocator.free(still_exist); + + for (still_exist) |*ex| { + ex.* = false; + } + + for (import_strs) |str| { + const uri = (try uriFromImportStr(self, handle, str)) orelse continue; + defer self.allocator.free(uri); + + var idx: usize = 0; + exists_loop: while (idx < still_exist.len) : (idx += 1) { + if (still_exist[idx]) continue; + + if (std.mem.eql(u8, handle.import_uris.items[idx], uri)) { + still_exist[idx] = true; + break :exists_loop; + } + } + } + + // Go through still_exist, remove the items that are false and decrement their handle counts. + var offset: usize = 0; + var idx: usize = 0; + while (idx < still_exist.len) : (idx += 1) { + if (still_exist[idx]) continue; + + std.debug.warn("Import removed: {}\n", .{handle.import_uris.items[idx - offset]}); + const uri = handle.import_uris.orderedRemove(idx - offset); + offset += 1; + + self.closeDocument(uri); + self.allocator.free(uri); + } } pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void { @@ -209,11 +256,29 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. try self.checkSanity(handle); } -// @TODO: We only reduce the count upon closing, -// find a way to reduce it when removing imports. -// Perhaps on new sane text we can go through imports -// and remove those that are in the import_uris table -// but not in the file anymore. +fn uriFromImportStr(store: *DocumentStore, handle: *Handle, import_str: []const u8) !?[]const u8 { + return if (std.mem.eql(u8, import_str, "std")) + if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri) + else { + std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); + return null; + } + else b: { + // Find relative uri + const path = try URI.parse(store.allocator, handle.uri()); + defer store.allocator.free(path); + + const dir_path = std.fs.path.dirname(path) orelse ""; + const import_path = try std.fs.path.resolve(store.allocator, &[_][]const u8 { + dir_path, import_str + }); + + defer store.allocator.free(import_path); + + break :b (try URI.fromPath(store.allocator, import_path)); + }; +} + pub const AnalysisContext = struct { store: *DocumentStore, handle: *Handle, @@ -224,27 +289,7 @@ pub const AnalysisContext = struct { pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; - - const final_uri = if (std.mem.eql(u8, import_str, "std")) - if (self.store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) - else { - std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); - return null; - } - else b: { - // Find relative uri - const path = try URI.parse(allocator, self.handle.uri()); - defer allocator.free(path); - - const dir_path = std.fs.path.dirname(path) orelse ""; - const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 { - dir_path, import_str - }); - - defer allocator.free(import_path); - - break :b (try URI.fromPath(allocator, import_path)); - }; + const final_uri = (try uriFromImportStr(self.store, self.handle, import_str)) orelse return null; std.debug.warn("Import final URI: {}\n", .{final_uri}); var consumed_final_uri = false; From 942b6178654d6dc44f47d6b2659f01709b5180e8 Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 15:09:27 +0300 Subject: [PATCH 12/14] Updated README, config.zig doc comment --- README.md | 1 + src/config.zig | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 3913b01..783fb26 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,7 @@ The following options are currently available. | Option | Type | Default value | What it Does | | --- | --- | --- | --- | | `enable_snippets` | `bool` | `true` | Enables snippet completion, set to false for compatibility with language clients that do not support snippets (such as ale). | +| `zig_lib_path` | `?[]const u8` | `null` | zig library path, used to analyze std library imports. | ## Usage diff --git a/src/config.zig b/src/config.zig index 5913b6f..1b70383 100644 --- a/src/config.zig +++ b/src/config.zig @@ -3,5 +3,5 @@ /// Whether to enable snippet completions enable_snippets: bool = true, -/// zig installation path +/// zig library path zig_lib_path: ?[]const u8 = null, From ca0bc205e4cbf6bfa5c50c7fd24f66513598937c Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 15:15:27 +0300 Subject: [PATCH 13/14] Fixed crash with enum fields --- src/analysis.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/analysis.zig b/src/analysis.zig index ff58b41..0ce83b9 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -201,7 +201,7 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. }, .ContainerField => { const field = node.cast(ast.Node.ContainerField).?; - return resolveTypeOfNode(analysis_ctx, field.type_expr.?); + return resolveTypeOfNode(analysis_ctx, field.type_expr orelse return null); }, .SuffixOp => { const suffix_op = node.cast(ast.Node.SuffixOp).?; From 197ba84a82587daac20f1fb1b0da04038e071b8c Mon Sep 17 00:00:00 2001 From: Alexandros Naskos Date: Thu, 14 May 2020 15:26:10 +0300 Subject: [PATCH 14/14] Finished DocumentStore.deinit() --- src/document_store.zig | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/document_store.zig b/src/document_store.zig index af73839..2ad2256 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -382,7 +382,23 @@ pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.A } pub fn deinit(self: *DocumentStore) void { - // @TODO: Deinit everything! + var entry_iterator = self.handles.iterator(); + while (entry_iterator.next()) |entry| { + self.allocator.free(entry.value.document.mem); + if (entry.value.document.sane_text) |sane| { + self.allocator.free(sane); + } + + for (entry.value.import_uris.items) |uri| { + self.allocator.free(uri); + } + + entry.value.import_uris.deinit(); + self.allocator.free(entry.key); + } self.handles.deinit(); + if (self.std_uri) |uri| { + self.allocator.free(uri); + } }