diff --git a/README.md b/README.md index 56762a1..91e237e 100644 --- a/README.md +++ b/README.md @@ -27,9 +27,9 @@ zig build ### Build Options -| Option | Type | What it Does | -| --- | --- | --- | -| `-Ddata_version` | `string` | The data file version. Any files in the `src/data` file that correspond with the Zig version you want the language server to build for (0.6.0, master). +| Option | Type | Default Value | What it Does | +| --- | --- | --- | --- | +| `-Ddata_version` | `string` (master or 0.6.0) | 0.6.0 | The data file version. This selects the files in the `src/data` folder that correspond to the Zig version being served. Then, you can use the `zls` executable in an editor of your choice that has a Zig language server client! diff --git a/src/analysis.zig b/src/analysis.zig index 351b4b8..00b8d4e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -180,61 +180,57 @@ pub fn isPascalCase(name: []const u8) bool { // ANALYSIS ENGINE +pub fn getDeclNameToken(tree: *ast.Tree, node: *ast.Node) ?ast.TokenIndex { + switch (node.id) { + .VarDecl => { + const vari = node.cast(ast.Node.VarDecl).?; + return vari.name_token; + }, + .ParamDecl => { + const decl = node.cast(ast.Node.ParamDecl).?; + if (decl.name_token == null) return null; + return decl.name_token.?; + }, + .FnProto => { + const func = node.cast(ast.Node.FnProto).?; + if (func.name_token == null) return null; + return func.name_token.?; + }, + .ContainerField => { + const field = node.cast(ast.Node.ContainerField).?; + return field.name_token; + }, + else => {}, + } + + return null; +} + +fn getDeclName(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { + return tree.tokenSlice(getDeclNameToken(tree, node) orelse return null); +} + /// Gets the child of node pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node { var index: usize = 0; - while (node.iterate(index)) |child| { - switch (child.id) { - .VarDecl => { - const vari = child.cast(ast.Node.VarDecl).?; - if (std.mem.eql(u8, tree.tokenSlice(vari.name_token), name)) return child; - }, - .FnProto => { - const func = child.cast(ast.Node.FnProto).?; - if (func.name_token != null and std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return child; - }, - .ContainerField => { - const field = child.cast(ast.Node.ContainerField).?; - if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; - }, - else => {}, - } - index += 1; + while (node.iterate(index)) |child| : (index += 1) { + const child_name = getDeclName(tree, child) orelse continue; + if (std.mem.eql(u8, child_name, name)) return child; } return null; } /// Gets the child of slice pub fn getChildOfSlice(tree: *ast.Tree, nodes: []*ast.Node, name: []const u8) ?*ast.Node { - // var index: usize = 0; for (nodes) |child| { - switch (child.id) { - .VarDecl => { - const vari = child.cast(ast.Node.VarDecl).?; - if (std.mem.eql(u8, tree.tokenSlice(vari.name_token), name)) return child; - }, - .ParamDecl => { - const decl = child.cast(ast.Node.ParamDecl).?; - if (decl.name_token != null and std.mem.eql(u8, tree.tokenSlice(decl.name_token.?), name)) return child; - }, - .FnProto => { - const func = child.cast(ast.Node.FnProto).?; - if (func.name_token != null and std.mem.eql(u8, tree.tokenSlice(func.name_token.?), name)) return child; - }, - .ContainerField => { - const field = child.cast(ast.Node.ContainerField).?; - if (std.mem.eql(u8, tree.tokenSlice(field.name_token), name)) return child; - }, - else => {}, - } - // index += 1; + const child_name = getDeclName(tree, child) orelse continue; + if (std.mem.eql(u8, child_name, name)) return child; } return null; } /// Resolves the type of a node pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.Node { - std.debug.warn("NODE {}\n", .{node}); switch (node.id) { .VarDecl => { const vari = node.cast(ast.Node.VarDecl).?; @@ -254,22 +250,14 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. return node; }, .Identifier => { - // std.debug.warn("IDENTIFIER {}\n", .{analysis_ctx.tree.getNodeSource(node)}); if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, analysis_ctx.tree.getNodeSource(node))) |child| { - // std.debug.warn("CHILD {}\n", .{child}); return resolveTypeOfNode(analysis_ctx, child); } else return null; }, - .ContainerDecl => { - return node; - }, .ContainerField => { const field = node.cast(ast.Node.ContainerField).?; return resolveTypeOfNode(analysis_ctx, field.type_expr orelse return null); }, - .ErrorSetDecl => { - return node; - }, .SuffixOp => { const suffix_op = node.cast(ast.Node.SuffixOp).?; switch (suffix_op.op) { @@ -337,12 +325,8 @@ pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast. break :block null; }; }, - .MultilineStringLiteral, .StringLiteral => { - return node; - }, - else => { - std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}); - }, + .MultilineStringLiteral, .StringLiteral, .ContainerDecl, .ErrorSetDecl => return node, + else => std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}), } return null; } @@ -360,10 +344,8 @@ fn maybeCollectImport(tree: *ast.Tree, builtin_call: *ast.Node.BuiltinCall, arr: /// Collects all imports we can find into a slice of import paths (without quotes). /// The import paths are valid as long as the tree is. -pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const u8 { - // TODO: Currently only detects `const smth = @import("string literal")<.SometThing>;` - var arr = std.ArrayList([]const u8).init(allocator); - +pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: *ast.Tree) !void { + // TODO: Currently only detects `const smth = @import("string literal")<.SomeThing>;` var idx: usize = 0; while (tree.root_node.iterate(idx)) |decl| : (idx += 1) { if (decl.id != .VarDecl) continue; @@ -373,7 +355,7 @@ pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const switch (var_decl.init_node.?.id) { .BuiltinCall => { const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?; - try maybeCollectImport(tree, builtin_call, &arr); + try maybeCollectImport(tree, builtin_call, import_arr); }, .InfixOp => { const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?; @@ -383,27 +365,25 @@ pub fn collectImports(allocator: *std.mem.Allocator, tree: *ast.Tree) ![][]const else => continue, } if (infix_op.lhs.id != .BuiltinCall) continue; - try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, &arr); + try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, import_arr); }, else => {}, } } - - return arr.toOwnedSlice(); } -pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { +pub fn getFieldAccessTypeNode( + analysis_ctx: *AnalysisContext, + tokenizer: *std.zig.Tokenizer, + line_length: usize, +) ?*ast.Node { var current_node = &analysis_ctx.tree.root_node.base; while (true) { var next = tokenizer.next(); switch (next.id) { - .Eof => { - return current_node; - }, + .Eof => return current_node, .Identifier => { - // var root = current_node.cast(ast.Node.Root).?; - // current_node. if (getChildOfSlice(analysis_ctx.tree, analysis_ctx.scope_nodes, tokenizer.buffer[next.start..next.end])) |child| { if (resolveTypeOfNode(analysis_ctx, child)) |node_type| { current_node = node_type; @@ -412,9 +392,12 @@ pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zi }, .Period => { var after_period = tokenizer.next(); - if (after_period.id == .Eof) { + if (after_period.id == .Eof or after_period.id == .Comma) { return current_node; } else if (after_period.id == .Identifier) { + // TODO: This works for now, maybe we should filter based on the partial identifier ourselves? + if (after_period.end == line_length) return current_node; + if (getChild(analysis_ctx.tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { if (resolveTypeOfNode(analysis_ctx, child)) |child_type| { current_node = child_type; @@ -422,9 +405,7 @@ pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zi } else return null; } }, - else => { - std.debug.warn("Not implemented; {}\n", .{next.id}); - }, + else => std.debug.warn("Not implemented; {}\n", .{next.id}), } } @@ -473,70 +454,44 @@ pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 { return null; } -pub fn declsFromIndexInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node, nodes: *std.ArrayList(*ast.Node)) anyerror!void { +pub fn declsFromIndexInternal(decls: *std.ArrayList(*ast.Node), tree: *ast.Tree, node: *ast.Node) anyerror!void { switch (node.id) { .FnProto => { const func = node.cast(ast.Node.FnProto).?; var param_index: usize = 0; while (param_index < func.params.len) : (param_index += 1) - try declsFromIndexInternal(allocator, tree, func.params.at(param_index).*, nodes); + try declsFromIndexInternal(decls, tree, func.params.at(param_index).*); if (func.body_node) |body_node| - try declsFromIndexInternal(allocator, tree, body_node, nodes); + try declsFromIndexInternal(decls, tree, body_node); }, .Block => { var index: usize = 0; - - while (node.iterate(index)) |inode| { - try declsFromIndexInternal(allocator, tree, inode, nodes); - index += 1; + while (node.iterate(index)) |inode| : (index += 1) { + try declsFromIndexInternal(decls, tree, inode); } }, - .VarDecl => { - try nodes.append(node); - }, - .ParamDecl => { - try nodes.append(node); - }, - else => { - try nodes.appendSlice(try getCompletionsFromNode(allocator, tree, node)); - }, + .VarDecl, .ParamDecl => try decls.append(node), + else => try addChildrenNodes(decls, tree, node), } } -pub fn getCompletionsFromNode(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) ![]*ast.Node { - var nodes = std.ArrayList(*ast.Node).init(allocator); - +pub fn addChildrenNodes(decls: *std.ArrayList(*ast.Node), tree: *ast.Tree, node: *ast.Node) !void { var index: usize = 0; - while (node.iterate(index)) |child_node| { - try nodes.append(child_node); - - index += 1; + while (node.iterate(index)) |child_node| : (index += 1) { + try decls.append(child_node); } - - return nodes.items; } -pub fn declsFromIndex(allocator: *std.mem.Allocator, tree: *ast.Tree, index: usize) ![]*ast.Node { - var iindex: usize = 0; - +pub fn declsFromIndex(decls: *std.ArrayList(*ast.Node), tree: *ast.Tree, index: usize) !void { var node = &tree.root_node.base; - var nodes = std.ArrayList(*ast.Node).init(allocator); - try nodes.appendSlice(try getCompletionsFromNode(allocator, tree, node)); - - while (node.iterate(iindex)) |inode| { - if (tree.tokens.at(inode.firstToken()).start < index and index < tree.tokens.at(inode.lastToken()).start) { - try declsFromIndexInternal(allocator, tree, inode, &nodes); + try addChildrenNodes(decls, tree, node); + var node_index: usize = 0; + while (node.iterate(node_index)) |inode| : (node_index += 1) { + if (tree.tokens.at(inode.firstToken()).start < index and index < tree.tokens.at(inode.lastToken()).end) { + try declsFromIndexInternal(decls, tree, inode); } - - iindex += 1; } - - if (tree.tokens.at(node.firstToken()).start < index and index < tree.tokens.at(node.lastToken()).start) { - return nodes.items; - } - - return nodes.items; } diff --git a/src/document_store.zig b/src/document_store.zig index 0fd2856..0e05fd1 100644 --- a/src/document_store.zig +++ b/src/document_store.zig @@ -64,7 +64,6 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: []u8) !*Handle { .mem = text, }, }; - try self.checkSanity(handle); const kv = try self.handles.getOrPutValue(uri, handle); return kv.value; } @@ -121,10 +120,7 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle { } // Check if the document text is now sane, move it to sane_text if so. -fn checkSanity(self: *DocumentStore, handle: *Handle) !void { - const tree = try handle.tree(self.allocator); - defer tree.deinit(); - +fn removeOldImports(self: *DocumentStore, handle: *Handle) !void { std.debug.warn("New text for document {}\n", .{handle.uri()}); // TODO: Better algorithm or data structure? // Removing the imports is costly since they live in an array list @@ -133,19 +129,22 @@ fn checkSanity(self: *DocumentStore, handle: *Handle) !void { // Try to detect removed imports and decrement their counts. if (handle.import_uris.items.len == 0) return; - const import_strs = try analysis.collectImports(self.allocator, tree); - defer self.allocator.free(import_strs); + const tree = try handle.tree(self.allocator); + defer tree.deinit(); - const still_exist = try self.allocator.alloc(bool, handle.import_uris.items.len); - defer self.allocator.free(still_exist); + var arena = std.heap.ArenaAllocator.init(self.allocator); + defer arena.deinit(); + var import_strs = std.ArrayList([]const u8).init(&arena.allocator); + try analysis.collectImports(&import_strs, tree); + + const still_exist = try arena.allocator.alloc(bool, handle.import_uris.items.len); for (still_exist) |*ex| { ex.* = false; } - for (import_strs) |str| { - const uri = (try uriFromImportStr(self, handle.*, str)) orelse continue; - defer self.allocator.free(uri); + for (import_strs.items) |str| { + const uri = (try uriFromImportStr(self, &arena.allocator, handle.*, str)) orelse continue; var idx: usize = 0; exists_loop: while (idx < still_exist.len) : (idx += 1) { @@ -226,28 +225,29 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std. } } - try self.checkSanity(handle); + try self.removeOldImports(handle); } -fn uriFromImportStr(store: *DocumentStore, handle: Handle, import_str: []const u8) !?[]const u8 { +fn uriFromImportStr(store: *DocumentStore, allocator: *std.mem.Allocator, handle: Handle, import_str: []const u8) !?[]const u8 { return if (std.mem.eql(u8, import_str, "std")) - if (store.std_uri) |std_root_uri| try std.mem.dupe(store.allocator, u8, std_root_uri) else { + if (store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) + else { std.debug.warn("Cannot resolve std library import, path is null.\n", .{}); return null; } else b: { // Find relative uri - const path = try URI.parse(store.allocator, handle.uri()); - defer store.allocator.free(path); + const path = try URI.parse(allocator, handle.uri()); + defer allocator.free(path); const dir_path = std.fs.path.dirname(path) orelse ""; - const import_path = try std.fs.path.resolve(store.allocator, &[_][]const u8{ - dir_path, import_str, + const import_path = try std.fs.path.resolve(allocator, &[_][]const u8 { + dir_path, import_str }); - defer store.allocator.free(import_path); + defer allocator.free(import_path); - break :b (try URI.fromPath(store.allocator, import_path)); + break :b (try URI.fromPath(allocator, import_path)); }; } @@ -262,9 +262,15 @@ pub const AnalysisContext = struct { tree: *std.zig.ast.Tree, scope_nodes: []*std.zig.ast.Node, + fn refreshScopeNodes(self: *AnalysisContext) !void { + var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator); + try analysis.addChildrenNodes(&scope_nodes, self.tree, &self.tree.root_node.base); + self.scope_nodes = scope_nodes.items; + } + pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node { const allocator = self.store.allocator; - const final_uri = (try uriFromImportStr(self.store, self.handle.*, import_str)) orelse return null; + const final_uri = (try uriFromImportStr(self.store, self.store.allocator, self.handle.*, import_str)) orelse return null; std.debug.warn("Import final URI: {}\n", .{final_uri}); var consumed_final_uri = false; @@ -278,6 +284,7 @@ pub const AnalysisContext = struct { self.tree.deinit(); self.tree = try self.handle.tree(allocator); + try self.refreshScopeNodes(); return &self.tree.root_node.base; } } @@ -291,6 +298,7 @@ pub const AnalysisContext = struct { self.tree.deinit(); self.tree = try self.handle.tree(allocator); + try self.refreshScopeNodes(); return &self.tree.root_node.base; } @@ -330,6 +338,7 @@ pub const AnalysisContext = struct { // If we return null, no one should access the tree. self.tree.deinit(); self.tree = try self.handle.tree(allocator); + try self.refreshScopeNodes(); return &self.tree.root_node.base; } @@ -352,12 +361,16 @@ pub const AnalysisContext = struct { pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator, position: types.Position) !AnalysisContext { const tree = try handle.tree(self.allocator); + + var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator); + try analysis.declsFromIndex(&scope_nodes, tree, try handle.document.positionToIndex(position)); + return AnalysisContext{ .store = self, .handle = handle, .arena = arena, .tree = tree, - .scope_nodes = try analysis.declsFromIndex(&arena.allocator, tree, try handle.document.positionToIndex(position)) + .scope_nodes = scope_nodes.items, }; } diff --git a/src/main.zig b/src/main.zig index 5ba88e4..2baaa82 100644 --- a/src/main.zig +++ b/src/main.zig @@ -11,13 +11,13 @@ const analysis = @import("analysis.zig"); // Code is largely based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig -var stdout: std.fs.File.OutStream = undefined; +var stdout: std.io.BufferedOutStream(4096, std.fs.File.OutStream) = undefined; var allocator: *std.mem.Allocator = undefined; var document_store: DocumentStore = undefined; const initialize_response = - \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"workspace":{"workspaceFolders":{"supported":true}}}}} + \\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"workspace":{"workspaceFolders":{"supported":true}}}}} ; const not_implemented_response = @@ -46,26 +46,11 @@ fn send(reqOrRes: var) !void { var mem_buffer: [1024 * 128]u8 = undefined; var fbs = std.io.fixedBufferStream(&mem_buffer); try std.json.stringify(reqOrRes, std.json.StringifyOptions{}, fbs.outStream()); - try stdout.print("Content-Length: {}\r\n\r\n", .{fbs.pos}); - try stdout.writeAll(fbs.getWritten()); -} -fn log(comptime fmt: []const u8, args: var) !void { - // Disable logs on Release modes. - if (std.builtin.mode != .Debug) return; - - var message = try std.fmt.allocPrint(allocator, fmt, args); - defer allocator.free(message); - - try send(types.Notification{ - .method = "window/logMessage", - .params = .{ - .LogMessageParams = .{ - .@"type" = .Log, - .message = message, - }, - }, - }); + const stdout_stream = stdout.outStream(); + try stdout_stream.print("Content-Length: {}\r\n\r\n", .{fbs.pos}); + try stdout_stream.writeAll(fbs.getWritten()); + try stdout.flush(); } fn respondGeneric(id: i64, response: []const u8) !void { @@ -82,8 +67,11 @@ fn respondGeneric(id: i64, response: []const u8) !void { // Numbers of character that will be printed from this string: len - 3 brackets // 1 from the beginning (escaped) and the 2 from the arg {} const json_fmt = "{{\"jsonrpc\":\"2.0\",\"id\":{}"; - try stdout.print("Content-Length: {}\r\n\r\n" ++ json_fmt, .{ response.len + id_digits + json_fmt.len - 3, id }); - try stdout.writeAll(response); + + const stdout_stream = stdout.outStream(); + try stdout_stream.print("Content-Length: {}\r\n\r\n" ++ json_fmt, .{ response.len + id_digits + json_fmt.len - 3, id }); + try stdout_stream.writeAll(response); + try stdout.flush(); } // TODO: Is this correct or can we get a better end? @@ -184,7 +172,7 @@ fn publishDiagnostics(handle: DocumentStore.Handle, config: Config) !void { fn containerToCompletion(list: *std.ArrayList(types.CompletionItem), analysis_ctx: *DocumentStore.AnalysisContext, container: *std.zig.ast.Node, config: Config) !void { var index: usize = 0; - while (container.iterate(index)) |child_node| : (index+=1) { + while (container.iterate(index)) |child_node| : (index += 1) { if (analysis.isNodePublic(analysis_ctx.tree, child_node)) { try nodeToCompletion(list, analysis_ctx, child_node, config); } @@ -283,6 +271,78 @@ fn nodeToCompletion(list: *std.ArrayList(types.CompletionItem), analysis_ctx: *D } } +fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []const u8 { + var start_idx = pos_index; + while (start_idx > 0 and + (std.ascii.isAlNum(handle.document.text[start_idx]) or handle.document.text[start_idx] == '_')) : (start_idx -= 1) + {} + + var end_idx = pos_index; + while (end_idx < handle.document.text.len and + (std.ascii.isAlNum(handle.document.text[end_idx]) or handle.document.text[end_idx] == '_')) : (end_idx += 1) + {} + + return handle.document.text[start_idx + 1 .. end_idx]; +} + +fn gotoDefinitionGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle) !void { + var tree = try handle.tree(allocator); + defer tree.deinit(); + + const name = identifierFromPosition(pos_index, handle); + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + var decl_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator); + try analysis.declsFromIndex(&decl_nodes, tree, pos_index); + + const decl = analysis.getChildOfSlice(tree, decl_nodes.items, name) orelse return try respondGeneric(id, null_result_response); + const name_token = analysis.getDeclNameToken(tree, decl) orelse unreachable; + + try send(types.Response{ + .id = .{ .Integer = id }, + .result = .{ + .Location = .{ + .uri = handle.document.uri, + .range = astLocationToRange(tree.tokenLocation(0, name_token)), + }, + }, + }); +} + +fn gotoDefinitionFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, line_start_idx: usize) !void { + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + var analysis_ctx = try document_store.analysisContext(handle, &arena, position); + defer analysis_ctx.deinit(); + + const pos_index = try handle.document.positionToIndex(position); + var name = identifierFromPosition(pos_index, handle.*); + + const line = try handle.document.getLine(@intCast(usize, position.line)); + var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]); + + const line_length = @ptrToInt(name.ptr) - @ptrToInt(line.ptr) + name.len - line_start_idx; + name = try std.mem.dupe(&arena.allocator, u8, name); + + if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer, line_length)) |container| { + const decl = analysis.getChild(analysis_ctx.tree, container, name) orelse return try respondGeneric(id, null_result_response); + const name_token = analysis.getDeclNameToken(analysis_ctx.tree, decl) orelse unreachable; + return try send(types.Response{ + .id = .{ .Integer = id }, + .result = .{ + .Location = .{ + .uri = analysis_ctx.handle.document.uri, + .range = astLocationToRange(analysis_ctx.tree.tokenLocation(0, name_token)), + }, + }, + }); + } + + try respondGeneric(id, null_result_response); +} + fn completeGlobal(id: i64, pos_index: usize, handle: *DocumentStore.Handle, config: Config) !void { var tree = try handle.tree(allocator); defer tree.deinit(); @@ -296,9 +356,9 @@ fn completeGlobal(id: i64, pos_index: usize, handle: *DocumentStore.Handle, conf var analysis_ctx = try document_store.analysisContext(handle, &arena, types.Position{.line = 0, .character = 0,}); defer analysis_ctx.deinit(); - // var decls = tree.root_node.decls.iterator(0); - var decls = try analysis.declsFromIndex(&arena.allocator, tree, pos_index); - for (decls) |decl_ptr| { + var decl_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator); + try analysis.declsFromIndex(&decl_nodes, tree, pos_index); + for (decl_nodes.items) |decl_ptr| { var decl = decl_ptr.*; try nodeToCompletion(&completions, &analysis_ctx, decl_ptr, config); } @@ -334,39 +394,10 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P const line = try handle.document.getLine(@intCast(usize, position.line)); var tokenizer = std.zig.Tokenizer.init(line[line_start_idx..]); + const line_length = line.len - line_start_idx; - // var decls = try analysis.declsFromIndex(&arena.allocator, analysis_ctx.tree, try handle.document.positionToIndex(position)); - if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| { - try nodeToCompletion(&completions, &analysis_ctx, node, config); - // var index: usize = 0; - // while (node.iterate(index)) |child_node| { - // if (analysis.isNodePublic(analysis_ctx.tree, child_node)) { - // // TODO: Not great to allocate it again and again inside a loop - // // Creating a new context, so that we don't destroy the tree that is iterated above when resolving imports - // const initial_handle = analysis_ctx.handle; - // std.debug.warn("\ncompleteFieldAccess calling resolveTypeOfNode for {}\n", .{analysis_ctx.tree.getNodeSource(child_node)}); - // var node_analysis_ctx = try document_store.analysisContext(initial_handle, &arena, nodePosition(analysis_ctx.tree, node)); - // defer node_analysis_ctx.deinit(); - - // const resolved_node = analysis.resolveTypeOfNode(&node_analysis_ctx, child_node); - // if (resolved_node) |n| { - // std.debug.warn("completeFieldAccess resolveTypeOfNode result = {}\n", .{resolved_node}); - // } - - // const completion_node: struct { node: *std.zig.ast.Node, context: *DocumentStore.AnalysisContext } = blk: { - // if (resolved_node) |n| { - // break :blk .{ .node = n, .context = &node_analysis_ctx }; - // } - - // break :blk .{ .node = child_node, .context = &analysis_ctx }; - // }; - - // std.debug.warn("completeFieldAccess resolved_node = {}\n", .{completion_node.context.tree.getNodeSource(completion_node.node)}); - - // try nodeToCompletion(&completions, completion_node.context.tree, completion_node.node, config); - // } - // index += 1; - // } + if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer, line_length)) |node| { + try nodeToCompletion(&completions, analysis_ctx, node, config); } try send(types.Response{ .id = .{ .Integer = id }, @@ -574,7 +605,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v const content_changes = params.getValue("contentChanges").?.Array; const handle = document_store.getHandle(uri) orelse { - try log("Trying to change non existent document {}", .{uri}); + std.debug.warn("Trying to change non existent document {}", .{uri}); return; }; @@ -595,8 +626,8 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v const position = params.getValue("position").?.Object; const handle = document_store.getHandle(uri) orelse { - try log("Trying to complete in non existent document {}", .{uri}); - return; + std.debug.warn("Trying to complete in non existent document {}", .{uri}); + return try respondGeneric(id, no_completions_response); }; const pos = types.Position{ @@ -625,23 +656,41 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v try respondGeneric(id, no_completions_response); } } else if (std.mem.eql(u8, method, "textDocument/signatureHelp")) { - // try respondGeneric(id, - // \\,"result":{"signatures":[{ - // \\"label": "nameOfFunction(aNumber: u8)", - // \\"documentation": {"kind": "markdown", "value": "Description of the function in **Markdown**!"}, - // \\"parameters": [ - // \\{"label": [15, 27], "documentation": {"kind": "markdown", "value": "An argument"}} - // \\] - // \\}]}} - // ); try respondGeneric(id, \\,"result":{"signatures":[]}} ); + } else if (std.mem.eql(u8, method, "textDocument/definition") or + std.mem.eql(u8, method, "textDocument/declaration") or + std.mem.eql(u8, method, "textDocument/typeDefinition")) + { + const document = params.getValue("textDocument").?.Object; + const uri = document.getValue("uri").?.String; + const position = params.getValue("position").?.Object; + + const handle = document_store.getHandle(uri) orelse { + std.debug.warn("Trying to got to definition in non existent document {}", .{uri}); + return try respondGeneric(id, null_result_response); + }; + + const pos = types.Position{ + .line = position.getValue("line").?.Integer, + .character = position.getValue("character").?.Integer - 1, + }; + if (pos.character >= 0) { + const pos_index = try handle.document.positionToIndex(pos); + const pos_context = documentPositionContext(handle.document, pos_index); + + switch (pos_context) { + .var_access => try gotoDefinitionGlobal(id, pos_index, handle.*), + .field_access => |start_idx| try gotoDefinitionFieldAccess(id, handle, pos, start_idx), + else => try respondGeneric(id, null_result_response), + } + } } else if (root.Object.getValue("id")) |_| { - try log("Method with return value not implemented: {}", .{method}); + std.debug.warn("Method with return value not implemented: {}", .{method}); try respondGeneric(id, not_implemented_response); } else { - try log("Method without return value not implemented: {}", .{method}); + std.debug.warn("Method without return value not implemented: {}", .{method}); } } @@ -662,17 +711,9 @@ pub fn main() anyerror!void { allocator = &debug_alloc_state.allocator; } - // Init buffer for stdin read - - var buffer = std.ArrayList(u8).init(allocator); - defer buffer.deinit(); - - try buffer.resize(4096); - // Init global vars - - const stdin = std.io.getStdIn().inStream(); - stdout = std.io.getStdOut().outStream(); + const in_stream = std.io.getStdIn().inStream(); + stdout = std.io.bufferedOutStream(std.io.getStdOut().outStream()); // Read the configuration, if any. const config_parse_options = std.json.ParseOptions{ .allocator = allocator }; @@ -717,19 +758,19 @@ pub fn main() anyerror!void { defer json_parser.deinit(); while (true) { - const headers = readRequestHeader(allocator, stdin) catch |err| { - try log("{}; exiting!", .{@errorName(err)}); + const headers = readRequestHeader(allocator, in_stream) catch |err| { + std.debug.warn("{}; exiting!", .{@errorName(err)}); return; }; defer headers.deinit(allocator); const buf = try allocator.alloc(u8, headers.content_length); defer allocator.free(buf); - try stdin.readNoEof(buf); + try in_stream.readNoEof(buf); try processJsonRpc(&json_parser, buf, config); json_parser.reset(); if (debug_alloc) |dbg| { - try log("{}", .{dbg.info}); + std.debug.warn("{}\n", .{dbg.info}); } } } diff --git a/src/types.zig b/src/types.zig index 97c1f2f..342329f 100644 --- a/src/types.zig +++ b/src/types.zig @@ -51,7 +51,8 @@ pub const NotificationParams = union(enum) { /// Params of a response (result) pub const ResponseParams = union(enum) { - CompletionList: CompletionList + CompletionList: CompletionList, + Location: Location, }; /// JSONRPC error @@ -282,4 +283,3 @@ pub const CompletionItem = struct { documentation: ?MarkupContent = null // filterText: String = .NotDefined, }; -