From faae689ff7ad24040c3c5131faecba0e036751cc Mon Sep 17 00:00:00 2001 From: travisstaloch Date: Sun, 25 Dec 2022 14:44:25 -0800 Subject: [PATCH 01/18] prevent underflow in ast.zig#fullWhile() (#848) --- src/ast.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ast.zig b/src/ast.zig index 9a1709c..81ece0a 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -167,7 +167,7 @@ fn fullWhile(tree: Ast, info: full.While.Components) full.While { .else_token = undefined, .error_token = null, }; - var tok_i = info.while_token - 1; + var tok_i = info.while_token -| 1; if (token_tags[tok_i] == .keyword_inline) { result.inline_token = tok_i; tok_i -= 1; From 3139a787a1d51e763595e07eaa6f34f4c09c0d50 Mon Sep 17 00:00:00 2001 From: travisstaloch Date: Mon, 26 Dec 2022 21:45:52 -0800 Subject: [PATCH 02/18] fix another underflow in ast.zig#fullWhile() (#853) --- src/ast.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ast.zig b/src/ast.zig index 81ece0a..3705147 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -173,9 +173,9 @@ fn fullWhile(tree: Ast, info: full.While.Components) full.While { tok_i -= 1; } if (token_tags[tok_i] == .colon and - token_tags[tok_i - 1] == .identifier) + token_tags[tok_i -| 1] == .identifier) { - result.label_token = tok_i - 1; + result.label_token = tok_i -| 1; } const last_cond_token = lastToken(tree, info.cond_expr); if (token_tags[last_cond_token + 2] == .pipe) { From 941882371cecc1e9abc0e866c1747d8ad682e7e2 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Tue, 27 Dec 2022 05:52:15 +0000 Subject: [PATCH 03/18] fix memory lifetime issues (#851) --- src/DocumentStore.zig | 82 ++++++++++++++++++++++++------------------- src/analysis.zig | 7 +--- src/references.zig | 2 +- 3 files changed, 47 insertions(+), 44 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index a92f877..d10bf45 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -125,11 +125,11 @@ fn getOrLoadHandleInternal(self: *DocumentStore, uri: Uri) !?*const Handle { var handle = try self.allocator.create(Handle); errdefer self.allocator.destroy(handle); - const dependency_uri = try self.allocator.dupe(u8, uri); - handle.* = (try self.createDocumentFromURI(dependency_uri, false)) orelse return error.Unknown; // error name doesn't matter + handle.* = (try self.createDocumentFromURI(uri, false)) orelse return error.Unknown; // error name doesn't matter + errdefer handle.deinit(self.allocator); const gop = try self.handles.getOrPutValue(self.allocator, handle.uri, handle); - std.debug.assert(!gop.found_existing); + if (gop.found_existing) return error.Unknown; return gop.value_ptr.*; } @@ -149,16 +149,14 @@ pub fn openDocument(self: *DocumentStore, uri: Uri, text: []const u8) error{OutO const duped_text = try self.allocator.dupeZ(u8, text); errdefer self.allocator.free(duped_text); - const duped_uri = try self.allocator.dupeZ(u8, uri); - errdefer self.allocator.free(duped_uri); var handle = try self.allocator.create(Handle); errdefer self.allocator.destroy(handle); - handle.* = try self.createDocument(duped_uri, duped_text, true); + handle.* = try self.createDocument(uri, duped_text, true); errdefer handle.deinit(self.allocator); - try self.handles.putNoClobber(self.allocator, duped_uri, handle); + try self.handles.putNoClobber(self.allocator, handle.uri, handle); return handle.*; } @@ -251,33 +249,29 @@ fn garbageCollectionImports(self: *DocumentStore) error{OutOfMemory}!void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + var arena = std.heap.ArenaAllocator.init(self.allocator); + defer arena.deinit(); + var reachable_handles = std.StringHashMapUnmanaged(void){}; - defer reachable_handles.deinit(self.allocator); + defer reachable_handles.deinit(arena.allocator()); var queue = std.ArrayListUnmanaged(Uri){}; - defer { - for (queue.items) |uri| { - self.allocator.free(uri); - } - queue.deinit(self.allocator); - } for (self.handles.values()) |handle| { if (!handle.open) continue; - try reachable_handles.put(self.allocator, handle.uri, {}); + try reachable_handles.put(arena.allocator(), handle.uri, {}); - try self.collectDependencies(self.allocator, handle.*, &queue); + try self.collectDependencies(arena.allocator(), handle.*, &queue); } while (queue.popOrNull()) |uri| { - if (reachable_handles.contains(uri)) continue; - - try reachable_handles.putNoClobber(self.allocator, uri, {}); + const gop = try reachable_handles.getOrPut(arena.allocator(), uri); + if (gop.found_existing) continue; const handle = self.handles.get(uri) orelse continue; - try self.collectDependencies(self.allocator, handle.*, &queue); + try self.collectDependencies(arena.allocator(), handle.*, &queue); } var i: usize = 0; @@ -451,6 +445,7 @@ fn loadBuildConfiguration( const parse_options = std.json.ParseOptions{ .allocator = allocator }; var token_stream = std.json.TokenStream.init(zig_run_result.stdout); var build_config = std.json.parse(BuildConfig, &token_stream, parse_options) catch return error.RunFailed; + errdefer std.json.parseFree(BuildConfig, build_config, parse_options); for (build_config.packages) |*pkg| { const pkg_abs_path = try std.fs.path.resolve(allocator, &[_][]const u8{ directory_path, pkg.path }); @@ -601,15 +596,17 @@ fn uriInImports( return false; } -/// takes ownership of the uri and text passed in. +/// takes ownership of the text passed in. fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) error{OutOfMemory}!Handle { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); var handle: Handle = blk: { - errdefer self.allocator.free(uri); errdefer self.allocator.free(text); + var duped_uri = try self.allocator.dupe(u8, uri); + errdefer self.allocator.free(duped_uri); + var tree = try std.zig.parse(self.allocator, text); errdefer tree.deinit(self.allocator); @@ -618,7 +615,7 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro break :blk Handle{ .open = open, - .uri = uri, + .uri = duped_uri, .text = text, .tree = tree, .document_scope = document_scope, @@ -642,13 +639,12 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro // TODO: Better logic for detecting std or subdirectories? const in_std = std.mem.indexOf(u8, uri, "/std/") != null; if (self.config.zig_exe_path != null and std.mem.endsWith(u8, uri, "/build.zig") and !in_std) { - const dupe_uri = try self.allocator.dupe(u8, uri); - if (self.createBuildFile(dupe_uri)) |build_file| { - try self.build_files.put(self.allocator, dupe_uri, build_file); - handle.is_build_file = true; - } else |err| { - log.debug("Failed to load build file {s}: (error: {})", .{ uri, err }); - } + errdefer |err| log.debug("Failed to load build file {s}: (error: {})", .{ uri, err }); + const duped_uri = try self.allocator.dupe(u8, uri); + var build_file = try self.createBuildFile(duped_uri); + errdefer build_file.deinit(self.allocator); + try self.build_files.putNoClobber(self.allocator, build_file.uri, build_file); + handle.is_build_file = true; } else if (self.config.zig_exe_path != null and !std.mem.endsWith(u8, uri, "/builtin.zig") and !in_std) blk: { log.debug("Going to walk down the tree towards: {s}", .{uri}); // walk down the tree towards the uri. When we hit build.zig files @@ -665,17 +661,24 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro log.debug("found build path: {s}", .{build_path}); - const build_file_uri = URI.fromPath(self.allocator, build_path) catch unreachable; - const gop = try self.build_files.getOrPut(self.allocator, build_file_uri); + const build_file_uri = try URI.fromPath(self.allocator, build_path); + const gop = self.build_files.getOrPut(self.allocator, build_file_uri) catch |err| { + self.allocator.free(build_file_uri); + return err; + }; + if (!gop.found_existing) { + errdefer self.build_files.swapRemoveAt(gop.index); gop.value_ptr.* = try self.createBuildFile(build_file_uri); + } else { + self.allocator.free(build_file_uri); } if (try self.uriAssociatedWithBuild(gop.value_ptr.*, uri)) { - handle.associated_build_file = build_file_uri; + handle.associated_build_file = gop.key_ptr.*; break; } else { - prev_build_file = build_file_uri; + prev_build_file = gop.key_ptr.*; } } @@ -690,7 +693,6 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro return handle; } -/// takes ownership of the uri passed in. fn createDocumentFromURI(self: *DocumentStore, uri: Uri, open: bool) error{OutOfMemory}!?Handle { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); @@ -706,15 +708,21 @@ fn createDocumentFromURI(self: *DocumentStore, uri: Uri, open: bool) error{OutOf return try self.createDocument(uri, file_contents, open); } +/// Caller owns returned memory. fn collectImportUris(self: *const DocumentStore, handle: Handle) error{OutOfMemory}!std.ArrayListUnmanaged(Uri) { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); var imports = try analysis.collectImports(self.allocator, handle.tree); - errdefer imports.deinit(self.allocator); + + var i: usize = 0; + errdefer { + // only free the uris + for (imports.items[0..i]) |uri| self.allocator.free(uri); + imports.deinit(self.allocator); + } // Convert to URIs - var i: usize = 0; while (i < imports.items.len) { const maybe_uri = try self.uriFromImportStr(self.allocator, handle, imports.items[i]); diff --git a/src/analysis.zig b/src/analysis.zig index 7eadf06..3ff791d 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1148,12 +1148,7 @@ pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator, /// Collects all `@import`'s we can find into a slice of import paths (without quotes). pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}!std.ArrayListUnmanaged([]const u8) { var imports = std.ArrayListUnmanaged([]const u8){}; - errdefer { - for (imports.items) |imp| { - allocator.free(imp); - } - imports.deinit(allocator); - } + errdefer imports.deinit(allocator); const tags = tree.tokens.items(.tag); diff --git a/src/references.zig b/src/references.zig index e04df2f..4323c44 100644 --- a/src/references.zig +++ b/src/references.zig @@ -488,7 +488,7 @@ pub fn symbolReferences( } var handle_dependencies = std.ArrayListUnmanaged([]const u8){}; - try store.collectDependencies(store.allocator, handle.*, &handle_dependencies); + try store.collectDependencies(arena.allocator(), handle.*, &handle_dependencies); for (handle_dependencies.items) |uri| { try dependencies.put(arena.allocator(), uri, {}); From 61c0981294c52820d185afe41a0965a722c3e314 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Tue, 27 Dec 2022 06:47:57 +0000 Subject: [PATCH 04/18] Use zig-lsp-codegen (#850) * add lsp.zig * change references from types.zig to lsp.zig * remove types.zig and requests.zig * add tres as a submodule * transition codebase from types.zig to lsp.zig * update lsp.zig * completely overhaul message handler * fix memory errors * partially transition tests to lsp.zig * update lsp.zig * more test fixes * disable failing tests * fix message handling bugs * fix remaining tests * access correct union in diff.applyTextEdits * more message handler fixes * run zig fmt * update tres submodule * fix memory access to freed memory * simplify initialize_msg for testing * check if publishDiagnostics is supported --- .gitmodules | 3 + build.zig | 3 + src/DocumentStore.zig | 3 +- src/{header.zig => Header.zig} | 38 +- src/Server.zig | 1675 ++--- src/analysis.zig | 35 +- src/code_actions.zig | 27 +- src/data/snippets.zig | 4 +- src/diff.zig | 13 +- src/inlay_hints.zig | 36 +- src/lsp.zig | 7852 ++++++++++++++++++++++++ src/main.zig | 36 +- src/offsets.zig | 14 +- src/references.zig | 2 +- src/requests.zig | 324 - src/signature_help.zig | 30 +- src/tres | 1 + src/types.zig | 536 -- src/zls.zig | 5 +- tests/context.zig | 116 +- tests/language_features/cimport.zig | 4 +- tests/lsp_features/completion.zig | 34 +- tests/lsp_features/folding_range.zig | 12 +- tests/lsp_features/inlay_hints.zig | 16 +- tests/lsp_features/references.zig | 14 +- tests/lsp_features/selection_range.zig | 13 +- tests/lsp_features/semantic_tokens.zig | 18 +- tests/utility/offsets.zig | 32 +- 28 files changed, 9015 insertions(+), 1881 deletions(-) rename src/{header.zig => Header.zig} (55%) create mode 100644 src/lsp.zig delete mode 100644 src/requests.zig create mode 160000 src/tres delete mode 100644 src/types.zig diff --git a/.gitmodules b/.gitmodules index afc8517..7e91343 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,3 +4,6 @@ [submodule "src/tracy"] path = src/tracy url = https://github.com/wolfpld/tracy +[submodule "src/tres"] + path = src/tres + url = https://github.com/ziglibs/tres.git diff --git a/build.zig b/build.zig index 370093f..710dc65 100644 --- a/build.zig +++ b/build.zig @@ -92,6 +92,8 @@ pub fn build(b: *std.build.Builder) !void { const known_folders_path = b.option([]const u8, "known-folders", "Path to known-folders package (default: " ++ KNOWN_FOLDERS_DEFAULT_PATH ++ ")") orelse KNOWN_FOLDERS_DEFAULT_PATH; exe.addPackage(.{ .name = "known-folders", .source = .{ .path = known_folders_path } }); + exe.addPackage(.{ .name = "tres", .source = .{ .path = "src/tres/tres.zig" } }); + if (enable_tracy) { const client_cpp = "src/tracy/TracyClient.cpp"; @@ -146,6 +148,7 @@ pub fn build(b: *std.build.Builder) !void { } tests.addPackage(.{ .name = "zls", .source = .{ .path = "src/zls.zig" }, .dependencies = exe.packages.items }); + tests.addPackage(.{ .name = "tres", .source = .{ .path = "src/tres/tres.zig" } }); tests.setBuildMode(.Debug); tests.setTarget(target); test_step.dependOn(&tests.step); diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index d10bf45..3165859 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -1,7 +1,6 @@ const std = @import("std"); const builtin = @import("builtin"); -const types = @import("types.zig"); -const requests = @import("requests.zig"); +const types = @import("lsp.zig"); const URI = @import("uri.zig"); const analysis = @import("analysis.zig"); const offsets = @import("offsets.zig"); diff --git a/src/header.zig b/src/Header.zig similarity index 55% rename from src/header.zig rename to src/Header.zig index 3e19971..d0b7d6e 100644 --- a/src/header.zig +++ b/src/Header.zig @@ -1,18 +1,19 @@ const std = @import("std"); -const RequestHeader = struct { - content_length: usize, +const Header = @This(); - /// null implies "application/vscode-jsonrpc; charset=utf-8" - content_type: ?[]const u8, +content_length: usize, - pub fn deinit(self: @This(), allocator: std.mem.Allocator) void { - if (self.content_type) |ct| allocator.free(ct); - } -}; +/// null implies "application/vscode-jsonrpc; charset=utf-8" +content_type: ?[]const u8 = null, -pub fn readRequestHeader(allocator: std.mem.Allocator, instream: anytype) !RequestHeader { - var r = RequestHeader{ +pub fn deinit(self: @This(), allocator: std.mem.Allocator) void { + if (self.content_type) |ct| allocator.free(ct); +} + +// Caller owns returned memory. +pub fn parse(allocator: std.mem.Allocator, reader: anytype) !Header { + var r = Header{ .content_length = undefined, .content_type = null, }; @@ -20,7 +21,7 @@ pub fn readRequestHeader(allocator: std.mem.Allocator, instream: anytype) !Reque var has_content_length = false; while (true) { - const header = try instream.readUntilDelimiterAlloc(allocator, '\n', 0x100); + const header = try reader.readUntilDelimiterAlloc(allocator, '\n', 0x100); defer allocator.free(header); if (header.len == 0 or header[header.len - 1] != '\r') return error.MissingCarriageReturn; if (header.len == 1) break; @@ -41,3 +42,18 @@ pub fn readRequestHeader(allocator: std.mem.Allocator, instream: anytype) !Reque return r; } + +pub fn format( + header: Header, + comptime unused_fmt_string: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, +) @TypeOf(writer).Error!void { + _ = options; + std.debug.assert(unused_fmt_string.len == 0); + try writer.print("Content-Length: {}\r\n", .{header.content_length}); + if (header.content_type) |content_type| { + try writer.print("Content-Type: {s}\r\n", .{content_type}); + } + try writer.writeAll("\r\n"); +} diff --git a/src/Server.zig b/src/Server.zig index 588af8e..45610b0 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -6,8 +6,7 @@ const build_options = @import("build_options"); const Config = @import("Config.zig"); const configuration = @import("configuration.zig"); const DocumentStore = @import("DocumentStore.zig"); -const requests = @import("requests.zig"); -const types = @import("types.zig"); +const types = @import("lsp.zig"); const analysis = @import("analysis.zig"); const ast = @import("ast.zig"); const references = @import("references.zig"); @@ -25,17 +24,20 @@ const ComptimeInterpreter = @import("ComptimeInterpreter.zig"); const data = @import("data/data.zig"); const snipped_data = @import("data/snippets.zig"); +const tres = @import("tres/tres.zig"); + const log = std.log.scoped(.server); // Server fields config: *Config, allocator: std.mem.Allocator = undefined, -arena: std.heap.ArenaAllocator = undefined, +arena: *std.heap.ArenaAllocator = undefined, document_store: DocumentStore = undefined, builtin_completions: std.ArrayListUnmanaged(types.CompletionItem), client_capabilities: ClientCapabilities = .{}, -offset_encoding: offsets.Encoding = .utf16, +outgoing_messages: std.ArrayListUnmanaged([]const u8) = .{}, +offset_encoding: offsets.Encoding = .@"utf-16", status: enum { /// the server has not received a `initialize` request uninitialized, @@ -55,111 +57,138 @@ const ClientCapabilities = struct { supports_inlay_hints: bool = false, supports_will_save: bool = false, supports_will_save_wait_until: bool = false, + supports_publish_diagnostics: bool = false, hover_supports_md: bool = false, completion_doc_supports_md: bool = false, label_details_support: bool = false, supports_configuration: bool = false, }; -const not_implemented_response = - \\,"error":{"code":-32601,"message":"NotImplemented"}} -; +/// TODO remove anyerror +pub const Error = anyerror || std.mem.Allocator.Error || error{ + ParseError, + InvalidRequest, + MethodNotFound, + InvalidParams, + InternalError, + /// Error code indicating that a server received a notification or + /// request before the server has received the `initialize` request. + ServerNotInitialized, + /// A request failed but it was syntactically correct, e.g the + /// method name was known and the parameters were valid. The error + /// message should contain human readable information about why + /// the request failed. + /// + /// @since 3.17.0 + RequestFailed, + /// The server cancelled the request. This error code should + /// only be used for requests that explicitly support being + /// server cancellable. + /// + /// @since 3.17.0 + ServerCancelled, + /// The server detected that the content of a document got + /// modified outside normal conditions. A server should + /// NOT send this error code if it detects a content change + /// in it unprocessed messages. The result even computed + /// on an older state might still be useful for the client. + /// + /// If a client decides that a result is not of any use anymore + /// the client should cancel the request. + ContentModified, + /// The client has canceled a request and a server as detected + /// the cancel. + RequestCancelled, +}; -const null_result_response = - \\,"result":null} -; -const empty_result_response = - \\,"result":{}} -; -const empty_array_response = - \\,"result":[]} -; -const edit_not_applied_response = - \\,"result":{"applied":false,"failureReason":"feature not implemented"}} -; -const no_completions_response = - \\,"result":{"isIncomplete":false,"items":[]}} -; -const no_signatures_response = - \\,"result":{"signatures":[]}} -; -const no_semantic_tokens_response = - \\,"result":{"data":[]}} -; +fn sendResponse(server: *Server, id: types.RequestId, result: anytype) void { + // TODO validate result type is a possible response + // TODO validate response is from a client to server request + // TODO validate result type -/// Sends a request or response -fn send(writer: anytype, allocator: std.mem.Allocator, reqOrRes: anytype) !void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - var arr = std.ArrayListUnmanaged(u8){}; - defer arr.deinit(allocator); - - try std.json.stringify(reqOrRes, .{}, arr.writer(allocator)); - - try writer.print("Content-Length: {}\r\n\r\n", .{arr.items.len}); - try writer.writeAll(arr.items); + server.sendInternal(id, null, null, "result", result) catch {}; } -pub fn sendErrorResponse(writer: anytype, allocator: std.mem.Allocator, code: types.ErrorCodes, message: []const u8) !void { - try send(writer, allocator, .{ - .@"error" = types.ResponseError{ - .code = @enumToInt(code), - .message = message, - .data = .Null, - }, - }); +fn sendRequest(server: *Server, id: types.RequestId, method: []const u8, params: anytype) void { + // TODO validate method is a request + // TODO validate method is server to client + // TODO validate params type + + server.sendInternal(id, method, null, "params", params) catch {}; } -fn respondGeneric(writer: anytype, id: types.RequestId, response: []const u8) !void { - var buffered_writer = std.io.bufferedWriter(writer); - const buf_writer = buffered_writer.writer(); +fn sendNotification(server: *Server, method: []const u8, params: anytype) void { + // TODO validate method is a notification + // TODO validate method is server to client + // TODO validate params type - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); + server.sendInternal(null, method, null, "params", params) catch {}; +} - const id_len = switch (id) { - .Integer => |id_val| blk: { - if (id_val == 0) break :blk 1; - var digits: usize = 1; - var value = @divTrunc(id_val, 10); - while (value != 0) : (value = @divTrunc(value, 10)) { - digits += 1; - } - break :blk digits; - }, - .String => |str_val| str_val.len + 2, - }; +fn sendResponseError(server: *Server, id: types.RequestId, err: ?types.ResponseError) void { + server.sendInternal(id, null, err, "", void) catch {}; +} - // Numbers of character that will be printed from this string: len - 1 brackets - const json_fmt = "{{\"jsonrpc\":\"2.0\",\"id\":"; - - try buf_writer.print("Content-Length: {}\r\n\r\n" ++ json_fmt, .{response.len + id_len + json_fmt.len - 1}); - switch (id) { - .Integer => |int| try buf_writer.print("{}", .{int}), - .String => |str| try buf_writer.print("\"{s}\"", .{str}), +fn sendInternal( + server: *Server, + maybe_id: ?types.RequestId, + maybe_method: ?[]const u8, + maybe_err: ?types.ResponseError, + extra_name: []const u8, + extra: anytype, +) !void { + var buffer = std.ArrayListUnmanaged(u8){}; + var writer = buffer.writer(server.allocator); + try writer.writeAll( + \\{"jsonrpc":"2.0" + ); + if (maybe_id) |id| { + try writer.writeAll( + \\,"id": + ); + try tres.stringify(id, .{}, writer); } + if (maybe_method) |method| { + try writer.writeAll( + \\,"method": + ); + try tres.stringify(method, .{}, writer); + } + if (@TypeOf(extra) != @TypeOf(void)) { + try writer.print( + \\,"{s}": + , .{extra_name}); + try tres.stringify(extra, .{ + .emit_null_optional_fields = false, + }, writer); + } + if (maybe_err) |err| { + try writer.writeAll( + \\,"error": + ); + try tres.stringify(err, .{}, writer); + } + try writer.writeByte('}'); - try buf_writer.writeAll(response); - try buffered_writer.flush(); + const message = try buffer.toOwnedSlice(server.allocator); + errdefer server.allocator.free(message); + + try server.outgoing_messages.append(server.allocator, message); } -fn showMessage(server: *Server, writer: anytype, message_type: types.MessageType, message: []const u8) !void { - try send(writer, server.arena.allocator(), types.Notification{ - .method = "window/showMessage", - .params = .{ - .ShowMessage = .{ - .type = message_type, - .message = message, - }, - }, +fn showMessage(server: *Server, message_type: types.MessageType, message: []const u8) void { + server.sendNotification("window/showMessage", types.ShowMessageParams{ + .type = message_type, + .message = message, }); } -fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void { +fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) !types.PublishDiagnosticsParams { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + std.debug.assert(server.client_capabilities.supports_publish_diagnostics); + const tree = handle.tree; var allocator = server.arena.allocator(); @@ -173,7 +202,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha try diagnostics.append(allocator, .{ .range = offsets.tokenToRange(tree, err.token, server.offset_encoding), .severity = .Error, - .code = @tagName(err.tag), + .code = .{ .string = @tagName(err.tag) }, .source = "zls", .message = try server.arena.allocator().dupe(u8, fbs.getWritten()), // .relatedInformation = undefined @@ -205,7 +234,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha try diagnostics.append(allocator, .{ .range = offsets.tokenToRange(tree, import_str_token, server.offset_encoding), .severity = .Hint, - .code = "dot_slash_import", + .code = .{ .string = "dot_slash_import" }, .source = "zls", .message = "A ./ is not needed in imports", }); @@ -236,7 +265,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha try diagnostics.append(allocator, .{ .range = offsets.tokenToRange(tree, name_token, server.offset_encoding), .severity = .Hint, - .code = "bad_style", + .code = .{ .string = "bad_style" }, .source = "zls", .message = "Functions should be camelCase", }); @@ -244,7 +273,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha try diagnostics.append(allocator, .{ .range = offsets.tokenToRange(tree, name_token, server.offset_encoding), .severity = .Hint, - .code = "bad_style", + .code = .{ .string = "bad_style" }, .source = "zls", .message = "Type functions should be PascalCase", }); @@ -271,7 +300,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha try diagnostics.append(allocator, .{ .range = offsets.nodeToRange(handle.tree, node, server.offset_encoding), .severity = .Error, - .code = "cImport", + .code = .{ .string = "cImport" }, .source = "zls", .message = try allocator.dupe(u8, pos_and_diag_iterator.rest()), }); @@ -296,7 +325,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha try diagnostics.append(allocator, .{ .range = offsets.tokenToRange(tree, decl_main_token, server.offset_encoding), .severity = .Hint, - .code = "highlight_global_var_declarations", + .code = .{ .string = "highlight_global_var_declarations" }, .source = "zls", .message = "Global var declaration", }); @@ -315,7 +344,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha try diagnostics.append(allocator, .{ .range = offsets.nodeToRange(tree, err.key_ptr.*, server.offset_encoding), .severity = .Error, - .code = err.value_ptr.code, + .code = .{ .string = err.value_ptr.code }, .source = "zls", .message = err.value_ptr.message, }); @@ -323,15 +352,10 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha } // try diagnostics.appendSlice(allocator, handle.interpreter.?.diagnostics.items); - try send(writer, server.arena.allocator(), types.Notification{ - .method = "textDocument/publishDiagnostics", - .params = .{ - .PublishDiagnostics = .{ - .uri = handle.uri, - .diagnostics = diagnostics.items, - }, - }, - }); + return .{ + .uri = handle.uri, + .diagnostics = diagnostics.items, + }; } fn getAstCheckDiagnostics( @@ -366,6 +390,11 @@ fn getAstCheckDiagnostics( if (term != .Exited) return; + var last_diagnostic: ?types.Diagnostic = null; + // we dont store DiagnosticRelatedInformation in last_diagnostic instead + // its stored in last_related_diagnostics because we need an ArrayList + var last_related_diagnostics: std.ArrayListUnmanaged(types.DiagnosticRelatedInformation) = .{}; + // NOTE: I believe that with color off it's one diag per line; is this correct? var line_iterator = std.mem.split(u8, stderr_bytes, "\n"); @@ -384,46 +413,44 @@ fn getAstCheckDiagnostics( }; // zig uses utf-8 encoding for character offsets - const position = offsets.convertPositionEncoding(handle.text, utf8_position, .utf8, server.offset_encoding); + const position = offsets.convertPositionEncoding(handle.text, utf8_position, .@"utf-8", server.offset_encoding); const range = offsets.tokenPositionToRange(handle.text, position, server.offset_encoding); const msg = pos_and_diag_iterator.rest()[1..]; + if (std.mem.startsWith(u8, msg, "note: ")) { + try last_related_diagnostics.append(allocator, .{ + .location = .{ + .uri = handle.uri, + .range = range, + }, + .message = try server.arena.allocator().dupe(u8, msg["note: ".len..]), + }); + continue; + } + + if (last_diagnostic) |*diagnostic| { + diagnostic.relatedInformation = try last_related_diagnostics.toOwnedSlice(allocator); + try diagnostics.append(allocator, diagnostic.*); + last_diagnostic = null; + } + if (std.mem.startsWith(u8, msg, "error: ")) { - try diagnostics.append(allocator, .{ + last_diagnostic = types.Diagnostic{ .range = range, .severity = .Error, - .code = "ast_check", + .code = .{ .string = "ast_check" }, .source = "zls", .message = try server.arena.allocator().dupe(u8, msg["error: ".len..]), - }); - } else if (std.mem.startsWith(u8, msg, "note: ")) { - var latestDiag = &diagnostics.items[diagnostics.items.len - 1]; - - var fresh = if (latestDiag.relatedInformation) |related_information| - try server.arena.allocator().realloc(@ptrCast([]types.DiagnosticRelatedInformation, related_information), related_information.len + 1) - else - try server.arena.allocator().alloc(types.DiagnosticRelatedInformation, 1); - - const location = types.Location{ - .uri = handle.uri, - .range = range, }; - - fresh[fresh.len - 1] = .{ - .location = location, - .message = try server.arena.allocator().dupe(u8, msg["note: ".len..]), - }; - - latestDiag.relatedInformation = fresh; } else { - try diagnostics.append(allocator, .{ + last_diagnostic = types.Diagnostic{ .range = range, .severity = .Error, - .code = "ast_check", + .code = .{ .string = "ast_check" }, .source = "zls", .message = try server.arena.allocator().dupe(u8, msg), - }); + }; } } } @@ -434,7 +461,7 @@ fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const Documen try getAstCheckDiagnostics(server, handle.*, &diagnostics); var builder = code_actions.Builder{ - .arena = &server.arena, + .arena = server.arena, .document_store = &server.document_store, .handle = handle, .offset_encoding = server.offset_encoding, @@ -447,12 +474,18 @@ fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const Documen var text_edits = std.ArrayListUnmanaged(types.TextEdit){}; for (actions.items) |action| { - if (action.kind != .SourceFixAll) continue; + std.debug.assert(action.kind != null); + std.debug.assert(action.edit != null); + std.debug.assert(action.edit.?.changes != null); - if (action.edit.changes.size != 1) continue; - const edits = action.edit.changes.get(handle.uri) orelse continue; + if (action.kind.? != .@"source.fixAll") continue; - try text_edits.appendSlice(allocator, edits.items); + const changes = action.edit.?.changes.?; + if (changes.count() != 1) continue; + + const edits: []const types.TextEdit = changes.get(handle.uri) orelse continue; + + try text_edits.appendSlice(allocator, edits); } return text_edits; @@ -566,23 +599,22 @@ fn nodeToCompletion( const node_tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); - const doc_kind: types.MarkupContent.Kind = if (server.client_capabilities.completion_doc_supports_md) - .Markdown + const doc_kind: types.MarkupKind = if (server.client_capabilities.completion_doc_supports_md) + .markdown else - .PlainText; + .plaintext; - const doc = if (try analysis.getDocComments( + const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation); + + const doc: Documentation = if (try analysis.getDocComments( allocator, handle.tree, node, doc_kind, - )) |doc_comments| - types.MarkupContent{ - .kind = doc_kind, - .value = doc_comments, - } - else - null; + )) |doc_comments| .{ .MarkupContent = types.MarkupContent{ + .kind = doc_kind, + .value = doc_comments, + } } else null; if (ast.isContainer(handle.tree, node)) { const context = DeclToCompletionContext{ @@ -593,7 +625,7 @@ fn nodeToCompletion( }; try analysis.iterateSymbolsContainer( &server.document_store, - &server.arena, + server.arena, node_handle, orig_handle, declToCompletion, @@ -617,7 +649,7 @@ fn nodeToCompletion( const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets; const insert_text = if (use_snippets) blk: { const skip_self_param = !(parent_is_type_val orelse true) and - try analysis.hasSelfParam(&server.arena, &server.document_store, handle, func); + try analysis.hasSelfParam(server.arena, &server.document_store, handle, func); break :blk try analysis.getFunctionSnippet(server.arena.allocator(), tree, func, skip_self_param); } else tree.tokenSlice(func.name_token.?); @@ -641,7 +673,7 @@ fn nodeToCompletion( const var_decl = ast.varDecl(tree, node).?; const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const; - if (try analysis.resolveVarDeclAlias(&server.document_store, &server.arena, node_handle)) |result| { + if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, node_handle)) |result| { const context = DeclToCompletionContext{ .server = server, .completions = list, @@ -792,7 +824,7 @@ fn gotoDefinitionSymbol( const name_token = switch (decl_handle.decl.*) { .ast_node => |node| block: { if (resolve_alias) { - if (try analysis.resolveVarDeclAlias(&server.document_store, &server.arena, .{ .node = node, .handle = handle })) |result| { + if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, .{ .node = node, .handle = handle })) |result| { handle = result.handle; break :block result.nameToken(); @@ -817,12 +849,12 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO const handle = decl_handle.handle; const tree = handle.tree; - const hover_kind: types.MarkupContent.Kind = if (server.client_capabilities.hover_supports_md) .Markdown else .PlainText; + const hover_kind: types.MarkupKind = if (server.client_capabilities.hover_supports_md) .markdown else .plaintext; var doc_str: ?[]const u8 = null; const def_str = switch (decl_handle.decl.*) { .ast_node => |node| def: { - if (try analysis.resolveVarDeclAlias(&server.document_store, &server.arena, .{ .node = node, .handle = handle })) |result| { + if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, .{ .node = node, .handle = handle })) |result| { return try server.hoverSymbol(result); } doc_str = try analysis.getDocComments(server.arena.allocator(), tree, node, hover_kind); @@ -856,7 +888,7 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO }; var bound_type_params = analysis.BoundTypeParams{}; - const resolved_type = try decl_handle.resolveType(&server.document_store, &server.arena, &bound_type_params); + const resolved_type = try decl_handle.resolveType(&server.document_store, server.arena, &bound_type_params); const resolved_type_str = if (resolved_type) |rt| if (rt.type.is_type_val) switch (rt.type.data) { @@ -903,7 +935,7 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO "unknown"; var hover_text: []const u8 = undefined; - if (hover_kind == .Markdown) { + if (hover_kind == .markdown) { hover_text = if (doc_str) |doc| try std.fmt.allocPrint(server.arena.allocator(), "```zig\n{s}\n```\n```zig\n({s})\n```\n{s}", .{ def_str, resolved_type_str, doc }) @@ -918,7 +950,10 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO } return types.Hover{ - .contents = .{ .value = hover_text }, + .contents = .{ .MarkupContent = .{ + .kind = hover_kind, + .value = hover_text, + } }, }; } @@ -943,7 +978,7 @@ fn getSymbolGlobal( const name = identifierFromPosition(pos_index, handle.*); if (name.len == 0) return null; - return try analysis.lookupSymbolGlobal(&server.document_store, &server.arena, handle, name, pos_index); + return try analysis.lookupSymbolGlobal(&server.document_store, server.arena, handle, name, pos_index); } fn gotoDefinitionLabel( @@ -990,11 +1025,14 @@ fn hoverDefinitionBuiltin(server: *Server, pos_index: usize, handle: *const Docu if (std.mem.eql(u8, builtin.name[1..], name)) { return types.Hover{ .contents = .{ - .value = try std.fmt.allocPrint( - server.arena.allocator(), - "```zig\n{s}\n```\n{s}", - .{ builtin.signature, builtin.documentation }, - ), + .MarkupContent = .{ + .kind = .markdown, + .value = try std.fmt.allocPrint( + server.arena.allocator(), + "```zig\n{s}\n```\n{s}", + .{ builtin.signature, builtin.documentation }, + ), + }, }, }; } @@ -1026,7 +1064,7 @@ fn getSymbolFieldAccess( var held_range = try server.arena.allocator().dupeZ(u8, offsets.locToSlice(handle.text, loc)); var tokenizer = std.zig.Tokenizer.init(held_range); - if (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, source_index, &tokenizer)) |result| { + if (try analysis.getFieldAccessType(&server.document_store, server.arena, handle, source_index, &tokenizer)) |result| { const container_handle = result.unwrapped orelse result.original; const container_handle_node = switch (container_handle.type.data) { .other => |n| n, @@ -1034,7 +1072,7 @@ fn getSymbolFieldAccess( }; return try analysis.lookupSymbolContainer( &server.document_store, - &server.arena, + server.arena, .{ .node = container_handle_node, .handle = container_handle.handle }, name, true, @@ -1114,15 +1152,14 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl context.parent_is_type_val, ), .param_payload => |pay| { + const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation); + const param = pay.param; - const doc_kind: types.MarkupContent.Kind = if (context.server.client_capabilities.completion_doc_supports_md) .Markdown else .PlainText; - const doc = if (param.first_doc_comment) |doc_comments| - types.MarkupContent{ - .kind = doc_kind, - .value = try analysis.collectDocComments(allocator, tree, doc_comments, doc_kind, false), - } - else - null; + const doc_kind: types.MarkupKind = if (context.server.client_capabilities.completion_doc_supports_md) .markdown else .plaintext; + const doc: Documentation = if (param.first_doc_comment) |doc_comments| .{ .MarkupContent = types.MarkupContent{ + .kind = doc_kind, + .value = try analysis.collectDocComments(allocator, tree, doc_comments, doc_kind, false), + } } else null; const first_token = ast.paramFirstToken(tree, param); const last_token = ast.paramLastToken(tree, param); @@ -1209,7 +1246,7 @@ fn completeGlobal(server: *Server, pos_index: usize, handle: *const DocumentStor .completions = &completions, .orig_handle = handle, }; - try analysis.iterateSymbolsGlobal(&server.document_store, &server.arena, handle, pos_index, declToCompletion, context); + try analysis.iterateSymbolsGlobal(&server.document_store, server.arena, handle, pos_index, declToCompletion, context); try populateSnippedCompletions(server.arena.allocator(), &completions, &snipped_data.generic, server.config.*, null); if (server.client_capabilities.label_details_support) { @@ -1232,7 +1269,7 @@ fn completeFieldAccess(server: *Server, handle: *const DocumentStore.Handle, sou var held_loc = try allocator.dupeZ(u8, offsets.locToSlice(handle.text, loc)); var tokenizer = std.zig.Tokenizer.init(held_loc); - const result = (try analysis.getFieldAccessType(&server.document_store, &server.arena, handle, source_index, &tokenizer)) orelse return null; + const result = (try analysis.getFieldAccessType(&server.document_store, server.arena, handle, source_index, &tokenizer)) orelse return null; try server.typeToCompletion(&completions, result, handle); if (server.client_capabilities.label_details_support) { for (completions.items) |*item| { @@ -1249,6 +1286,7 @@ fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) ! // things a little bit, wich is quite messy // but it works, it provide decent results + std.debug.assert(item.kind != null); if (item.detail == null) return; @@ -1281,14 +1319,14 @@ fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) ! var s: usize = std.mem.indexOf(u8, it, "(") orelse return; var e: usize = std.mem.lastIndexOf(u8, it, ")") orelse return; if (e < s) { - log.warn("something wrong when trying to build label detail for {s} kind: {}", .{ it, item.kind }); + log.warn("something wrong when trying to build label detail for {s} kind: {}", .{ it, item.kind.? }); return; } item.detail = item.label; item.labelDetails = .{ .detail = it[s .. e + 1], .description = it[e + 1 ..] }; - if (item.kind == .Constant) { + if (item.kind.? == .Constant) { if (std.mem.indexOf(u8, it, "= struct")) |_| { item.labelDetails.?.description = "struct"; } else if (std.mem.indexOf(u8, it, "= union")) |_| { @@ -1302,7 +1340,7 @@ fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) ! item.labelDetails.?.description = it[us - 5 .. ue + 1]; } } - } else if ((item.kind == .Variable or item.kind == .Constant) and (isVar or isConst)) { + } else if ((item.kind.? == .Variable or item.kind.? == .Constant) and (isVar or isConst)) { item.insertText = item.label; item.insertTextFormat = .PlainText; item.detail = item.label; @@ -1329,7 +1367,7 @@ fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) ! .description = it[start + 2 .. it.len], // right }; } - } else if (item.kind == .Variable) { + } else if (item.kind.? == .Variable) { var s: usize = std.mem.indexOf(u8, it, ":") orelse return; var e: usize = std.mem.indexOf(u8, it, "=") orelse return; @@ -1353,7 +1391,7 @@ fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) ! .detail = "", // left .description = it, // right }; - } else if (item.kind == .Constant or item.kind == .Field) { + } else if (item.kind.? == .Constant or item.kind.? == .Field) { var s: usize = std.mem.indexOf(u8, it, " ") orelse return; var e: usize = std.mem.indexOf(u8, it, "=") orelse it.len; if (e < s) { @@ -1392,7 +1430,7 @@ fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) ! } else if (std.mem.indexOf(u8, it, "= enum")) |_| { item.labelDetails.?.description = "enum"; } - } else if (item.kind == .Field and isValue) { + } else if (item.kind.? == .Field and isValue) { item.insertText = item.label; item.insertTextFormat = .PlainText; item.detail = item.label; @@ -1415,7 +1453,7 @@ fn completeError(server: *Server, handle: *const DocumentStore.Handle) ![]types. return try server.document_store.errorCompletionItems(server.arena.allocator(), handle.*); } -fn kindToSortScore(kind: types.CompletionItem.Kind) ?[]const u8 { +fn kindToSortScore(kind: types.CompletionItemKind) ?[]const u8 { return switch (kind) { .Module => "1_", // use for packages .Folder => "2_", @@ -1437,7 +1475,7 @@ fn kindToSortScore(kind: types.CompletionItem.Kind) ?[]const u8 { => "6_", else => { - std.log.debug(@typeName(types.CompletionItem.Kind) ++ "{s} has no sort score specified!", .{@tagName(kind)}); + std.log.debug(@typeName(types.CompletionItemKind) ++ "{s} has no sort score specified!", .{@tagName(kind)}); return null; }, }; @@ -1508,21 +1546,11 @@ fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, store: *const D return completions.toOwnedSlice(allocator); } -fn documentSymbol(server: *Server, writer: anytype, id: types.RequestId, handle: *const DocumentStore.Handle) !void { +fn initializeHandler(server: *Server, request: types.InitializeParams) !types.InitializeResult { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ .DocumentSymbols = try analysis.getDocumentSymbols(server.arena.allocator(), handle.tree, server.offset_encoding) }, - }); -} - -fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.Initialize) !void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - if (req.params.clientInfo) |clientInfo| { + if (request.clientInfo) |clientInfo| { std.log.info("client is '{s}-{s}'", .{ clientInfo.name, clientInfo.version orelse "" }); if (std.mem.eql(u8, clientInfo.name, "Sublime Text LSP")) blk: { @@ -1538,53 +1566,60 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req: } } - if (req.params.capabilities.general) |general| { + if (request.capabilities.general) |general| { var supports_utf8 = false; var supports_utf16 = false; var supports_utf32 = false; - for (general.positionEncodings.value) |encoding| { - if (std.mem.eql(u8, encoding, "utf-8")) { - supports_utf8 = true; - } else if (std.mem.eql(u8, encoding, "utf-16")) { - supports_utf16 = true; - } else if (std.mem.eql(u8, encoding, "utf-32")) { - supports_utf32 = true; + if (general.positionEncodings) |position_encodings| { + for (position_encodings) |encoding| { + switch (encoding) { + .@"utf-8" => supports_utf8 = true, + .@"utf-16" => supports_utf16 = true, + .@"utf-32" => supports_utf32 = true, + } } } if (supports_utf8) { - server.offset_encoding = .utf8; + server.offset_encoding = .@"utf-8"; } else if (supports_utf32) { - server.offset_encoding = .utf32; + server.offset_encoding = .@"utf-32"; } else { - server.offset_encoding = .utf16; + server.offset_encoding = .@"utf-16"; } } - if (req.params.capabilities.textDocument) |textDocument| { - server.client_capabilities.supports_semantic_tokens = textDocument.semanticTokens.exists; - server.client_capabilities.supports_inlay_hints = textDocument.inlayHint.exists; + if (request.capabilities.textDocument) |textDocument| { + server.client_capabilities.supports_semantic_tokens = textDocument.semanticTokens != null; + server.client_capabilities.supports_inlay_hints = textDocument.inlayHint != null; + server.client_capabilities.supports_publish_diagnostics = textDocument.publishDiagnostics != null; if (textDocument.hover) |hover| { - for (hover.contentFormat.value) |format| { - if (std.mem.eql(u8, "markdown", format)) { - server.client_capabilities.hover_supports_md = true; + if (hover.contentFormat) |content_format| { + for (content_format) |format| { + if (format == .markdown) { + server.client_capabilities.hover_supports_md = true; + break; + } } } } if (textDocument.completion) |completion| { if (completion.completionItem) |completionItem| { - server.client_capabilities.label_details_support = completionItem.labelDetailsSupport.value; - server.client_capabilities.supports_snippets = completionItem.snippetSupport.value; - for (completionItem.documentationFormat.value) |documentationFormat| { - if (std.mem.eql(u8, "markdown", documentationFormat)) { - server.client_capabilities.completion_doc_supports_md = true; + server.client_capabilities.label_details_support = completionItem.labelDetailsSupport orelse false; + server.client_capabilities.supports_snippets = completionItem.snippetSupport orelse false; + if (completionItem.documentationFormat) |documentation_format| { + for (documentation_format) |format| { + if (format == .markdown) { + server.client_capabilities.completion_doc_supports_md = true; + break; + } } } } } if (textDocument.synchronization) |synchronization| { - server.client_capabilities.supports_will_save = synchronization.willSave.value; - server.client_capabilities.supports_will_save_wait_until = synchronization.willSaveWaitUntil.value; + server.client_capabilities.supports_will_save = synchronization.willSave orelse false; + server.client_capabilities.supports_will_save_wait_until = synchronization.willSaveWaitUntil orelse false; } } @@ -1596,86 +1631,12 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req: } } - try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ - .InitializeResult = .{ - .serverInfo = .{ - .name = "zls", - .version = build_options.version, - }, - .capabilities = .{ - .positionEncoding = server.offset_encoding, - .signatureHelpProvider = .{ - .triggerCharacters = &.{"("}, - .retriggerCharacters = &.{","}, - }, - .textDocumentSync = .{ - .openClose = true, - .change = .Incremental, - .save = true, - .willSave = true, - .willSaveWaitUntil = true, - }, - .renameProvider = true, - .completionProvider = .{ .resolveProvider = false, .triggerCharacters = &[_][]const u8{ ".", ":", "@", "]" }, .completionItem = .{ .labelDetailsSupport = true } }, - .documentHighlightProvider = true, - .hoverProvider = true, - .codeActionProvider = true, - .declarationProvider = true, - .definitionProvider = true, - .typeDefinitionProvider = true, - .implementationProvider = false, - .referencesProvider = true, - .documentSymbolProvider = true, - .colorProvider = false, - .documentFormattingProvider = true, - .documentRangeFormattingProvider = false, - .foldingRangeProvider = true, - .selectionRangeProvider = true, - .workspaceSymbolProvider = false, - .rangeProvider = false, - .documentProvider = true, - .workspace = .{ - .workspaceFolders = .{ - .supported = false, - .changeNotifications = false, - }, - }, - .semanticTokensProvider = .{ - .full = true, - .range = false, - .legend = .{ - .tokenTypes = comptime block: { - const tokTypeFields = std.meta.fields(semantic_tokens.TokenType); - var names: [tokTypeFields.len][]const u8 = undefined; - for (tokTypeFields) |field, i| { - names[i] = field.name; - } - break :block &names; - }, - .tokenModifiers = comptime block: { - const tokModFields = std.meta.fields(semantic_tokens.TokenModifiers); - var names: [tokModFields.len][]const u8 = undefined; - for (tokModFields) |field, i| { - names[i] = field.name; - } - break :block &names; - }, - }, - }, - .inlayHintProvider = true, - }, - }, - }, - }); - - server.status = .initializing; - - if (req.params.capabilities.workspace) |workspace| { - server.client_capabilities.supports_configuration = workspace.configuration.value; - if (workspace.didChangeConfiguration != null and workspace.didChangeConfiguration.?.dynamicRegistration.value) { - try server.registerCapability(writer, "workspace/didChangeConfiguration"); + if (request.capabilities.workspace) |workspace| { + server.client_capabilities.supports_configuration = workspace.configuration orelse false; + if (workspace.didChangeConfiguration) |did_change| { + if (did_change.dynamicRegistration orelse false) { + try server.registerCapability("workspace/didChangeConfiguration"); + } } } @@ -1683,6 +1644,8 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req: log.info("{}", .{server.client_capabilities}); log.info("Using offset encoding: {s}", .{std.meta.tagName(server.offset_encoding)}); + server.status = .initializing; + if (server.config.zig_exe_path) |exe_path| blk: { // TODO avoid having to call getZigEnv twice // once in init and here @@ -1697,20 +1660,94 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req: "ZLS was built with Zig {}, but your Zig version is {s}. Update Zig to avoid unexpected behavior.", .{ zig_builtin.zig_version, env.version }, ); - try server.showMessage(writer, .Warning, version_mismatch_message); + server.showMessage(.Warning, version_mismatch_message); } } else { - try server.showMessage( - writer, + server.showMessage( .Warning, \\ZLS failed to find Zig. Please add Zig to your PATH or set the zig_exe_path config option in your zls.json. , ); } + + return .{ + .serverInfo = .{ + .name = "zls", + .version = build_options.version, + }, + .capabilities = .{ + .positionEncoding = server.offset_encoding, + .signatureHelpProvider = .{ + .triggerCharacters = &.{"("}, + .retriggerCharacters = &.{","}, + }, + .textDocumentSync = .{ + .TextDocumentSyncOptions = .{ + .openClose = true, + .change = .Incremental, + .save = .{ .bool = true }, + .willSave = true, + .willSaveWaitUntil = true, + }, + }, + .renameProvider = .{ .bool = true }, + .completionProvider = .{ + .resolveProvider = false, + .triggerCharacters = &[_][]const u8{ ".", ":", "@", "]" }, + .completionItem = .{ .labelDetailsSupport = true }, + }, + .documentHighlightProvider = .{ .bool = true }, + .hoverProvider = .{ .bool = true }, + .codeActionProvider = .{ .bool = true }, + .declarationProvider = .{ .bool = true }, + .definitionProvider = .{ .bool = true }, + .typeDefinitionProvider = .{ .bool = true }, + .implementationProvider = .{ .bool = false }, + .referencesProvider = .{ .bool = true }, + .documentSymbolProvider = .{ .bool = true }, + .colorProvider = .{ .bool = false }, + .documentFormattingProvider = .{ .bool = true }, + .documentRangeFormattingProvider = .{ .bool = false }, + .foldingRangeProvider = .{ .bool = true }, + .selectionRangeProvider = .{ .bool = true }, + .workspaceSymbolProvider = .{ .bool = false }, + .workspace = .{ + .workspaceFolders = .{ + .supported = false, + .changeNotifications = .{ .bool = false }, + }, + }, + .semanticTokensProvider = .{ + .SemanticTokensOptions = .{ + .full = .{ .bool = true }, + .range = .{ .bool = false }, + .legend = .{ + .tokenTypes = comptime block: { + const tokTypeFields = std.meta.fields(semantic_tokens.TokenType); + var names: [tokTypeFields.len][]const u8 = undefined; + for (tokTypeFields) |field, i| { + names[i] = field.name; + } + break :block &names; + }, + .tokenModifiers = comptime block: { + const tokModFields = std.meta.fields(semantic_tokens.TokenModifiers); + var names: [tokModFields.len][]const u8 = undefined; + for (tokModFields) |field, i| { + names[i] = field.name; + } + break :block &names; + }, + }, + }, + }, + .inlayHintProvider = .{ .bool = true }, + }, + }; } -fn initializedHandler(server: *Server, writer: anytype, id: types.RequestId) !void { - _ = id; +fn initializedHandler(server: *Server, notification: types.InitializedParams) !void { + _ = notification; if (server.status != .initializing) { std.log.warn("received a initialized notification but the server has not send a initialize request!", .{}); @@ -1719,26 +1756,17 @@ fn initializedHandler(server: *Server, writer: anytype, id: types.RequestId) !vo server.status = .initialized; if (server.client_capabilities.supports_configuration) - try server.requestConfiguration(writer); + try server.requestConfiguration(); } -fn shutdownHandler(server: *Server, writer: anytype, id: types.RequestId) !void { - if (server.status != .initialized) { - return try sendErrorResponse( - writer, - server.arena.allocator(), - types.ErrorCodes.InvalidRequest, - "received a shutdown request but the server is not initialized!", - ); - } +fn shutdownHandler(server: *Server, _: void) !?void { + if (server.status != .initialized) return error.InvalidRequest; // received a shutdown request but the server is not initialized! // Technically we should deinitialize first and send possible errors to the client - return try respondGeneric(writer, id, null_result_response); + return null; } -fn exitHandler(server: *Server, writer: anytype, id: types.RequestId) noreturn { - _ = writer; - _ = id; +fn exitHandler(server: *Server, _: void) noreturn { log.info("Server exiting...", .{}); // Technically we should deinitialize first and send possible errors to the client @@ -1750,36 +1778,34 @@ fn exitHandler(server: *Server, writer: anytype, id: types.RequestId) noreturn { std.os.exit(error_code); } -fn cancelRequestHandler(server: *Server, writer: anytype, id: types.RequestId) !void { - _ = id; - _ = writer; +fn cancelRequestHandler(server: *Server, request: types.CancelParams) !void { _ = server; + _ = request; // TODO implement $/cancelRequest } -fn registerCapability(server: *Server, writer: anytype, method: []const u8) !void { - const id = try std.fmt.allocPrint(server.arena.allocator(), "register-{s}", .{method}); +fn registerCapability(server: *Server, method: []const u8) !void { + const allocator = server.arena.allocator(); + + const id = try std.fmt.allocPrint(allocator, "register-{s}", .{method}); log.debug("Dynamically registering method '{s}'", .{method}); - try send(writer, server.arena.allocator(), types.Request{ - .id = .{ .String = id }, - .method = "client/registerCapability", - .params = types.ResponseParams{ - .RegistrationParams = types.RegistrationParams{ - .registrations = &.{ - .{ - .id = id, - .method = method, - }, - }, - }, - }, - }); + var registrations = try allocator.alloc(types.Registration, 1); + registrations[0] = .{ + .id = id, + .method = method, + }; + + server.sendRequest( + .{ .string = id }, + "client/registerCapability", + types.RegistrationParams{ .registrations = registrations }, + ); } -fn requestConfiguration(server: *Server, writer: anytype) !void { +fn requestConfiguration(server: *Server) !void { const configuration_items = comptime confi: { - var comp_confi: [std.meta.fields(Config).len]types.ConfigurationParams.ConfigurationItem = undefined; + var comp_confi: [std.meta.fields(Config).len]types.ConfigurationItem = undefined; inline for (std.meta.fields(Config)) |field, index| { comp_confi[index] = .{ .section = "zls." ++ field.name, @@ -1789,48 +1815,105 @@ fn requestConfiguration(server: *Server, writer: anytype) !void { break :confi comp_confi; }; - try send(writer, server.arena.allocator(), types.Request{ - .id = .{ .String = "i_haz_configuration" }, - .method = "workspace/configuration", - .params = types.ResponseParams{ - .ConfigurationParams = .{ - .items = &configuration_items, - }, + server.sendRequest( + .{ .string = "i_haz_configuration" }, + "workspace/configuration", + types.ConfigurationParams{ + .items = &configuration_items, }, - }); + ); } -fn openDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.OpenDocument) !void { +fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory}!void { + log.info("Setting configuration...", .{}); + + // NOTE: Does this work with other editors? + // Yes, String ids are officially supported by LSP + // but not sure how standard this "standard" really is + + const result = json.Array; + + inline for (std.meta.fields(Config)) |field, index| { + const value = result.items[index]; + const ft = if (@typeInfo(field.type) == .Optional) + @typeInfo(field.type).Optional.child + else + field.type; + const ti = @typeInfo(ft); + + if (value != .Null) { + const new_value: field.type = switch (ft) { + []const u8 => switch (value) { + .String => |s| blk: { + if (s.len == 0) { + if (field.type == ?[]const u8) { + break :blk null; + } else { + break :blk s; + } + } + var nv = try server.allocator.dupe(u8, s); + if (@field(server.config, field.name)) |prev_val| server.allocator.free(prev_val); + break :blk nv; + }, // TODO: Allocation model? (same with didChangeConfiguration); imo this isn't *that* bad but still + else => @panic("Invalid configuration value"), // TODO: Handle this + }, + else => switch (ti) { + .Int => switch (value) { + .Integer => |s| std.math.cast(ft, s) orelse @panic("Invalid configuration value"), + else => @panic("Invalid configuration value"), // TODO: Handle this + }, + .Bool => switch (value) { + .Bool => |b| b, + else => @panic("Invalid configuration value"), // TODO: Handle this + }, + else => @compileError("Not implemented for " ++ @typeName(ft)), + }, + }; + log.debug("setting configuration option '{s}' to '{any}'", .{ field.name, new_value }); + @field(server.config, field.name) = new_value; + } + } + + configuration.configChanged(server.config, server.allocator, null) catch |err| { + log.err("failed to update configuration: {}", .{err}); + }; +} + +fn openDocumentHandler(server: *Server, notification: types.DidOpenTextDocumentParams) !void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - _ = id; - - const handle = try server.document_store.openDocument(req.params.textDocument.uri, req.params.textDocument.text); - try server.publishDiagnostics(writer, handle); + const handle = try server.document_store.openDocument(notification.textDocument.uri, notification.textDocument.text); + + if (server.client_capabilities.supports_publish_diagnostics) { + const diagnostics = try server.generateDiagnostics(handle); + server.sendNotification("textDocument/publishDiagnostics", diagnostics); + } } -fn changeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.ChangeDocument) !void { +fn changeDocumentHandler(server: *Server, notification: types.DidChangeTextDocumentParams) !void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - _ = id; + const handle = server.document_store.getHandle(notification.textDocument.uri) orelse return; - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse return; - - const new_text = try diff.applyTextEdits(server.allocator, handle.text, req.params.contentChanges, server.offset_encoding); + const new_text = try diff.applyTextEdits(server.allocator, handle.text, notification.contentChanges, server.offset_encoding); try server.document_store.refreshDocument(handle.uri, new_text); - try server.publishDiagnostics(writer, handle.*); + + if (server.client_capabilities.supports_publish_diagnostics) { + const diagnostics = try server.generateDiagnostics(handle.*); + server.sendNotification("textDocument/publishDiagnostics", diagnostics); + } } -fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SaveDocument) !void { +fn saveDocumentHandler(server: *Server, notification: types.DidSaveTextDocumentParams) !void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - _ = id; const allocator = server.arena.allocator(); - const uri = req.params.textDocument.uri; + const uri = notification.textDocument.uri; const handle = server.document_store.getHandle(uri) orelse return; try server.document_store.applySave(handle); @@ -1841,108 +1924,82 @@ fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, re if (server.client_capabilities.supports_will_save) return; if (server.client_capabilities.supports_will_save_wait_until) return; - const text_edits = try server.autofix(allocator, handle); + var text_edits = try server.autofix(allocator, handle); var workspace_edit = types.WorkspaceEdit{ .changes = .{} }; - try workspace_edit.changes.putNoClobber(allocator, uri, text_edits); + try workspace_edit.changes.?.putNoClobber(allocator, uri, try text_edits.toOwnedSlice(allocator)); - try send(writer, allocator, types.Request{ - .id = .{ .String = "apply_edit" }, - .method = "workspace/applyEdit", - .params = .{ - .ApplyEdit = .{ - .label = "autofix", - .edit = workspace_edit, - }, + server.sendRequest( + .{ .string = "apply_edit" }, + "workspace/applyEdit", + types.ApplyWorkspaceEditParams{ + .label = "autofix", + .edit = workspace_edit, }, - }); + ); } -fn closeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.CloseDocument) error{}!void { +fn closeDocumentHandler(server: *Server, notification: types.DidCloseTextDocumentParams) error{}!void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - _ = id; - _ = writer; - server.document_store.closeDocument(req.params.textDocument.uri); + server.document_store.closeDocument(notification.textDocument.uri); } -fn willSaveHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.WillSave) !void { +fn willSaveHandler(server: *Server, request: types.WillSaveTextDocumentParams) !?[]types.TextEdit { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - if (server.client_capabilities.supports_will_save_wait_until) return; - try willSaveWaitUntilHandler(server, writer, id, req); + if (server.client_capabilities.supports_will_save_wait_until) return null; + return try willSaveWaitUntilHandler(server, request); } -fn willSaveWaitUntilHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.WillSave) !void { +fn willSaveWaitUntilHandler(server: *Server, request: types.WillSaveTextDocumentParams) !?[]types.TextEdit { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); const allocator = server.arena.allocator(); - b: { - if (!server.config.enable_ast_check_diagnostics or !server.config.enable_autofix) - break :b; + if (!server.config.enable_ast_check_diagnostics) return null; + if (!server.config.enable_autofix) return null; - const uri = req.params.textDocument.uri; + const uri = request.textDocument.uri; - const handle = server.document_store.getHandle(uri) orelse break :b; - if (handle.tree.errors.len != 0) break :b; + const handle = server.document_store.getHandle(uri) orelse return null; + if (handle.tree.errors.len != 0) return null; - var text_edits = try server.autofix(allocator, handle); + var text_edits = try server.autofix(allocator, handle); - return try send(writer, allocator, types.Response{ - .id = id, - .result = .{ .TextEdits = try text_edits.toOwnedSlice(allocator) }, - }); - } - - return try send(writer, allocator, types.Response{ - .id = id, - .result = .{ .TextEdits = &.{} }, - }); + return try text_edits.toOwnedSlice(allocator); } -fn semanticTokensFullHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SemanticTokensFull) !void { +fn semanticTokensFullHandler(server: *Server, request: types.SemanticTokensParams) !?types.SemanticTokens { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - if (!server.config.enable_semantic_tokens) return try respondGeneric(writer, id, no_semantic_tokens_response); + if (!server.config.enable_semantic_tokens) return null; - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, no_semantic_tokens_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; - const token_array = try semantic_tokens.writeAllSemanticTokens(&server.arena, &server.document_store, handle, server.offset_encoding); + const token_array = try semantic_tokens.writeAllSemanticTokens(server.arena, &server.document_store, handle, server.offset_encoding); - return try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ .SemanticTokensFull = .{ .data = token_array } }, - }); + return .{ .data = token_array }; } -fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.Completion) !void { +fn completionHandler(server: *Server, request: types.CompletionParams) !?types.CompletionList { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, no_completions_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; - if (req.params.position.character == 0) { + if (request.position.character == 0) { var completions = std.ArrayListUnmanaged(types.CompletionItem){}; try populateSnippedCompletions(server.arena.allocator(), &completions, &snipped_data.top_level_decl_data, server.config.*, null); - return try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ - .CompletionList = .{ .isIncomplete = false, .items = completions.items }, - }, - }); + return .{ .isIncomplete = false, .items = completions.items }; } - const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding); + const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); const maybe_completions = switch (pos_context) { @@ -1962,7 +2019,7 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req: else => null, }; - const completions = maybe_completions orelse return try respondGeneric(writer, id, no_completions_response); + const completions = maybe_completions orelse return null; // truncate completions for (completions) |*item| { @@ -1975,113 +2032,95 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req: // TODO: config for sorting rule? for (completions) |*c| { - const prefix = kindToSortScore(c.kind) orelse continue; + const prefix = kindToSortScore(c.kind.?) orelse continue; c.sortText = try std.fmt.allocPrint(server.arena.allocator(), "{s}{s}", .{ prefix, c.label }); } - try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ - .CompletionList = .{ - .isIncomplete = false, - .items = completions, - }, - }, - }); + return .{ .isIncomplete = false, .items = completions }; } -fn signatureHelpHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SignatureHelp) !void { +fn signatureHelpHandler(server: *Server, request: types.SignatureHelpParams) !?types.SignatureHelp { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); const getSignatureInfo = @import("signature_help.zig").getSignatureInfo; - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, no_signatures_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; - if (req.params.position.character == 0) - return try respondGeneric(writer, id, no_signatures_response); + if (request.position.character == 0) return null; - const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding); - if (try getSignatureInfo( + const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); + + const signature_info = (try getSignatureInfo( &server.document_store, - &server.arena, + server.arena, handle, source_index, data, - )) |sig_info| { - return try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ - .SignatureHelp = .{ - .signatures = &[1]types.SignatureInformation{sig_info}, - .activeSignature = 0, - .activeParameter = sig_info.activeParameter, - }, - }, - }); - } - return try respondGeneric(writer, id, no_signatures_response); + )) orelse return null; + + var signatures = try server.arena.allocator().alloc(types.SignatureInformation, 1); + signatures[0] = signature_info; + + return .{ + .signatures = signatures, + .activeSignature = 0, + .activeParameter = signature_info.activeParameter, + }; } -fn gotoHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.GotoDefinition, resolve_alias: bool) !void { +fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, resolve_alias: bool) !?types.Location { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, null_result_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; - if (req.params.position.character == 0) return try respondGeneric(writer, id, null_result_response); + if (request.position.character == 0) return null; - const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding); + const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); - const maybe_location = switch (pos_context) { + return switch (pos_context) { .var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias), .field_access => |loc| try server.gotoDefinitionFieldAccess(handle, source_index, loc, resolve_alias), .import_string_literal => try server.gotoDefinitionString(source_index, handle), .label => try server.gotoDefinitionLabel(source_index, handle), else => null, }; - - const location = maybe_location orelse return try respondGeneric(writer, id, null_result_response); - - try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ .Location = location }, - }); } -fn gotoDefinitionHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.GotoDefinition) !void { +fn gotoDefinitionHandler( + server: *Server, + request: types.TextDocumentPositionParams, +) !?types.Location { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - try server.gotoHandler(writer, id, req, true); + return try server.gotoHandler(request, true); } -fn gotoDeclarationHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.GotoDeclaration) !void { +fn gotoDeclarationHandler( + server: *Server, + request: types.TextDocumentPositionParams, +) !?types.Location { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - try server.gotoHandler(writer, id, req, false); + return try server.gotoHandler(request, false); } -fn hoverHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.Hover) !void { +fn hoverHandler(server: *Server, request: types.HoverParams) !?types.Hover { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, null_result_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; - if (req.params.position.character == 0) return try respondGeneric(writer, id, null_result_response); + if (request.position.character == 0) return null; - const source_index = offsets.positionToIndex(handle.text, req.params.position, server.offset_encoding); + const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); - const maybe_hover = switch (pos_context) { + const response = switch (pos_context) { .builtin => try server.hoverDefinitionBuiltin(source_index, handle), .var_access => try server.hoverDefinitionGlobal(source_index, handle), .field_access => |loc| try server.hoverDefinitionFieldAccess(handle, source_index, loc), @@ -2089,57 +2128,47 @@ fn hoverHandler(server: *Server, writer: anytype, id: types.RequestId, req: requ else => null, }; - const hover = maybe_hover orelse return try respondGeneric(writer, id, null_result_response); + // TODO: Figure out a better solution for comptime interpreter diags - try server.publishDiagnostics(writer, handle.*); - - try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ .Hover = hover }, - }); -} - -fn documentSymbolsHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.DocumentSymbols) !void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, null_result_response); - }; - try server.documentSymbol(writer, id, handle); -} - -fn formattingHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.Formatting) !void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, null_result_response); - }; - - if (handle.tree.errors.len != 0) { - return try respondGeneric(writer, id, null_result_response); + if (server.client_capabilities.supports_publish_diagnostics) { + const diagnostics = try server.generateDiagnostics(handle.*); + server.sendNotification("textDocument/publishDiagnostics", diagnostics); } + return response; +} + +fn documentSymbolsHandler(server: *Server, request: types.DocumentSymbolParams) !?[]types.DocumentSymbol { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + + return try analysis.getDocumentSymbols(server.arena.allocator(), handle.tree, server.offset_encoding); +} + +fn formattingHandler(server: *Server, request: types.DocumentFormattingParams) !?[]types.TextEdit { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + + if (handle.tree.errors.len != 0) return null; + const formatted = try handle.tree.render(server.allocator); defer server.allocator.free(formatted); - if (std.mem.eql(u8, handle.text, formatted)) return try respondGeneric(writer, id, null_result_response); + if (std.mem.eql(u8, handle.text, formatted)) return null; // avoid computing diffs if the output is small const maybe_edits = if (formatted.len <= 512) null else diff.edits(server.arena.allocator(), handle.text, formatted) catch null; const edits = maybe_edits orelse { // if edits have been computed we replace the entire file with the formatted text - return try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ - .TextEdits = &[1]types.TextEdit{.{ - .range = offsets.locToRange(handle.text, .{ .start = 0, .end = handle.text.len }, server.offset_encoding), - .newText = formatted, - }}, - }, - }); + return &[1]types.TextEdit{.{ + .range = offsets.locToRange(handle.text, .{ .start = 0, .end = handle.text.len }, server.offset_encoding), + .newText = formatted, + }}; }; // Convert from `[]diff.Edit` to `[]types.TextEdit` @@ -2151,24 +2180,15 @@ fn formattingHandler(server: *Server, writer: anytype, id: types.RequestId, req: }); } - return try send( - writer, - server.arena.allocator(), - types.Response{ - .id = id, - .result = .{ .TextEdits = text_edits.items }, - }, - ); + return text_edits.items; } -fn didChangeConfigurationHandler(server: *Server, writer: anytype, id: types.RequestId, req: configuration.DidChangeConfigurationParams) !void { +fn didChangeConfigurationHandler(server: *Server, request: configuration.DidChangeConfigurationParams) !void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - _ = id; - // NOTE: VS Code seems to always respond with null - if (req.settings) |cfg| { + if (request.settings) |cfg| { inline for (std.meta.fields(configuration.Configuration)) |field| { if (@field(cfg, field.name)) |value| { blk: { @@ -2185,66 +2205,73 @@ fn didChangeConfigurationHandler(server: *Server, writer: anytype, id: types.Req try configuration.configChanged(server.config, server.allocator, null); } else if (server.client_capabilities.supports_configuration) { - try server.requestConfiguration(writer); + try server.requestConfiguration(); } } -fn renameHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.Rename) !void { +fn renameHandler(server: *Server, request: types.RenameParams) !?types.WorkspaceEdit { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - try generalReferencesHandler(server, writer, id, .{ .rename = req }); + const response = try generalReferencesHandler(server, .{ .rename = request }); + return if (response) |rep| rep.rename else null; } -fn referencesHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.References) !void { +fn referencesHandler(server: *Server, request: types.ReferenceParams) !?[]types.Location { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - try generalReferencesHandler(server, writer, id, .{ .references = req }); + const response = try generalReferencesHandler(server, .{ .references = request }); + return if (response) |rep| rep.references else null; } -fn documentHighlightHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.DocumentHighlight) !void { +fn documentHighlightHandler(server: *Server, request: types.DocumentHighlightParams) !?[]types.DocumentHighlight { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - try generalReferencesHandler(server, writer, id, .{ .highlight = req }); + const response = try generalReferencesHandler(server, .{ .highlight = request }); + return if (response) |rep| rep.highlight else null; } const GeneralReferencesRequest = union(enum) { - rename: requests.Rename, - references: requests.References, - highlight: requests.DocumentHighlight, + rename: types.RenameParams, + references: types.ReferenceParams, + highlight: types.DocumentHighlightParams, pub fn uri(self: @This()) []const u8 { return switch (self) { - .rename => |rename| rename.params.textDocument.uri, - .references => |ref| ref.params.textDocument.uri, - .highlight => |highlight| highlight.params.textDocument.uri, + .rename => |rename| rename.textDocument.uri, + .references => |ref| ref.textDocument.uri, + .highlight => |highlight| highlight.textDocument.uri, }; } pub fn position(self: @This()) types.Position { return switch (self) { - .rename => |rename| rename.params.position, - .references => |ref| ref.params.position, - .highlight => |highlight| highlight.params.position, + .rename => |rename| rename.position, + .references => |ref| ref.position, + .highlight => |highlight| highlight.position, }; } }; -fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestId, req: GeneralReferencesRequest) !void { +const GeneralReferencesResponse = union { + rename: types.WorkspaceEdit, + references: []types.Location, + highlight: []types.DocumentHighlight, +}; + +fn generalReferencesHandler(server: *Server, request: GeneralReferencesRequest) !?GeneralReferencesResponse { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); const allocator = server.arena.allocator(); - const handle = server.document_store.getHandle(req.uri()) orelse { - return try respondGeneric(writer, id, null_result_response); - }; + const handle = server.document_store.getHandle(request.uri()) orelse return null; - if (req.position().character <= 0) return try respondGeneric(writer, id, null_result_response); + if (request.position().character <= 0) return null; - const source_index = offsets.positionToIndex(handle.text, req.position(), server.offset_encoding); + const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); const decl = switch (pos_context) { @@ -2252,10 +2279,10 @@ fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestI .field_access => |range| try server.getSymbolFieldAccess(handle, source_index, range), .label => try getLabelGlobal(source_index, handle), else => null, - } orelse return try respondGeneric(writer, id, null_result_response); + } orelse return null; - const include_decl = switch (req) { - .references => |ref| ref.params.context.includeDeclaration, + const include_decl = switch (request) { + .references => |ref| ref.context.includeDeclaration, else => true, }; @@ -2263,29 +2290,40 @@ fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestI try references.labelReferences(allocator, decl, server.offset_encoding, include_decl) else try references.symbolReferences( - &server.arena, + server.arena, &server.document_store, decl, server.offset_encoding, include_decl, server.config.skip_std_references, - req != .highlight, // scan the entire workspace except for highlight + request != .highlight, // scan the entire workspace except for highlight ); - const result: types.ResponseParams = switch (req) { - .rename => |rename| blk: { - var edits: types.WorkspaceEdit = .{ .changes = .{} }; + switch (request) { + .rename => |rename| { + var changes = std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(types.TextEdit)){}; + for (locations.items) |loc| { - const gop = try edits.changes.getOrPutValue(allocator, loc.uri, .{}); + const gop = try changes.getOrPutValue(allocator, loc.uri, .{}); try gop.value_ptr.append(allocator, .{ .range = loc.range, - .newText = rename.params.newName, + .newText = rename.newName, }); } - break :blk .{ .WorkspaceEdit = edits }; + + // TODO can we avoid having to move map from `changes` to `new_changes`? + var new_changes: types.Map(types.DocumentUri, []const types.TextEdit) = .{}; + try new_changes.ensureTotalCapacity(allocator, @intCast(u32, changes.count())); + + var changes_it = changes.iterator(); + while (changes_it.next()) |entry| { + new_changes.putAssumeCapacityNoClobber(entry.key_ptr.*, try entry.value_ptr.toOwnedSlice(allocator)); + } + + return .{ .rename = .{ .changes = new_changes } }; }, - .references => .{ .Locations = locations.items }, - .highlight => blk: { + .references => return .{ .references = locations.items }, + .highlight => { var highlights = try std.ArrayListUnmanaged(types.DocumentHighlight).initCapacity(allocator, locations.items.len); const uri = handle.uri; for (locations.items) |loc| { @@ -2295,14 +2333,9 @@ fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestI .kind = .Text, }); } - break :blk .{ .DocumentHighlight = highlights.items }; + return .{ .highlight = highlights.items }; }, - }; - - try send(writer, allocator, types.Response{ - .id = id, - .result = result, - }); + } } fn isPositionBefore(lhs: types.Position, rhs: types.Position) bool { @@ -2313,68 +2346,53 @@ fn isPositionBefore(lhs: types.Position, rhs: types.Position) bool { } } -fn inlayHintHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.InlayHint) !void { +fn inlayHintHandler(server: *Server, request: types.InlayHintParams) !?[]types.InlayHint { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - if (!server.config.enable_inlay_hints) return try respondGeneric(writer, id, null_result_response); + if (!server.config.enable_inlay_hints) return null; - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, null_result_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; - const hover_kind: types.MarkupContent.Kind = if (server.client_capabilities.hover_supports_md) .Markdown else .PlainText; + const hover_kind: types.MarkupKind = if (server.client_capabilities.hover_supports_md) .markdown else .plaintext; // TODO cache hints per document // because the function could be stored in a different document // we need the regenerate hints when the document itself or its imported documents change // with caching it would also make sense to generate all hints instead of only the visible ones const hints = try inlay_hints.writeRangeInlayHint( - &server.arena, + server.arena, server.config.*, &server.document_store, handle, - req.params.range, + request.range, hover_kind, server.offset_encoding, ); - defer { - for (hints) |hint| { - server.allocator.free(hint.tooltip.value); - } - server.allocator.free(hints); - } // and only convert and return all hints in range for every request var visible_hints = hints; // small_hints should roughly be sorted by position for (hints) |hint, i| { - if (isPositionBefore(hint.position, req.params.range.start)) continue; + if (isPositionBefore(hint.position, request.range.start)) continue; visible_hints = hints[i..]; break; } for (visible_hints) |hint, i| { - if (isPositionBefore(hint.position, req.params.range.end)) continue; + if (isPositionBefore(hint.position, request.range.end)) continue; visible_hints = visible_hints[0..i]; break; } - return try send(writer, server.arena.allocator(), types.Response{ - .id = id, - .result = .{ .InlayHint = visible_hints }, - }); + return visible_hints; } -fn codeActionHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.CodeAction) !void { - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - return try respondGeneric(writer, id, null_result_response); - }; - - const allocator = server.arena.allocator(); +fn codeActionHandler(server: *Server, request: types.CodeActionParams) !?[]types.CodeAction { + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; var builder = code_actions.Builder{ - .arena = &server.arena, + .arena = server.arena, .document_store = &server.document_store, .handle = handle, .offset_encoding = server.offset_encoding, @@ -2382,29 +2400,23 @@ fn codeActionHandler(server: *Server, writer: anytype, id: types.RequestId, req: var actions = std.ArrayListUnmanaged(types.CodeAction){}; - for (req.params.context.diagnostics) |diagnostic| { + for (request.context.diagnostics) |diagnostic| { try builder.generateCodeAction(diagnostic, &actions); } for (actions.items) |*action| { // TODO query whether SourceFixAll is supported by the server - if (action.kind == .SourceFixAll) action.kind = .QuickFix; + if (action.kind.? == .@"source.fixAll") action.kind = .quickfix; } - return try send(writer, allocator, types.Response{ - .id = id, - .result = .{ .CodeAction = actions.items }, - }); + return actions.items; } -fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.FoldingRange) !void { +fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) !?[]types.FoldingRange { const Token = std.zig.Token; const Node = Ast.Node; const allocator = server.arena.allocator(); - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - log.warn("Trying to get folding ranges of non existent document {s}", .{req.params.textDocument.uri}); - return try respondGeneric(writer, id, null_result_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; const helper = struct { const Inclusivity = enum { inclusive, exclusive }; @@ -2415,10 +2427,11 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re start: Ast.TokenIndex, end: Ast.TokenIndex, end_reach: Inclusivity, + encoding: offsets.Encoding, ) std.mem.Allocator.Error!bool { const can_add = start < end and !tree.tokensOnSameLine(start, end); if (can_add) { - try addTokRange(p_ranges, tree, start, end, end_reach); + try addTokRange(p_ranges, tree, start, end, end_reach, encoding); } return can_add; } @@ -2428,17 +2441,16 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re start: Ast.TokenIndex, end: Ast.TokenIndex, end_reach: Inclusivity, + encoding: offsets.Encoding, ) std.mem.Allocator.Error!void { std.debug.assert(!std.debug.runtime_safety or !tree.tokensOnSameLine(start, end)); - const start_loc = tree.tokenLocation(0, start); - const end_loc_rel = tree.tokenLocation(@intCast(Ast.ByteOffset, start_loc.line_start), end); - std.debug.assert(end_loc_rel.line != 0); + const start_line = offsets.tokenToPosition(tree, start, encoding).line; + const end_line = offsets.tokenToPosition(tree, end, encoding).line; try p_ranges.append(.{ - .startLine = start_loc.line, - .endLine = (start_loc.line + end_loc_rel.line) - - @boolToInt(end_reach == .exclusive), + .startLine = start_line, + .endLine = end_line - @boolToInt(end_reach == .exclusive), }); } }; @@ -2449,7 +2461,7 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re const token_tags: []const Token.Tag = handle.tree.tokens.items(.tag); const node_tags: []const Node.Tag = handle.tree.nodes.items(.tag); - if (token_tags.len == 0) return; + if (token_tags.len == 0) return null; if (token_tags[0] == .container_doc_comment) { var tok: Ast.TokenIndex = 1; while (tok < token_tags.len) : (tok += 1) { @@ -2476,14 +2488,14 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re const start_tok_1 = handle.tree.lastToken(if_full.ast.cond_expr); const end_tok_1 = handle.tree.lastToken(if_full.ast.then_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); if (if_full.ast.else_expr == 0) continue; const start_tok_2 = if_full.else_token; const end_tok_2 = handle.tree.lastToken(if_full.ast.else_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); }, // same as if/else @@ -2497,13 +2509,13 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re const start_tok_1 = handle.tree.lastToken(loop_full.ast.cond_expr); const end_tok_1 = handle.tree.lastToken(loop_full.ast.then_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); if (loop_full.ast.else_expr == 0) continue; const start_tok_2 = loop_full.else_token; const end_tok_2 = handle.tree.lastToken(loop_full.ast.else_expr); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); }, .global_var_decl, @@ -2531,7 +2543,7 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re start_doc_tok -= 1; } - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive, server.offset_encoding); } // Function prototype folding regions @@ -2544,7 +2556,7 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re if (handle.tree.tokensOnSameLine(list_start_tok, list_end_tok)) break :decl_node_blk; try ranges.ensureUnusedCapacity(1 + fn_proto.ast.params.len); // best guess, doesn't include anytype params - helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive) catch |err| switch (err) { + helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive, server.offset_encoding) catch |err| switch (err) { error.OutOfMemory => unreachable, }; @@ -2556,7 +2568,7 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re while (token_tags[doc_end_tok + 1] == .doc_comment) doc_end_tok += 1; - _ = try helper.maybeAddTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive, server.offset_encoding); } }, @@ -2568,7 +2580,7 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re => { const start_tok = handle.tree.firstToken(node); const end_tok = handle.tree.lastToken(node); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive, server.offset_encoding); }, // most other trivial cases can go through here. @@ -2615,7 +2627,7 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re const start_tok = handle.tree.firstToken(node); const end_tok = handle.tree.lastToken(node); - _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive); + _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive, server.offset_encoding); }, } } @@ -2625,10 +2637,10 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re // We add opened folding regions to a stack as we go and pop one off when we find a closing brace. // As an optimization we start with a reasonable capacity, which should work well in most cases since // people will almost never have nesting that deep. - var stack = try std.ArrayList(usize).initCapacity(allocator, 10); + var stack = try std.ArrayList(u32).initCapacity(allocator, 10); var i: usize = 0; - var lines_count: usize = 0; + var lines_count: u32 = 0; while (i < handle.tree.source.len) : (i += 1) { const slice = handle.tree.source[i..]; @@ -2655,18 +2667,17 @@ fn foldingRangeHandler(server: *Server, writer: anytype, id: types.RequestId, re } } - try send(writer, allocator, types.Response{ - .id = id, - .result = .{ .FoldingRange = ranges.items }, - }); + return ranges.items; } -fn selectionRangeHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SelectionRange) !void { +pub const SelectionRange = struct { + range: types.Range, + parent: ?*SelectionRange, +}; + +fn selectionRangeHandler(server: *Server, request: types.SelectionRangeParams) !?[]*SelectionRange { const allocator = server.arena.allocator(); - const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse { - log.warn("Trying to get selection range of non existent document {s}", .{req.params.textDocument.uri}); - return try respondGeneric(writer, id, null_result_response); - }; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; // For each of the input positons, we need to compute the stack of AST // nodes/ranges which contain the position. At the moment, we do this in a @@ -2675,17 +2686,17 @@ fn selectionRangeHandler(server: *Server, writer: anytype, id: types.RequestId, // // A faster algorithm would be to walk the tree starting from the root, // descending into the child containing the position at every step. - var result = try allocator.alloc(*types.SelectionRange, req.params.positions.len); + var result = try allocator.alloc(*SelectionRange, request.positions.len); var locs = try std.ArrayListUnmanaged(offsets.Loc).initCapacity(allocator, 32); - for (req.params.positions) |position, position_index| { + for (request.positions) |position, position_index| { const index = offsets.positionToIndex(handle.text, position, server.offset_encoding); locs.clearRetainingCapacity(); for (handle.tree.nodes.items(.data)) |_, i| { - const node = @intCast(u32, i); + const node = @intCast(Ast.Node.Index, i); const loc = offsets.nodeToLoc(handle.tree, node); if (loc.start <= index and index <= loc.end) { - (try locs.addOne(allocator)).* = loc; + try locs.append(allocator, loc); } } @@ -2701,7 +2712,7 @@ fn selectionRangeHandler(server: *Server, writer: anytype, id: types.RequestId, } } - var selection_ranges = try allocator.alloc(types.SelectionRange, locs.items.len); + var selection_ranges = try allocator.alloc(SelectionRange, locs.items.len); for (selection_ranges) |*range, i| { range.range = offsets.locToRange(handle.text, locs.items[i], server.offset_encoding); range.parent = if (i + 1 < selection_ranges.len) &selection_ranges[i + 1] else null; @@ -2709,122 +2720,222 @@ fn selectionRangeHandler(server: *Server, writer: anytype, id: types.RequestId, result[position_index] = &selection_ranges[0]; } - try send(writer, allocator, types.Response{ - .id = id, - .result = .{ .SelectionRange = result }, - }); + return result; } fn shorterLocsFirst(_: void, lhs: offsets.Loc, rhs: offsets.Loc) bool { return (lhs.end - lhs.start) < (rhs.end - rhs.start); } -// Needed for the hack seen below. -fn extractErr(val: anytype) anyerror { - val catch |e| return e; - return error.HackDone; +/// return true if there is a request with the given method name +fn requestMethodExists(method: []const u8) bool { + const methods = comptime blk: { + var methods: [types.request_metadata.len][]const u8 = undefined; + for (types.request_metadata) |meta, i| { + methods[i] = meta.method; + } + break :blk methods; + }; + + return for (methods) |name| { + if (std.mem.eql(u8, method, name)) break true; + } else false; } -pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void { +/// return true if there is a notification with the given method name +fn notificationMethodExists(method: []const u8) bool { + const methods = comptime blk: { + var methods: [types.notification_metadata.len][]const u8 = undefined; + for (types.notification_metadata) |meta, i| { + methods[i] = meta.method; + } + break :blk methods; + }; + + return for (methods) |name| { + if (std.mem.eql(u8, method, name)) break true; + } else false; +} + +const Message = union(enum) { + RequestMessage: struct { + id: types.RequestId, + method: []const u8, + /// may be null + params: types.LSPAny, + }, + NotificationMessage: struct { + method: []const u8, + /// may be null + params: types.LSPAny, + }, + ResponseMessage: struct { + id: types.RequestId, + /// non null on success + result: types.LSPAny, + @"error": ?types.ResponseError, + }, + + pub fn id(self: Message) ?types.RequestId { + return switch (self) { + .RequestMessage => |request| request.id, + .NotificationMessage => null, + .ResponseMessage => |response| response.id, + }; + } + + pub fn method(self: Message) ?[]const u8 { + return switch (self) { + .RequestMessage => |request| request.method, + .NotificationMessage => |notification| notification.method, + .ResponseMessage => null, + }; + } + + pub fn params(self: Message) ?types.LSPAny { + return switch (self) { + .RequestMessage => |request| request.params, + .NotificationMessage => |notification| notification.params, + .ResponseMessage => null, + }; + } + + pub fn fromJsonValueTree(tree: std.json.ValueTree) error{InvalidRequest}!Message { + if (tree.root != .Object) return error.InvalidRequest; + const object = tree.root.Object; + + if (object.get("id")) |id_obj| { + comptime std.debug.assert(!tres.isAllocatorRequired(types.RequestId)); + const msg_id = tres.parse(types.RequestId, id_obj, null) catch return error.InvalidRequest; + + if (object.get("method")) |method_obj| { + const msg_method = switch (method_obj) { + .String => |str| str, + else => return error.InvalidRequest, + }; + + const msg_params = object.get("params") orelse .Null; + + return .{ .RequestMessage = .{ + .id = msg_id, + .method = msg_method, + .params = msg_params, + } }; + } else { + const result = object.get("result") orelse .Null; + const error_obj = object.get("error") orelse .Null; + + comptime std.debug.assert(!tres.isAllocatorRequired(?types.ResponseError)); + const err = tres.parse(?types.ResponseError, error_obj, null) catch return error.InvalidRequest; + + if (result != .Null and err != null) return error.InvalidRequest; + + return .{ .ResponseMessage = .{ + .id = msg_id, + .result = result, + .@"error" = err, + } }; + } + } else { + const msg_method = switch (object.get("method") orelse return error.InvalidRequest) { + .String => |str| str, + else => return error.InvalidRequest, + }; + + const msg_params = object.get("params") orelse .Null; + + return .{ .NotificationMessage = .{ + .method = msg_method, + .params = msg_params, + } }; + } + } +}; + +pub fn processJsonRpc( + server: *Server, + arena: *std.heap.ArenaAllocator, + json: []const u8, +) void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - server.arena = std.heap.ArenaAllocator.init(server.allocator); - defer server.arena.deinit(); + server.arena = arena; var parser = std.json.Parser.init(server.arena.allocator(), false); defer parser.deinit(); - var tree = try parser.parse(json); + var tree = parser.parse(json) catch { + std.log.err("failed to parse message!", .{}); + return; // maybe panic? + }; defer tree.deinit(); - const id = if (tree.root.Object.get("id")) |id| switch (id) { - .Integer => |int| types.RequestId{ .Integer = @intCast(i32, int) }, - .String => |str| types.RequestId{ .String = str }, - else => types.RequestId{ .Integer = 0 }, - } else types.RequestId{ .Integer = 0 }; + const message = Message.fromJsonValueTree(tree) catch { + std.log.err("failed to parse message!", .{}); + return; // maybe panic? + }; - if (id == .String and std.mem.startsWith(u8, id.String, "register")) - return; - if (id == .String and std.mem.startsWith(u8, id.String, "apply_edit")) - return; - if (id == .String and std.mem.eql(u8, id.String, "i_haz_configuration")) { - log.info("Setting configuration...", .{}); - - // NOTE: Does this work with other editors? - // Yes, String ids are officially supported by LSP - // but not sure how standard this "standard" really is - - if (tree.root.Object.get("error")) |_| return; - const result = tree.root.Object.get("result").?.Array; - - inline for (std.meta.fields(Config)) |field, index| { - const value = result.items[index]; - const ft = if (@typeInfo(field.type) == .Optional) - @typeInfo(field.type).Optional.child - else - field.type; - const ti = @typeInfo(ft); - - if (value != .Null) { - const new_value: field.type = switch (ft) { - []const u8 => switch (value) { - .String => |s| blk: { - if (s.len == 0) { - if (field.type == ?[]const u8) { - break :blk null; - } else { - break :blk s; - } - } - var nv = try server.allocator.dupe(u8, s); - if (@field(server.config, field.name)) |prev_val| server.allocator.free(prev_val); - break :blk nv; - }, // TODO: Allocation model? (same with didChangeConfiguration); imo this isn't *that* bad but still - else => @panic("Invalid configuration value"), // TODO: Handle this - }, - else => switch (ti) { - .Int => switch (value) { - .Integer => |s| std.math.cast(ft, s) orelse @panic("Invalid configuration value"), - else => @panic("Invalid configuration value"), // TODO: Handle this - }, - .Bool => switch (value) { - .Bool => |b| b, - else => @panic("Invalid configuration value"), // TODO: Handle this - }, - else => @compileError("Not implemented for " ++ @typeName(ft)), - }, - }; - log.debug("setting configuration option '{s}' to '{any}'", .{ field.name, new_value }); - @field(server.config, field.name) = new_value; - } + server.processMessage(message) catch |err| { + std.log.err("got {} while processing message!", .{err}); // TODO include message information + switch (message) { + .RequestMessage => |request| server.sendResponseError(request.id, .{ + .code = @errorToInt(err), + .message = @errorName(err), + }), + else => {}, } + }; +} - try configuration.configChanged(server.config, server.allocator, null); +fn processMessage(server: *Server, message: Message) Error!void { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); - return; + switch (message) { + .RequestMessage => |request| { + if (!requestMethodExists(request.method)) return error.MethodNotFound; + }, + .NotificationMessage => |notification| { + if (!notificationMethodExists(notification.method)) return error.MethodNotFound; + }, + .ResponseMessage => |response| { + if (response.id != .string) return; + if (std.mem.startsWith(u8, response.id.string, "register")) return; + if (std.mem.eql(u8, response.id.string, "apply_edit")) return; + + if (std.mem.eql(u8, response.id.string, "i_haz_configuration")) { + if (response.@"error" != null) return; + try server.handleConfiguration(response.result); + return; + } + + std.log.warn("received response from client with id '{s}' that has no handler!", .{response.id.string}); + return; + }, } - const method = tree.root.Object.get("method").?.String; + const method = message.method().?; // message cannot be a ResponseMessage switch (server.status) { .uninitialized => blk: { if (std.mem.eql(u8, method, "initialize")) break :blk; if (std.mem.eql(u8, method, "exit")) break :blk; - // ignore notifications - if (tree.root.Object.get("id") == null) break :blk; - - return try sendErrorResponse(writer, server.arena.allocator(), .ServerNotInitialized, "server received a request before being initialized!"); + return error.ServerNotInitialized; // server received a request before being initialized! }, .initializing => blk: { if (std.mem.eql(u8, method, "initialized")) break :blk; if (std.mem.eql(u8, method, "$/progress")) break :blk; - return try sendErrorResponse(writer, server.arena.allocator(), .InvalidRequest, "server received a request during initialization!"); + return error.InvalidRequest; // server received a request during initialization! }, .initialized => {}, - .shutdown => return try sendErrorResponse(writer, server.arena.allocator(), .InvalidRequest, "server received a request after shutdown!"), + .shutdown => blk: { + if (std.mem.eql(u8, method, "exit")) break :blk; + + return error.InvalidRequest; // server received a request after shutdown! + }, } const start_time = std.time.milliTimestamp(); @@ -2837,83 +2948,74 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void } const method_map = .{ - .{ "initialized", void, initializedHandler }, - .{ "initialize", requests.Initialize, initializeHandler }, - .{ "shutdown", void, shutdownHandler }, - .{ "exit", void, exitHandler }, - .{ "$/cancelRequest", void, cancelRequestHandler }, - .{ "textDocument/didOpen", requests.OpenDocument, openDocumentHandler }, - .{ "textDocument/didChange", requests.ChangeDocument, changeDocumentHandler }, - .{ "textDocument/didSave", requests.SaveDocument, saveDocumentHandler }, - .{ "textDocument/didClose", requests.CloseDocument, closeDocumentHandler }, - .{ "textDocument/willSave", requests.WillSave, willSaveHandler }, - .{ "textDocument/willSaveWaitUntil", requests.WillSave, willSaveWaitUntilHandler }, - .{ "textDocument/semanticTokens/full", requests.SemanticTokensFull, semanticTokensFullHandler }, - .{ "textDocument/inlayHint", requests.InlayHint, inlayHintHandler }, - .{ "textDocument/completion", requests.Completion, completionHandler }, - .{ "textDocument/signatureHelp", requests.SignatureHelp, signatureHelpHandler }, - .{ "textDocument/definition", requests.GotoDefinition, gotoDefinitionHandler }, - .{ "textDocument/typeDefinition", requests.GotoDefinition, gotoDefinitionHandler }, - .{ "textDocument/implementation", requests.GotoDefinition, gotoDefinitionHandler }, - .{ "textDocument/declaration", requests.GotoDeclaration, gotoDeclarationHandler }, - .{ "textDocument/hover", requests.Hover, hoverHandler }, - .{ "textDocument/documentSymbol", requests.DocumentSymbols, documentSymbolsHandler }, - .{ "textDocument/formatting", requests.Formatting, formattingHandler }, - .{ "textDocument/rename", requests.Rename, renameHandler }, - .{ "textDocument/references", requests.References, referencesHandler }, - .{ "textDocument/documentHighlight", requests.DocumentHighlight, documentHighlightHandler }, - .{ "textDocument/codeAction", requests.CodeAction, codeActionHandler }, - .{ "workspace/didChangeConfiguration", configuration.DidChangeConfigurationParams, didChangeConfigurationHandler }, - .{ "textDocument/foldingRange", requests.FoldingRange, foldingRangeHandler }, - .{ "textDocument/selectionRange", requests.SelectionRange, selectionRangeHandler }, + .{ "initialized", initializedHandler }, + .{ "initialize", initializeHandler }, + .{ "shutdown", shutdownHandler }, + .{ "exit", exitHandler }, + .{ "$/cancelRequest", cancelRequestHandler }, + .{ "textDocument/didOpen", openDocumentHandler }, + .{ "textDocument/didChange", changeDocumentHandler }, + .{ "textDocument/didSave", saveDocumentHandler }, + .{ "textDocument/didClose", closeDocumentHandler }, + .{ "textDocument/willSave", willSaveHandler }, + .{ "textDocument/willSaveWaitUntil", willSaveWaitUntilHandler }, + .{ "textDocument/semanticTokens/full", semanticTokensFullHandler }, + .{ "textDocument/inlayHint", inlayHintHandler }, + .{ "textDocument/completion", completionHandler }, + .{ "textDocument/signatureHelp", signatureHelpHandler }, + .{ "textDocument/definition", gotoDefinitionHandler }, + .{ "textDocument/typeDefinition", gotoDefinitionHandler }, + .{ "textDocument/implementation", gotoDefinitionHandler }, + .{ "textDocument/declaration", gotoDeclarationHandler }, + .{ "textDocument/hover", hoverHandler }, + .{ "textDocument/documentSymbol", documentSymbolsHandler }, + .{ "textDocument/formatting", formattingHandler }, + .{ "textDocument/rename", renameHandler }, + .{ "textDocument/references", referencesHandler }, + .{ "textDocument/documentHighlight", documentHighlightHandler }, + .{ "textDocument/codeAction", codeActionHandler }, + .{ "workspace/didChangeConfiguration", didChangeConfigurationHandler }, // types.DidChangeConfigurationParams + .{ "textDocument/foldingRange", foldingRangeHandler }, + .{ "textDocument/selectionRange", selectionRangeHandler }, }; + comptime { + inline for (method_map) |method_info| { + _ = method_info; + // TODO validate that the method actually exists + // TODO validate that direction is client_to_server + // TODO validate that the handler accepts and returns the correct types + // TODO validate that notification handler return Error!void + // TODO validate handler parameter names + } + } + + @setEvalBranchQuota(10000); inline for (method_map) |method_info| { if (std.mem.eql(u8, method, method_info[0])) { - if (method_info.len == 1) { - log.warn("method not mapped: {s}", .{method}); - } else if (method_info[1] != void) { - const ReqT = method_info[1]; - const request_obj = try requests.fromDynamicTree(&server.arena, ReqT, tree.root); - method_info[2](server, writer, id, request_obj) catch |err| { - log.err("failed to process request: {s}", .{@errorName(err)}); - }; - } else { - method_info[2](server, writer, id) catch |err| { - log.err("failed to process request: {s}", .{@errorName(err)}); - }; + const handler = method_info[1]; + + const handler_info: std.builtin.Type.Fn = @typeInfo(@TypeOf(handler)).Fn; + const ParamsType = handler_info.params[1].type.?; // TODO add error message on null + + const params: ParamsType = tres.parse(ParamsType, message.params().?, server.arena.allocator()) catch return error.InternalError; + const response = handler(server, params) catch return error.InternalError; + + if (@TypeOf(response) == void) return; + + if (message == .RequestMessage) { + server.sendResponse(message.RequestMessage.id, response); } + return; } } - // Boolean value is true if the method is a request (and thus the client - // needs a response) or false if the method is a notification (in which - // case it should be silently ignored) - const unimplemented_map = std.ComptimeStringMap(bool, .{ - .{ "textDocument/codeLens", true }, - .{ "textDocument/documentLink", true }, - .{ "textDocument/rangeFormatting", true }, - .{ "textDocument/onTypeFormatting", true }, - .{ "textDocument/prepareRename", true }, - .{ "textDocument/selectionRange", true }, - .{ "textDocument/semanticTokens/range", true }, - .{ "workspace/didChangeWorkspaceFolders", false }, - }); - - if (unimplemented_map.get(method)) |request| { - // TODO: Unimplemented methods, implement them and add them to server capabilities. - if (request) { - return try respondGeneric(writer, id, null_result_response); - } - - log.debug("Notification method {s} is not implemented", .{method}); - return; + switch (message) { + .RequestMessage => |request| server.sendResponse(request.id, null), + .NotificationMessage => return, + .ResponseMessage => unreachable, } - if (tree.root.Object.get("id")) |_| { - return try respondGeneric(writer, id, not_implemented_response); - } - log.debug("Method without return value not implemented: {s}", .{method}); } pub fn init( @@ -2947,8 +3049,10 @@ pub fn init( .insertText = if (config.include_at_in_builtins) insert_text else insert_text[1..], .insertTextFormat = if (config.enable_snippets) .Snippet else .PlainText, .documentation = .{ - .kind = .Markdown, - .value = builtin.documentation, + .MarkupContent = .{ + .kind = .markdown, + .value = builtin.documentation, + }, }, }); } @@ -2967,4 +3071,9 @@ pub fn deinit(server: *Server) void { analysis.deinit(); server.builtin_completions.deinit(server.allocator); + + for (server.outgoing_messages.items) |message| { + server.allocator.free(message); + } + server.outgoing_messages.deinit(server.allocator); } diff --git a/src/analysis.zig b/src/analysis.zig index 3ff791d..7b142f3 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1,7 +1,7 @@ const std = @import("std"); const DocumentStore = @import("DocumentStore.zig"); const Ast = std.zig.Ast; -const types = @import("types.zig"); +const types = @import("lsp.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.analysis); const ast = @import("ast.zig"); @@ -19,7 +19,7 @@ pub fn deinit() void { } /// Gets a declaration's doc comments. Caller owns returned memory. -pub fn getDocComments(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index, format: types.MarkupContent.Kind) !?[]const u8 { +pub fn getDocComments(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index, format: types.MarkupKind) !?[]const u8 { const base = tree.nodes.items(.main_token)[node]; const base_kind = tree.nodes.items(.tag)[node]; const tokens = tree.tokens.items(.tag); @@ -68,7 +68,7 @@ pub fn getDocCommentTokenIndex(tokens: []const std.zig.Token.Tag, base_token: As } else idx + 1; } -pub fn collectDocComments(allocator: std.mem.Allocator, tree: Ast, doc_comments: Ast.TokenIndex, format: types.MarkupContent.Kind, container_doc: bool) ![]const u8 { +pub fn collectDocComments(allocator: std.mem.Allocator, tree: Ast, doc_comments: Ast.TokenIndex, format: types.MarkupKind, container_doc: bool) ![]const u8 { var lines = std.ArrayList([]const u8).init(allocator); defer lines.deinit(); const tokens = tree.tokens.items(.tag); @@ -81,7 +81,7 @@ pub fn collectDocComments(allocator: std.mem.Allocator, tree: Ast, doc_comments: } else break; } - return try std.mem.join(allocator, if (format == .Markdown) " \n" else "\n", lines.items); + return try std.mem.join(allocator, if (format == .markdown) " \n" else "\n", lines.items); } /// Gets a function's keyword, name, arguments and return value. @@ -2417,11 +2417,17 @@ pub const DocumentScope = struct { } self.scopes.deinit(allocator); for (self.error_completions.entries.items(.key)) |item| { - if (item.documentation) |doc| allocator.free(doc.value); + switch (item.documentation orelse continue) { + .string => |str| allocator.free(str), + .MarkupContent => |content| allocator.free(content.value), + } } self.error_completions.deinit(allocator); for (self.enum_completions.entries.items(.key)) |item| { - if (item.documentation) |doc| allocator.free(doc.value); + switch (item.documentation orelse continue) { + .string => |str| allocator.free(str), + .MarkupContent => |content| allocator.free(content.value), + } } self.enum_completions.deinit(allocator); } @@ -2556,13 +2562,18 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx: if (container_field) |_| { if (!std.mem.eql(u8, name, "_")) { - var doc = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| - types.MarkupContent{ .kind = .Markdown, .value = docs } - else - null; - var gop_res = try context.enums.getOrPut(allocator, .{ .label = name, .kind = .Constant, .insertText = name, .insertTextFormat = .PlainText, .documentation = doc }); + const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation); + + var doc: Documentation = if (try getDocComments(allocator, tree, decl, .markdown)) |docs| .{ .MarkupContent = types.MarkupContent{ .kind = .markdown, .value = docs } } else null; + var gop_res = try context.enums.getOrPut(allocator, .{ + .label = name, + .kind = .Constant, + .insertText = name, + .insertTextFormat = .PlainText, + .documentation = doc, + }); if (gop_res.found_existing) { - if (doc) |d| allocator.free(d.value); + if (doc) |d| allocator.free(d.MarkupContent.value); } } } diff --git a/src/code_actions.zig b/src/code_actions.zig index e17fa57..2140222 100644 --- a/src/code_actions.zig +++ b/src/code_actions.zig @@ -5,8 +5,7 @@ const DocumentStore = @import("DocumentStore.zig"); const analysis = @import("analysis.zig"); const ast = @import("ast.zig"); -const types = @import("types.zig"); -const requests = @import("requests.zig"); +const types = @import("lsp.zig"); const offsets = @import("offsets.zig"); pub const Builder = struct { @@ -55,11 +54,9 @@ pub const Builder = struct { } pub fn createWorkspaceEdit(self: *Builder, edits: []const types.TextEdit) error{OutOfMemory}!types.WorkspaceEdit { - var text_edits = std.ArrayListUnmanaged(types.TextEdit){}; - try text_edits.appendSlice(self.arena.allocator(), edits); - + const allocator = self.arena.allocator(); var workspace_edit = types.WorkspaceEdit{ .changes = .{} }; - try workspace_edit.changes.putNoClobber(self.arena.allocator(), self.handle.uri, text_edits); + try workspace_edit.changes.?.putNoClobber(allocator, self.handle.uri, try allocator.dupe(types.TextEdit, edits)); return workspace_edit; } @@ -74,7 +71,7 @@ fn handleNonCamelcaseFunction(builder: *Builder, actions: *std.ArrayListUnmanage const action1 = types.CodeAction{ .title = "make function name camelCase", - .kind = .QuickFix, + .kind = .quickfix, .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(loc, new_text)}), }; @@ -115,7 +112,7 @@ fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnman const action1 = types.CodeAction{ .title = "discard function parameter", - .kind = .SourceFixAll, + .kind = .@"source.fixAll", .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditPos(index, new_text)}), }; @@ -123,7 +120,7 @@ fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnman // TODO fix formatting const action2 = types.CodeAction{ .title = "remove function parameter", - .kind = .QuickFix, + .kind = .quickfix, .isPreferred = false, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(getParamRemovalRange(tree, payload.param), "")}), }; @@ -162,7 +159,7 @@ fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnma try actions.append(builder.arena.allocator(), .{ .title = "discard value", - .kind = .SourceFixAll, + .kind = .@"source.fixAll", .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditPos(index, new_text)}), }); @@ -179,7 +176,7 @@ fn handleUnusedIndexCapture(builder: *Builder, actions: *std.ArrayListUnmanaged( // TODO fix formatting try actions.append(builder.arena.allocator(), .{ .title = "remove capture", - .kind = .QuickFix, + .kind = .quickfix, .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.loc, "")}), }); @@ -188,7 +185,7 @@ fn handleUnusedIndexCapture(builder: *Builder, actions: *std.ArrayListUnmanaged( // |v, _| -> |v| try actions.append(builder.arena.allocator(), .{ .title = "remove index capture", - .kind = .QuickFix, + .kind = .quickfix, .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc( .{ .start = capture_locs.value.end, .end = capture_locs.loc.end - 1 }, @@ -207,7 +204,7 @@ fn handleUnusedCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types // |v, i| -> |_, i| try actions.append(builder.arena.allocator(), .{ .title = "discard capture", - .kind = .QuickFix, + .kind = .quickfix, .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.value, "_")}), }); @@ -216,7 +213,7 @@ fn handleUnusedCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types // TODO fix formatting try actions.append(builder.arena.allocator(), .{ .title = "remove capture", - .kind = .QuickFix, + .kind = .quickfix, .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.loc, "")}), }); @@ -228,7 +225,7 @@ fn handlePointlessDiscard(builder: *Builder, actions: *std.ArrayListUnmanaged(ty try actions.append(builder.arena.allocator(), .{ .title = "remove pointless discard", - .kind = .SourceFixAll, + .kind = .@"source.fixAll", .isPreferred = true, .edit = try builder.createWorkspaceEdit(&.{ builder.createTextEditLoc(edit_loc, ""), diff --git a/src/data/snippets.zig b/src/data/snippets.zig index 023e221..cd0e31c 100644 --- a/src/data/snippets.zig +++ b/src/data/snippets.zig @@ -1,8 +1,8 @@ -const types = @import("../types.zig"); +const types = @import("../lsp.zig"); pub const Snipped = struct { label: []const u8, - kind: types.CompletionItem.Kind, + kind: types.CompletionItemKind, text: ?[]const u8 = null, }; diff --git a/src/diff.zig b/src/diff.zig index 0d1f5ac..4c0ad40 100644 --- a/src/diff.zig +++ b/src/diff.zig @@ -1,6 +1,5 @@ const std = @import("std"); -const types = @import("types.zig"); -const requests = @import("requests.zig"); +const types = @import("lsp.zig"); const offsets = @import("offsets.zig"); pub const Error = error{ OutOfMemory, InvalidRange }; @@ -357,14 +356,14 @@ fn char_pos_to_range( pub fn applyTextEdits( allocator: std.mem.Allocator, text: []const u8, - content_changes: []const requests.TextDocumentContentChangeEvent, + content_changes: []const types.TextDocumentContentChangeEvent, encoding: offsets.Encoding, ) ![:0]const u8 { var last_full_text_change: ?usize = null; var i: usize = content_changes.len; while (i > 0) { i -= 1; - if (content_changes[i].range == null) { + if (content_changes[i] == .literal_1) { last_full_text_change = i; continue; } @@ -373,16 +372,16 @@ pub fn applyTextEdits( var text_array = std.ArrayListUnmanaged(u8){}; errdefer text_array.deinit(allocator); - try text_array.appendSlice(allocator, if (last_full_text_change) |index| content_changes[index].text else text); + try text_array.appendSlice(allocator, if (last_full_text_change) |index| content_changes[index].literal_1.text else text); // don't even bother applying changes before a full text change const changes = content_changes[if (last_full_text_change) |index| index + 1 else 0..]; for (changes) |item| { - const range = item.range.?; // every element is guaranteed to have `range` set + const range = item.literal_0.range; const loc = offsets.rangeToLoc(text_array.items, range, encoding); - try text_array.replaceRange(allocator, loc.start, loc.end - loc.start, item.text); + try text_array.replaceRange(allocator, loc.start, loc.end - loc.start, item.literal_0.text); } return try text_array.toOwnedSliceSentinel(allocator, 0); diff --git a/src/inlay_hints.zig b/src/inlay_hints.zig index c524903..1bc2747 100644 --- a/src/inlay_hints.zig +++ b/src/inlay_hints.zig @@ -2,7 +2,7 @@ const std = @import("std"); const zig_builtin = @import("builtin"); const DocumentStore = @import("DocumentStore.zig"); const analysis = @import("analysis.zig"); -const types = @import("types.zig"); +const types = @import("lsp.zig"); const offsets = @import("offsets.zig"); const Ast = std.zig.Ast; const log = std.log.scoped(.inlay_hint); @@ -32,20 +32,13 @@ fn isNodeInRange(tree: Ast, node: Ast.Node.Index, range: types.Range) bool { } const Builder = struct { - allocator: std.mem.Allocator, + arena: std.mem.Allocator, config: *const Config, handle: *const DocumentStore.Handle, hints: std.ArrayListUnmanaged(types.InlayHint), - hover_kind: types.MarkupContent.Kind, + hover_kind: types.MarkupKind, encoding: offsets.Encoding, - fn deinit(self: *Builder) void { - for (self.hints.items) |hint| { - self.allocator.free(hint.tooltip.value); - } - self.hints.deinit(self.allocator); - } - fn appendParameterHint(self: *Builder, position: types.Position, label: []const u8, tooltip: []const u8, tooltip_noalias: bool, tooltip_comptime: bool) !void { // TODO allocation could be avoided by extending InlayHint.jsonStringify // adding tooltip_noalias & tooltip_comptime to InlayHint should be enough @@ -53,28 +46,28 @@ const Builder = struct { if (tooltip.len == 0) break :blk ""; const prefix = if (tooltip_noalias) if (tooltip_comptime) "noalias comptime " else "noalias " else if (tooltip_comptime) "comptime " else ""; - if (self.hover_kind == .Markdown) { - break :blk try std.fmt.allocPrint(self.allocator, "```zig\n{s}{s}\n```", .{ prefix, tooltip }); + if (self.hover_kind == .markdown) { + break :blk try std.fmt.allocPrint(self.arena, "```zig\n{s}{s}\n```", .{ prefix, tooltip }); } - break :blk try std.fmt.allocPrint(self.allocator, "{s}{s}", .{ prefix, tooltip }); + break :blk try std.fmt.allocPrint(self.arena, "{s}{s}", .{ prefix, tooltip }); }; - try self.hints.append(self.allocator, .{ + try self.hints.append(self.arena, .{ .position = position, - .label = label, + .label = .{ .string = label }, .kind = types.InlayHintKind.Parameter, - .tooltip = .{ + .tooltip = .{ .MarkupContent = .{ .kind = self.hover_kind, .value = tooltip_text, - }, + } }, .paddingLeft = false, .paddingRight = true, }); } fn toOwnedSlice(self: *Builder) error{OutOfMemory}![]types.InlayHint { - return self.hints.toOwnedSlice(self.allocator); + return self.hints.toOwnedSlice(self.arena); } }; @@ -689,26 +682,23 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: /// creates a list of `InlayHint`'s from the given document /// only parameter hints are created /// only hints in the given range are created -/// Caller owns returned memory. -/// `InlayHint.tooltip.value` has to deallocated separately pub fn writeRangeInlayHint( arena: *std.heap.ArenaAllocator, config: Config, store: *DocumentStore, handle: *const DocumentStore.Handle, range: types.Range, - hover_kind: types.MarkupContent.Kind, + hover_kind: types.MarkupKind, encoding: offsets.Encoding, ) error{OutOfMemory}![]types.InlayHint { var builder: Builder = .{ - .allocator = arena.child_allocator, + .arena = arena.allocator(), .config = &config, .handle = handle, .hints = .{}, .hover_kind = hover_kind, .encoding = encoding, }; - errdefer builder.deinit(); var buf: [2]Ast.Node.Index = undefined; for (ast.declMembers(handle.tree, 0, &buf)) |child| { diff --git a/src/lsp.zig b/src/lsp.zig new file mode 100644 index 0000000..9585783 --- /dev/null +++ b/src/lsp.zig @@ -0,0 +1,7852 @@ +//! generated by zig-lsp-codegen + +const std = @import("std"); +const tres = @import("tres"); +const Undefinedable = tres.Undefinedable; + +const URI = []const u8; +/// The URI of a document +pub const DocumentUri = []const u8; +/// A JavaScript regular expression; never used +pub const RegExp = []const u8; + +pub const LSPAny = std.json.Value; +pub const LSPArray = []LSPAny; +pub const LSPObject = std.json.ObjectMap; + +pub fn Map(comptime Key: type, comptime Value: type) type { + if (Key == []const u8) + return std.StringHashMapUnmanaged(Value) + else + return std.AutoHashMapUnmanaged(Key, Value); +} + +pub const RequestId = union(enum) { + integer: i64, + string: []const u8, +}; + +pub const ResponseError = struct { + /// A number indicating the error type that occurred. + code: i64, + /// A string providing a short description of the error. + message: []const u8, + + /// A primitive or structured value that contains additional + /// information about the error. Can be omitted. + data: std.json.Value = .Null, +}; + +pub const MessageDirection = enum { + client_to_server, + server_to_client, + bidirectional, +}; + +pub const RegistrationMetadata = struct { + method: ?[]const u8, + Options: ?type, +}; + +pub const NotificationMetadata = struct { + method: []const u8, + documentation: ?[]const u8, + direction: MessageDirection, + Params: ?type, + registration: RegistrationMetadata, +}; + +pub const RequestMetadata = struct { + method: []const u8, + documentation: ?[]const u8, + direction: MessageDirection, + Params: ?type, + Result: type, + PartialResult: ?type, + ErrorData: ?type, + registration: RegistrationMetadata, +}; + +test { + _ = @field(@This(), "notification_metadata"); + _ = @field(@This(), "request_metadata"); +} + +// Type Aliases + +/// The definition of a symbol represented as one or many {@link Location locations}. +/// For most programming languages there is only one location at which a symbol is +/// defined. +/// +/// Servers should prefer returning `DefinitionLink` over `Definition` if supported +/// by the client. +pub const Definition = union(enum) { + Location: Location, + array_of_Location: []const Location, +}; + +/// Information about where a symbol is defined. +/// +/// Provides additional metadata over normal {@link Location location} definitions, including the range of +/// the defining symbol +pub const DefinitionLink = LocationLink; + +/// The declaration of a symbol representation as one or many {@link Location locations}. +pub const Declaration = union(enum) { + Location: Location, + array_of_Location: []const Location, +}; + +/// Information about where a symbol is declared. +/// +/// Provides additional metadata over normal {@link Location location} declarations, including the range of +/// the declaring symbol. +/// +/// Servers should prefer returning `DeclarationLink` over `Declaration` if supported +/// by the client. +pub const DeclarationLink = LocationLink; + +/// Inline value information can be provided by different means: +/// - directly as a text value (class InlineValueText). +/// - as a name to use for a variable lookup (class InlineValueVariableLookup) +/// - as an evaluatable expression (class InlineValueEvaluatableExpression) +/// The InlineValue types combines all inline value types into one type. +/// +/// @since 3.17.0 +pub const InlineValue = union(enum) { + InlineValueText: InlineValueText, + InlineValueVariableLookup: InlineValueVariableLookup, + InlineValueEvaluatableExpression: InlineValueEvaluatableExpression, +}; + +/// The result of a document diagnostic pull request. A report can +/// either be a full report containing all diagnostics for the +/// requested document or an unchanged report indicating that nothing +/// has changed in terms of diagnostics in comparison to the last +/// pull request. +/// +/// @since 3.17.0 +pub const DocumentDiagnosticReport = union(enum) { + RelatedFullDocumentDiagnosticReport: RelatedFullDocumentDiagnosticReport, + RelatedUnchangedDocumentDiagnosticReport: RelatedUnchangedDocumentDiagnosticReport, +}; + +pub const PrepareRenameResult = union(enum) { + Range: Range, + literal_1: struct { + range: Range, + placeholder: []const u8, + }, + literal_2: struct { + defaultBehavior: bool, + }, +}; + +/// A document selector is the combination of one or many document filters. +/// +/// @sample `let sel:DocumentSelector = [{ language: 'typescript' }, { language: 'json', pattern: '**∕tsconfig.json' }]`; +/// +/// The use of a string as a document filter is deprecated @since 3.16.0. +pub const DocumentSelector = []const DocumentFilter; + +pub const ProgressToken = union(enum) { + integer: i32, + string: []const u8, +}; + +/// An identifier to refer to a change annotation stored with a workspace edit. +pub const ChangeAnnotationIdentifier = []const u8; + +/// A workspace diagnostic document report. +/// +/// @since 3.17.0 +pub const WorkspaceDocumentDiagnosticReport = union(enum) { + WorkspaceFullDocumentDiagnosticReport: WorkspaceFullDocumentDiagnosticReport, + WorkspaceUnchangedDocumentDiagnosticReport: WorkspaceUnchangedDocumentDiagnosticReport, +}; + +/// An event describing a change to a text document. If only a text is provided +/// it is considered to be the full content of the document. +pub const TextDocumentContentChangeEvent = union(enum) { + literal_0: struct { + /// The range of the document that changed. + range: Range, + /// The optional length of the range that got replaced. + /// + /// @deprecated use range instead. + /// field can be undefined, but this possible state is non-critical + rangeLength: ?u32 = null, + /// The new text for the provided range. + text: []const u8, + }, + literal_1: struct { + /// The new text of the whole document. + text: []const u8, + }, +}; + +/// MarkedString can be used to render human readable text. It is either a markdown string +/// or a code-block that provides a language and a code snippet. The language identifier +/// is semantically equal to the optional language identifier in fenced code blocks in GitHub +/// issues. See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting +/// +/// The pair of a language and a value is an equivalent to markdown: +/// ```${language} +/// ${value} +/// ``` +/// +/// Note that markdown strings will be sanitized - that means html will be escaped. +/// @deprecated use MarkupContent instead. +pub const MarkedString = union(enum) { + string: []const u8, + literal_1: struct { + language: []const u8, + value: []const u8, + }, +}; + +/// A document filter describes a top level text document or +/// a notebook cell document. +/// +/// @since 3.17.0 - proposed support for NotebookCellTextDocumentFilter. +pub const DocumentFilter = union(enum) { + TextDocumentFilter: TextDocumentFilter, + NotebookCellTextDocumentFilter: NotebookCellTextDocumentFilter, +}; + +/// The glob pattern. Either a string pattern or a relative pattern. +/// +/// @since 3.17.0 +pub const GlobPattern = union(enum) { + Pattern: Pattern, + RelativePattern: RelativePattern, +}; + +/// A document filter denotes a document by different properties like +/// the {@link TextDocument.languageId language}, the {@link Uri.scheme scheme} of +/// its resource, or a glob-pattern that is applied to the {@link TextDocument.fileName path}. +/// +/// Glob patterns can have the following syntax: +/// - `*` to match one or more characters in a path segment +/// - `?` to match on one character in a path segment +/// - `**` to match any number of path segments, including none +/// - `{}` to group sub patterns into an OR expression. (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files) +/// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) +/// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`) +/// +/// @sample A language filter that applies to typescript files on disk: `{ language: 'typescript', scheme: 'file' }` +/// @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }` +/// +/// @since 3.17.0 +pub const TextDocumentFilter = union(enum) { + literal_0: struct { + /// A language id, like `typescript`. + language: []const u8, + /// A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + /// field can be undefined, but this possible state is non-critical + scheme: ?[]const u8 = null, + /// A glob pattern, like `*.{ts,js}`. + /// field can be undefined, but this possible state is non-critical + pattern: ?[]const u8 = null, + }, + literal_1: struct { + /// A language id, like `typescript`. + /// field can be undefined, but this possible state is non-critical + language: ?[]const u8 = null, + /// A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + scheme: []const u8, + /// A glob pattern, like `*.{ts,js}`. + /// field can be undefined, but this possible state is non-critical + pattern: ?[]const u8 = null, + }, + literal_2: struct { + /// A language id, like `typescript`. + /// field can be undefined, but this possible state is non-critical + language: ?[]const u8 = null, + /// A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + /// field can be undefined, but this possible state is non-critical + scheme: ?[]const u8 = null, + /// A glob pattern, like `*.{ts,js}`. + pattern: []const u8, + }, +}; + +/// A notebook document filter denotes a notebook document by +/// different properties. The properties will be match +/// against the notebook's URI (same as with documents) +/// +/// @since 3.17.0 +pub const NotebookDocumentFilter = union(enum) { + literal_0: struct { + /// The type of the enclosing notebook. + notebookType: []const u8, + /// A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + /// field can be undefined, but this possible state is non-critical + scheme: ?[]const u8 = null, + /// A glob pattern. + /// field can be undefined, but this possible state is non-critical + pattern: ?[]const u8 = null, + }, + literal_1: struct { + /// The type of the enclosing notebook. + /// field can be undefined, but this possible state is non-critical + notebookType: ?[]const u8 = null, + /// A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + scheme: []const u8, + /// A glob pattern. + /// field can be undefined, but this possible state is non-critical + pattern: ?[]const u8 = null, + }, + literal_2: struct { + /// The type of the enclosing notebook. + /// field can be undefined, but this possible state is non-critical + notebookType: ?[]const u8 = null, + /// A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. + /// field can be undefined, but this possible state is non-critical + scheme: ?[]const u8 = null, + /// A glob pattern. + pattern: []const u8, + }, +}; + +/// The glob pattern to watch relative to the base path. Glob patterns can have the following syntax: +/// - `*` to match one or more characters in a path segment +/// - `?` to match on one character in a path segment +/// - `**` to match any number of path segments, including none +/// - `{}` to group conditions (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files) +/// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) +/// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`) +/// +/// @since 3.17.0 +pub const Pattern = []const u8; + +// Enumerations + +/// A set of predefined token types. This set is not fixed +/// an clients can specify additional token types via the +/// corresponding client capabilities. +/// +/// @since 3.16.0 +pub const SemanticTokenTypes = enum { + pub const tres_string_enum = {}; + + namespace, + /// Represents a generic type. Acts as a fallback for types which can't be mapped to + /// a specific type like class or enum. + type, + class, + @"enum", + interface, + @"struct", + typeParameter, + parameter, + variable, + property, + enumMember, + event, + function, + method, + macro, + keyword, + modifier, + comment, + string, + number, + regexp, + operator, + /// @since 3.17.0 + decorator, +}; + +/// A set of predefined token modifiers. This set is not fixed +/// an clients can specify additional token types via the +/// corresponding client capabilities. +/// +/// @since 3.16.0 +pub const SemanticTokenModifiers = enum { + pub const tres_string_enum = {}; + + declaration, + definition, + readonly, + static, + deprecated, + abstract, + @"async", + modification, + documentation, + defaultLibrary, +}; + +/// The document diagnostic report kinds. +/// +/// @since 3.17.0 +pub const DocumentDiagnosticReportKind = enum { + pub const tres_string_enum = {}; + + /// A diagnostic report with a full + /// set of problems. + full, + /// A report indicating that the last + /// returned report is still accurate. + unchanged, +}; + +/// Predefined error codes. +pub const ErrorCodes = enum(i32) { + ParseError = -32700, + InvalidRequest = -32600, + MethodNotFound = -32601, + InvalidParams = -32602, + InternalError = -32603, + /// Error code indicating that a server received a notification or + /// request before the server has received the `initialize` request. + ServerNotInitialized = -32002, + UnknownErrorCode = -32001, +}; + +pub const LSPErrorCodes = enum(i32) { + /// A request failed but it was syntactically correct, e.g the + /// method name was known and the parameters were valid. The error + /// message should contain human readable information about why + /// the request failed. + /// + /// @since 3.17.0 + RequestFailed = -32803, + /// The server cancelled the request. This error code should + /// only be used for requests that explicitly support being + /// server cancellable. + /// + /// @since 3.17.0 + ServerCancelled = -32802, + /// The server detected that the content of a document got + /// modified outside normal conditions. A server should + /// NOT send this error code if it detects a content change + /// in it unprocessed messages. The result even computed + /// on an older state might still be useful for the client. + /// + /// If a client decides that a result is not of any use anymore + /// the client should cancel the request. + ContentModified = -32801, + /// The client has canceled a request and a server as detected + /// the cancel. + RequestCancelled = -32800, +}; + +/// A set of predefined range kinds. +pub const FoldingRangeKind = enum { + pub const tres_string_enum = {}; + + /// Folding range for a comment + comment, + /// Folding range for an import or include + imports, + /// Folding range for a region (e.g. `#region`) + region, +}; + +/// A symbol kind. +pub const SymbolKind = enum(u32) { + File = 1, + Module = 2, + Namespace = 3, + Package = 4, + Class = 5, + Method = 6, + Property = 7, + Field = 8, + Constructor = 9, + Enum = 10, + Interface = 11, + Function = 12, + Variable = 13, + Constant = 14, + String = 15, + Number = 16, + Boolean = 17, + Array = 18, + Object = 19, + Key = 20, + Null = 21, + EnumMember = 22, + Struct = 23, + Event = 24, + Operator = 25, + TypeParameter = 26, +}; + +/// Symbol tags are extra annotations that tweak the rendering of a symbol. +/// +/// @since 3.16 +pub const SymbolTag = enum(u32) { + /// Render a symbol as obsolete, usually using a strike-out. + Deprecated = 1, + placeholder__, // fixes alignment issue +}; + +/// Moniker uniqueness level to define scope of the moniker. +/// +/// @since 3.16.0 +pub const UniquenessLevel = enum { + pub const tres_string_enum = {}; + + /// The moniker is only unique inside a document + document, + /// The moniker is unique inside a project for which a dump got created + project, + /// The moniker is unique inside the group to which a project belongs + group, + /// The moniker is unique inside the moniker scheme. + scheme, + /// The moniker is globally unique + global, +}; + +/// The moniker kind. +/// +/// @since 3.16.0 +pub const MonikerKind = enum { + pub const tres_string_enum = {}; + + /// The moniker represent a symbol that is imported into a project + import, + /// The moniker represents a symbol that is exported from a project + @"export", + /// The moniker represents a symbol that is local to a project (e.g. a local + /// variable of a function, a class not visible outside the project, ...) + local, +}; + +/// Inlay hint kinds. +/// +/// @since 3.17.0 +pub const InlayHintKind = enum(u32) { + /// An inlay hint that for a type annotation. + Type = 1, + /// An inlay hint that is for a parameter. + Parameter = 2, +}; + +/// The message type +pub const MessageType = enum(u32) { + /// An error message. + Error = 1, + /// A warning message. + Warning = 2, + /// An information message. + Info = 3, + /// A log message. + Log = 4, +}; + +/// Defines how the host (editor) should sync +/// document changes to the language server. +pub const TextDocumentSyncKind = enum(u32) { + /// Documents should not be synced at all. + None = 0, + /// Documents are synced by always sending the full content + /// of the document. + Full = 1, + /// Documents are synced by sending the full content on open. + /// After that only incremental updates to the document are + /// send. + Incremental = 2, +}; + +/// Represents reasons why a text document is saved. +pub const TextDocumentSaveReason = enum(u32) { + /// Manually triggered, e.g. by the user pressing save, by starting debugging, + /// or by an API call. + Manual = 1, + /// Automatic after a delay. + AfterDelay = 2, + /// When the editor lost focus. + FocusOut = 3, +}; + +/// The kind of a completion entry. +pub const CompletionItemKind = enum(u32) { + Text = 1, + Method = 2, + Function = 3, + Constructor = 4, + Field = 5, + Variable = 6, + Class = 7, + Interface = 8, + Module = 9, + Property = 10, + Unit = 11, + Value = 12, + Enum = 13, + Keyword = 14, + Snippet = 15, + Color = 16, + File = 17, + Reference = 18, + Folder = 19, + EnumMember = 20, + Constant = 21, + Struct = 22, + Event = 23, + Operator = 24, + TypeParameter = 25, +}; + +/// Completion item tags are extra annotations that tweak the rendering of a completion +/// item. +/// +/// @since 3.15.0 +pub const CompletionItemTag = enum(u32) { + /// Render a completion as obsolete, usually using a strike-out. + Deprecated = 1, + placeholder__, // fixes alignment issue +}; + +/// Defines whether the insert text in a completion item should be interpreted as +/// plain text or a snippet. +pub const InsertTextFormat = enum(u32) { + /// The primary text to be inserted is treated as a plain string. + PlainText = 1, + /// The primary text to be inserted is treated as a snippet. + /// + /// A snippet can define tab stops and placeholders with `$1`, `$2` + /// and `${3:foo}`. `$0` defines the final tab stop, it defaults to + /// the end of the snippet. Placeholders with equal identifiers are linked, + /// that is typing in one will update others too. + /// + /// See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax + Snippet = 2, +}; + +/// How whitespace and indentation is handled during completion +/// item insertion. +/// +/// @since 3.16.0 +pub const InsertTextMode = enum(u32) { + /// The insertion or replace strings is taken as it is. If the + /// value is multi line the lines below the cursor will be + /// inserted using the indentation defined in the string value. + /// The client will not apply any kind of adjustments to the + /// string. + asIs = 1, + /// The editor adjusts leading whitespace of new lines so that + /// they match the indentation up to the cursor of the line for + /// which the item is accepted. + /// + /// Consider a line like this: <2tabs><3tabs>foo. Accepting a + /// multi line completion item is indented using 2 tabs and all + /// following lines inserted will be indented using 2 tabs as well. + adjustIndentation = 2, +}; + +/// A document highlight kind. +pub const DocumentHighlightKind = enum(u32) { + /// A textual occurrence. + Text = 1, + /// Read-access of a symbol, like reading a variable. + Read = 2, + /// Write-access of a symbol, like writing to a variable. + Write = 3, +}; + +/// A set of predefined code action kinds +pub const CodeActionKind = enum { + pub const tres_string_enum = {}; + + /// Empty kind. + empty, + /// Base kind for quickfix actions: 'quickfix' + quickfix, + /// Base kind for refactoring actions: 'refactor' + refactor, + /// Base kind for refactoring extraction actions: 'refactor.extract' + /// + /// Example extract actions: + /// + /// - Extract method + /// - Extract function + /// - Extract variable + /// - Extract interface from class + /// - ... + @"refactor.extract", + /// Base kind for refactoring inline actions: 'refactor.inline' + /// + /// Example inline actions: + /// + /// - Inline function + /// - Inline variable + /// - Inline constant + /// - ... + @"refactor.inline", + /// Base kind for refactoring rewrite actions: 'refactor.rewrite' + /// + /// Example rewrite actions: + /// + /// - Convert JavaScript function to class + /// - Add or remove parameter + /// - Encapsulate field + /// - Make method static + /// - Move method to base class + /// - ... + @"refactor.rewrite", + /// Base kind for source actions: `source` + /// + /// Source code actions apply to the entire file. + source, + /// Base kind for an organize imports source action: `source.organizeImports` + @"source.organizeImports", + /// Base kind for auto-fix source actions: `source.fixAll`. + /// + /// Fix all actions automatically fix errors that have a clear fix that do not require user input. + /// They should not suppress errors or perform unsafe fixes such as generating new types or classes. + /// + /// @since 3.15.0 + @"source.fixAll", + + pub fn tresParse(json_value: std.json.Value, maybe_allocator: ?std.mem.Allocator) error{InvalidEnumTag}!@This() { + _ = maybe_allocator; + if (json_value != .String) return error.InvalidEnumTag; + if (json_value.String.len == 0) return .empty; + return std.meta.stringToEnum(@This(), json_value.String) orelse return error.InvalidEnumTag; + } +}; + +pub const TraceValues = enum { + pub const tres_string_enum = {}; + + /// Turn tracing off. + off, + /// Trace messages only. + messages, + /// Verbose message tracing. + verbose, +}; + +/// Describes the content type that a client supports in various +/// result literals like `Hover`, `ParameterInfo` or `CompletionItem`. +/// +/// Please note that `MarkupKinds` must not start with a `$`. This kinds +/// are reserved for internal usage. +pub const MarkupKind = enum { + pub const tres_string_enum = {}; + + /// Plain text is supported as a content format + plaintext, + /// Markdown is supported as a content format + markdown, +}; + +/// A set of predefined position encoding kinds. +/// +/// @since 3.17.0 +pub const PositionEncodingKind = enum { + pub const tres_string_enum = {}; + + /// Character offsets count UTF-8 code units. + @"utf-8", + /// Character offsets count UTF-16 code units. + /// + /// This is the default and must always be supported + /// by servers + @"utf-16", + /// Character offsets count UTF-32 code units. + /// + /// Implementation note: these are the same as Unicode code points, + /// so this `PositionEncodingKind` may also be used for an + /// encoding-agnostic representation of character offsets. + @"utf-32", +}; + +/// The file event type +pub const FileChangeType = enum(u32) { + /// The file got created. + Created = 1, + /// The file got changed. + Changed = 2, + /// The file got deleted. + Deleted = 3, +}; + +pub const WatchKind = enum(u32) { + /// Interested in create events. + Create = 1, + /// Interested in change events + Change = 2, + /// Interested in delete events + Delete = 4, +}; + +/// The diagnostic's severity. +pub const DiagnosticSeverity = enum(u32) { + /// Reports an error. + Error = 1, + /// Reports a warning. + Warning = 2, + /// Reports an information. + Information = 3, + /// Reports a hint. + Hint = 4, +}; + +/// The diagnostic tags. +/// +/// @since 3.15.0 +pub const DiagnosticTag = enum(u32) { + /// Unused or unnecessary code. + /// + /// Clients are allowed to render diagnostics with this tag faded out instead of having + /// an error squiggle. + Unnecessary = 1, + /// Deprecated or obsolete code. + /// + /// Clients are allowed to rendered diagnostics with this tag strike through. + Deprecated = 2, +}; + +/// How a completion was triggered +pub const CompletionTriggerKind = enum(u32) { + /// Completion was triggered by typing an identifier (24x7 code + /// complete), manual invocation (e.g Ctrl+Space) or via API. + Invoked = 1, + /// Completion was triggered by a trigger character specified by + /// the `triggerCharacters` properties of the `CompletionRegistrationOptions`. + TriggerCharacter = 2, + /// Completion was re-triggered as current completion list is incomplete + TriggerForIncompleteCompletions = 3, +}; + +/// How a signature help was triggered. +/// +/// @since 3.15.0 +pub const SignatureHelpTriggerKind = enum(u32) { + /// Signature help was invoked manually by the user or by a command. + Invoked = 1, + /// Signature help was triggered by a trigger character. + TriggerCharacter = 2, + /// Signature help was triggered by the cursor moving or by the document content changing. + ContentChange = 3, +}; + +/// The reason why code actions were requested. +/// +/// @since 3.17.0 +pub const CodeActionTriggerKind = enum(u32) { + /// Code actions were explicitly requested by the user or by an extension. + Invoked = 1, + /// Code actions were requested automatically. + /// + /// This typically happens when current selection in a file changes, but can + /// also be triggered when file content changes. + Automatic = 2, +}; + +/// A pattern kind describing if a glob pattern matches a file a folder or +/// both. +/// +/// @since 3.16.0 +pub const FileOperationPatternKind = enum { + pub const tres_string_enum = {}; + + /// The pattern matches a file only. + file, + /// The pattern matches a folder only. + folder, +}; + +/// A notebook cell kind. +/// +/// @since 3.17.0 +pub const NotebookCellKind = enum(u32) { + /// A markup-cell is formatted source that is used for display. + Markup = 1, + /// A code-cell is source code. + Code = 2, +}; + +pub const ResourceOperationKind = enum { + pub const tres_string_enum = {}; + + /// Supports creating new files and folders. + create, + /// Supports renaming existing files and folders. + rename, + /// Supports deleting existing files and folders. + delete, +}; + +pub const FailureHandlingKind = enum { + pub const tres_string_enum = {}; + + /// Applying the workspace change is simply aborted if one of the changes provided + /// fails. All operations executed before the failing operation stay executed. + abort, + /// All operations are executed transactional. That means they either all + /// succeed or no changes at all are applied to the workspace. + transactional, + /// If the workspace edit contains only textual file changes they are executed transactional. + /// If resource changes (create, rename or delete file) are part of the change the failure + /// handling strategy is abort. + textOnlyTransactional, + /// The client tries to undo the operations already executed. But there is no + /// guarantee that this is succeeding. + undo, +}; + +pub const PrepareSupportDefaultBehavior = enum(u32) { + /// The client's default behavior is to select the identifier + /// according the to language's syntax rule. + Identifier = 1, + placeholder__, // fixes alignment issue +}; + +pub const TokenFormat = enum { + pub const tres_string_enum = {}; + + relative, + placeholder__, // fixes alignment issue +}; + +// Structures + +pub const ImplementationParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents a location inside a resource, such as a line +/// inside a text file. +pub const Location = struct { + uri: DocumentUri, + range: Range, +}; + +pub const ImplementationRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends ImplementationOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +pub const TypeDefinitionParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +pub const TypeDefinitionRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends TypeDefinitionOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// A workspace folder inside a client. +pub const WorkspaceFolder = struct { + /// The associated URI for this workspace folder. + uri: URI, + /// The name of the workspace folder. Used to refer to this + /// workspace folder in the user interface. + name: []const u8, +}; + +/// The parameters of a `workspace/didChangeWorkspaceFolders` notification. +pub const DidChangeWorkspaceFoldersParams = struct { + /// The actual workspace folder change event. + event: WorkspaceFoldersChangeEvent, +}; + +/// The parameters of a configuration request. +pub const ConfigurationParams = struct { + items: []const ConfigurationItem, +}; + +/// Parameters for a {@link DocumentColorRequest}. +pub const DocumentColorParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents a color range from a document. +pub const ColorInformation = struct { + /// The range in the document where this color appears. + range: Range, + /// The actual color value for this color range. + color: Color, +}; + +pub const DocumentColorRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DocumentColorOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// Parameters for a {@link ColorPresentationRequest}. +pub const ColorPresentationParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The color to request presentations for. + color: Color, + /// The range where the color would be inserted. Serves as a context. + range: Range, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +pub const ColorPresentation = struct { + /// The label of this color presentation. It will be shown on the color + /// picker header. By default this is also the text that is inserted when selecting + /// this color presentation. + label: []const u8, + /// An {@link TextEdit edit} which is applied to a document when selecting + /// this presentation for the color. When `falsy` the {@link ColorPresentation.label label} + /// is used. + /// field can be undefined, but this possible state is non-critical + textEdit: ?TextEdit = null, + /// An optional array of additional {@link TextEdit text edits} that are applied when + /// selecting this color presentation. Edits must not overlap with the main {@link ColorPresentation.textEdit edit} nor with themselves. + /// field can be undefined, but this possible state is non-critical + additionalTextEdits: ?[]const TextEdit = null, +}; + +pub const WorkDoneProgressOptions = struct { + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// General text document registration options. +pub const TextDocumentRegistrationOptions = struct { + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, +}; + +/// Parameters for a {@link FoldingRangeRequest}. +pub const FoldingRangeParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents a folding range. To be valid, start and end line must be bigger than zero and smaller +/// than the number of lines in the document. Clients are free to ignore invalid ranges. +pub const FoldingRange = struct { + /// The zero-based start line of the range to fold. The folded area starts after the line's last character. + /// To be valid, the end must be zero or larger and smaller than the number of lines in the document. + startLine: u32, + /// The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line. + /// field can be undefined, but this possible state is non-critical + startCharacter: ?u32 = null, + /// The zero-based end line of the range to fold. The folded area ends with the line's last character. + /// To be valid, the end must be zero or larger and smaller than the number of lines in the document. + endLine: u32, + /// The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line. + /// field can be undefined, but this possible state is non-critical + endCharacter: ?u32 = null, + /// Describes the kind of the folding range such as `comment' or 'region'. The kind + /// is used to categorize folding ranges and used by commands like 'Fold all comments'. + /// See {@link FoldingRangeKind} for an enumeration of standardized kinds. + /// field can be undefined, but this possible state is non-critical + kind: ?FoldingRangeKind = null, + /// The text that the client should show when the specified range is + /// collapsed. If not defined or not supported by the client, a default + /// will be chosen by the client. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + collapsedText: ?[]const u8 = null, +}; + +pub const FoldingRangeRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends FoldingRangeOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +pub const DeclarationParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +pub const DeclarationRegistrationOptions = struct { + // Extends DeclarationOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// A parameter literal used in selection range requests. +pub const SelectionRangeParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The positions inside the text document. + positions: []const Position, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A selection range represents a part of a selection hierarchy. A selection range +/// may have a parent selection range that contains it. +pub const SelectionRange = struct { + /// The {@link Range range} of this selection range. + range: Range, + /// The parent selection range containing this range. Therefore `parent.range` must contain `this.range`. + /// field can be undefined, but this possible state is non-critical + parent: ?SelectionRange = null, +}; + +pub const SelectionRangeRegistrationOptions = struct { + // Extends SelectionRangeOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +pub const WorkDoneProgressCreateParams = struct { + /// The token to be used to report progress. + token: ProgressToken, +}; + +pub const WorkDoneProgressCancelParams = struct { + /// The token to be used to report progress. + token: ProgressToken, +}; + +/// The parameter of a `textDocument/prepareCallHierarchy` request. +/// +/// @since 3.16.0 +pub const CallHierarchyPrepareParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Represents programming constructs like functions or constructors in the context +/// of call hierarchy. +/// +/// @since 3.16.0 +pub const CallHierarchyItem = struct { + /// The name of this item. + name: []const u8, + /// The kind of this item. + kind: SymbolKind, + /// Tags for this item. + /// field can be undefined, but this possible state is non-critical + tags: ?[]const SymbolTag = null, + /// More detail for this item, e.g. the signature of a function. + /// field can be undefined, but this possible state is non-critical + detail: ?[]const u8 = null, + /// The resource identifier of this item. + uri: DocumentUri, + /// The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code. + range: Range, + /// The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function. + /// Must be contained by the {@link CallHierarchyItem.range `range`}. + selectionRange: Range, + /// A data entry field that is preserved between a call hierarchy prepare and + /// incoming calls or outgoing calls requests. + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Call hierarchy options used during static or dynamic registration. +/// +/// @since 3.16.0 +pub const CallHierarchyRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends CallHierarchyOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// The parameter of a `callHierarchy/incomingCalls` request. +/// +/// @since 3.16.0 +pub const CallHierarchyIncomingCallsParams = struct { + item: CallHierarchyItem, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents an incoming call, e.g. a caller of a method or constructor. +/// +/// @since 3.16.0 +pub const CallHierarchyIncomingCall = struct { + /// The item that makes the call. + from: CallHierarchyItem, + /// The ranges at which the calls appear. This is relative to the caller + /// denoted by {@link CallHierarchyIncomingCall.from `this.from`}. + fromRanges: []const Range, +}; + +/// The parameter of a `callHierarchy/outgoingCalls` request. +/// +/// @since 3.16.0 +pub const CallHierarchyOutgoingCallsParams = struct { + item: CallHierarchyItem, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc. +/// +/// @since 3.16.0 +pub const CallHierarchyOutgoingCall = struct { + /// The item that is called. + to: CallHierarchyItem, + /// The range at which this item is called. This is the range relative to the caller, e.g the item + /// passed to {@link CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls `provideCallHierarchyOutgoingCalls`} + /// and not {@link CallHierarchyOutgoingCall.to `this.to`}. + fromRanges: []const Range, +}; + +/// @since 3.16.0 +pub const SemanticTokensParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// @since 3.16.0 +pub const SemanticTokens = struct { + /// An optional result id. If provided and clients support delta updating + /// the client will include the result id in the next semantic token request. + /// A server can then instead of computing all semantic tokens again simply + /// send a delta. + /// field can be undefined, but this possible state is non-critical + resultId: ?[]const u8 = null, + /// The actual tokens. + data: []const u32, +}; + +/// @since 3.16.0 +pub const SemanticTokensPartialResult = struct { + data: []const u32, +}; + +/// @since 3.16.0 +pub const SemanticTokensRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends SemanticTokensOptions + /// The legend used by the server + legend: SemanticTokensLegend, + /// Server supports providing semantic tokens for a specific range + /// of a document. + /// field can be undefined, but this possible state is non-critical + range: ?union(enum) { + bool: bool, + literal_1: struct {}, + } = null, + /// Server supports providing semantic tokens for a full document. + /// field can be undefined, but this possible state is non-critical + full: ?union(enum) { + bool: bool, + literal_1: struct { + /// The server supports deltas for full documents. + /// field can be undefined, but this possible state is non-critical + delta: ?bool = null, + }, + } = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// @since 3.16.0 +pub const SemanticTokensDeltaParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The result id of a previous response. The result Id can either point to a full response + /// or a delta response depending on what was received last. + previousResultId: []const u8, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// @since 3.16.0 +pub const SemanticTokensDelta = struct { + /// field can be undefined, but this possible state is non-critical + resultId: ?[]const u8 = null, + /// The semantic token edits to transform a previous result into a new result. + edits: []const SemanticTokensEdit, +}; + +/// @since 3.16.0 +pub const SemanticTokensDeltaPartialResult = struct { + edits: []const SemanticTokensEdit, +}; + +/// @since 3.16.0 +pub const SemanticTokensRangeParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The range the semantic tokens are requested for. + range: Range, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Params to show a document. +/// +/// @since 3.16.0 +pub const ShowDocumentParams = struct { + /// The document uri to show. + uri: URI, + /// Indicates to show the resource in an external program. + /// To show for example `https://code.visualstudio.com/` + /// in the default WEB browser set `external` to `true`. + /// field can be undefined, but this possible state is non-critical + external: ?bool = null, + /// An optional property to indicate whether the editor + /// showing the document should take focus or not. + /// Clients might ignore this property if an external + /// program is started. + /// field can be undefined, but this possible state is non-critical + takeFocus: ?bool = null, + /// An optional selection range if the document is a text + /// document. Clients might ignore the property if an + /// external program is started or the file is not a text + /// file. + /// field can be undefined, but this possible state is non-critical + selection: ?Range = null, +}; + +/// The result of a showDocument request. +/// +/// @since 3.16.0 +pub const ShowDocumentResult = struct { + /// A boolean indicating if the show was successful. + success: bool, +}; + +pub const LinkedEditingRangeParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// The result of a linked editing range request. +/// +/// @since 3.16.0 +pub const LinkedEditingRanges = struct { + /// A list of ranges that can be edited together. The ranges must have + /// identical length and contain identical text content. The ranges cannot overlap. + ranges: []const Range, + /// An optional word pattern (regular expression) that describes valid contents for + /// the given ranges. If no pattern is provided, the client configuration's word + /// pattern will be used. + /// field can be undefined, but this possible state is non-critical + wordPattern: ?[]const u8 = null, +}; + +pub const LinkedEditingRangeRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends LinkedEditingRangeOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// The parameters sent in notifications/requests for user-initiated creation of +/// files. +/// +/// @since 3.16.0 +pub const CreateFilesParams = struct { + /// An array of all files/folders created in this operation. + files: []const FileCreate, +}; + +/// A workspace edit represents changes to many resources managed in the workspace. The edit +/// should either provide `changes` or `documentChanges`. If documentChanges are present +/// they are preferred over `changes` if the client can handle versioned document edits. +/// +/// Since version 3.13.0 a workspace edit can contain resource operations as well. If resource +/// operations are present clients need to execute the operations in the order in which they +/// are provided. So a workspace edit for example can consist of the following two changes: +/// (1) a create file a.txt and (2) a text document edit which insert text into file a.txt. +/// +/// An invalid sequence (e.g. (1) delete file a.txt and (2) insert text into file a.txt) will +/// cause failure of the operation. How the client recovers from the failure is described by +/// the client capability: `workspace.workspaceEdit.failureHandling` +pub const WorkspaceEdit = struct { + /// Holds changes to existing resources. + /// field can be undefined, but this possible state is non-critical + changes: ?Map(DocumentUri, []const TextEdit) = null, + /// Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes + /// are either an array of `TextDocumentEdit`s to express changes to n different text documents + /// where each text document edit addresses a specific version of a text document. Or it can contain + /// above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations. + /// + /// Whether a client supports versioned document edits is expressed via + /// `workspace.workspaceEdit.documentChanges` client capability. + /// + /// If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then + /// only plain `TextEdit`s using the `changes` property are supported. + /// field can be undefined, but this possible state is non-critical + documentChanges: ?[]const union(enum) { + TextDocumentEdit: TextDocumentEdit, + CreateFile: CreateFile, + RenameFile: RenameFile, + DeleteFile: DeleteFile, + } = null, + /// A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and + /// delete file / folder operations. + /// + /// Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + changeAnnotations: ?Map(ChangeAnnotationIdentifier, ChangeAnnotation) = null, +}; + +/// The options to register for file operations. +/// +/// @since 3.16.0 +pub const FileOperationRegistrationOptions = struct { + /// The actual filters. + filters: []const FileOperationFilter, +}; + +/// The parameters sent in notifications/requests for user-initiated renames of +/// files. +/// +/// @since 3.16.0 +pub const RenameFilesParams = struct { + /// An array of all files/folders renamed in this operation. When a folder is renamed, only + /// the folder will be included, and not its children. + files: []const FileRename, +}; + +/// The parameters sent in notifications/requests for user-initiated deletes of +/// files. +/// +/// @since 3.16.0 +pub const DeleteFilesParams = struct { + /// An array of all files/folders deleted in this operation. + files: []const FileDelete, +}; + +pub const MonikerParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Moniker definition to match LSIF 0.5 moniker definition. +/// +/// @since 3.16.0 +pub const Moniker = struct { + /// The scheme of the moniker. For example tsc or .Net + scheme: []const u8, + /// The identifier of the moniker. The value is opaque in LSIF however + /// schema owners are allowed to define the structure if they want. + identifier: []const u8, + /// The scope in which the moniker is unique + unique: UniquenessLevel, + /// The moniker kind if known. + /// field can be undefined, but this possible state is non-critical + kind: ?MonikerKind = null, +}; + +pub const MonikerRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends MonikerOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameter of a `textDocument/prepareTypeHierarchy` request. +/// +/// @since 3.17.0 +pub const TypeHierarchyPrepareParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// @since 3.17.0 +pub const TypeHierarchyItem = struct { + /// The name of this item. + name: []const u8, + /// The kind of this item. + kind: SymbolKind, + /// Tags for this item. + /// field can be undefined, but this possible state is non-critical + tags: ?[]const SymbolTag = null, + /// More detail for this item, e.g. the signature of a function. + /// field can be undefined, but this possible state is non-critical + detail: ?[]const u8 = null, + /// The resource identifier of this item. + uri: DocumentUri, + /// The range enclosing this symbol not including leading/trailing whitespace + /// but everything else, e.g. comments and code. + range: Range, + /// The range that should be selected and revealed when this symbol is being + /// picked, e.g. the name of a function. Must be contained by the + /// {@link TypeHierarchyItem.range `range`}. + selectionRange: Range, + /// A data entry field that is preserved between a type hierarchy prepare and + /// supertypes or subtypes requests. It could also be used to identify the + /// type hierarchy in the server, helping improve the performance on + /// resolving supertypes and subtypes. + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Type hierarchy options used during static or dynamic registration. +/// +/// @since 3.17.0 +pub const TypeHierarchyRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends TypeHierarchyOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// The parameter of a `typeHierarchy/supertypes` request. +/// +/// @since 3.17.0 +pub const TypeHierarchySupertypesParams = struct { + item: TypeHierarchyItem, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// The parameter of a `typeHierarchy/subtypes` request. +/// +/// @since 3.17.0 +pub const TypeHierarchySubtypesParams = struct { + item: TypeHierarchyItem, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A parameter literal used in inline value requests. +/// +/// @since 3.17.0 +pub const InlineValueParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The document range for which inline values should be computed. + range: Range, + /// Additional information about the context in which inline values were + /// requested. + context: InlineValueContext, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Inline value options used during static or dynamic registration. +/// +/// @since 3.17.0 +pub const InlineValueRegistrationOptions = struct { + // Extends InlineValueOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// A parameter literal used in inlay hint requests. +/// +/// @since 3.17.0 +pub const InlayHintParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The document range for which inlay hints should be computed. + range: Range, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Inlay hint information. +/// +/// @since 3.17.0 +pub const InlayHint = struct { + /// The position of this hint. + position: Position, + /// The label of this hint. A human readable string or an array of + /// InlayHintLabelPart label parts. + /// + /// *Note* that neither the string nor the label part can be empty. + label: union(enum) { + string: []const u8, + array_of_InlayHintLabelPart: []const InlayHintLabelPart, + }, + /// The kind of this hint. Can be omitted in which case the client + /// should fall back to a reasonable default. + /// field can be undefined, but this possible state is non-critical + kind: ?InlayHintKind = null, + /// Optional text edits that are performed when accepting this inlay hint. + /// + /// *Note* that edits are expected to change the document so that the inlay + /// hint (or its nearest variant) is now part of the document and the inlay + /// hint itself is now obsolete. + /// field can be undefined, but this possible state is non-critical + textEdits: ?[]const TextEdit = null, + /// The tooltip text when you hover over this item. + /// field can be undefined, but this possible state is non-critical + tooltip: ?union(enum) { + string: []const u8, + MarkupContent: MarkupContent, + } = null, + /// Render padding before the hint. + /// + /// Note: Padding should use the editor's background color, not the + /// background color of the hint itself. That means padding can be used + /// to visually align/separate an inlay hint. + /// field can be undefined, but this possible state is non-critical + paddingLeft: ?bool = null, + /// Render padding after the hint. + /// + /// Note: Padding should use the editor's background color, not the + /// background color of the hint itself. That means padding can be used + /// to visually align/separate an inlay hint. + /// field can be undefined, but this possible state is non-critical + paddingRight: ?bool = null, + /// A data entry field that is preserved on an inlay hint between + /// a `textDocument/inlayHint` and a `inlayHint/resolve` request. + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Inlay hint options used during static or dynamic registration. +/// +/// @since 3.17.0 +pub const InlayHintRegistrationOptions = struct { + // Extends InlayHintOptions + /// The server provides support to resolve additional + /// information for an inlay hint item. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// Parameters of the document diagnostic request. +/// +/// @since 3.17.0 +pub const DocumentDiagnosticParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The additional identifier provided during registration. + /// field can be undefined, but this possible state is non-critical + identifier: ?[]const u8 = null, + /// The result id of a previous response if provided. + /// field can be undefined, but this possible state is non-critical + previousResultId: ?[]const u8 = null, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A partial result for a document diagnostic report. +/// +/// @since 3.17.0 +pub const DocumentDiagnosticReportPartialResult = struct { + relatedDocuments: Map(DocumentUri, union(enum) { + FullDocumentDiagnosticReport: FullDocumentDiagnosticReport, + UnchangedDocumentDiagnosticReport: UnchangedDocumentDiagnosticReport, + }), +}; + +/// Cancellation data returned from a diagnostic request. +/// +/// @since 3.17.0 +pub const DiagnosticServerCancellationData = struct { + retriggerRequest: bool, +}; + +/// Diagnostic registration options. +/// +/// @since 3.17.0 +pub const DiagnosticRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DiagnosticOptions + /// An optional identifier under which the diagnostics are + /// managed by the client. + /// field can be undefined, but this possible state is non-critical + identifier: ?[]const u8 = null, + /// Whether the language has inter file dependencies meaning that + /// editing code in one file can result in a different diagnostic + /// set in another file. Inter file dependencies are common for + /// most programming languages and typically uncommon for linters. + interFileDependencies: bool, + /// The server provides support for workspace diagnostics as well. + workspaceDiagnostics: bool, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +/// Parameters of the workspace diagnostic request. +/// +/// @since 3.17.0 +pub const WorkspaceDiagnosticParams = struct { + /// The additional identifier provided during registration. + /// field can be undefined, but this possible state is non-critical + identifier: ?[]const u8 = null, + /// The currently known diagnostic reports with their + /// previous result ids. + previousResultIds: []const PreviousResultId, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A workspace diagnostic report. +/// +/// @since 3.17.0 +pub const WorkspaceDiagnosticReport = struct { + items: []const WorkspaceDocumentDiagnosticReport, +}; + +/// A partial result for a workspace diagnostic report. +/// +/// @since 3.17.0 +pub const WorkspaceDiagnosticReportPartialResult = struct { + items: []const WorkspaceDocumentDiagnosticReport, +}; + +/// The params sent in an open notebook document notification. +/// +/// @since 3.17.0 +pub const DidOpenNotebookDocumentParams = struct { + /// The notebook document that got opened. + notebookDocument: NotebookDocument, + /// The text documents that represent the content + /// of a notebook cell. + cellTextDocuments: []const TextDocumentItem, +}; + +/// The params sent in a change notebook document notification. +/// +/// @since 3.17.0 +pub const DidChangeNotebookDocumentParams = struct { + /// The notebook document that did change. The version number points + /// to the version after all provided changes have been applied. If + /// only the text document content of a cell changes the notebook version + /// doesn't necessarily have to change. + notebookDocument: VersionedNotebookDocumentIdentifier, + /// The actual changes to the notebook document. + /// + /// The changes describe single state changes to the notebook document. + /// So if there are two changes c1 (at array index 0) and c2 (at array + /// index 1) for a notebook in state S then c1 moves the notebook from + /// S to S' and c2 from S' to S''. So c1 is computed on the state S and + /// c2 is computed on the state S'. + /// + /// To mirror the content of a notebook using change events use the following approach: + /// - start with the same initial content + /// - apply the 'notebookDocument/didChange' notifications in the order you receive them. + /// - apply the `NotebookChangeEvent`s in a single notification in the order + /// you receive them. + change: NotebookDocumentChangeEvent, +}; + +/// The params sent in a save notebook document notification. +/// +/// @since 3.17.0 +pub const DidSaveNotebookDocumentParams = struct { + /// The notebook document that got saved. + notebookDocument: NotebookDocumentIdentifier, +}; + +/// The params sent in a close notebook document notification. +/// +/// @since 3.17.0 +pub const DidCloseNotebookDocumentParams = struct { + /// The notebook document that got closed. + notebookDocument: NotebookDocumentIdentifier, + /// The text documents that represent the content + /// of a notebook cell that got closed. + cellTextDocuments: []const TextDocumentIdentifier, +}; + +pub const RegistrationParams = struct { + registrations: []const Registration, +}; + +pub const UnregistrationParams = struct { + unregisterations: []const Unregistration, +}; + +pub const InitializeParams = struct { + // Extends _InitializeParams + /// The process Id of the parent process that started + /// the server. + /// + /// Is `null` if the process has not been started by another process. + /// If the parent process is not alive then the server should exit. + processId: ?i32 = null, + /// Information about the client + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + clientInfo: ?struct { + /// The name of the client as defined by the client. + name: []const u8, + /// The client's version as defined by the client. + /// field can be undefined, but this possible state is non-critical + version: ?[]const u8 = null, + } = null, + /// The locale the client is currently showing the user interface + /// in. This must not necessarily be the locale of the operating + /// system. + /// + /// Uses IETF language tags as the value's syntax + /// (See https://en.wikipedia.org/wiki/IETF_language_tag) + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + locale: ?[]const u8 = null, + /// The rootPath of the workspace. Is null + /// if no folder is open. + /// + /// @deprecated in favour of rootUri. + /// field can be undefined, but this possible state is non-critical + rootPath: ?[]const u8 = null, + /// The rootUri of the workspace. Is null if no + /// folder is open. If both `rootPath` and `rootUri` are set + /// `rootUri` wins. + /// + /// @deprecated in favour of workspaceFolders. + rootUri: ?DocumentUri = null, + /// The capabilities provided by the client (editor or tool) + capabilities: ClientCapabilities, + /// User provided initialization options. + /// field can be undefined, but this possible state is non-critical + initializationOptions: ?LSPAny = null, + /// The initial trace setting. If omitted trace is disabled ('off'). + /// field can be undefined, but this possible state is non-critical + trace: ?TraceValues = null, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Extends WorkspaceFoldersInitializeParams + /// The workspace folders configured in the client when the server starts. + /// + /// This property is only available if the client supports workspace folders. + /// It can be `null` if the client supports workspace folders but none are + /// configured. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + workspaceFolders: ?[]const WorkspaceFolder = null, +}; + +/// The result returned from an initialize request. +pub const InitializeResult = struct { + /// The capabilities the language server provides. + capabilities: ServerCapabilities, + /// Information about the server. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + serverInfo: ?struct { + /// The name of the server as defined by the server. + name: []const u8, + /// The server's version as defined by the server. + /// field can be undefined, but this possible state is non-critical + version: ?[]const u8 = null, + } = null, +}; + +/// The data type of the ResponseError if the +/// initialize request fails. +pub const InitializeError = struct { + /// Indicates whether the client execute the following retry logic: + /// (1) show the message provided by the ResponseError to the user + /// (2) user selects retry or cancel + /// (3) if user selected retry the initialize method is sent again. + retry: bool, +}; + +pub const InitializedParams = struct {}; + +/// The parameters of a change configuration notification. +pub const DidChangeConfigurationParams = struct { + /// The actual changed settings + settings: LSPAny, +}; + +pub const DidChangeConfigurationRegistrationOptions = struct { + /// field can be undefined, but this possible state is non-critical + section: ?union(enum) { + string: []const u8, + array_of_string: []const []const u8, + } = null, +}; + +/// The parameters of a notification message. +pub const ShowMessageParams = struct { + /// The message type. See {@link MessageType} + type: MessageType, + /// The actual message. + message: []const u8, +}; + +pub const ShowMessageRequestParams = struct { + /// The message type. See {@link MessageType} + type: MessageType, + /// The actual message. + message: []const u8, + /// The message action items to present. + /// field can be undefined, but this possible state is non-critical + actions: ?[]const MessageActionItem = null, +}; + +pub const MessageActionItem = struct { + /// A short title like 'Retry', 'Open Log' etc. + title: []const u8, +}; + +/// The log message parameters. +pub const LogMessageParams = struct { + /// The message type. See {@link MessageType} + type: MessageType, + /// The actual message. + message: []const u8, +}; + +/// The parameters sent in an open text document notification +pub const DidOpenTextDocumentParams = struct { + /// The document that was opened. + textDocument: TextDocumentItem, +}; + +/// The change text document notification's parameters. +pub const DidChangeTextDocumentParams = struct { + /// The document that did change. The version number points + /// to the version after all provided content changes have + /// been applied. + textDocument: VersionedTextDocumentIdentifier, + /// The actual content changes. The content changes describe single state changes + /// to the document. So if there are two content changes c1 (at array index 0) and + /// c2 (at array index 1) for a document in state S then c1 moves the document from + /// S to S' and c2 from S' to S''. So c1 is computed on the state S and c2 is computed + /// on the state S'. + /// + /// To mirror the content of a document using change events use the following approach: + /// - start with the same initial content + /// - apply the 'textDocument/didChange' notifications in the order you receive them. + /// - apply the `TextDocumentContentChangeEvent`s in a single notification in the order + /// you receive them. + contentChanges: []const TextDocumentContentChangeEvent, +}; + +/// Describe options to be used when registered for text document change events. +pub const TextDocumentChangeRegistrationOptions = struct { + /// How documents are synced to the server. + syncKind: TextDocumentSyncKind, + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, +}; + +/// The parameters sent in a close text document notification +pub const DidCloseTextDocumentParams = struct { + /// The document that was closed. + textDocument: TextDocumentIdentifier, +}; + +/// The parameters sent in a save text document notification +pub const DidSaveTextDocumentParams = struct { + /// The document that was saved. + textDocument: TextDocumentIdentifier, + /// Optional the content when saved. Depends on the includeText value + /// when the save notification was requested. + /// field can be undefined, but this possible state is non-critical + text: ?[]const u8 = null, +}; + +/// Save registration options. +pub const TextDocumentSaveRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends SaveOptions + /// The client is supposed to include the content on save. + /// field can be undefined, but this possible state is non-critical + includeText: ?bool = null, +}; + +/// The parameters sent in a will save text document notification. +pub const WillSaveTextDocumentParams = struct { + /// The document that will be saved. + textDocument: TextDocumentIdentifier, + /// The 'TextDocumentSaveReason'. + reason: TextDocumentSaveReason, +}; + +/// A text edit applicable to a text document. +pub const TextEdit = struct { + /// The range of the text document to be manipulated. To insert + /// text into a document create a range where start === end. + range: Range, + /// The string to be inserted. For delete operations use an + /// empty string. + newText: []const u8, +}; + +/// The watched files change notification's parameters. +pub const DidChangeWatchedFilesParams = struct { + /// The actual file events. + changes: []const FileEvent, +}; + +/// Describe options to be used when registered for text document change events. +pub const DidChangeWatchedFilesRegistrationOptions = struct { + /// The watchers to register. + watchers: []const FileSystemWatcher, +}; + +/// The publish diagnostic notification's parameters. +pub const PublishDiagnosticsParams = struct { + /// The URI for which diagnostic information is reported. + uri: DocumentUri, + /// Optional the version number of the document the diagnostics are published for. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + version: ?i32 = null, + /// An array of diagnostic information items. + diagnostics: []const Diagnostic, +}; + +/// Completion parameters +pub const CompletionParams = struct { + /// The completion context. This is only available it the client specifies + /// to send this using the client capability `textDocument.completion.contextSupport === true` + /// field can be undefined, but this possible state is non-critical + context: ?CompletionContext = null, + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A completion item represents a text snippet that is +/// proposed to complete text that is being typed. +pub const CompletionItem = struct { + /// The label of this completion item. + /// + /// The label property is also by default the text that + /// is inserted when selecting this completion. + /// + /// If label details are provided the label itself should + /// be an unqualified name of the completion item. + label: []const u8, + /// Additional details for the label + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + labelDetails: ?CompletionItemLabelDetails = null, + /// The kind of this completion item. Based of the kind + /// an icon is chosen by the editor. + /// field can be undefined, but this possible state is non-critical + kind: ?CompletionItemKind = null, + /// Tags for this completion item. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + tags: ?[]const CompletionItemTag = null, + /// A human-readable string with additional information + /// about this item, like type or symbol information. + /// field can be undefined, but this possible state is non-critical + detail: ?[]const u8 = null, + /// A human-readable string that represents a doc-comment. + /// field can be undefined, but this possible state is non-critical + documentation: ?union(enum) { + string: []const u8, + MarkupContent: MarkupContent, + } = null, + /// Indicates if this item is deprecated. + /// @deprecated Use `tags` instead. + /// field can be undefined, but this possible state is non-critical + deprecated: ?bool = null, + /// Select this item when showing. + /// + /// *Note* that only one completion item can be selected and that the + /// tool / client decides which item that is. The rule is that the *first* + /// item of those that match best is selected. + /// field can be undefined, but this possible state is non-critical + preselect: ?bool = null, + /// A string that should be used when comparing this item + /// with other items. When `falsy` the {@link CompletionItem.label label} + /// is used. + /// field can be undefined, but this possible state is non-critical + sortText: ?[]const u8 = null, + /// A string that should be used when filtering a set of + /// completion items. When `falsy` the {@link CompletionItem.label label} + /// is used. + /// field can be undefined, but this possible state is non-critical + filterText: ?[]const u8 = null, + /// A string that should be inserted into a document when selecting + /// this completion. When `falsy` the {@link CompletionItem.label label} + /// is used. + /// + /// The `insertText` is subject to interpretation by the client side. + /// Some tools might not take the string literally. For example + /// VS Code when code complete is requested in this example + /// `con` and a completion item with an `insertText` of + /// `console` is provided it will only insert `sole`. Therefore it is + /// recommended to use `textEdit` instead since it avoids additional client + /// side interpretation. + /// field can be undefined, but this possible state is non-critical + insertText: ?[]const u8 = null, + /// The format of the insert text. The format applies to both the + /// `insertText` property and the `newText` property of a provided + /// `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`. + /// + /// Please note that the insertTextFormat doesn't apply to + /// `additionalTextEdits`. + /// field can be undefined, but this possible state is non-critical + insertTextFormat: ?InsertTextFormat = null, + /// How whitespace and indentation is handled during completion + /// item insertion. If not provided the clients default value depends on + /// the `textDocument.completion.insertTextMode` client capability. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + insertTextMode: ?InsertTextMode = null, + /// An {@link TextEdit edit} which is applied to a document when selecting + /// this completion. When an edit is provided the value of + /// {@link CompletionItem.insertText insertText} is ignored. + /// + /// Most editors support two different operations when accepting a completion + /// item. One is to insert a completion text and the other is to replace an + /// existing text with a completion text. Since this can usually not be + /// predetermined by a server it can report both ranges. Clients need to + /// signal support for `InsertReplaceEdits` via the + /// `textDocument.completion.insertReplaceSupport` client capability + /// property. + /// + /// *Note 1:* The text edit's range as well as both ranges from an insert + /// replace edit must be a [single line] and they must contain the position + /// at which completion has been requested. + /// *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range + /// must be a prefix of the edit's replace range, that means it must be + /// contained and starting at the same position. + /// + /// @since 3.16.0 additional type `InsertReplaceEdit` + /// field can be undefined, but this possible state is non-critical + textEdit: ?union(enum) { + TextEdit: TextEdit, + InsertReplaceEdit: InsertReplaceEdit, + } = null, + /// The edit text used if the completion item is part of a CompletionList and + /// CompletionList defines an item default for the text edit range. + /// + /// Clients will only honor this property if they opt into completion list + /// item defaults using the capability `completionList.itemDefaults`. + /// + /// If not provided and a list's default range is provided the label + /// property is used as a text. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + textEditText: ?[]const u8 = null, + /// An optional array of additional {@link TextEdit text edits} that are applied when + /// selecting this completion. Edits must not overlap (including the same insert position) + /// with the main {@link CompletionItem.textEdit edit} nor with themselves. + /// + /// Additional text edits should be used to change text unrelated to the current cursor position + /// (for example adding an import statement at the top of the file if the completion item will + /// insert an unqualified type). + /// field can be undefined, but this possible state is non-critical + additionalTextEdits: ?[]const TextEdit = null, + /// An optional set of characters that when pressed while this completion is active will accept it first and + /// then type that character. *Note* that all commit characters should have `length=1` and that superfluous + /// characters will be ignored. + /// field can be undefined, but this possible state is non-critical + commitCharacters: ?[]const []const u8 = null, + /// An optional {@link Command command} that is executed *after* inserting this completion. *Note* that + /// additional modifications to the current document should be described with the + /// {@link CompletionItem.additionalTextEdits additionalTextEdits}-property. + /// field can be undefined, but this possible state is non-critical + command: ?Command = null, + /// A data entry field that is preserved on a completion item between a + /// {@link CompletionRequest} and a {@link CompletionResolveRequest}. + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Represents a collection of {@link CompletionItem completion items} to be presented +/// in the editor. +pub const CompletionList = struct { + /// This list it not complete. Further typing results in recomputing this list. + /// + /// Recomputed lists have all their items replaced (not appended) in the + /// incomplete completion sessions. + isIncomplete: bool, + /// In many cases the items of an actual completion result share the same + /// value for properties like `commitCharacters` or the range of a text + /// edit. A completion list can therefore define item defaults which will + /// be used if a completion item itself doesn't specify the value. + /// + /// If a completion list specifies a default value and a completion item + /// also specifies a corresponding value the one from the item is used. + /// + /// Servers are only allowed to return default values if the client + /// signals support for this via the `completionList.itemDefaults` + /// capability. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + itemDefaults: ?struct { + /// A default commit character set. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + commitCharacters: ?[]const []const u8 = null, + /// A default edit range. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + editRange: ?union(enum) { + Range: Range, + literal_1: struct { + insert: Range, + replace: Range, + }, + } = null, + /// A default insert text format. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + insertTextFormat: ?InsertTextFormat = null, + /// A default insert text mode. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + insertTextMode: ?InsertTextMode = null, + /// A default data value. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, + } = null, + /// The completion items. + items: []const CompletionItem, +}; + +/// Registration options for a {@link CompletionRequest}. +pub const CompletionRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends CompletionOptions + /// Most tools trigger completion request automatically without explicitly requesting + /// it using a keyboard shortcut (e.g. Ctrl+Space). Typically they do so when the user + /// starts to type an identifier. For example if the user types `c` in a JavaScript file + /// code complete will automatically pop up present `console` besides others as a + /// completion item. Characters that make up identifiers don't need to be listed here. + /// + /// If code complete should automatically be trigger on characters not being valid inside + /// an identifier (for example `.` in JavaScript) list them in `triggerCharacters`. + /// field can be undefined, but this possible state is non-critical + triggerCharacters: ?[]const []const u8 = null, + /// The list of all possible characters that commit a completion. This field can be used + /// if clients don't support individual commit characters per completion item. See + /// `ClientCapabilities.textDocument.completion.completionItem.commitCharactersSupport` + /// + /// If a server provides both `allCommitCharacters` and commit characters on an individual + /// completion item the ones on the completion item win. + /// + /// @since 3.2.0 + /// field can be undefined, but this possible state is non-critical + allCommitCharacters: ?[]const []const u8 = null, + /// The server provides support to resolve additional + /// information for a completion item. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + /// The server supports the following `CompletionItem` specific + /// capabilities. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + completionItem: ?struct { + /// The server has support for completion item label + /// details (see also `CompletionItemLabelDetails`) when + /// receiving a completion item in a resolve call. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + labelDetailsSupport: ?bool = null, + } = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Parameters for a {@link HoverRequest}. +pub const HoverParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// The result of a hover request. +pub const Hover = struct { + /// The hover's content + contents: union(enum) { + MarkupContent: MarkupContent, + MarkedString: MarkedString, + array_of_MarkedString: []const MarkedString, + }, + /// An optional range inside the text document that is used to + /// visualize the hover, e.g. by changing the background color. + /// field can be undefined, but this possible state is non-critical + range: ?Range = null, +}; + +/// Registration options for a {@link HoverRequest}. +pub const HoverRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends HoverOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Parameters for a {@link SignatureHelpRequest}. +pub const SignatureHelpParams = struct { + /// The signature help context. This is only available if the client specifies + /// to send this using the client capability `textDocument.signatureHelp.contextSupport === true` + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + context: ?SignatureHelpContext = null, + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Signature help represents the signature of something +/// callable. There can be multiple signature but only one +/// active and only one active parameter. +pub const SignatureHelp = struct { + /// One or more signatures. + signatures: []const SignatureInformation, + /// The active signature. If omitted or the value lies outside the + /// range of `signatures` the value defaults to zero or is ignored if + /// the `SignatureHelp` has no signatures. + /// + /// Whenever possible implementors should make an active decision about + /// the active signature and shouldn't rely on a default value. + /// + /// In future version of the protocol this property might become + /// mandatory to better express this. + /// field can be undefined, but this possible state is non-critical + activeSignature: ?u32 = null, + /// The active parameter of the active signature. If omitted or the value + /// lies outside the range of `signatures[activeSignature].parameters` + /// defaults to 0 if the active signature has parameters. If + /// the active signature has no parameters it is ignored. + /// In future version of the protocol this property might become + /// mandatory to better express the active parameter if the + /// active signature does have any. + /// field can be undefined, but this possible state is non-critical + activeParameter: ?u32 = null, +}; + +/// Registration options for a {@link SignatureHelpRequest}. +pub const SignatureHelpRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends SignatureHelpOptions + /// List of characters that trigger signature help automatically. + /// field can be undefined, but this possible state is non-critical + triggerCharacters: ?[]const []const u8 = null, + /// List of characters that re-trigger signature help. + /// + /// These trigger characters are only active when signature help is already showing. All trigger characters + /// are also counted as re-trigger characters. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + retriggerCharacters: ?[]const []const u8 = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Parameters for a {@link DefinitionRequest}. +pub const DefinitionParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Registration options for a {@link DefinitionRequest}. +pub const DefinitionRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DefinitionOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Parameters for a {@link ReferencesRequest}. +pub const ReferenceParams = struct { + context: ReferenceContext, + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Registration options for a {@link ReferencesRequest}. +pub const ReferenceRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends ReferenceOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Parameters for a {@link DocumentHighlightRequest}. +pub const DocumentHighlightParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A document highlight is a range inside a text document which deserves +/// special attention. Usually a document highlight is visualized by changing +/// the background color of its range. +pub const DocumentHighlight = struct { + /// The range this highlight applies to. + range: Range, + /// The highlight kind, default is {@link DocumentHighlightKind.Text text}. + /// field can be undefined, but this possible state is non-critical + kind: ?DocumentHighlightKind = null, +}; + +/// Registration options for a {@link DocumentHighlightRequest}. +pub const DocumentHighlightRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DocumentHighlightOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Parameters for a {@link DocumentSymbolRequest}. +pub const DocumentSymbolParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents information about programming constructs like variables, classes, +/// interfaces etc. +pub const SymbolInformation = struct { + /// Indicates if this symbol is deprecated. + /// + /// @deprecated Use tags instead + /// field can be undefined, but this possible state is non-critical + deprecated: ?bool = null, + /// The location of this symbol. The location's range is used by a tool + /// to reveal the location in the editor. If the symbol is selected in the + /// tool the range's start information is used to position the cursor. So + /// the range usually spans more than the actual symbol's name and does + /// normally include things like visibility modifiers. + /// + /// The range doesn't have to denote a node range in the sense of an abstract + /// syntax tree. It can therefore not be used to re-construct a hierarchy of + /// the symbols. + location: Location, + // Extends BaseSymbolInformation + /// The name of this symbol. + name: []const u8, + /// The kind of this symbol. + kind: SymbolKind, + /// Tags for this symbol. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + tags: ?[]const SymbolTag = null, + /// The name of the symbol containing this symbol. This information is for + /// user interface purposes (e.g. to render a qualifier in the user interface + /// if necessary). It can't be used to re-infer a hierarchy for the document + /// symbols. + /// field can be undefined, but this possible state is non-critical + containerName: ?[]const u8 = null, +}; + +/// Represents programming constructs like variables, classes, interfaces etc. +/// that appear in a document. Document symbols can be hierarchical and they +/// have two ranges: one that encloses its definition and one that points to +/// its most interesting range, e.g. the range of an identifier. +pub const DocumentSymbol = struct { + /// The name of this symbol. Will be displayed in the user interface and therefore must not be + /// an empty string or a string only consisting of white spaces. + name: []const u8, + /// More detail for this symbol, e.g the signature of a function. + /// field can be undefined, but this possible state is non-critical + detail: ?[]const u8 = null, + /// The kind of this symbol. + kind: SymbolKind, + /// Tags for this document symbol. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + tags: ?[]const SymbolTag = null, + /// Indicates if this symbol is deprecated. + /// + /// @deprecated Use tags instead + /// field can be undefined, but this possible state is non-critical + deprecated: ?bool = null, + /// The range enclosing this symbol not including leading/trailing whitespace but everything else + /// like comments. This information is typically used to determine if the clients cursor is + /// inside the symbol to reveal in the symbol in the UI. + range: Range, + /// The range that should be selected and revealed when this symbol is being picked, e.g the name of a function. + /// Must be contained by the `range`. + selectionRange: Range, + /// Children of this symbol, e.g. properties of a class. + /// field can be undefined, but this possible state is non-critical + children: ?[]const DocumentSymbol = null, +}; + +/// Registration options for a {@link DocumentSymbolRequest}. +pub const DocumentSymbolRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DocumentSymbolOptions + /// A human-readable string that is shown when multiple outlines trees + /// are shown for the same document. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + label: ?[]const u8 = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters of a {@link CodeActionRequest}. +pub const CodeActionParams = struct { + /// The document in which the command was invoked. + textDocument: TextDocumentIdentifier, + /// The range for which the command was invoked. + range: Range, + /// Context carrying additional information. + context: CodeActionContext, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents a reference to a command. Provides a title which +/// will be used to represent a command in the UI and, optionally, +/// an array of arguments which will be passed to the command handler +/// function when invoked. +pub const Command = struct { + /// Title of the command, like `save`. + title: []const u8, + /// The identifier of the actual command handler. + command: []const u8, + /// Arguments that the command handler should be + /// invoked with. + /// field can be undefined, but this possible state is non-critical + arguments: ?[]const LSPAny = null, +}; + +/// A code action represents a change that can be performed in code, e.g. to fix a problem or +/// to refactor code. +/// +/// A CodeAction must set either `edit` and/or a `command`. If both are supplied, the `edit` is applied first, then the `command` is executed. +pub const CodeAction = struct { + /// A short, human-readable, title for this code action. + title: []const u8, + /// The kind of the code action. + /// + /// Used to filter code actions. + /// field can be undefined, but this possible state is non-critical + kind: ?CodeActionKind = null, + /// The diagnostics that this code action resolves. + /// field can be undefined, but this possible state is non-critical + diagnostics: ?[]const Diagnostic = null, + /// Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted + /// by keybindings. + /// + /// A quick fix should be marked preferred if it properly addresses the underlying error. + /// A refactoring should be marked preferred if it is the most reasonable choice of actions to take. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + isPreferred: ?bool = null, + /// Marks that the code action cannot currently be applied. + /// + /// Clients should follow the following guidelines regarding disabled code actions: + /// + /// - Disabled code actions are not shown in automatic [lightbulbs](https://code.visualstudio.com/docs/editor/editingevolved#_code-action) + /// code action menus. + /// + /// - Disabled actions are shown as faded out in the code action menu when the user requests a more specific type + /// of code action, such as refactorings. + /// + /// - If the user has a [keybinding](https://code.visualstudio.com/docs/editor/refactoring#_keybindings-for-code-actions) + /// that auto applies a code action and only disabled code actions are returned, the client should show the user an + /// error message with `reason` in the editor. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + disabled: ?struct { + /// Human readable description of why the code action is currently disabled. + /// + /// This is displayed in the code actions UI. + reason: []const u8, + } = null, + /// The workspace edit this code action performs. + /// field can be undefined, but this possible state is non-critical + edit: ?WorkspaceEdit = null, + /// A command this code action executes. If a code action + /// provides an edit and a command, first the edit is + /// executed and then the command. + /// field can be undefined, but this possible state is non-critical + command: ?Command = null, + /// A data entry field that is preserved on a code action between + /// a `textDocument/codeAction` and a `codeAction/resolve` request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Registration options for a {@link CodeActionRequest}. +pub const CodeActionRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends CodeActionOptions + /// CodeActionKinds that this server may return. + /// + /// The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server + /// may list out every specific kind they provide. + /// field can be undefined, but this possible state is non-critical + codeActionKinds: ?[]const CodeActionKind = null, + /// The server provides support to resolve additional + /// information for a code action. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters of a {@link WorkspaceSymbolRequest}. +pub const WorkspaceSymbolParams = struct { + /// A query string to filter symbols by. Clients may send an empty + /// string here to request all symbols. + query: []const u8, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A special workspace symbol that supports locations without a range. +/// +/// See also SymbolInformation. +/// +/// @since 3.17.0 +pub const WorkspaceSymbol = struct { + /// The location of the symbol. Whether a server is allowed to + /// return a location without a range depends on the client + /// capability `workspace.symbol.resolveSupport`. + /// + /// See SymbolInformation#location for more details. + location: union(enum) { + Location: Location, + literal_1: struct { + uri: DocumentUri, + }, + }, + /// A data entry field that is preserved on a workspace symbol between a + /// workspace symbol request and a workspace symbol resolve request. + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, + // Extends BaseSymbolInformation + /// The name of this symbol. + name: []const u8, + /// The kind of this symbol. + kind: SymbolKind, + /// Tags for this symbol. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + tags: ?[]const SymbolTag = null, + /// The name of the symbol containing this symbol. This information is for + /// user interface purposes (e.g. to render a qualifier in the user interface + /// if necessary). It can't be used to re-infer a hierarchy for the document + /// symbols. + /// field can be undefined, but this possible state is non-critical + containerName: ?[]const u8 = null, +}; + +/// Registration options for a {@link WorkspaceSymbolRequest}. +pub const WorkspaceSymbolRegistrationOptions = struct { + // Extends WorkspaceSymbolOptions + /// The server provides support to resolve additional + /// information for a workspace symbol. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters of a {@link CodeLensRequest}. +pub const CodeLensParams = struct { + /// The document to request code lens for. + textDocument: TextDocumentIdentifier, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A code lens represents a {@link Command command} that should be shown along with +/// source text, like the number of references, a way to run tests, etc. +/// +/// A code lens is _unresolved_ when no command is associated to it. For performance +/// reasons the creation of a code lens and resolving should be done in two stages. +pub const CodeLens = struct { + /// The range in which this code lens is valid. Should only span a single line. + range: Range, + /// The command this code lens represents. + /// field can be undefined, but this possible state is non-critical + command: ?Command = null, + /// A data entry field that is preserved on a code lens item between + /// a {@link CodeLensRequest} and a [CodeLensResolveRequest] + /// (#CodeLensResolveRequest) + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Registration options for a {@link CodeLensRequest}. +pub const CodeLensRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends CodeLensOptions + /// Code lens has a resolve provider as well. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters of a {@link DocumentLinkRequest}. +pub const DocumentLinkParams = struct { + /// The document to provide document links for. + textDocument: TextDocumentIdentifier, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, + + // Uses mixin PartialResultParams + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// A document link is a range in a text document that links to an internal or external resource, like another +/// text document or a web site. +pub const DocumentLink = struct { + /// The range this link applies to. + range: Range, + /// The uri this link points to. If missing a resolve request is sent later. + /// field can be undefined, but this possible state is non-critical + target: ?[]const u8 = null, + /// The tooltip text when you hover over this link. + /// + /// If a tooltip is provided, is will be displayed in a string that includes instructions on how to + /// trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS, + /// user settings, and localization. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + tooltip: ?[]const u8 = null, + /// A data entry field that is preserved on a document link between a + /// DocumentLinkRequest and a DocumentLinkResolveRequest. + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Registration options for a {@link DocumentLinkRequest}. +pub const DocumentLinkRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DocumentLinkOptions + /// Document links have a resolve provider as well. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters of a {@link DocumentFormattingRequest}. +pub const DocumentFormattingParams = struct { + /// The document to format. + textDocument: TextDocumentIdentifier, + /// The format options. + options: FormattingOptions, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Registration options for a {@link DocumentFormattingRequest}. +pub const DocumentFormattingRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DocumentFormattingOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters of a {@link DocumentRangeFormattingRequest}. +pub const DocumentRangeFormattingParams = struct { + /// The document to format. + textDocument: TextDocumentIdentifier, + /// The range to format + range: Range, + /// The format options + options: FormattingOptions, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Registration options for a {@link DocumentRangeFormattingRequest}. +pub const DocumentRangeFormattingRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DocumentRangeFormattingOptions + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters of a {@link DocumentOnTypeFormattingRequest}. +pub const DocumentOnTypeFormattingParams = struct { + /// The document to format. + textDocument: TextDocumentIdentifier, + /// The position around which the on type formatting should happen. + /// This is not necessarily the exact position where the character denoted + /// by the property `ch` got typed. + position: Position, + /// The character that has been typed that triggered the formatting + /// on type request. That is not necessarily the last character that + /// got inserted into the document since the client could auto insert + /// characters as well (e.g. like automatic brace completion). + ch: []const u8, + /// The formatting options. + options: FormattingOptions, +}; + +/// Registration options for a {@link DocumentOnTypeFormattingRequest}. +pub const DocumentOnTypeFormattingRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends DocumentOnTypeFormattingOptions + /// A character on which formatting should be triggered, like `{`. + firstTriggerCharacter: []const u8, + /// More trigger characters. + /// field can be undefined, but this possible state is non-critical + moreTriggerCharacter: ?[]const []const u8 = null, +}; + +/// The parameters of a {@link RenameRequest}. +pub const RenameParams = struct { + /// The document to rename. + textDocument: TextDocumentIdentifier, + /// The position at which this request was sent. + position: Position, + /// The new name of the symbol. If the given name is not valid the + /// request must return a {@link ResponseError} with an + /// appropriate message set. + newName: []const u8, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Registration options for a {@link RenameRequest}. +pub const RenameRegistrationOptions = struct { + // Extends TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + + // Extends RenameOptions + /// Renames should be checked and tested before being executed. + /// + /// @since version 3.12.0 + /// field can be undefined, but this possible state is non-critical + prepareProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +pub const PrepareRenameParams = struct { + // Extends TextDocumentPositionParams + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, + + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// The parameters of a {@link ExecuteCommandRequest}. +pub const ExecuteCommandParams = struct { + /// The identifier of the actual command handler. + command: []const u8, + /// Arguments that the command should be invoked with. + /// field can be undefined, but this possible state is non-critical + arguments: ?[]const LSPAny = null, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +/// Registration options for a {@link ExecuteCommandRequest}. +pub const ExecuteCommandRegistrationOptions = struct { + // Extends ExecuteCommandOptions + /// The commands to be executed on the server + commands: []const []const u8, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The parameters passed via a apply workspace edit request. +pub const ApplyWorkspaceEditParams = struct { + /// An optional label of the workspace edit. This label is + /// presented in the user interface for example on an undo + /// stack to undo the workspace edit. + /// field can be undefined, but this possible state is non-critical + label: ?[]const u8 = null, + /// The edits to apply. + edit: WorkspaceEdit, +}; + +/// The result returned from the apply workspace edit request. +/// +/// @since 3.17 renamed from ApplyWorkspaceEditResponse +pub const ApplyWorkspaceEditResult = struct { + /// Indicates whether the edit was applied or not. + applied: bool, + /// An optional textual description for why the edit was not applied. + /// This may be used by the server for diagnostic logging or to provide + /// a suitable error for a request that triggered the edit. + /// field can be undefined, but this possible state is non-critical + failureReason: ?[]const u8 = null, + /// Depending on the client's failure handling strategy `failedChange` might + /// contain the index of the change that failed. This property is only available + /// if the client signals a `failureHandlingStrategy` in its client capabilities. + /// field can be undefined, but this possible state is non-critical + failedChange: ?u32 = null, +}; + +pub const WorkDoneProgressBegin = struct { + comptime kind: []const u8 = "begin", + /// Mandatory title of the progress operation. Used to briefly inform about + /// the kind of operation being performed. + /// + /// Examples: "Indexing" or "Linking dependencies". + title: []const u8, + /// Controls if a cancel button should show to allow the user to cancel the + /// long running operation. Clients that don't support cancellation are allowed + /// to ignore the setting. + /// field can be undefined, but this possible state is non-critical + cancellable: ?bool = null, + /// Optional, more detailed associated progress message. Contains + /// complementary information to the `title`. + /// + /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep". + /// If unset, the previous progress message (if any) is still valid. + /// field can be undefined, but this possible state is non-critical + message: ?[]const u8 = null, + /// Optional progress percentage to display (value 100 is considered 100%). + /// If not provided infinite progress is assumed and clients are allowed + /// to ignore the `percentage` value in subsequent in report notifications. + /// + /// The value should be steadily rising. Clients are free to ignore values + /// that are not following this rule. The value range is [0, 100]. + /// field can be undefined, but this possible state is non-critical + percentage: ?u32 = null, +}; + +pub const WorkDoneProgressReport = struct { + comptime kind: []const u8 = "report", + /// Controls enablement state of a cancel button. + /// + /// Clients that don't support cancellation or don't support controlling the button's + /// enablement state are allowed to ignore the property. + /// field can be undefined, but this possible state is non-critical + cancellable: ?bool = null, + /// Optional, more detailed associated progress message. Contains + /// complementary information to the `title`. + /// + /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep". + /// If unset, the previous progress message (if any) is still valid. + /// field can be undefined, but this possible state is non-critical + message: ?[]const u8 = null, + /// Optional progress percentage to display (value 100 is considered 100%). + /// If not provided infinite progress is assumed and clients are allowed + /// to ignore the `percentage` value in subsequent in report notifications. + /// + /// The value should be steadily rising. Clients are free to ignore values + /// that are not following this rule. The value range is [0, 100] + /// field can be undefined, but this possible state is non-critical + percentage: ?u32 = null, +}; + +pub const WorkDoneProgressEnd = struct { + comptime kind: []const u8 = "end", + /// Optional, a final message indicating to for example indicate the outcome + /// of the operation. + /// field can be undefined, but this possible state is non-critical + message: ?[]const u8 = null, +}; + +pub const SetTraceParams = struct { + value: TraceValues, +}; + +pub const LogTraceParams = struct { + message: []const u8, + /// field can be undefined, but this possible state is non-critical + verbose: ?[]const u8 = null, +}; + +pub const CancelParams = struct { + /// The request id to cancel. + id: union(enum) { + integer: i32, + string: []const u8, + }, +}; + +pub const ProgressParams = struct { + /// The progress token provided by the client or server. + token: ProgressToken, + /// The progress data. + value: LSPAny, +}; + +/// A parameter literal used in requests to pass a text document and a position inside that +/// document. +pub const TextDocumentPositionParams = struct { + /// The text document. + textDocument: TextDocumentIdentifier, + /// The position inside the text document. + position: Position, +}; + +pub const WorkDoneProgressParams = struct { + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +pub const PartialResultParams = struct { + /// An optional token that a server can use to report partial results (e.g. streaming) to + /// the client. + /// field can be undefined, but this possible state is non-critical + partialResultToken: ?ProgressToken = null, +}; + +/// Represents the connection of two locations. Provides additional metadata over normal {@link Location locations}, +/// including an origin range. +pub const LocationLink = struct { + /// Span of the origin of this link. + /// + /// Used as the underlined span for mouse interaction. Defaults to the word range at + /// the definition position. + /// field can be undefined, but this possible state is non-critical + originSelectionRange: ?Range = null, + /// The target resource identifier of this link. + targetUri: DocumentUri, + /// The full target range of this link. If the target for example is a symbol then target range is the + /// range enclosing this symbol not including leading/trailing whitespace but everything else + /// like comments. This information is typically used to highlight the range in the editor. + targetRange: Range, + /// The range that should be selected and revealed when this link is being followed, e.g the name of a function. + /// Must be contained by the `targetRange`. See also `DocumentSymbol#range` + targetSelectionRange: Range, +}; + +/// A range in a text document expressed as (zero-based) start and end positions. +/// +/// If you want to specify a range that contains a line including the line ending +/// character(s) then use an end position denoting the start of the next line. +/// For example: +/// ```ts +/// { +/// start: { line: 5, character: 23 } +/// end : { line 6, character : 0 } +/// } +/// ``` +pub const Range = struct { + /// The range's start position. + start: Position, + /// The range's end position. + end: Position, +}; + +pub const ImplementationOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Static registration options to be returned in the initialize +/// request. +pub const StaticRegistrationOptions = struct { + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +pub const TypeDefinitionOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The workspace folder change event. +pub const WorkspaceFoldersChangeEvent = struct { + /// The array of added workspace folders + added: []const WorkspaceFolder, + /// The array of the removed workspace folders + removed: []const WorkspaceFolder, +}; + +pub const ConfigurationItem = struct { + /// The scope to get the configuration section for. + /// field can be undefined, but this possible state is non-critical + scopeUri: ?[]const u8 = null, + /// The configuration section asked for. + /// field can be undefined, but this possible state is non-critical + section: ?[]const u8 = null, +}; + +/// A literal to identify a text document in the client. +pub const TextDocumentIdentifier = struct { + /// The text document's uri. + uri: DocumentUri, +}; + +/// Represents a color in RGBA space. +pub const Color = struct { + /// The red component of this color in the range [0-1]. + red: f32, + /// The green component of this color in the range [0-1]. + green: f32, + /// The blue component of this color in the range [0-1]. + blue: f32, + /// The alpha component of this color in the range [0-1]. + alpha: f32, +}; + +pub const DocumentColorOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +pub const FoldingRangeOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +pub const DeclarationOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Position in a text document expressed as zero-based line and character +/// offset. Prior to 3.17 the offsets were always based on a UTF-16 string +/// representation. So a string of the form `a𐐀b` the character offset of the +/// character `a` is 0, the character offset of `𐐀` is 1 and the character +/// offset of b is 3 since `𐐀` is represented using two code units in UTF-16. +/// Since 3.17 clients and servers can agree on a different string encoding +/// representation (e.g. UTF-8). The client announces it's supported encoding +/// via the client capability [`general.positionEncodings`](#clientCapabilities). +/// The value is an array of position encodings the client supports, with +/// decreasing preference (e.g. the encoding at index `0` is the most preferred +/// one). To stay backwards compatible the only mandatory encoding is UTF-16 +/// represented via the string `utf-16`. The server can pick one of the +/// encodings offered by the client and signals that encoding back to the +/// client via the initialize result's property +/// [`capabilities.positionEncoding`](#serverCapabilities). If the string value +/// `utf-16` is missing from the client's capability `general.positionEncodings` +/// servers can safely assume that the client supports UTF-16. If the server +/// omits the position encoding in its initialize result the encoding defaults +/// to the string value `utf-16`. Implementation considerations: since the +/// conversion from one encoding into another requires the content of the +/// file / line the conversion is best done where the file is read which is +/// usually on the server side. +/// +/// Positions are line end character agnostic. So you can not specify a position +/// that denotes `\r|\n` or `\n|` where `|` represents the character offset. +/// +/// @since 3.17.0 - support for negotiated position encoding. +pub const Position = struct { + /// Line position in a document (zero-based). + /// + /// If a line number is greater than the number of lines in a document, it defaults back to the number of lines in the document. + /// If a line number is negative, it defaults to 0. + line: u32, + /// Character offset on a line in a document (zero-based). + /// + /// The meaning of this offset is determined by the negotiated + /// `PositionEncodingKind`. + /// + /// If the character value is greater than the line length it defaults back to the + /// line length. + character: u32, +}; + +pub const SelectionRangeOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Call hierarchy options used during static registration. +/// +/// @since 3.16.0 +pub const CallHierarchyOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// @since 3.16.0 +pub const SemanticTokensOptions = struct { + /// The legend used by the server + legend: SemanticTokensLegend, + /// Server supports providing semantic tokens for a specific range + /// of a document. + /// field can be undefined, but this possible state is non-critical + range: ?union(enum) { + bool: bool, + literal_1: struct {}, + } = null, + /// Server supports providing semantic tokens for a full document. + /// field can be undefined, but this possible state is non-critical + full: ?union(enum) { + bool: bool, + literal_1: struct { + /// The server supports deltas for full documents. + /// field can be undefined, but this possible state is non-critical + delta: ?bool = null, + }, + } = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// @since 3.16.0 +pub const SemanticTokensEdit = struct { + /// The start offset of the edit. + start: u32, + /// The count of elements to remove. + deleteCount: u32, + /// The elements to insert. + /// field can be undefined, but this possible state is non-critical + data: ?[]const u32 = null, +}; + +pub const LinkedEditingRangeOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Represents information on a file/folder create. +/// +/// @since 3.16.0 +pub const FileCreate = struct { + /// A file:// URI for the location of the file/folder being created. + uri: []const u8, +}; + +/// Describes textual changes on a text document. A TextDocumentEdit describes all changes +/// on a document version Si and after they are applied move the document to version Si+1. +/// So the creator of a TextDocumentEdit doesn't need to sort the array of edits or do any +/// kind of ordering. However the edits must be non overlapping. +pub const TextDocumentEdit = struct { + /// The text document to change. + textDocument: OptionalVersionedTextDocumentIdentifier, + /// The edits to be applied. + /// + /// @since 3.16.0 - support for AnnotatedTextEdit. This is guarded using a + /// client capability. + edits: []const union(enum) { + TextEdit: TextEdit, + AnnotatedTextEdit: AnnotatedTextEdit, + }, +}; + +/// Create file operation. +pub const CreateFile = struct { + /// A create + comptime kind: []const u8 = "create", + /// The resource to create. + uri: DocumentUri, + /// Additional options + /// field can be undefined, but this possible state is non-critical + options: ?CreateFileOptions = null, + // Extends ResourceOperation + /// An optional annotation identifier describing the operation. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + annotationId: ?ChangeAnnotationIdentifier = null, +}; + +/// Rename file operation +pub const RenameFile = struct { + /// A rename + comptime kind: []const u8 = "rename", + /// The old (existing) location. + oldUri: DocumentUri, + /// The new location. + newUri: DocumentUri, + /// Rename options. + /// field can be undefined, but this possible state is non-critical + options: ?RenameFileOptions = null, + // Extends ResourceOperation + /// An optional annotation identifier describing the operation. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + annotationId: ?ChangeAnnotationIdentifier = null, +}; + +/// Delete file operation +pub const DeleteFile = struct { + /// A delete + comptime kind: []const u8 = "delete", + /// The file to delete. + uri: DocumentUri, + /// Delete options. + /// field can be undefined, but this possible state is non-critical + options: ?DeleteFileOptions = null, + // Extends ResourceOperation + /// An optional annotation identifier describing the operation. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + annotationId: ?ChangeAnnotationIdentifier = null, +}; + +/// Additional information that describes document changes. +/// +/// @since 3.16.0 +pub const ChangeAnnotation = struct { + /// A human-readable string describing the actual change. The string + /// is rendered prominent in the user interface. + label: []const u8, + /// A flag which indicates that user confirmation is needed + /// before applying the change. + /// field can be undefined, but this possible state is non-critical + needsConfirmation: ?bool = null, + /// A human-readable string which is rendered less prominent in + /// the user interface. + /// field can be undefined, but this possible state is non-critical + description: ?[]const u8 = null, +}; + +/// A filter to describe in which file operation requests or notifications +/// the server is interested in receiving. +/// +/// @since 3.16.0 +pub const FileOperationFilter = struct { + /// A Uri scheme like `file` or `untitled`. + /// field can be undefined, but this possible state is non-critical + scheme: ?[]const u8 = null, + /// The actual file operation pattern. + pattern: FileOperationPattern, +}; + +/// Represents information on a file/folder rename. +/// +/// @since 3.16.0 +pub const FileRename = struct { + /// A file:// URI for the original location of the file/folder being renamed. + oldUri: []const u8, + /// A file:// URI for the new location of the file/folder being renamed. + newUri: []const u8, +}; + +/// Represents information on a file/folder delete. +/// +/// @since 3.16.0 +pub const FileDelete = struct { + /// A file:// URI for the location of the file/folder being deleted. + uri: []const u8, +}; + +pub const MonikerOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Type hierarchy options used during static registration. +/// +/// @since 3.17.0 +pub const TypeHierarchyOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// @since 3.17.0 +pub const InlineValueContext = struct { + /// The stack frame (as a DAP Id) where the execution has stopped. + frameId: i32, + /// The document range where execution has stopped. + /// Typically the end position of the range denotes the line where the inline values are shown. + stoppedLocation: Range, +}; + +/// Provide inline value as text. +/// +/// @since 3.17.0 +pub const InlineValueText = struct { + /// The document range for which the inline value applies. + range: Range, + /// The text of the inline value. + text: []const u8, +}; + +/// Provide inline value through a variable lookup. +/// If only a range is specified, the variable name will be extracted from the underlying document. +/// An optional variable name can be used to override the extracted name. +/// +/// @since 3.17.0 +pub const InlineValueVariableLookup = struct { + /// The document range for which the inline value applies. + /// The range is used to extract the variable name from the underlying document. + range: Range, + /// If specified the name of the variable to look up. + /// field can be undefined, but this possible state is non-critical + variableName: ?[]const u8 = null, + /// How to perform the lookup. + caseSensitiveLookup: bool, +}; + +/// Provide an inline value through an expression evaluation. +/// If only a range is specified, the expression will be extracted from the underlying document. +/// An optional expression can be used to override the extracted expression. +/// +/// @since 3.17.0 +pub const InlineValueEvaluatableExpression = struct { + /// The document range for which the inline value applies. + /// The range is used to extract the evaluatable expression from the underlying document. + range: Range, + /// If specified the expression overrides the extracted expression. + /// field can be undefined, but this possible state is non-critical + expression: ?[]const u8 = null, +}; + +/// Inline value options used during static registration. +/// +/// @since 3.17.0 +pub const InlineValueOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// An inlay hint label part allows for interactive and composite labels +/// of inlay hints. +/// +/// @since 3.17.0 +pub const InlayHintLabelPart = struct { + /// The value of this label part. + value: []const u8, + /// The tooltip text when you hover over this label part. Depending on + /// the client capability `inlayHint.resolveSupport` clients might resolve + /// this property late using the resolve request. + /// field can be undefined, but this possible state is non-critical + tooltip: ?union(enum) { + string: []const u8, + MarkupContent: MarkupContent, + } = null, + /// An optional source code location that represents this + /// label part. + /// + /// The editor will use this location for the hover and for code navigation + /// features: This part will become a clickable link that resolves to the + /// definition of the symbol at the given location (not necessarily the + /// location itself), it shows the hover that shows at the given location, + /// and it shows a context menu with further code navigation commands. + /// + /// Depending on the client capability `inlayHint.resolveSupport` clients + /// might resolve this property late using the resolve request. + /// field can be undefined, but this possible state is non-critical + location: ?Location = null, + /// An optional command for this label part. + /// + /// Depending on the client capability `inlayHint.resolveSupport` clients + /// might resolve this property late using the resolve request. + /// field can be undefined, but this possible state is non-critical + command: ?Command = null, +}; + +/// A `MarkupContent` literal represents a string value which content is interpreted base on its +/// kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds. +/// +/// If the kind is `markdown` then the value can contain fenced code blocks like in GitHub issues. +/// See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting +/// +/// Here is an example how such a string can be constructed using JavaScript / TypeScript: +/// ```ts +/// let markdown: MarkdownContent = { +/// kind: MarkupKind.Markdown, +/// value: [ +/// '# Header', +/// 'Some text', +/// '```typescript', +/// 'someCode();', +/// '```' +/// ].join('\n') +/// }; +/// ``` +/// +/// *Please Note* that clients might sanitize the return markdown. A client could decide to +/// remove HTML from the markdown to avoid script execution. +pub const MarkupContent = struct { + /// The type of the Markup + kind: MarkupKind, + /// The content itself + value: []const u8, +}; + +/// Inlay hint options used during static registration. +/// +/// @since 3.17.0 +pub const InlayHintOptions = struct { + /// The server provides support to resolve additional + /// information for an inlay hint item. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// A full diagnostic report with a set of related documents. +/// +/// @since 3.17.0 +pub const RelatedFullDocumentDiagnosticReport = struct { + /// Diagnostics of related documents. This information is useful + /// in programming languages where code in a file A can generate + /// diagnostics in a file B which A depends on. An example of + /// such a language is C/C++ where marco definitions in a file + /// a.cpp and result in errors in a header file b.hpp. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + relatedDocuments: ?Map(DocumentUri, union(enum) { + FullDocumentDiagnosticReport: FullDocumentDiagnosticReport, + UnchangedDocumentDiagnosticReport: UnchangedDocumentDiagnosticReport, + }) = null, + // Extends FullDocumentDiagnosticReport + /// A full document diagnostic report. + comptime kind: []const u8 = "full", + /// An optional result id. If provided it will + /// be sent on the next diagnostic request for the + /// same document. + /// field can be undefined, but this possible state is non-critical + resultId: ?[]const u8 = null, + /// The actual items. + items: []const Diagnostic, +}; + +/// An unchanged diagnostic report with a set of related documents. +/// +/// @since 3.17.0 +pub const RelatedUnchangedDocumentDiagnosticReport = struct { + /// Diagnostics of related documents. This information is useful + /// in programming languages where code in a file A can generate + /// diagnostics in a file B which A depends on. An example of + /// such a language is C/C++ where marco definitions in a file + /// a.cpp and result in errors in a header file b.hpp. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + relatedDocuments: ?Map(DocumentUri, union(enum) { + FullDocumentDiagnosticReport: FullDocumentDiagnosticReport, + UnchangedDocumentDiagnosticReport: UnchangedDocumentDiagnosticReport, + }) = null, + // Extends UnchangedDocumentDiagnosticReport + /// A document diagnostic report indicating + /// no changes to the last result. A server can + /// only return `unchanged` if result ids are + /// provided. + comptime kind: []const u8 = "unchanged", + /// A result id which will be sent on the next + /// diagnostic request for the same document. + resultId: []const u8, +}; + +/// A diagnostic report with a full set of problems. +/// +/// @since 3.17.0 +pub const FullDocumentDiagnosticReport = struct { + /// A full document diagnostic report. + comptime kind: []const u8 = "full", + /// An optional result id. If provided it will + /// be sent on the next diagnostic request for the + /// same document. + /// field can be undefined, but this possible state is non-critical + resultId: ?[]const u8 = null, + /// The actual items. + items: []const Diagnostic, +}; + +/// A diagnostic report indicating that the last returned +/// report is still accurate. +/// +/// @since 3.17.0 +pub const UnchangedDocumentDiagnosticReport = struct { + /// A document diagnostic report indicating + /// no changes to the last result. A server can + /// only return `unchanged` if result ids are + /// provided. + comptime kind: []const u8 = "unchanged", + /// A result id which will be sent on the next + /// diagnostic request for the same document. + resultId: []const u8, +}; + +/// Diagnostic options. +/// +/// @since 3.17.0 +pub const DiagnosticOptions = struct { + /// An optional identifier under which the diagnostics are + /// managed by the client. + /// field can be undefined, but this possible state is non-critical + identifier: ?[]const u8 = null, + /// Whether the language has inter file dependencies meaning that + /// editing code in one file can result in a different diagnostic + /// set in another file. Inter file dependencies are common for + /// most programming languages and typically uncommon for linters. + interFileDependencies: bool, + /// The server provides support for workspace diagnostics as well. + workspaceDiagnostics: bool, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// A previous result id in a workspace pull request. +/// +/// @since 3.17.0 +pub const PreviousResultId = struct { + /// The URI for which the client knowns a + /// result id. + uri: DocumentUri, + /// The value of the previous result id. + value: []const u8, +}; + +/// A notebook document. +/// +/// @since 3.17.0 +pub const NotebookDocument = struct { + /// The notebook document's uri. + uri: URI, + /// The type of the notebook. + notebookType: []const u8, + /// The version number of this document (it will increase after each + /// change, including undo/redo). + version: i32, + /// Additional metadata stored with the notebook + /// document. + /// + /// Note: should always be an object literal (e.g. LSPObject) + /// field can be undefined, but this possible state is non-critical + metadata: ?LSPObject = null, + /// The cells of a notebook. + cells: []const NotebookCell, +}; + +/// An item to transfer a text document from the client to the +/// server. +pub const TextDocumentItem = struct { + /// The text document's uri. + uri: DocumentUri, + /// The text document's language identifier. + languageId: []const u8, + /// The version number of this document (it will increase after each + /// change, including undo/redo). + version: i32, + /// The content of the opened text document. + text: []const u8, +}; + +/// A versioned notebook document identifier. +/// +/// @since 3.17.0 +pub const VersionedNotebookDocumentIdentifier = struct { + /// The version number of this notebook document. + version: i32, + /// The notebook document's uri. + uri: URI, +}; + +/// A change event for a notebook document. +/// +/// @since 3.17.0 +pub const NotebookDocumentChangeEvent = struct { + /// The changed meta data if any. + /// + /// Note: should always be an object literal (e.g. LSPObject) + /// field can be undefined, but this possible state is non-critical + metadata: ?LSPObject = null, + /// Changes to cells + /// field can be undefined, but this possible state is non-critical + cells: ?struct { + /// Changes to the cell structure to add or + /// remove cells. + /// field can be undefined, but this possible state is non-critical + structure: ?struct { + /// The change to the cell array. + array: NotebookCellArrayChange, + /// Additional opened cell text documents. + /// field can be undefined, but this possible state is non-critical + didOpen: ?[]const TextDocumentItem = null, + /// Additional closed cell text documents. + /// field can be undefined, but this possible state is non-critical + didClose: ?[]const TextDocumentIdentifier = null, + } = null, + /// Changes to notebook cells properties like its + /// kind, execution summary or metadata. + /// field can be undefined, but this possible state is non-critical + data: ?[]const NotebookCell = null, + /// Changes to the text content of notebook cells. + /// field can be undefined, but this possible state is non-critical + textContent: ?[]const struct { + document: VersionedTextDocumentIdentifier, + changes: []const TextDocumentContentChangeEvent, + } = null, + } = null, +}; + +/// A literal to identify a notebook document in the client. +/// +/// @since 3.17.0 +pub const NotebookDocumentIdentifier = struct { + /// The notebook document's uri. + uri: URI, +}; + +/// General parameters to to register for an notification or to register a provider. +pub const Registration = struct { + /// The id used to register the request. The id can be used to deregister + /// the request again. + id: []const u8, + /// The method / capability to register for. + method: []const u8, + /// Options necessary for the registration. + /// field can be undefined, but this possible state is non-critical + registerOptions: ?LSPAny = null, +}; + +/// General parameters to unregister a request or notification. +pub const Unregistration = struct { + /// The id used to unregister the request or notification. Usually an id + /// provided during the register request. + id: []const u8, + /// The method to unregister for. + method: []const u8, +}; + +/// The initialize parameters +pub const _InitializeParams = struct { + /// The process Id of the parent process that started + /// the server. + /// + /// Is `null` if the process has not been started by another process. + /// If the parent process is not alive then the server should exit. + processId: ?i32 = null, + /// Information about the client + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + clientInfo: ?struct { + /// The name of the client as defined by the client. + name: []const u8, + /// The client's version as defined by the client. + /// field can be undefined, but this possible state is non-critical + version: ?[]const u8 = null, + } = null, + /// The locale the client is currently showing the user interface + /// in. This must not necessarily be the locale of the operating + /// system. + /// + /// Uses IETF language tags as the value's syntax + /// (See https://en.wikipedia.org/wiki/IETF_language_tag) + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + locale: ?[]const u8 = null, + /// The rootPath of the workspace. Is null + /// if no folder is open. + /// + /// @deprecated in favour of rootUri. + /// field can be undefined, but this possible state is non-critical + rootPath: ?[]const u8 = null, + /// The rootUri of the workspace. Is null if no + /// folder is open. If both `rootPath` and `rootUri` are set + /// `rootUri` wins. + /// + /// @deprecated in favour of workspaceFolders. + rootUri: ?DocumentUri = null, + /// The capabilities provided by the client (editor or tool) + capabilities: ClientCapabilities, + /// User provided initialization options. + /// field can be undefined, but this possible state is non-critical + initializationOptions: ?LSPAny = null, + /// The initial trace setting. If omitted trace is disabled ('off'). + /// field can be undefined, but this possible state is non-critical + trace: ?TraceValues = null, + // Uses mixin WorkDoneProgressParams + /// An optional token that a server can use to report work done progress. + /// field can be undefined, but this possible state is non-critical + workDoneToken: ?ProgressToken = null, +}; + +pub const WorkspaceFoldersInitializeParams = struct { + /// The workspace folders configured in the client when the server starts. + /// + /// This property is only available if the client supports workspace folders. + /// It can be `null` if the client supports workspace folders but none are + /// configured. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + workspaceFolders: ?[]const WorkspaceFolder = null, +}; + +/// Defines the capabilities provided by a language +/// server. +pub const ServerCapabilities = struct { + /// The position encoding the server picked from the encodings offered + /// by the client via the client capability `general.positionEncodings`. + /// + /// If the client didn't provide any position encodings the only valid + /// value that a server can return is 'utf-16'. + /// + /// If omitted it defaults to 'utf-16'. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + positionEncoding: ?PositionEncodingKind = null, + /// Defines how text documents are synced. Is either a detailed structure + /// defining each notification or for backwards compatibility the + /// TextDocumentSyncKind number. + /// field can be undefined, but this possible state is non-critical + textDocumentSync: ?union(enum) { + TextDocumentSyncOptions: TextDocumentSyncOptions, + TextDocumentSyncKind: TextDocumentSyncKind, + } = null, + /// Defines how notebook documents are synced. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + notebookDocumentSync: ?union(enum) { + NotebookDocumentSyncOptions: NotebookDocumentSyncOptions, + NotebookDocumentSyncRegistrationOptions: NotebookDocumentSyncRegistrationOptions, + } = null, + /// The server provides completion support. + /// field can be undefined, but this possible state is non-critical + completionProvider: ?CompletionOptions = null, + /// The server provides hover support. + /// field can be undefined, but this possible state is non-critical + hoverProvider: ?union(enum) { + bool: bool, + HoverOptions: HoverOptions, + } = null, + /// The server provides signature help support. + /// field can be undefined, but this possible state is non-critical + signatureHelpProvider: ?SignatureHelpOptions = null, + /// The server provides Goto Declaration support. + /// field can be undefined, but this possible state is non-critical + declarationProvider: ?union(enum) { + bool: bool, + DeclarationOptions: DeclarationOptions, + DeclarationRegistrationOptions: DeclarationRegistrationOptions, + } = null, + /// The server provides goto definition support. + /// field can be undefined, but this possible state is non-critical + definitionProvider: ?union(enum) { + bool: bool, + DefinitionOptions: DefinitionOptions, + } = null, + /// The server provides Goto Type Definition support. + /// field can be undefined, but this possible state is non-critical + typeDefinitionProvider: ?union(enum) { + bool: bool, + TypeDefinitionOptions: TypeDefinitionOptions, + TypeDefinitionRegistrationOptions: TypeDefinitionRegistrationOptions, + } = null, + /// The server provides Goto Implementation support. + /// field can be undefined, but this possible state is non-critical + implementationProvider: ?union(enum) { + bool: bool, + ImplementationOptions: ImplementationOptions, + ImplementationRegistrationOptions: ImplementationRegistrationOptions, + } = null, + /// The server provides find references support. + /// field can be undefined, but this possible state is non-critical + referencesProvider: ?union(enum) { + bool: bool, + ReferenceOptions: ReferenceOptions, + } = null, + /// The server provides document highlight support. + /// field can be undefined, but this possible state is non-critical + documentHighlightProvider: ?union(enum) { + bool: bool, + DocumentHighlightOptions: DocumentHighlightOptions, + } = null, + /// The server provides document symbol support. + /// field can be undefined, but this possible state is non-critical + documentSymbolProvider: ?union(enum) { + bool: bool, + DocumentSymbolOptions: DocumentSymbolOptions, + } = null, + /// The server provides code actions. CodeActionOptions may only be + /// specified if the client states that it supports + /// `codeActionLiteralSupport` in its initial `initialize` request. + /// field can be undefined, but this possible state is non-critical + codeActionProvider: ?union(enum) { + bool: bool, + CodeActionOptions: CodeActionOptions, + } = null, + /// The server provides code lens. + /// field can be undefined, but this possible state is non-critical + codeLensProvider: ?CodeLensOptions = null, + /// The server provides document link support. + /// field can be undefined, but this possible state is non-critical + documentLinkProvider: ?DocumentLinkOptions = null, + /// The server provides color provider support. + /// field can be undefined, but this possible state is non-critical + colorProvider: ?union(enum) { + bool: bool, + DocumentColorOptions: DocumentColorOptions, + DocumentColorRegistrationOptions: DocumentColorRegistrationOptions, + } = null, + /// The server provides workspace symbol support. + /// field can be undefined, but this possible state is non-critical + workspaceSymbolProvider: ?union(enum) { + bool: bool, + WorkspaceSymbolOptions: WorkspaceSymbolOptions, + } = null, + /// The server provides document formatting. + /// field can be undefined, but this possible state is non-critical + documentFormattingProvider: ?union(enum) { + bool: bool, + DocumentFormattingOptions: DocumentFormattingOptions, + } = null, + /// The server provides document range formatting. + /// field can be undefined, but this possible state is non-critical + documentRangeFormattingProvider: ?union(enum) { + bool: bool, + DocumentRangeFormattingOptions: DocumentRangeFormattingOptions, + } = null, + /// The server provides document formatting on typing. + /// field can be undefined, but this possible state is non-critical + documentOnTypeFormattingProvider: ?DocumentOnTypeFormattingOptions = null, + /// The server provides rename support. RenameOptions may only be + /// specified if the client states that it supports + /// `prepareSupport` in its initial `initialize` request. + /// field can be undefined, but this possible state is non-critical + renameProvider: ?union(enum) { + bool: bool, + RenameOptions: RenameOptions, + } = null, + /// The server provides folding provider support. + /// field can be undefined, but this possible state is non-critical + foldingRangeProvider: ?union(enum) { + bool: bool, + FoldingRangeOptions: FoldingRangeOptions, + FoldingRangeRegistrationOptions: FoldingRangeRegistrationOptions, + } = null, + /// The server provides selection range support. + /// field can be undefined, but this possible state is non-critical + selectionRangeProvider: ?union(enum) { + bool: bool, + SelectionRangeOptions: SelectionRangeOptions, + SelectionRangeRegistrationOptions: SelectionRangeRegistrationOptions, + } = null, + /// The server provides execute command support. + /// field can be undefined, but this possible state is non-critical + executeCommandProvider: ?ExecuteCommandOptions = null, + /// The server provides call hierarchy support. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + callHierarchyProvider: ?union(enum) { + bool: bool, + CallHierarchyOptions: CallHierarchyOptions, + CallHierarchyRegistrationOptions: CallHierarchyRegistrationOptions, + } = null, + /// The server provides linked editing range support. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + linkedEditingRangeProvider: ?union(enum) { + bool: bool, + LinkedEditingRangeOptions: LinkedEditingRangeOptions, + LinkedEditingRangeRegistrationOptions: LinkedEditingRangeRegistrationOptions, + } = null, + /// The server provides semantic tokens support. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + semanticTokensProvider: ?union(enum) { + SemanticTokensOptions: SemanticTokensOptions, + SemanticTokensRegistrationOptions: SemanticTokensRegistrationOptions, + } = null, + /// The server provides moniker support. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + monikerProvider: ?union(enum) { + bool: bool, + MonikerOptions: MonikerOptions, + MonikerRegistrationOptions: MonikerRegistrationOptions, + } = null, + /// The server provides type hierarchy support. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + typeHierarchyProvider: ?union(enum) { + bool: bool, + TypeHierarchyOptions: TypeHierarchyOptions, + TypeHierarchyRegistrationOptions: TypeHierarchyRegistrationOptions, + } = null, + /// The server provides inline values. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + inlineValueProvider: ?union(enum) { + bool: bool, + InlineValueOptions: InlineValueOptions, + InlineValueRegistrationOptions: InlineValueRegistrationOptions, + } = null, + /// The server provides inlay hints. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + inlayHintProvider: ?union(enum) { + bool: bool, + InlayHintOptions: InlayHintOptions, + InlayHintRegistrationOptions: InlayHintRegistrationOptions, + } = null, + /// The server has support for pull model diagnostics. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + diagnosticProvider: ?union(enum) { + DiagnosticOptions: DiagnosticOptions, + DiagnosticRegistrationOptions: DiagnosticRegistrationOptions, + } = null, + /// Workspace specific server capabilities. + /// field can be undefined, but this possible state is non-critical + workspace: ?struct { + /// The server supports workspace folder. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + workspaceFolders: ?WorkspaceFoldersServerCapabilities = null, + /// The server is interested in notifications/requests for operations on files. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + fileOperations: ?FileOperationOptions = null, + } = null, + /// Experimental server capabilities. + /// field can be undefined, but this possible state is non-critical + experimental: ?LSPAny = null, +}; + +/// A text document identifier to denote a specific version of a text document. +pub const VersionedTextDocumentIdentifier = struct { + /// The version number of this document. + version: i32, + // Extends TextDocumentIdentifier + /// The text document's uri. + uri: DocumentUri, +}; + +/// Save options. +pub const SaveOptions = struct { + /// The client is supposed to include the content on save. + /// field can be undefined, but this possible state is non-critical + includeText: ?bool = null, +}; + +/// An event describing a file change. +pub const FileEvent = struct { + /// The file's uri. + uri: DocumentUri, + /// The change type. + type: FileChangeType, +}; + +pub const FileSystemWatcher = struct { + /// The glob pattern to watch. See {@link GlobPattern glob pattern} for more detail. + /// + /// @since 3.17.0 support for relative patterns. + globPattern: GlobPattern, + /// The kind of events of interest. If omitted it defaults + /// to WatchKind.Create | WatchKind.Change | WatchKind.Delete + /// which is 7. + /// field can be undefined, but this possible state is non-critical + kind: ?WatchKind = null, +}; + +/// Represents a diagnostic, such as a compiler error or warning. Diagnostic objects +/// are only valid in the scope of a resource. +pub const Diagnostic = struct { + /// The range at which the message applies + range: Range, + /// The diagnostic's severity. Can be omitted. If omitted it is up to the + /// client to interpret diagnostics as error, warning, info or hint. + /// field can be undefined, but this possible state is non-critical + severity: ?DiagnosticSeverity = null, + /// The diagnostic's code, which usually appear in the user interface. + /// field can be undefined, but this possible state is non-critical + code: ?union(enum) { + integer: i32, + string: []const u8, + } = null, + /// An optional property to describe the error code. + /// Requires the code field (above) to be present/not null. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + codeDescription: ?CodeDescription = null, + /// A human-readable string describing the source of this + /// diagnostic, e.g. 'typescript' or 'super lint'. It usually + /// appears in the user interface. + /// field can be undefined, but this possible state is non-critical + source: ?[]const u8 = null, + /// The diagnostic's message. It usually appears in the user interface + message: []const u8, + /// Additional metadata about the diagnostic. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + tags: ?[]const DiagnosticTag = null, + /// An array of related diagnostic information, e.g. when symbol-names within + /// a scope collide all definitions can be marked via this property. + /// field can be undefined, but this possible state is non-critical + relatedInformation: ?[]const DiagnosticRelatedInformation = null, + /// A data entry field that is preserved between a `textDocument/publishDiagnostics` + /// notification and `textDocument/codeAction` request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + data: ?LSPAny = null, +}; + +/// Contains additional information about the context in which a completion request is triggered. +pub const CompletionContext = struct { + /// How the completion was triggered. + triggerKind: CompletionTriggerKind, + /// The trigger character (a single character) that has trigger code complete. + /// Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter` + /// field can be undefined, but this possible state is non-critical + triggerCharacter: ?[]const u8 = null, +}; + +/// Additional details for a completion item label. +/// +/// @since 3.17.0 +pub const CompletionItemLabelDetails = struct { + /// An optional string which is rendered less prominently directly after {@link CompletionItem.label label}, + /// without any spacing. Should be used for function signatures and type annotations. + /// field can be undefined, but this possible state is non-critical + detail: ?[]const u8 = null, + /// An optional string which is rendered less prominently after {@link CompletionItem.detail}. Should be used + /// for fully qualified names and file paths. + /// field can be undefined, but this possible state is non-critical + description: ?[]const u8 = null, +}; + +/// A special text edit to provide an insert and a replace operation. +/// +/// @since 3.16.0 +pub const InsertReplaceEdit = struct { + /// The string to be inserted. + newText: []const u8, + /// The range if the insert is requested + insert: Range, + /// The range if the replace is requested. + replace: Range, +}; + +/// Completion options. +pub const CompletionOptions = struct { + /// Most tools trigger completion request automatically without explicitly requesting + /// it using a keyboard shortcut (e.g. Ctrl+Space). Typically they do so when the user + /// starts to type an identifier. For example if the user types `c` in a JavaScript file + /// code complete will automatically pop up present `console` besides others as a + /// completion item. Characters that make up identifiers don't need to be listed here. + /// + /// If code complete should automatically be trigger on characters not being valid inside + /// an identifier (for example `.` in JavaScript) list them in `triggerCharacters`. + /// field can be undefined, but this possible state is non-critical + triggerCharacters: ?[]const []const u8 = null, + /// The list of all possible characters that commit a completion. This field can be used + /// if clients don't support individual commit characters per completion item. See + /// `ClientCapabilities.textDocument.completion.completionItem.commitCharactersSupport` + /// + /// If a server provides both `allCommitCharacters` and commit characters on an individual + /// completion item the ones on the completion item win. + /// + /// @since 3.2.0 + /// field can be undefined, but this possible state is non-critical + allCommitCharacters: ?[]const []const u8 = null, + /// The server provides support to resolve additional + /// information for a completion item. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + /// The server supports the following `CompletionItem` specific + /// capabilities. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + completionItem: ?struct { + /// The server has support for completion item label + /// details (see also `CompletionItemLabelDetails`) when + /// receiving a completion item in a resolve call. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + labelDetailsSupport: ?bool = null, + } = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Hover options. +pub const HoverOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Additional information about the context in which a signature help request was triggered. +/// +/// @since 3.15.0 +pub const SignatureHelpContext = struct { + /// Action that caused signature help to be triggered. + triggerKind: SignatureHelpTriggerKind, + /// Character that caused signature help to be triggered. + /// + /// This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter` + /// field can be undefined, but this possible state is non-critical + triggerCharacter: ?[]const u8 = null, + /// `true` if signature help was already showing when it was triggered. + /// + /// Retriggers occurs when the signature help is already active and can be caused by actions such as + /// typing a trigger character, a cursor move, or document content changes. + isRetrigger: bool, + /// The currently active `SignatureHelp`. + /// + /// The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on + /// the user navigating through available signatures. + /// field can be undefined, but this possible state is non-critical + activeSignatureHelp: ?SignatureHelp = null, +}; + +/// Represents the signature of something callable. A signature +/// can have a label, like a function-name, a doc-comment, and +/// a set of parameters. +pub const SignatureInformation = struct { + /// The label of this signature. Will be shown in + /// the UI. + label: []const u8, + /// The human-readable doc-comment of this signature. Will be shown + /// in the UI but can be omitted. + /// field can be undefined, but this possible state is non-critical + documentation: ?union(enum) { + string: []const u8, + MarkupContent: MarkupContent, + } = null, + /// The parameters of this signature. + /// field can be undefined, but this possible state is non-critical + parameters: ?[]const ParameterInformation = null, + /// The index of the active parameter. + /// + /// If provided, this is used in place of `SignatureHelp.activeParameter`. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + activeParameter: ?u32 = null, +}; + +/// Server Capabilities for a {@link SignatureHelpRequest}. +pub const SignatureHelpOptions = struct { + /// List of characters that trigger signature help automatically. + /// field can be undefined, but this possible state is non-critical + triggerCharacters: ?[]const []const u8 = null, + /// List of characters that re-trigger signature help. + /// + /// These trigger characters are only active when signature help is already showing. All trigger characters + /// are also counted as re-trigger characters. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + retriggerCharacters: ?[]const []const u8 = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Server Capabilities for a {@link DefinitionRequest}. +pub const DefinitionOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Value-object that contains additional information when +/// requesting references. +pub const ReferenceContext = struct { + /// Include the declaration of the current symbol. + includeDeclaration: bool, +}; + +/// Reference options. +pub const ReferenceOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Provider options for a {@link DocumentHighlightRequest}. +pub const DocumentHighlightOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// A base for all symbol information. +pub const BaseSymbolInformation = struct { + /// The name of this symbol. + name: []const u8, + /// The kind of this symbol. + kind: SymbolKind, + /// Tags for this symbol. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + tags: ?[]const SymbolTag = null, + /// The name of the symbol containing this symbol. This information is for + /// user interface purposes (e.g. to render a qualifier in the user interface + /// if necessary). It can't be used to re-infer a hierarchy for the document + /// symbols. + /// field can be undefined, but this possible state is non-critical + containerName: ?[]const u8 = null, +}; + +/// Provider options for a {@link DocumentSymbolRequest}. +pub const DocumentSymbolOptions = struct { + /// A human-readable string that is shown when multiple outlines trees + /// are shown for the same document. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + label: ?[]const u8 = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Contains additional diagnostic information about the context in which +/// a {@link CodeActionProvider.provideCodeActions code action} is run. +pub const CodeActionContext = struct { + /// An array of diagnostics known on the client side overlapping the range provided to the + /// `textDocument/codeAction` request. They are provided so that the server knows which + /// errors are currently presented to the user for the given range. There is no guarantee + /// that these accurately reflect the error state of the resource. The primary parameter + /// to compute code actions is the provided range. + diagnostics: []const Diagnostic, + /// Requested kind of actions to return. + /// + /// Actions not of this kind are filtered out by the client before being shown. So servers + /// can omit computing them. + /// field can be undefined, but this possible state is non-critical + only: ?[]const CodeActionKind = null, + /// The reason why code actions were requested. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + triggerKind: ?CodeActionTriggerKind = null, +}; + +/// Provider options for a {@link CodeActionRequest}. +pub const CodeActionOptions = struct { + /// CodeActionKinds that this server may return. + /// + /// The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server + /// may list out every specific kind they provide. + /// field can be undefined, but this possible state is non-critical + codeActionKinds: ?[]const CodeActionKind = null, + /// The server provides support to resolve additional + /// information for a code action. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Server capabilities for a {@link WorkspaceSymbolRequest}. +pub const WorkspaceSymbolOptions = struct { + /// The server provides support to resolve additional + /// information for a workspace symbol. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Code Lens provider options of a {@link CodeLensRequest}. +pub const CodeLensOptions = struct { + /// Code lens has a resolve provider as well. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Provider options for a {@link DocumentLinkRequest}. +pub const DocumentLinkOptions = struct { + /// Document links have a resolve provider as well. + /// field can be undefined, but this possible state is non-critical + resolveProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Value-object describing what options formatting should use. +pub const FormattingOptions = struct { + /// Size of a tab in spaces. + tabSize: u32, + /// Prefer spaces over tabs. + insertSpaces: bool, + /// Trim trailing whitespace on a line. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + trimTrailingWhitespace: ?bool = null, + /// Insert a newline character at the end of the file if one does not exist. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + insertFinalNewline: ?bool = null, + /// Trim all newlines after the final newline at the end of the file. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + trimFinalNewlines: ?bool = null, +}; + +/// Provider options for a {@link DocumentFormattingRequest}. +pub const DocumentFormattingOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Provider options for a {@link DocumentRangeFormattingRequest}. +pub const DocumentRangeFormattingOptions = struct { + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// Provider options for a {@link DocumentOnTypeFormattingRequest}. +pub const DocumentOnTypeFormattingOptions = struct { + /// A character on which formatting should be triggered, like `{`. + firstTriggerCharacter: []const u8, + /// More trigger characters. + /// field can be undefined, but this possible state is non-critical + moreTriggerCharacter: ?[]const []const u8 = null, +}; + +/// Provider options for a {@link RenameRequest}. +pub const RenameOptions = struct { + /// Renames should be checked and tested before being executed. + /// + /// @since version 3.12.0 + /// field can be undefined, but this possible state is non-critical + prepareProvider: ?bool = null, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// The server capabilities of a {@link ExecuteCommandRequest}. +pub const ExecuteCommandOptions = struct { + /// The commands to be executed on the server + commands: []const []const u8, + // Uses mixin WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, +}; + +/// @since 3.16.0 +pub const SemanticTokensLegend = struct { + /// The token types a server uses. + tokenTypes: []const []const u8, + /// The token modifiers a server uses. + tokenModifiers: []const []const u8, +}; + +/// A text document identifier to optionally denote a specific version of a text document. +pub const OptionalVersionedTextDocumentIdentifier = struct { + /// The version number of this document. If a versioned text document identifier + /// is sent from the server to the client and the file is not open in the editor + /// (the server has not received an open notification before) the server can send + /// `null` to indicate that the version is unknown and the content on disk is the + /// truth (as specified with document content ownership). + version: ?i32 = null, + // Extends TextDocumentIdentifier + /// The text document's uri. + uri: DocumentUri, +}; + +/// A special text edit with an additional change annotation. +/// +/// @since 3.16.0. +pub const AnnotatedTextEdit = struct { + /// The actual identifier of the change annotation + annotationId: ChangeAnnotationIdentifier, + // Extends TextEdit + /// The range of the text document to be manipulated. To insert + /// text into a document create a range where start === end. + range: Range, + /// The string to be inserted. For delete operations use an + /// empty string. + newText: []const u8, +}; + +/// A generic resource operation. +pub const ResourceOperation = struct { + /// The resource operation kind. + kind: []const u8, + /// An optional annotation identifier describing the operation. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + annotationId: ?ChangeAnnotationIdentifier = null, +}; + +/// Options to create a file. +pub const CreateFileOptions = struct { + /// Overwrite existing file. Overwrite wins over `ignoreIfExists` + /// field can be undefined, but this possible state is non-critical + overwrite: ?bool = null, + /// Ignore if exists. + /// field can be undefined, but this possible state is non-critical + ignoreIfExists: ?bool = null, +}; + +/// Rename file options +pub const RenameFileOptions = struct { + /// Overwrite target if existing. Overwrite wins over `ignoreIfExists` + /// field can be undefined, but this possible state is non-critical + overwrite: ?bool = null, + /// Ignores if target exists. + /// field can be undefined, but this possible state is non-critical + ignoreIfExists: ?bool = null, +}; + +/// Delete file options +pub const DeleteFileOptions = struct { + /// Delete the content recursively if a folder is denoted. + /// field can be undefined, but this possible state is non-critical + recursive: ?bool = null, + /// Ignore the operation if the file doesn't exist. + /// field can be undefined, but this possible state is non-critical + ignoreIfNotExists: ?bool = null, +}; + +/// A pattern to describe in which file operation requests or notifications +/// the server is interested in receiving. +/// +/// @since 3.16.0 +pub const FileOperationPattern = struct { + /// The glob pattern to match. Glob patterns can have the following syntax: + /// - `*` to match one or more characters in a path segment + /// - `?` to match on one character in a path segment + /// - `**` to match any number of path segments, including none + /// - `{}` to group sub patterns into an OR expression. (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files) + /// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) + /// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`) + glob: []const u8, + /// Whether to match files or folders with this pattern. + /// + /// Matches both if undefined. + /// field can be undefined, but this possible state is non-critical + matches: ?FileOperationPatternKind = null, + /// Additional options used during matching. + /// field can be undefined, but this possible state is non-critical + options: ?FileOperationPatternOptions = null, +}; + +/// A full document diagnostic report for a workspace diagnostic result. +/// +/// @since 3.17.0 +pub const WorkspaceFullDocumentDiagnosticReport = struct { + /// The URI for which diagnostic information is reported. + uri: DocumentUri, + /// The version number for which the diagnostics are reported. + /// If the document is not marked as open `null` can be provided. + version: ?i32 = null, + // Extends FullDocumentDiagnosticReport + /// A full document diagnostic report. + comptime kind: []const u8 = "full", + /// An optional result id. If provided it will + /// be sent on the next diagnostic request for the + /// same document. + /// field can be undefined, but this possible state is non-critical + resultId: ?[]const u8 = null, + /// The actual items. + items: []const Diagnostic, +}; + +/// An unchanged document diagnostic report for a workspace diagnostic result. +/// +/// @since 3.17.0 +pub const WorkspaceUnchangedDocumentDiagnosticReport = struct { + /// The URI for which diagnostic information is reported. + uri: DocumentUri, + /// The version number for which the diagnostics are reported. + /// If the document is not marked as open `null` can be provided. + version: ?i32 = null, + // Extends UnchangedDocumentDiagnosticReport + /// A document diagnostic report indicating + /// no changes to the last result. A server can + /// only return `unchanged` if result ids are + /// provided. + comptime kind: []const u8 = "unchanged", + /// A result id which will be sent on the next + /// diagnostic request for the same document. + resultId: []const u8, +}; + +/// A notebook cell. +/// +/// A cell's document URI must be unique across ALL notebook +/// cells and can therefore be used to uniquely identify a +/// notebook cell or the cell's text document. +/// +/// @since 3.17.0 +pub const NotebookCell = struct { + /// The cell's kind + kind: NotebookCellKind, + /// The URI of the cell's text document + /// content. + document: DocumentUri, + /// Additional metadata stored with the cell. + /// + /// Note: should always be an object literal (e.g. LSPObject) + /// field can be undefined, but this possible state is non-critical + metadata: ?LSPObject = null, + /// Additional execution summary information + /// if supported by the client. + /// field can be undefined, but this possible state is non-critical + executionSummary: ?ExecutionSummary = null, +}; + +/// A change describing how to move a `NotebookCell` +/// array from state S to S'. +/// +/// @since 3.17.0 +pub const NotebookCellArrayChange = struct { + /// The start oftest of the cell that changed. + start: u32, + /// The deleted cells + deleteCount: u32, + /// The new cells, if any + /// field can be undefined, but this possible state is non-critical + cells: ?[]const NotebookCell = null, +}; + +/// Defines the capabilities provided by the client. +pub const ClientCapabilities = struct { + /// Workspace specific client capabilities. + /// field can be undefined, but this possible state is non-critical + workspace: ?WorkspaceClientCapabilities = null, + /// Text document specific client capabilities. + /// field can be undefined, but this possible state is non-critical + textDocument: ?TextDocumentClientCapabilities = null, + /// Capabilities specific to the notebook document support. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + notebookDocument: ?NotebookDocumentClientCapabilities = null, + /// Window specific client capabilities. + /// field can be undefined, but this possible state is non-critical + window: ?WindowClientCapabilities = null, + /// General client capabilities. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + general: ?GeneralClientCapabilities = null, + /// Experimental client capabilities. + /// field can be undefined, but this possible state is non-critical + experimental: ?LSPAny = null, +}; + +pub const TextDocumentSyncOptions = struct { + /// Open and close notifications are sent to the server. If omitted open close notification should not + /// be sent. + /// field can be undefined, but this possible state is non-critical + openClose: ?bool = null, + /// Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full + /// and TextDocumentSyncKind.Incremental. If omitted it defaults to TextDocumentSyncKind.None. + /// field can be undefined, but this possible state is non-critical + change: ?TextDocumentSyncKind = null, + /// If present will save notifications are sent to the server. If omitted the notification should not be + /// sent. + /// field can be undefined, but this possible state is non-critical + willSave: ?bool = null, + /// If present will save wait until requests are sent to the server. If omitted the request should not be + /// sent. + /// field can be undefined, but this possible state is non-critical + willSaveWaitUntil: ?bool = null, + /// If present save notifications are sent to the server. If omitted the notification should not be + /// sent. + /// field can be undefined, but this possible state is non-critical + save: ?union(enum) { + bool: bool, + SaveOptions: SaveOptions, + } = null, +}; + +/// Options specific to a notebook plus its cells +/// to be synced to the server. +/// +/// If a selector provides a notebook document +/// filter but no cell selector all cells of a +/// matching notebook document will be synced. +/// +/// If a selector provides no notebook document +/// filter but only a cell selector all notebook +/// document that contain at least one matching +/// cell will be synced. +/// +/// @since 3.17.0 +pub const NotebookDocumentSyncOptions = struct { + /// The notebooks to be synced + notebookSelector: []const union(enum) { + literal_0: struct { + /// The notebook to be synced If a string + /// value is provided it matches against the + /// notebook type. '*' matches every notebook. + notebook: union(enum) { + string: []const u8, + NotebookDocumentFilter: NotebookDocumentFilter, + }, + /// The cells of the matching notebook to be synced. + /// field can be undefined, but this possible state is non-critical + cells: ?[]const struct { + language: []const u8, + } = null, + }, + literal_1: struct { + /// The notebook to be synced If a string + /// value is provided it matches against the + /// notebook type. '*' matches every notebook. + /// field can be undefined, but this possible state is non-critical + notebook: ?union(enum) { + string: []const u8, + NotebookDocumentFilter: NotebookDocumentFilter, + } = null, + /// The cells of the matching notebook to be synced. + cells: []const struct { + language: []const u8, + }, + }, + }, + /// Whether save notification should be forwarded to + /// the server. Will only be honored if mode === `notebook`. + /// field can be undefined, but this possible state is non-critical + save: ?bool = null, +}; + +/// Registration options specific to a notebook. +/// +/// @since 3.17.0 +pub const NotebookDocumentSyncRegistrationOptions = struct { + // Extends NotebookDocumentSyncOptions + /// The notebooks to be synced + notebookSelector: []const union(enum) { + literal_0: struct { + /// The notebook to be synced If a string + /// value is provided it matches against the + /// notebook type. '*' matches every notebook. + notebook: union(enum) { + string: []const u8, + NotebookDocumentFilter: NotebookDocumentFilter, + }, + /// The cells of the matching notebook to be synced. + /// field can be undefined, but this possible state is non-critical + cells: ?[]const struct { + language: []const u8, + } = null, + }, + literal_1: struct { + /// The notebook to be synced If a string + /// value is provided it matches against the + /// notebook type. '*' matches every notebook. + /// field can be undefined, but this possible state is non-critical + notebook: ?union(enum) { + string: []const u8, + NotebookDocumentFilter: NotebookDocumentFilter, + } = null, + /// The cells of the matching notebook to be synced. + cells: []const struct { + language: []const u8, + }, + }, + }, + /// Whether save notification should be forwarded to + /// the server. Will only be honored if mode === `notebook`. + /// field can be undefined, but this possible state is non-critical + save: ?bool = null, + + // Uses mixin StaticRegistrationOptions + /// The id used to register the request. The id can be used to deregister + /// the request again. See also Registration#id. + /// field can be undefined, but this possible state is non-critical + id: ?[]const u8 = null, +}; + +pub const WorkspaceFoldersServerCapabilities = struct { + /// The server has support for workspace folders + /// field can be undefined, but this possible state is non-critical + supported: ?bool = null, + /// Whether the server wants to receive workspace folder + /// change notifications. + /// + /// If a string is provided the string is treated as an ID + /// under which the notification is registered on the client + /// side. The ID can be used to unregister for these events + /// using the `client/unregisterCapability` request. + /// field can be undefined, but this possible state is non-critical + changeNotifications: ?union(enum) { + string: []const u8, + bool: bool, + } = null, +}; + +/// Options for notifications/requests for user operations on files. +/// +/// @since 3.16.0 +pub const FileOperationOptions = struct { + /// The server is interested in receiving didCreateFiles notifications. + /// field can be undefined, but this possible state is non-critical + didCreate: ?FileOperationRegistrationOptions = null, + /// The server is interested in receiving willCreateFiles requests. + /// field can be undefined, but this possible state is non-critical + willCreate: ?FileOperationRegistrationOptions = null, + /// The server is interested in receiving didRenameFiles notifications. + /// field can be undefined, but this possible state is non-critical + didRename: ?FileOperationRegistrationOptions = null, + /// The server is interested in receiving willRenameFiles requests. + /// field can be undefined, but this possible state is non-critical + willRename: ?FileOperationRegistrationOptions = null, + /// The server is interested in receiving didDeleteFiles file notifications. + /// field can be undefined, but this possible state is non-critical + didDelete: ?FileOperationRegistrationOptions = null, + /// The server is interested in receiving willDeleteFiles file requests. + /// field can be undefined, but this possible state is non-critical + willDelete: ?FileOperationRegistrationOptions = null, +}; + +/// Structure to capture a description for an error code. +/// +/// @since 3.16.0 +pub const CodeDescription = struct { + /// An URI to open with more information about the diagnostic error. + href: URI, +}; + +/// Represents a related message and source code location for a diagnostic. This should be +/// used to point to code locations that cause or related to a diagnostics, e.g when duplicating +/// a symbol in a scope. +pub const DiagnosticRelatedInformation = struct { + /// The location of this related diagnostic information. + location: Location, + /// The message of this related diagnostic information. + message: []const u8, +}; + +/// Represents a parameter of a callable-signature. A parameter can +/// have a label and a doc-comment. +pub const ParameterInformation = struct { + /// The label of this parameter information. + /// + /// Either a string or an inclusive start and exclusive end offsets within its containing + /// signature label. (see SignatureInformation.label). The offsets are based on a UTF-16 + /// string representation as `Position` and `Range` does. + /// + /// *Note*: a label of type string should be a substring of its containing signature label. + /// Its intended use case is to highlight the parameter label part in the `SignatureInformation.label`. + label: union(enum) { + string: []const u8, + tuple_1: struct { u32, u32 }, + }, + /// The human-readable doc-comment of this parameter. Will be shown + /// in the UI but can be omitted. + /// field can be undefined, but this possible state is non-critical + documentation: ?union(enum) { + string: []const u8, + MarkupContent: MarkupContent, + } = null, +}; + +/// A notebook cell text document filter denotes a cell text +/// document by different properties. +/// +/// @since 3.17.0 +pub const NotebookCellTextDocumentFilter = struct { + /// A filter that matches against the notebook + /// containing the notebook cell. If a string + /// value is provided it matches against the + /// notebook type. '*' matches every notebook. + notebook: union(enum) { + string: []const u8, + NotebookDocumentFilter: NotebookDocumentFilter, + }, + /// A language id like `python`. + /// + /// Will be matched against the language id of the + /// notebook cell document. '*' matches every language. + /// field can be undefined, but this possible state is non-critical + language: ?[]const u8 = null, +}; + +/// Matching options for the file operation pattern. +/// +/// @since 3.16.0 +pub const FileOperationPatternOptions = struct { + /// The pattern should be matched ignoring casing. + /// field can be undefined, but this possible state is non-critical + ignoreCase: ?bool = null, +}; + +pub const ExecutionSummary = struct { + /// A strict monotonically increasing value + /// indicating the execution order of a cell + /// inside a notebook. + executionOrder: u32, + /// Whether the execution was successful or + /// not if known by the client. + /// field can be undefined, but this possible state is non-critical + success: ?bool = null, +}; + +/// Workspace specific client capabilities. +pub const WorkspaceClientCapabilities = struct { + /// The client supports applying batch edits + /// to the workspace by supporting the request + /// 'workspace/applyEdit' + /// field can be undefined, but this possible state is non-critical + applyEdit: ?bool = null, + /// Capabilities specific to `WorkspaceEdit`s. + /// field can be undefined, but this possible state is non-critical + workspaceEdit: ?WorkspaceEditClientCapabilities = null, + /// Capabilities specific to the `workspace/didChangeConfiguration` notification. + /// field can be undefined, but this possible state is non-critical + didChangeConfiguration: ?DidChangeConfigurationClientCapabilities = null, + /// Capabilities specific to the `workspace/didChangeWatchedFiles` notification. + /// field can be undefined, but this possible state is non-critical + didChangeWatchedFiles: ?DidChangeWatchedFilesClientCapabilities = null, + /// Capabilities specific to the `workspace/symbol` request. + /// field can be undefined, but this possible state is non-critical + symbol: ?WorkspaceSymbolClientCapabilities = null, + /// Capabilities specific to the `workspace/executeCommand` request. + /// field can be undefined, but this possible state is non-critical + executeCommand: ?ExecuteCommandClientCapabilities = null, + /// The client has support for workspace folders. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + workspaceFolders: ?bool = null, + /// The client supports `workspace/configuration` requests. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + configuration: ?bool = null, + /// Capabilities specific to the semantic token requests scoped to the + /// workspace. + /// + /// @since 3.16.0. + /// field can be undefined, but this possible state is non-critical + semanticTokens: ?SemanticTokensWorkspaceClientCapabilities = null, + /// Capabilities specific to the code lens requests scoped to the + /// workspace. + /// + /// @since 3.16.0. + /// field can be undefined, but this possible state is non-critical + codeLens: ?CodeLensWorkspaceClientCapabilities = null, + /// The client has support for file notifications/requests for user operations on files. + /// + /// Since 3.16.0 + /// field can be undefined, but this possible state is non-critical + fileOperations: ?FileOperationClientCapabilities = null, + /// Capabilities specific to the inline values requests scoped to the + /// workspace. + /// + /// @since 3.17.0. + /// field can be undefined, but this possible state is non-critical + inlineValue: ?InlineValueWorkspaceClientCapabilities = null, + /// Capabilities specific to the inlay hint requests scoped to the + /// workspace. + /// + /// @since 3.17.0. + /// field can be undefined, but this possible state is non-critical + inlayHint: ?InlayHintWorkspaceClientCapabilities = null, + /// Capabilities specific to the diagnostic requests scoped to the + /// workspace. + /// + /// @since 3.17.0. + /// field can be undefined, but this possible state is non-critical + diagnostics: ?DiagnosticWorkspaceClientCapabilities = null, +}; + +/// Text document specific client capabilities. +pub const TextDocumentClientCapabilities = struct { + /// Defines which synchronization capabilities the client supports. + /// field can be undefined, but this possible state is non-critical + synchronization: ?TextDocumentSyncClientCapabilities = null, + /// Capabilities specific to the `textDocument/completion` request. + /// field can be undefined, but this possible state is non-critical + completion: ?CompletionClientCapabilities = null, + /// Capabilities specific to the `textDocument/hover` request. + /// field can be undefined, but this possible state is non-critical + hover: ?HoverClientCapabilities = null, + /// Capabilities specific to the `textDocument/signatureHelp` request. + /// field can be undefined, but this possible state is non-critical + signatureHelp: ?SignatureHelpClientCapabilities = null, + /// Capabilities specific to the `textDocument/declaration` request. + /// + /// @since 3.14.0 + /// field can be undefined, but this possible state is non-critical + declaration: ?DeclarationClientCapabilities = null, + /// Capabilities specific to the `textDocument/definition` request. + /// field can be undefined, but this possible state is non-critical + definition: ?DefinitionClientCapabilities = null, + /// Capabilities specific to the `textDocument/typeDefinition` request. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + typeDefinition: ?TypeDefinitionClientCapabilities = null, + /// Capabilities specific to the `textDocument/implementation` request. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + implementation: ?ImplementationClientCapabilities = null, + /// Capabilities specific to the `textDocument/references` request. + /// field can be undefined, but this possible state is non-critical + references: ?ReferenceClientCapabilities = null, + /// Capabilities specific to the `textDocument/documentHighlight` request. + /// field can be undefined, but this possible state is non-critical + documentHighlight: ?DocumentHighlightClientCapabilities = null, + /// Capabilities specific to the `textDocument/documentSymbol` request. + /// field can be undefined, but this possible state is non-critical + documentSymbol: ?DocumentSymbolClientCapabilities = null, + /// Capabilities specific to the `textDocument/codeAction` request. + /// field can be undefined, but this possible state is non-critical + codeAction: ?CodeActionClientCapabilities = null, + /// Capabilities specific to the `textDocument/codeLens` request. + /// field can be undefined, but this possible state is non-critical + codeLens: ?CodeLensClientCapabilities = null, + /// Capabilities specific to the `textDocument/documentLink` request. + /// field can be undefined, but this possible state is non-critical + documentLink: ?DocumentLinkClientCapabilities = null, + /// Capabilities specific to the `textDocument/documentColor` and the + /// `textDocument/colorPresentation` request. + /// + /// @since 3.6.0 + /// field can be undefined, but this possible state is non-critical + colorProvider: ?DocumentColorClientCapabilities = null, + /// Capabilities specific to the `textDocument/formatting` request. + /// field can be undefined, but this possible state is non-critical + formatting: ?DocumentFormattingClientCapabilities = null, + /// Capabilities specific to the `textDocument/rangeFormatting` request. + /// field can be undefined, but this possible state is non-critical + rangeFormatting: ?DocumentRangeFormattingClientCapabilities = null, + /// Capabilities specific to the `textDocument/onTypeFormatting` request. + /// field can be undefined, but this possible state is non-critical + onTypeFormatting: ?DocumentOnTypeFormattingClientCapabilities = null, + /// Capabilities specific to the `textDocument/rename` request. + /// field can be undefined, but this possible state is non-critical + rename: ?RenameClientCapabilities = null, + /// Capabilities specific to the `textDocument/foldingRange` request. + /// + /// @since 3.10.0 + /// field can be undefined, but this possible state is non-critical + foldingRange: ?FoldingRangeClientCapabilities = null, + /// Capabilities specific to the `textDocument/selectionRange` request. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + selectionRange: ?SelectionRangeClientCapabilities = null, + /// Capabilities specific to the `textDocument/publishDiagnostics` notification. + /// field can be undefined, but this possible state is non-critical + publishDiagnostics: ?PublishDiagnosticsClientCapabilities = null, + /// Capabilities specific to the various call hierarchy requests. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + callHierarchy: ?CallHierarchyClientCapabilities = null, + /// Capabilities specific to the various semantic token request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + semanticTokens: ?SemanticTokensClientCapabilities = null, + /// Capabilities specific to the `textDocument/linkedEditingRange` request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + linkedEditingRange: ?LinkedEditingRangeClientCapabilities = null, + /// Client capabilities specific to the `textDocument/moniker` request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + moniker: ?MonikerClientCapabilities = null, + /// Capabilities specific to the various type hierarchy requests. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + typeHierarchy: ?TypeHierarchyClientCapabilities = null, + /// Capabilities specific to the `textDocument/inlineValue` request. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + inlineValue: ?InlineValueClientCapabilities = null, + /// Capabilities specific to the `textDocument/inlayHint` request. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + inlayHint: ?InlayHintClientCapabilities = null, + /// Capabilities specific to the diagnostic pull model. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + diagnostic: ?DiagnosticClientCapabilities = null, +}; + +/// Capabilities specific to the notebook document support. +/// +/// @since 3.17.0 +pub const NotebookDocumentClientCapabilities = struct { + /// Capabilities specific to notebook document synchronization + /// + /// @since 3.17.0 + synchronization: NotebookDocumentSyncClientCapabilities, +}; + +pub const WindowClientCapabilities = struct { + /// It indicates whether the client supports server initiated + /// progress using the `window/workDoneProgress/create` request. + /// + /// The capability also controls Whether client supports handling + /// of progress notifications. If set servers are allowed to report a + /// `workDoneProgress` property in the request specific server + /// capabilities. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + /// Capabilities specific to the showMessage request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + showMessage: ?ShowMessageRequestClientCapabilities = null, + /// Capabilities specific to the showDocument request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + showDocument: ?ShowDocumentClientCapabilities = null, +}; + +/// General client capabilities. +/// +/// @since 3.16.0 +pub const GeneralClientCapabilities = struct { + /// Client capability that signals how the client + /// handles stale requests (e.g. a request + /// for which the client will not process the response + /// anymore since the information is outdated). + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + staleRequestSupport: ?struct { + /// The client will actively cancel the request. + cancel: bool, + /// The list of requests for which the client + /// will retry the request if it receives a + /// response with error code `ContentModified` + retryOnContentModified: []const []const u8, + } = null, + /// Client capabilities specific to regular expressions. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + regularExpressions: ?RegularExpressionsClientCapabilities = null, + /// Client capabilities specific to the client's markdown parser. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + markdown: ?MarkdownClientCapabilities = null, + /// The position encodings supported by the client. Client and server + /// have to agree on the same position encoding to ensure that offsets + /// (e.g. character position in a line) are interpreted the same on both + /// sides. + /// + /// To keep the protocol backwards compatible the following applies: if + /// the value 'utf-16' is missing from the array of position encodings + /// servers can assume that the client supports UTF-16. UTF-16 is + /// therefore a mandatory encoding. + /// + /// If omitted it defaults to ['utf-16']. + /// + /// Implementation considerations: since the conversion from one encoding + /// into another requires the content of the file / line the conversion + /// is best done where the file is read which is usually on the server + /// side. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + positionEncodings: ?[]const PositionEncodingKind = null, +}; + +/// A relative pattern is a helper to construct glob patterns that are matched +/// relatively to a base URI. The common value for a `baseUri` is a workspace +/// folder root, but it can be another absolute URI as well. +/// +/// @since 3.17.0 +pub const RelativePattern = struct { + /// A workspace folder or a base URI to which this pattern will be matched + /// against relatively. + baseUri: union(enum) { + WorkspaceFolder: WorkspaceFolder, + uri: URI, + }, + /// The actual glob pattern; + pattern: Pattern, +}; + +pub const WorkspaceEditClientCapabilities = struct { + /// The client supports versioned document changes in `WorkspaceEdit`s + /// field can be undefined, but this possible state is non-critical + documentChanges: ?bool = null, + /// The resource operations the client supports. Clients should at least + /// support 'create', 'rename' and 'delete' files and folders. + /// + /// @since 3.13.0 + /// field can be undefined, but this possible state is non-critical + resourceOperations: ?[]const ResourceOperationKind = null, + /// The failure handling strategy of a client if applying the workspace edit + /// fails. + /// + /// @since 3.13.0 + /// field can be undefined, but this possible state is non-critical + failureHandling: ?FailureHandlingKind = null, + /// Whether the client normalizes line endings to the client specific + /// setting. + /// If set to `true` the client will normalize line ending characters + /// in a workspace edit to the client-specified new line + /// character. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + normalizesLineEndings: ?bool = null, + /// Whether the client in general supports change annotations on text edits, + /// create file, rename file and delete file changes. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + changeAnnotationSupport: ?struct { + /// Whether the client groups edits with equal labels into tree nodes, + /// for instance all edits labelled with "Changes in Strings" would + /// be a tree node. + /// field can be undefined, but this possible state is non-critical + groupsOnLabel: ?bool = null, + } = null, +}; + +pub const DidChangeConfigurationClientCapabilities = struct { + /// Did change configuration notification supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +pub const DidChangeWatchedFilesClientCapabilities = struct { + /// Did change watched files notification supports dynamic registration. Please note + /// that the current protocol doesn't support static configuration for file changes + /// from the server side. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Whether the client has support for {@link RelativePattern relative pattern} + /// or not. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + relativePatternSupport: ?bool = null, +}; + +/// Client capabilities for a {@link WorkspaceSymbolRequest}. +pub const WorkspaceSymbolClientCapabilities = struct { + /// Symbol request supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Specific capabilities for the `SymbolKind` in the `workspace/symbol` request. + /// field can be undefined, but this possible state is non-critical + symbolKind: ?struct { + /// The symbol kind values the client supports. When this + /// property exists the client also guarantees that it will + /// handle values outside its set gracefully and falls back + /// to a default value when unknown. + /// + /// If this property is not present the client only supports + /// the symbol kinds from `File` to `Array` as defined in + /// the initial version of the protocol. + /// field can be undefined, but this possible state is non-critical + valueSet: ?[]const SymbolKind = null, + } = null, + /// The client supports tags on `SymbolInformation`. + /// Clients supporting tags have to handle unknown tags gracefully. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + tagSupport: ?struct { + /// The tags supported by the client. + valueSet: []const SymbolTag, + } = null, + /// The client support partial workspace symbols. The client will send the + /// request `workspaceSymbol/resolve` to the server to resolve additional + /// properties. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + resolveSupport: ?struct { + /// The properties that a client can resolve lazily. Usually + /// `location.range` + properties: []const []const u8, + } = null, +}; + +/// The client capabilities of a {@link ExecuteCommandRequest}. +pub const ExecuteCommandClientCapabilities = struct { + /// Execute command supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// @since 3.16.0 +pub const SemanticTokensWorkspaceClientCapabilities = struct { + /// Whether the client implementation supports a refresh request sent from + /// the server to the client. + /// + /// Note that this event is global and will force the client to refresh all + /// semantic tokens currently shown. It should be used with absolute care + /// and is useful for situation where a server for example detects a project + /// wide change that requires such a calculation. + /// field can be undefined, but this possible state is non-critical + refreshSupport: ?bool = null, +}; + +/// @since 3.16.0 +pub const CodeLensWorkspaceClientCapabilities = struct { + /// Whether the client implementation supports a refresh request sent from the + /// server to the client. + /// + /// Note that this event is global and will force the client to refresh all + /// code lenses currently shown. It should be used with absolute care and is + /// useful for situation where a server for example detect a project wide + /// change that requires such a calculation. + /// field can be undefined, but this possible state is non-critical + refreshSupport: ?bool = null, +}; + +/// Capabilities relating to events from file operations by the user in the client. +/// +/// These events do not come from the file system, they come from user operations +/// like renaming a file in the UI. +/// +/// @since 3.16.0 +pub const FileOperationClientCapabilities = struct { + /// Whether the client supports dynamic registration for file requests/notifications. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client has support for sending didCreateFiles notifications. + /// field can be undefined, but this possible state is non-critical + didCreate: ?bool = null, + /// The client has support for sending willCreateFiles requests. + /// field can be undefined, but this possible state is non-critical + willCreate: ?bool = null, + /// The client has support for sending didRenameFiles notifications. + /// field can be undefined, but this possible state is non-critical + didRename: ?bool = null, + /// The client has support for sending willRenameFiles requests. + /// field can be undefined, but this possible state is non-critical + willRename: ?bool = null, + /// The client has support for sending didDeleteFiles notifications. + /// field can be undefined, but this possible state is non-critical + didDelete: ?bool = null, + /// The client has support for sending willDeleteFiles requests. + /// field can be undefined, but this possible state is non-critical + willDelete: ?bool = null, +}; + +/// Client workspace capabilities specific to inline values. +/// +/// @since 3.17.0 +pub const InlineValueWorkspaceClientCapabilities = struct { + /// Whether the client implementation supports a refresh request sent from the + /// server to the client. + /// + /// Note that this event is global and will force the client to refresh all + /// inline values currently shown. It should be used with absolute care and is + /// useful for situation where a server for example detects a project wide + /// change that requires such a calculation. + /// field can be undefined, but this possible state is non-critical + refreshSupport: ?bool = null, +}; + +/// Client workspace capabilities specific to inlay hints. +/// +/// @since 3.17.0 +pub const InlayHintWorkspaceClientCapabilities = struct { + /// Whether the client implementation supports a refresh request sent from + /// the server to the client. + /// + /// Note that this event is global and will force the client to refresh all + /// inlay hints currently shown. It should be used with absolute care and + /// is useful for situation where a server for example detects a project wide + /// change that requires such a calculation. + /// field can be undefined, but this possible state is non-critical + refreshSupport: ?bool = null, +}; + +/// Workspace client capabilities specific to diagnostic pull requests. +/// +/// @since 3.17.0 +pub const DiagnosticWorkspaceClientCapabilities = struct { + /// Whether the client implementation supports a refresh request sent from + /// the server to the client. + /// + /// Note that this event is global and will force the client to refresh all + /// pulled diagnostics currently shown. It should be used with absolute care and + /// is useful for situation where a server for example detects a project wide + /// change that requires such a calculation. + /// field can be undefined, but this possible state is non-critical + refreshSupport: ?bool = null, +}; + +pub const TextDocumentSyncClientCapabilities = struct { + /// Whether text document synchronization supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports sending will save notifications. + /// field can be undefined, but this possible state is non-critical + willSave: ?bool = null, + /// The client supports sending a will save request and + /// waits for a response providing text edits which will + /// be applied to the document before it is saved. + /// field can be undefined, but this possible state is non-critical + willSaveWaitUntil: ?bool = null, + /// The client supports did save notifications. + /// field can be undefined, but this possible state is non-critical + didSave: ?bool = null, +}; + +/// Completion client capabilities +pub const CompletionClientCapabilities = struct { + /// Whether completion supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports the following `CompletionItem` specific + /// capabilities. + /// field can be undefined, but this possible state is non-critical + completionItem: ?struct { + /// Client supports snippets as insert text. + /// + /// A snippet can define tab stops and placeholders with `$1`, `$2` + /// and `${3:foo}`. `$0` defines the final tab stop, it defaults to + /// the end of the snippet. Placeholders with equal identifiers are linked, + /// that is typing in one will update others too. + /// field can be undefined, but this possible state is non-critical + snippetSupport: ?bool = null, + /// Client supports commit characters on a completion item. + /// field can be undefined, but this possible state is non-critical + commitCharactersSupport: ?bool = null, + /// Client supports the following content formats for the documentation + /// property. The order describes the preferred format of the client. + /// field can be undefined, but this possible state is non-critical + documentationFormat: ?[]const MarkupKind = null, + /// Client supports the deprecated property on a completion item. + /// field can be undefined, but this possible state is non-critical + deprecatedSupport: ?bool = null, + /// Client supports the preselect property on a completion item. + /// field can be undefined, but this possible state is non-critical + preselectSupport: ?bool = null, + /// Client supports the tag property on a completion item. Clients supporting + /// tags have to handle unknown tags gracefully. Clients especially need to + /// preserve unknown tags when sending a completion item back to the server in + /// a resolve call. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + tagSupport: ?struct { + /// The tags supported by the client. + valueSet: []const CompletionItemTag, + } = null, + /// Client support insert replace edit to control different behavior if a + /// completion item is inserted in the text or should replace text. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + insertReplaceSupport: ?bool = null, + /// Indicates which properties a client can resolve lazily on a completion + /// item. Before version 3.16.0 only the predefined properties `documentation` + /// and `details` could be resolved lazily. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + resolveSupport: ?struct { + /// The properties that a client can resolve lazily. + properties: []const []const u8, + } = null, + /// The client supports the `insertTextMode` property on + /// a completion item to override the whitespace handling mode + /// as defined by the client (see `insertTextMode`). + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + insertTextModeSupport: ?struct { + valueSet: []const InsertTextMode, + } = null, + /// The client has support for completion item label + /// details (see also `CompletionItemLabelDetails`). + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + labelDetailsSupport: ?bool = null, + } = null, + /// field can be undefined, but this possible state is non-critical + completionItemKind: ?struct { + /// The completion item kind values the client supports. When this + /// property exists the client also guarantees that it will + /// handle values outside its set gracefully and falls back + /// to a default value when unknown. + /// + /// If this property is not present the client only supports + /// the completion items kinds from `Text` to `Reference` as defined in + /// the initial version of the protocol. + /// field can be undefined, but this possible state is non-critical + valueSet: ?[]const CompletionItemKind = null, + } = null, + /// Defines how the client handles whitespace and indentation + /// when accepting a completion item that uses multi line + /// text in either `insertText` or `textEdit`. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + insertTextMode: ?InsertTextMode = null, + /// The client supports to send additional context information for a + /// `textDocument/completion` request. + /// field can be undefined, but this possible state is non-critical + contextSupport: ?bool = null, + /// The client supports the following `CompletionList` specific + /// capabilities. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + completionList: ?struct { + /// The client supports the following itemDefaults on + /// a completion list. + /// + /// The value lists the supported property names of the + /// `CompletionList.itemDefaults` object. If omitted + /// no properties are supported. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + itemDefaults: ?[]const []const u8 = null, + } = null, +}; + +pub const HoverClientCapabilities = struct { + /// Whether hover supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Client supports the following content formats for the content + /// property. The order describes the preferred format of the client. + /// field can be undefined, but this possible state is non-critical + contentFormat: ?[]const MarkupKind = null, +}; + +/// Client Capabilities for a {@link SignatureHelpRequest}. +pub const SignatureHelpClientCapabilities = struct { + /// Whether signature help supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports the following `SignatureInformation` + /// specific properties. + /// field can be undefined, but this possible state is non-critical + signatureInformation: ?struct { + /// Client supports the following content formats for the documentation + /// property. The order describes the preferred format of the client. + /// field can be undefined, but this possible state is non-critical + documentationFormat: ?[]const MarkupKind = null, + /// Client capabilities specific to parameter information. + /// field can be undefined, but this possible state is non-critical + parameterInformation: ?struct { + /// The client supports processing label offsets instead of a + /// simple label string. + /// + /// @since 3.14.0 + /// field can be undefined, but this possible state is non-critical + labelOffsetSupport: ?bool = null, + } = null, + /// The client supports the `activeParameter` property on `SignatureInformation` + /// literal. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + activeParameterSupport: ?bool = null, + } = null, + /// The client supports to send additional context information for a + /// `textDocument/signatureHelp` request. A client that opts into + /// contextSupport will also support the `retriggerCharacters` on + /// `SignatureHelpOptions`. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + contextSupport: ?bool = null, +}; + +/// @since 3.14.0 +pub const DeclarationClientCapabilities = struct { + /// Whether declaration supports dynamic registration. If this is set to `true` + /// the client supports the new `DeclarationRegistrationOptions` return value + /// for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports additional metadata in the form of declaration links. + /// field can be undefined, but this possible state is non-critical + linkSupport: ?bool = null, +}; + +/// Client Capabilities for a {@link DefinitionRequest}. +pub const DefinitionClientCapabilities = struct { + /// Whether definition supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports additional metadata in the form of definition links. + /// + /// @since 3.14.0 + /// field can be undefined, but this possible state is non-critical + linkSupport: ?bool = null, +}; + +/// Since 3.6.0 +pub const TypeDefinitionClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `TypeDefinitionRegistrationOptions` return value + /// for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports additional metadata in the form of definition links. + /// + /// Since 3.14.0 + /// field can be undefined, but this possible state is non-critical + linkSupport: ?bool = null, +}; + +/// @since 3.6.0 +pub const ImplementationClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `ImplementationRegistrationOptions` return value + /// for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports additional metadata in the form of definition links. + /// + /// @since 3.14.0 + /// field can be undefined, but this possible state is non-critical + linkSupport: ?bool = null, +}; + +/// Client Capabilities for a {@link ReferencesRequest}. +pub const ReferenceClientCapabilities = struct { + /// Whether references supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Client Capabilities for a {@link DocumentHighlightRequest}. +pub const DocumentHighlightClientCapabilities = struct { + /// Whether document highlight supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Client Capabilities for a {@link DocumentSymbolRequest}. +pub const DocumentSymbolClientCapabilities = struct { + /// Whether document symbol supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Specific capabilities for the `SymbolKind` in the + /// `textDocument/documentSymbol` request. + /// field can be undefined, but this possible state is non-critical + symbolKind: ?struct { + /// The symbol kind values the client supports. When this + /// property exists the client also guarantees that it will + /// handle values outside its set gracefully and falls back + /// to a default value when unknown. + /// + /// If this property is not present the client only supports + /// the symbol kinds from `File` to `Array` as defined in + /// the initial version of the protocol. + /// field can be undefined, but this possible state is non-critical + valueSet: ?[]const SymbolKind = null, + } = null, + /// The client supports hierarchical document symbols. + /// field can be undefined, but this possible state is non-critical + hierarchicalDocumentSymbolSupport: ?bool = null, + /// The client supports tags on `SymbolInformation`. Tags are supported on + /// `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true. + /// Clients supporting tags have to handle unknown tags gracefully. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + tagSupport: ?struct { + /// The tags supported by the client. + valueSet: []const SymbolTag, + } = null, + /// The client supports an additional label presented in the UI when + /// registering a document symbol provider. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + labelSupport: ?bool = null, +}; + +/// The Client Capabilities of a {@link CodeActionRequest}. +pub const CodeActionClientCapabilities = struct { + /// Whether code action supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client support code action literals of type `CodeAction` as a valid + /// response of the `textDocument/codeAction` request. If the property is not + /// set the request can only return `Command` literals. + /// + /// @since 3.8.0 + /// field can be undefined, but this possible state is non-critical + codeActionLiteralSupport: ?struct { + /// The code action kind is support with the following value + /// set. + codeActionKind: struct { + /// The code action kind values the client supports. When this + /// property exists the client also guarantees that it will + /// handle values outside its set gracefully and falls back + /// to a default value when unknown. + valueSet: []const CodeActionKind, + }, + } = null, + /// Whether code action supports the `isPreferred` property. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + isPreferredSupport: ?bool = null, + /// Whether code action supports the `disabled` property. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + disabledSupport: ?bool = null, + /// Whether code action supports the `data` property which is + /// preserved between a `textDocument/codeAction` and a + /// `codeAction/resolve` request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + dataSupport: ?bool = null, + /// Whether the client supports resolving additional code action + /// properties via a separate `codeAction/resolve` request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + resolveSupport: ?struct { + /// The properties that a client can resolve lazily. + properties: []const []const u8, + } = null, + /// Whether the client honors the change annotations in + /// text edits and resource operations returned via the + /// `CodeAction#edit` property by for example presenting + /// the workspace edit in the user interface and asking + /// for confirmation. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + honorsChangeAnnotations: ?bool = null, +}; + +/// The client capabilities of a {@link CodeLensRequest}. +pub const CodeLensClientCapabilities = struct { + /// Whether code lens supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// The client capabilities of a {@link DocumentLinkRequest}. +pub const DocumentLinkClientCapabilities = struct { + /// Whether document link supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Whether the client supports the `tooltip` property on `DocumentLink`. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + tooltipSupport: ?bool = null, +}; + +pub const DocumentColorClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `DocumentColorRegistrationOptions` return value + /// for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Client capabilities of a {@link DocumentFormattingRequest}. +pub const DocumentFormattingClientCapabilities = struct { + /// Whether formatting supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Client capabilities of a {@link DocumentRangeFormattingRequest}. +pub const DocumentRangeFormattingClientCapabilities = struct { + /// Whether range formatting supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Client capabilities of a {@link DocumentOnTypeFormattingRequest}. +pub const DocumentOnTypeFormattingClientCapabilities = struct { + /// Whether on type formatting supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +pub const RenameClientCapabilities = struct { + /// Whether rename supports dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Client supports testing for validity of rename operations + /// before execution. + /// + /// @since 3.12.0 + /// field can be undefined, but this possible state is non-critical + prepareSupport: ?bool = null, + /// Client supports the default behavior result. + /// + /// The value indicates the default behavior used by the + /// client. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + prepareSupportDefaultBehavior: ?PrepareSupportDefaultBehavior = null, + /// Whether the client honors the change annotations in + /// text edits and resource operations returned via the + /// rename request's workspace edit by for example presenting + /// the workspace edit in the user interface and asking + /// for confirmation. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + honorsChangeAnnotations: ?bool = null, +}; + +pub const FoldingRangeClientCapabilities = struct { + /// Whether implementation supports dynamic registration for folding range + /// providers. If this is set to `true` the client supports the new + /// `FoldingRangeRegistrationOptions` return value for the corresponding + /// server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The maximum number of folding ranges that the client prefers to receive + /// per document. The value serves as a hint, servers are free to follow the + /// limit. + /// field can be undefined, but this possible state is non-critical + rangeLimit: ?u32 = null, + /// If set, the client signals that it only supports folding complete lines. + /// If set, client will ignore specified `startCharacter` and `endCharacter` + /// properties in a FoldingRange. + /// field can be undefined, but this possible state is non-critical + lineFoldingOnly: ?bool = null, + /// Specific options for the folding range kind. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + foldingRangeKind: ?struct { + /// The folding range kind values the client supports. When this + /// property exists the client also guarantees that it will + /// handle values outside its set gracefully and falls back + /// to a default value when unknown. + /// field can be undefined, but this possible state is non-critical + valueSet: ?[]const FoldingRangeKind = null, + } = null, + /// Specific options for the folding range. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + foldingRange: ?struct { + /// If set, the client signals that it supports setting collapsedText on + /// folding ranges to display custom labels instead of the default text. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + collapsedText: ?bool = null, + } = null, +}; + +pub const SelectionRangeClientCapabilities = struct { + /// Whether implementation supports dynamic registration for selection range providers. If this is set to `true` + /// the client supports the new `SelectionRangeRegistrationOptions` return value for the corresponding server + /// capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// The publish diagnostic client capabilities. +pub const PublishDiagnosticsClientCapabilities = struct { + /// Whether the clients accepts diagnostics with related information. + /// field can be undefined, but this possible state is non-critical + relatedInformation: ?bool = null, + /// Client supports the tag property to provide meta data about a diagnostic. + /// Clients supporting tags have to handle unknown tags gracefully. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + tagSupport: ?struct { + /// The tags supported by the client. + valueSet: []const DiagnosticTag, + } = null, + /// Whether the client interprets the version property of the + /// `textDocument/publishDiagnostics` notification's parameter. + /// + /// @since 3.15.0 + /// field can be undefined, but this possible state is non-critical + versionSupport: ?bool = null, + /// Client supports a codeDescription property + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + codeDescriptionSupport: ?bool = null, + /// Whether code action supports the `data` property which is + /// preserved between a `textDocument/publishDiagnostics` and + /// `textDocument/codeAction` request. + /// + /// @since 3.16.0 + /// field can be undefined, but this possible state is non-critical + dataSupport: ?bool = null, +}; + +/// @since 3.16.0 +pub const CallHierarchyClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` + /// return value for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// @since 3.16.0 +pub const SemanticTokensClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` + /// return value for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Which requests the client supports and might send to the server + /// depending on the server's capability. Please note that clients might not + /// show semantic tokens or degrade some of the user experience if a range + /// or full request is advertised by the client but not provided by the + /// server. If for example the client capability `requests.full` and + /// `request.range` are both set to true but the server only provides a + /// range provider the client might not render a minimap correctly or might + /// even decide to not show any semantic tokens at all. + requests: struct { + /// The client will send the `textDocument/semanticTokens/range` request if + /// the server provides a corresponding handler. + /// field can be undefined, but this possible state is non-critical + range: ?union(enum) { + bool: bool, + literal_1: struct {}, + } = null, + /// The client will send the `textDocument/semanticTokens/full` request if + /// the server provides a corresponding handler. + /// field can be undefined, but this possible state is non-critical + full: ?union(enum) { + bool: bool, + literal_1: struct { + /// The client will send the `textDocument/semanticTokens/full/delta` request if + /// the server provides a corresponding handler. + /// field can be undefined, but this possible state is non-critical + delta: ?bool = null, + }, + } = null, + }, + /// The token types that the client supports. + tokenTypes: []const []const u8, + /// The token modifiers that the client supports. + tokenModifiers: []const []const u8, + /// The token formats the clients supports. + formats: []const TokenFormat, + /// Whether the client supports tokens that can overlap each other. + /// field can be undefined, but this possible state is non-critical + overlappingTokenSupport: ?bool = null, + /// Whether the client supports tokens that can span multiple lines. + /// field can be undefined, but this possible state is non-critical + multilineTokenSupport: ?bool = null, + /// Whether the client allows the server to actively cancel a + /// semantic token request, e.g. supports returning + /// LSPErrorCodes.ServerCancelled. If a server does the client + /// needs to retrigger the request. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + serverCancelSupport: ?bool = null, + /// Whether the client uses semantic tokens to augment existing + /// syntax tokens. If set to `true` client side created syntax + /// tokens and semantic tokens are both used for colorization. If + /// set to `false` the client only uses the returned semantic tokens + /// for colorization. + /// + /// If the value is `undefined` then the client behavior is not + /// specified. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + augmentsSyntaxTokens: ?bool = null, +}; + +/// Client capabilities for the linked editing range request. +/// +/// @since 3.16.0 +pub const LinkedEditingRangeClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` + /// return value for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Client capabilities specific to the moniker request. +/// +/// @since 3.16.0 +pub const MonikerClientCapabilities = struct { + /// Whether moniker supports dynamic registration. If this is set to `true` + /// the client supports the new `MonikerRegistrationOptions` return value + /// for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// @since 3.17.0 +pub const TypeHierarchyClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` + /// return value for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Client capabilities specific to inline values. +/// +/// @since 3.17.0 +pub const InlineValueClientCapabilities = struct { + /// Whether implementation supports dynamic registration for inline value providers. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, +}; + +/// Inlay hint client capabilities. +/// +/// @since 3.17.0 +pub const InlayHintClientCapabilities = struct { + /// Whether inlay hints support dynamic registration. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Indicates which properties a client can resolve lazily on an inlay + /// hint. + /// field can be undefined, but this possible state is non-critical + resolveSupport: ?struct { + /// The properties that a client can resolve lazily. + properties: []const []const u8, + } = null, +}; + +/// Client capabilities specific to diagnostic pull requests. +/// +/// @since 3.17.0 +pub const DiagnosticClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is set to `true` + /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` + /// return value for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// Whether the clients supports related documents for document diagnostic pulls. + /// field can be undefined, but this possible state is non-critical + relatedDocumentSupport: ?bool = null, +}; + +/// Notebook specific client capabilities. +/// +/// @since 3.17.0 +pub const NotebookDocumentSyncClientCapabilities = struct { + /// Whether implementation supports dynamic registration. If this is + /// set to `true` the client supports the new + /// `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` + /// return value for the corresponding server capability as well. + /// field can be undefined, but this possible state is non-critical + dynamicRegistration: ?bool = null, + /// The client supports sending execution summary data per cell. + /// field can be undefined, but this possible state is non-critical + executionSummarySupport: ?bool = null, +}; + +/// Show message request client capabilities +pub const ShowMessageRequestClientCapabilities = struct { + /// Capabilities specific to the `MessageActionItem` type. + /// field can be undefined, but this possible state is non-critical + messageActionItem: ?struct { + /// Whether the client supports additional attributes which + /// are preserved and send back to the server in the + /// request's response. + /// field can be undefined, but this possible state is non-critical + additionalPropertiesSupport: ?bool = null, + } = null, +}; + +/// Client capabilities for the showDocument request. +/// +/// @since 3.16.0 +pub const ShowDocumentClientCapabilities = struct { + /// The client has support for the showDocument + /// request. + support: bool, +}; + +/// Client capabilities specific to regular expressions. +/// +/// @since 3.16.0 +pub const RegularExpressionsClientCapabilities = struct { + /// The engine's name. + engine: []const u8, + /// The engine's version. + /// field can be undefined, but this possible state is non-critical + version: ?[]const u8 = null, +}; + +/// Client capabilities specific to the used markdown parser. +/// +/// @since 3.16.0 +pub const MarkdownClientCapabilities = struct { + /// The name of the parser. + parser: []const u8, + /// The version of the parser. + /// field can be undefined, but this possible state is non-critical + version: ?[]const u8 = null, + /// A list of HTML tags that the client allows / supports in + /// Markdown. + /// + /// @since 3.17.0 + /// field can be undefined, but this possible state is non-critical + allowedTags: ?[]const []const u8 = null, +}; + +pub const notification_metadata = [_]NotificationMetadata{ + // The `workspace/didChangeWorkspaceFolders` notification is sent from the client to the server when the workspace + // folder configuration changes. + .{ + .method = "workspace/didChangeWorkspaceFolders", + .documentation = "The `workspace/didChangeWorkspaceFolders` notification is sent from the client to the server when the workspace\nfolder configuration changes.", + .direction = .client_to_server, + .Params = DidChangeWorkspaceFoldersParams, + .registration = .{ .method = null, .Options = null }, + }, + // The `window/workDoneProgress/cancel` notification is sent from the client to the server to cancel a progress + // initiated on the server side. + .{ + .method = "window/workDoneProgress/cancel", + .documentation = "The `window/workDoneProgress/cancel` notification is sent from the client to the server to cancel a progress\ninitiated on the server side.", + .direction = .client_to_server, + .Params = WorkDoneProgressCancelParams, + .registration = .{ .method = null, .Options = null }, + }, + // The did create files notification is sent from the client to the server when + // files were created from within the client. + // + // @since 3.16.0 + .{ + .method = "workspace/didCreateFiles", + .documentation = "The did create files notification is sent from the client to the server when\nfiles were created from within the client.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = CreateFilesParams, + .registration = .{ .method = null, .Options = FileOperationRegistrationOptions }, + }, + // The did rename files notification is sent from the client to the server when + // files were renamed from within the client. + // + // @since 3.16.0 + .{ + .method = "workspace/didRenameFiles", + .documentation = "The did rename files notification is sent from the client to the server when\nfiles were renamed from within the client.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = RenameFilesParams, + .registration = .{ .method = null, .Options = FileOperationRegistrationOptions }, + }, + // The will delete files request is sent from the client to the server before files are actually + // deleted as long as the deletion is triggered from within the client. + // + // @since 3.16.0 + .{ + .method = "workspace/didDeleteFiles", + .documentation = "The will delete files request is sent from the client to the server before files are actually\ndeleted as long as the deletion is triggered from within the client.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = DeleteFilesParams, + .registration = .{ .method = null, .Options = FileOperationRegistrationOptions }, + }, + // A notification sent when a notebook opens. + // + // @since 3.17.0 + .{ + .method = "notebookDocument/didOpen", + .documentation = "A notification sent when a notebook opens.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = DidOpenNotebookDocumentParams, + .registration = .{ .method = "notebookDocument/sync", .Options = null }, + }, + .{ + .method = "notebookDocument/didChange", + .documentation = null, + .direction = .client_to_server, + .Params = DidChangeNotebookDocumentParams, + .registration = .{ .method = "notebookDocument/sync", .Options = null }, + }, + // A notification sent when a notebook document is saved. + // + // @since 3.17.0 + .{ + .method = "notebookDocument/didSave", + .documentation = "A notification sent when a notebook document is saved.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = DidSaveNotebookDocumentParams, + .registration = .{ .method = "notebookDocument/sync", .Options = null }, + }, + // A notification sent when a notebook closes. + // + // @since 3.17.0 + .{ + .method = "notebookDocument/didClose", + .documentation = "A notification sent when a notebook closes.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = DidCloseNotebookDocumentParams, + .registration = .{ .method = "notebookDocument/sync", .Options = null }, + }, + // The initialized notification is sent from the client to the + // server after the client is fully initialized and the server + // is allowed to send requests from the server to the client. + .{ + .method = "initialized", + .documentation = "The initialized notification is sent from the client to the\nserver after the client is fully initialized and the server\nis allowed to send requests from the server to the client.", + .direction = .client_to_server, + .Params = InitializedParams, + .registration = .{ .method = null, .Options = null }, + }, + // The exit event is sent from the client to the server to + // ask the server to exit its process. + .{ + .method = "exit", + .documentation = "The exit event is sent from the client to the server to\nask the server to exit its process.", + .direction = .client_to_server, + .Params = null, + .registration = .{ .method = null, .Options = null }, + }, + // The configuration change notification is sent from the client to the server + // when the client's configuration has changed. The notification contains + // the changed configuration as defined by the language client. + .{ + .method = "workspace/didChangeConfiguration", + .documentation = "The configuration change notification is sent from the client to the server\nwhen the client's configuration has changed. The notification contains\nthe changed configuration as defined by the language client.", + .direction = .client_to_server, + .Params = DidChangeConfigurationParams, + .registration = .{ .method = null, .Options = DidChangeConfigurationRegistrationOptions }, + }, + // The show message notification is sent from a server to a client to ask + // the client to display a particular message in the user interface. + .{ + .method = "window/showMessage", + .documentation = "The show message notification is sent from a server to a client to ask\nthe client to display a particular message in the user interface.", + .direction = .server_to_client, + .Params = ShowMessageParams, + .registration = .{ .method = null, .Options = null }, + }, + // The log message notification is sent from the server to the client to ask + // the client to log a particular message. + .{ + .method = "window/logMessage", + .documentation = "The log message notification is sent from the server to the client to ask\nthe client to log a particular message.", + .direction = .server_to_client, + .Params = LogMessageParams, + .registration = .{ .method = null, .Options = null }, + }, + // The telemetry event notification is sent from the server to the client to ask + // the client to log telemetry data. + .{ + .method = "telemetry/event", + .documentation = "The telemetry event notification is sent from the server to the client to ask\nthe client to log telemetry data.", + .direction = .server_to_client, + .Params = LSPAny, + .registration = .{ .method = null, .Options = null }, + }, + // The document open notification is sent from the client to the server to signal + // newly opened text documents. The document's truth is now managed by the client + // and the server must not try to read the document's truth using the document's + // uri. Open in this sense means it is managed by the client. It doesn't necessarily + // mean that its content is presented in an editor. An open notification must not + // be sent more than once without a corresponding close notification send before. + // This means open and close notification must be balanced and the max open count + // is one. + .{ + .method = "textDocument/didOpen", + .documentation = "The document open notification is sent from the client to the server to signal\nnewly opened text documents. The document's truth is now managed by the client\nand the server must not try to read the document's truth using the document's\nuri. Open in this sense means it is managed by the client. It doesn't necessarily\nmean that its content is presented in an editor. An open notification must not\nbe sent more than once without a corresponding close notification send before.\nThis means open and close notification must be balanced and the max open count\nis one.", + .direction = .client_to_server, + .Params = DidOpenTextDocumentParams, + .registration = .{ .method = null, .Options = TextDocumentRegistrationOptions }, + }, + // The document change notification is sent from the client to the server to signal + // changes to a text document. + .{ + .method = "textDocument/didChange", + .documentation = "The document change notification is sent from the client to the server to signal\nchanges to a text document.", + .direction = .client_to_server, + .Params = DidChangeTextDocumentParams, + .registration = .{ .method = null, .Options = TextDocumentChangeRegistrationOptions }, + }, + // The document close notification is sent from the client to the server when + // the document got closed in the client. The document's truth now exists where + // the document's uri points to (e.g. if the document's uri is a file uri the + // truth now exists on disk). As with the open notification the close notification + // is about managing the document's content. Receiving a close notification + // doesn't mean that the document was open in an editor before. A close + // notification requires a previous open notification to be sent. + .{ + .method = "textDocument/didClose", + .documentation = "The document close notification is sent from the client to the server when\nthe document got closed in the client. The document's truth now exists where\nthe document's uri points to (e.g. if the document's uri is a file uri the\ntruth now exists on disk). As with the open notification the close notification\nis about managing the document's content. Receiving a close notification\ndoesn't mean that the document was open in an editor before. A close\nnotification requires a previous open notification to be sent.", + .direction = .client_to_server, + .Params = DidCloseTextDocumentParams, + .registration = .{ .method = null, .Options = TextDocumentRegistrationOptions }, + }, + // The document save notification is sent from the client to the server when + // the document got saved in the client. + .{ + .method = "textDocument/didSave", + .documentation = "The document save notification is sent from the client to the server when\nthe document got saved in the client.", + .direction = .client_to_server, + .Params = DidSaveTextDocumentParams, + .registration = .{ .method = null, .Options = TextDocumentSaveRegistrationOptions }, + }, + // A document will save notification is sent from the client to the server before + // the document is actually saved. + .{ + .method = "textDocument/willSave", + .documentation = "A document will save notification is sent from the client to the server before\nthe document is actually saved.", + .direction = .client_to_server, + .Params = WillSaveTextDocumentParams, + .registration = .{ .method = null, .Options = TextDocumentRegistrationOptions }, + }, + // The watched files notification is sent from the client to the server when + // the client detects changes to file watched by the language client. + .{ + .method = "workspace/didChangeWatchedFiles", + .documentation = "The watched files notification is sent from the client to the server when\nthe client detects changes to file watched by the language client.", + .direction = .client_to_server, + .Params = DidChangeWatchedFilesParams, + .registration = .{ .method = null, .Options = DidChangeWatchedFilesRegistrationOptions }, + }, + // Diagnostics notification are sent from the server to the client to signal + // results of validation runs. + .{ + .method = "textDocument/publishDiagnostics", + .documentation = "Diagnostics notification are sent from the server to the client to signal\nresults of validation runs.", + .direction = .server_to_client, + .Params = PublishDiagnosticsParams, + .registration = .{ .method = null, .Options = null }, + }, + .{ + .method = "$/setTrace", + .documentation = null, + .direction = .client_to_server, + .Params = SetTraceParams, + .registration = .{ .method = null, .Options = null }, + }, + .{ + .method = "$/logTrace", + .documentation = null, + .direction = .server_to_client, + .Params = LogTraceParams, + .registration = .{ .method = null, .Options = null }, + }, + .{ + .method = "$/cancelRequest", + .documentation = null, + .direction = .bidirectional, + .Params = CancelParams, + .registration = .{ .method = null, .Options = null }, + }, + .{ + .method = "$/progress", + .documentation = null, + .direction = .bidirectional, + .Params = ProgressParams, + .registration = .{ .method = null, .Options = null }, + }, +}; +pub const request_metadata = [_]RequestMetadata{ + // A request to resolve the implementation locations of a symbol at a given text + // document position. The request's parameter is of type [TextDocumentPositionParams] + // (#TextDocumentPositionParams) the response is of type {@link Definition} or a + // Thenable that resolves to such. + .{ + .method = "textDocument/implementation", + .documentation = "A request to resolve the implementation locations of a symbol at a given text\ndocument position. The request's parameter is of type [TextDocumentPositionParams]\n(#TextDocumentPositionParams) the response is of type {@link Definition} or a\nThenable that resolves to such.", + .direction = .client_to_server, + .Params = ImplementationParams, + .Result = ?union(enum) { + Definition: Definition, + array_of_DefinitionLink: []const DefinitionLink, + }, + .PartialResult = union(enum) { + array_of_Location: []const Location, + array_of_DefinitionLink: []const DefinitionLink, + }, + .ErrorData = null, + .registration = .{ .method = null, .Options = ImplementationRegistrationOptions }, + }, + // A request to resolve the type definition locations of a symbol at a given text + // document position. The request's parameter is of type [TextDocumentPositionParams] + // (#TextDocumentPositionParams) the response is of type {@link Definition} or a + // Thenable that resolves to such. + .{ + .method = "textDocument/typeDefinition", + .documentation = "A request to resolve the type definition locations of a symbol at a given text\ndocument position. The request's parameter is of type [TextDocumentPositionParams]\n(#TextDocumentPositionParams) the response is of type {@link Definition} or a\nThenable that resolves to such.", + .direction = .client_to_server, + .Params = TypeDefinitionParams, + .Result = ?union(enum) { + Definition: Definition, + array_of_DefinitionLink: []const DefinitionLink, + }, + .PartialResult = union(enum) { + array_of_Location: []const Location, + array_of_DefinitionLink: []const DefinitionLink, + }, + .ErrorData = null, + .registration = .{ .method = null, .Options = TypeDefinitionRegistrationOptions }, + }, + // The `workspace/workspaceFolders` is sent from the server to the client to fetch the open workspace folders. + .{ + .method = "workspace/workspaceFolders", + .documentation = "The `workspace/workspaceFolders` is sent from the server to the client to fetch the open workspace folders.", + .direction = .server_to_client, + .Params = null, + .Result = ?[]const WorkspaceFolder, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // The 'workspace/configuration' request is sent from the server to the client to fetch a certain + // configuration setting. + // + // This pull model replaces the old push model were the client signaled configuration change via an + // event. If the server still needs to react to configuration changes (since the server caches the + // result of `workspace/configuration` requests) the server should register for an empty configuration + // change event and empty the cache if such an event is received. + .{ + .method = "workspace/configuration", + .documentation = "The 'workspace/configuration' request is sent from the server to the client to fetch a certain\nconfiguration setting.\n\nThis pull model replaces the old push model were the client signaled configuration change via an\nevent. If the server still needs to react to configuration changes (since the server caches the\nresult of `workspace/configuration` requests) the server should register for an empty configuration\nchange event and empty the cache if such an event is received.", + .direction = .server_to_client, + .Params = ConfigurationParams, + .Result = []const LSPAny, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to list all color symbols found in a given text document. The request's + // parameter is of type {@link DocumentColorParams} the + // response is of type {@link ColorInformation ColorInformation[]} or a Thenable + // that resolves to such. + .{ + .method = "textDocument/documentColor", + .documentation = "A request to list all color symbols found in a given text document. The request's\nparameter is of type {@link DocumentColorParams} the\nresponse is of type {@link ColorInformation ColorInformation[]} or a Thenable\nthat resolves to such.", + .direction = .client_to_server, + .Params = DocumentColorParams, + .Result = []const ColorInformation, + .PartialResult = []const ColorInformation, + .ErrorData = null, + .registration = .{ .method = null, .Options = DocumentColorRegistrationOptions }, + }, + // A request to list all presentation for a color. The request's + // parameter is of type {@link ColorPresentationParams} the + // response is of type {@link ColorInformation ColorInformation[]} or a Thenable + // that resolves to such. + .{ + .method = "textDocument/colorPresentation", + .documentation = "A request to list all presentation for a color. The request's\nparameter is of type {@link ColorPresentationParams} the\nresponse is of type {@link ColorInformation ColorInformation[]} or a Thenable\nthat resolves to such.", + .direction = .client_to_server, + .Params = ColorPresentationParams, + .Result = []const ColorPresentation, + .PartialResult = []const ColorPresentation, + .ErrorData = null, + .registration = .{ + .method = null, + .Options = struct { + // And WorkDoneProgressOptions + /// field can be undefined, but this possible state is non-critical + workDoneProgress: ?bool = null, + // And TextDocumentRegistrationOptions + /// A document selector to identify the scope of the registration. If set to null + /// the document selector provided on the client side will be used. + documentSelector: ?DocumentSelector = null, + }, + }, + }, + // A request to provide folding ranges in a document. The request's + // parameter is of type {@link FoldingRangeParams}, the + // response is of type {@link FoldingRangeList} or a Thenable + // that resolves to such. + .{ + .method = "textDocument/foldingRange", + .documentation = "A request to provide folding ranges in a document. The request's\nparameter is of type {@link FoldingRangeParams}, the\nresponse is of type {@link FoldingRangeList} or a Thenable\nthat resolves to such.", + .direction = .client_to_server, + .Params = FoldingRangeParams, + .Result = ?[]const FoldingRange, + .PartialResult = []const FoldingRange, + .ErrorData = null, + .registration = .{ .method = null, .Options = FoldingRangeRegistrationOptions }, + }, + // A request to resolve the type definition locations of a symbol at a given text + // document position. The request's parameter is of type [TextDocumentPositionParams] + // (#TextDocumentPositionParams) the response is of type {@link Declaration} + // or a typed array of {@link DeclarationLink} or a Thenable that resolves + // to such. + .{ + .method = "textDocument/declaration", + .documentation = "A request to resolve the type definition locations of a symbol at a given text\ndocument position. The request's parameter is of type [TextDocumentPositionParams]\n(#TextDocumentPositionParams) the response is of type {@link Declaration}\nor a typed array of {@link DeclarationLink} or a Thenable that resolves\nto such.", + .direction = .client_to_server, + .Params = DeclarationParams, + .Result = ?union(enum) { + Declaration: Declaration, + array_of_DeclarationLink: []const DeclarationLink, + }, + .PartialResult = union(enum) { + array_of_Location: []const Location, + array_of_DeclarationLink: []const DeclarationLink, + }, + .ErrorData = null, + .registration = .{ .method = null, .Options = DeclarationRegistrationOptions }, + }, + // A request to provide selection ranges in a document. The request's + // parameter is of type {@link SelectionRangeParams}, the + // response is of type {@link SelectionRange SelectionRange[]} or a Thenable + // that resolves to such. + .{ + .method = "textDocument/selectionRange", + .documentation = "A request to provide selection ranges in a document. The request's\nparameter is of type {@link SelectionRangeParams}, the\nresponse is of type {@link SelectionRange SelectionRange[]} or a Thenable\nthat resolves to such.", + .direction = .client_to_server, + .Params = SelectionRangeParams, + .Result = ?[]const SelectionRange, + .PartialResult = []const SelectionRange, + .ErrorData = null, + .registration = .{ .method = null, .Options = SelectionRangeRegistrationOptions }, + }, + // The `window/workDoneProgress/create` request is sent from the server to the client to initiate progress + // reporting from the server. + .{ + .method = "window/workDoneProgress/create", + .documentation = "The `window/workDoneProgress/create` request is sent from the server to the client to initiate progress\nreporting from the server.", + .direction = .server_to_client, + .Params = WorkDoneProgressCreateParams, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to result a `CallHierarchyItem` in a document at a given position. + // Can be used as an input to an incoming or outgoing call hierarchy. + // + // @since 3.16.0 + .{ + .method = "textDocument/prepareCallHierarchy", + .documentation = "A request to result a `CallHierarchyItem` in a document at a given position.\nCan be used as an input to an incoming or outgoing call hierarchy.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = CallHierarchyPrepareParams, + .Result = ?[]const CallHierarchyItem, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = CallHierarchyRegistrationOptions }, + }, + // A request to resolve the incoming calls for a given `CallHierarchyItem`. + // + // @since 3.16.0 + .{ + .method = "callHierarchy/incomingCalls", + .documentation = "A request to resolve the incoming calls for a given `CallHierarchyItem`.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = CallHierarchyIncomingCallsParams, + .Result = ?[]const CallHierarchyIncomingCall, + .PartialResult = []const CallHierarchyIncomingCall, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to resolve the outgoing calls for a given `CallHierarchyItem`. + // + // @since 3.16.0 + .{ + .method = "callHierarchy/outgoingCalls", + .documentation = "A request to resolve the outgoing calls for a given `CallHierarchyItem`.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = CallHierarchyOutgoingCallsParams, + .Result = ?[]const CallHierarchyOutgoingCall, + .PartialResult = []const CallHierarchyOutgoingCall, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // @since 3.16.0 + .{ + .method = "textDocument/semanticTokens/full", + .documentation = "@since 3.16.0", + .direction = .client_to_server, + .Params = SemanticTokensParams, + .Result = ?SemanticTokens, + .PartialResult = SemanticTokensPartialResult, + .ErrorData = null, + .registration = .{ .method = "textDocument/semanticTokens", .Options = SemanticTokensRegistrationOptions }, + }, + // @since 3.16.0 + .{ + .method = "textDocument/semanticTokens/full/delta", + .documentation = "@since 3.16.0", + .direction = .client_to_server, + .Params = SemanticTokensDeltaParams, + .Result = ?union(enum) { + SemanticTokens: SemanticTokens, + SemanticTokensDelta: SemanticTokensDelta, + }, + .PartialResult = union(enum) { + SemanticTokensPartialResult: SemanticTokensPartialResult, + SemanticTokensDeltaPartialResult: SemanticTokensDeltaPartialResult, + }, + .ErrorData = null, + .registration = .{ .method = "textDocument/semanticTokens", .Options = SemanticTokensRegistrationOptions }, + }, + // @since 3.16.0 + .{ + .method = "textDocument/semanticTokens/range", + .documentation = "@since 3.16.0", + .direction = .client_to_server, + .Params = SemanticTokensRangeParams, + .Result = ?SemanticTokens, + .PartialResult = SemanticTokensPartialResult, + .ErrorData = null, + .registration = .{ .method = "textDocument/semanticTokens", .Options = null }, + }, + // @since 3.16.0 + .{ + .method = "workspace/semanticTokens/refresh", + .documentation = "@since 3.16.0", + .direction = .server_to_client, + .Params = null, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to show a document. This request might open an + // external program depending on the value of the URI to open. + // For example a request to open `https://code.visualstudio.com/` + // will very likely open the URI in a WEB browser. + // + // @since 3.16.0 + .{ + .method = "window/showDocument", + .documentation = "A request to show a document. This request might open an\nexternal program depending on the value of the URI to open.\nFor example a request to open `https://code.visualstudio.com/`\nwill very likely open the URI in a WEB browser.\n\n@since 3.16.0", + .direction = .server_to_client, + .Params = ShowDocumentParams, + .Result = ShowDocumentResult, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to provide ranges that can be edited together. + // + // @since 3.16.0 + .{ + .method = "textDocument/linkedEditingRange", + .documentation = "A request to provide ranges that can be edited together.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = LinkedEditingRangeParams, + .Result = ?LinkedEditingRanges, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = LinkedEditingRangeRegistrationOptions }, + }, + // The will create files request is sent from the client to the server before files are actually + // created as long as the creation is triggered from within the client. + // + // @since 3.16.0 + .{ + .method = "workspace/willCreateFiles", + .documentation = "The will create files request is sent from the client to the server before files are actually\ncreated as long as the creation is triggered from within the client.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = CreateFilesParams, + .Result = ?WorkspaceEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = FileOperationRegistrationOptions }, + }, + // The will rename files request is sent from the client to the server before files are actually + // renamed as long as the rename is triggered from within the client. + // + // @since 3.16.0 + .{ + .method = "workspace/willRenameFiles", + .documentation = "The will rename files request is sent from the client to the server before files are actually\nrenamed as long as the rename is triggered from within the client.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = RenameFilesParams, + .Result = ?WorkspaceEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = FileOperationRegistrationOptions }, + }, + // The did delete files notification is sent from the client to the server when + // files were deleted from within the client. + // + // @since 3.16.0 + .{ + .method = "workspace/willDeleteFiles", + .documentation = "The did delete files notification is sent from the client to the server when\nfiles were deleted from within the client.\n\n@since 3.16.0", + .direction = .client_to_server, + .Params = DeleteFilesParams, + .Result = ?WorkspaceEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = FileOperationRegistrationOptions }, + }, + // A request to get the moniker of a symbol at a given text document position. + // The request parameter is of type {@link TextDocumentPositionParams}. + // The response is of type {@link Moniker Moniker[]} or `null`. + .{ + .method = "textDocument/moniker", + .documentation = "A request to get the moniker of a symbol at a given text document position.\nThe request parameter is of type {@link TextDocumentPositionParams}.\nThe response is of type {@link Moniker Moniker[]} or `null`.", + .direction = .client_to_server, + .Params = MonikerParams, + .Result = ?[]const Moniker, + .PartialResult = []const Moniker, + .ErrorData = null, + .registration = .{ .method = null, .Options = MonikerRegistrationOptions }, + }, + // A request to result a `TypeHierarchyItem` in a document at a given position. + // Can be used as an input to a subtypes or supertypes type hierarchy. + // + // @since 3.17.0 + .{ + .method = "textDocument/prepareTypeHierarchy", + .documentation = "A request to result a `TypeHierarchyItem` in a document at a given position.\nCan be used as an input to a subtypes or supertypes type hierarchy.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = TypeHierarchyPrepareParams, + .Result = ?[]const TypeHierarchyItem, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = TypeHierarchyRegistrationOptions }, + }, + // A request to resolve the supertypes for a given `TypeHierarchyItem`. + // + // @since 3.17.0 + .{ + .method = "typeHierarchy/supertypes", + .documentation = "A request to resolve the supertypes for a given `TypeHierarchyItem`.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = TypeHierarchySupertypesParams, + .Result = ?[]const TypeHierarchyItem, + .PartialResult = []const TypeHierarchyItem, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to resolve the subtypes for a given `TypeHierarchyItem`. + // + // @since 3.17.0 + .{ + .method = "typeHierarchy/subtypes", + .documentation = "A request to resolve the subtypes for a given `TypeHierarchyItem`.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = TypeHierarchySubtypesParams, + .Result = ?[]const TypeHierarchyItem, + .PartialResult = []const TypeHierarchyItem, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to provide inline values in a document. The request's parameter is of + // type {@link InlineValueParams}, the response is of type + // {@link InlineValue InlineValue[]} or a Thenable that resolves to such. + // + // @since 3.17.0 + .{ + .method = "textDocument/inlineValue", + .documentation = "A request to provide inline values in a document. The request's parameter is of\ntype {@link InlineValueParams}, the response is of type\n{@link InlineValue InlineValue[]} or a Thenable that resolves to such.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = InlineValueParams, + .Result = ?[]const InlineValue, + .PartialResult = []const InlineValue, + .ErrorData = null, + .registration = .{ .method = null, .Options = InlineValueRegistrationOptions }, + }, + // @since 3.17.0 + .{ + .method = "workspace/inlineValue/refresh", + .documentation = "@since 3.17.0", + .direction = .server_to_client, + .Params = null, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to provide inlay hints in a document. The request's parameter is of + // type {@link InlayHintsParams}, the response is of type + // {@link InlayHint InlayHint[]} or a Thenable that resolves to such. + // + // @since 3.17.0 + .{ + .method = "textDocument/inlayHint", + .documentation = "A request to provide inlay hints in a document. The request's parameter is of\ntype {@link InlayHintsParams}, the response is of type\n{@link InlayHint InlayHint[]} or a Thenable that resolves to such.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = InlayHintParams, + .Result = ?[]const InlayHint, + .PartialResult = []const InlayHint, + .ErrorData = null, + .registration = .{ .method = null, .Options = InlayHintRegistrationOptions }, + }, + // A request to resolve additional properties for an inlay hint. + // The request's parameter is of type {@link InlayHint}, the response is + // of type {@link InlayHint} or a Thenable that resolves to such. + // + // @since 3.17.0 + .{ + .method = "inlayHint/resolve", + .documentation = "A request to resolve additional properties for an inlay hint.\nThe request's parameter is of type {@link InlayHint}, the response is\nof type {@link InlayHint} or a Thenable that resolves to such.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = InlayHint, + .Result = InlayHint, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // @since 3.17.0 + .{ + .method = "workspace/inlayHint/refresh", + .documentation = "@since 3.17.0", + .direction = .server_to_client, + .Params = null, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // The document diagnostic request definition. + // + // @since 3.17.0 + .{ + .method = "textDocument/diagnostic", + .documentation = "The document diagnostic request definition.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = DocumentDiagnosticParams, + .Result = DocumentDiagnosticReport, + .PartialResult = DocumentDiagnosticReportPartialResult, + .ErrorData = DiagnosticServerCancellationData, + .registration = .{ .method = null, .Options = DiagnosticRegistrationOptions }, + }, + // The workspace diagnostic request definition. + // + // @since 3.17.0 + .{ + .method = "workspace/diagnostic", + .documentation = "The workspace diagnostic request definition.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = WorkspaceDiagnosticParams, + .Result = WorkspaceDiagnosticReport, + .PartialResult = WorkspaceDiagnosticReportPartialResult, + .ErrorData = DiagnosticServerCancellationData, + .registration = .{ .method = null, .Options = null }, + }, + // The diagnostic refresh request definition. + // + // @since 3.17.0 + .{ + .method = "workspace/diagnostic/refresh", + .documentation = "The diagnostic refresh request definition.\n\n@since 3.17.0", + .direction = .server_to_client, + .Params = null, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // The `client/registerCapability` request is sent from the server to the client to register a new capability + // handler on the client side. + .{ + .method = "client/registerCapability", + .documentation = "The `client/registerCapability` request is sent from the server to the client to register a new capability\nhandler on the client side.", + .direction = .server_to_client, + .Params = RegistrationParams, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // The `client/unregisterCapability` request is sent from the server to the client to unregister a previously registered capability + // handler on the client side. + .{ + .method = "client/unregisterCapability", + .documentation = "The `client/unregisterCapability` request is sent from the server to the client to unregister a previously registered capability\nhandler on the client side.", + .direction = .server_to_client, + .Params = UnregistrationParams, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // The initialize request is sent from the client to the server. + // It is sent once as the request after starting up the server. + // The requests parameter is of type {@link InitializeParams} + // the response if of type {@link InitializeResult} of a Thenable that + // resolves to such. + .{ + .method = "initialize", + .documentation = "The initialize request is sent from the client to the server.\nIt is sent once as the request after starting up the server.\nThe requests parameter is of type {@link InitializeParams}\nthe response if of type {@link InitializeResult} of a Thenable that\nresolves to such.", + .direction = .client_to_server, + .Params = InitializeParams, + .Result = InitializeResult, + .PartialResult = null, + .ErrorData = InitializeError, + .registration = .{ .method = null, .Options = null }, + }, + // A shutdown request is sent from the client to the server. + // It is sent once when the client decides to shutdown the + // server. The only notification that is sent after a shutdown request + // is the exit event. + .{ + .method = "shutdown", + .documentation = "A shutdown request is sent from the client to the server.\nIt is sent once when the client decides to shutdown the\nserver. The only notification that is sent after a shutdown request\nis the exit event.", + .direction = .client_to_server, + .Params = null, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // The show message request is sent from the server to the client to show a message + // and a set of options actions to the user. + .{ + .method = "window/showMessageRequest", + .documentation = "The show message request is sent from the server to the client to show a message\nand a set of options actions to the user.", + .direction = .server_to_client, + .Params = ShowMessageRequestParams, + .Result = ?MessageActionItem, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A document will save request is sent from the client to the server before + // the document is actually saved. The request can return an array of TextEdits + // which will be applied to the text document before it is saved. Please note that + // clients might drop results if computing the text edits took too long or if a + // server constantly fails on this request. This is done to keep the save fast and + // reliable. + .{ + .method = "textDocument/willSaveWaitUntil", + .documentation = "A document will save request is sent from the client to the server before\nthe document is actually saved. The request can return an array of TextEdits\nwhich will be applied to the text document before it is saved. Please note that\nclients might drop results if computing the text edits took too long or if a\nserver constantly fails on this request. This is done to keep the save fast and\nreliable.", + .direction = .client_to_server, + .Params = WillSaveTextDocumentParams, + .Result = ?[]const TextEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = TextDocumentRegistrationOptions }, + }, + // Request to request completion at a given text document position. The request's + // parameter is of type {@link TextDocumentPosition} the response + // is of type {@link CompletionItem CompletionItem[]} or {@link CompletionList} + // or a Thenable that resolves to such. + // + // The request can delay the computation of the {@link CompletionItem.detail `detail`} + // and {@link CompletionItem.documentation `documentation`} properties to the `completionItem/resolve` + // request. However, properties that are needed for the initial sorting and filtering, like `sortText`, + // `filterText`, `insertText`, and `textEdit`, must not be changed during resolve. + .{ + .method = "textDocument/completion", + .documentation = "Request to request completion at a given text document position. The request's\nparameter is of type {@link TextDocumentPosition} the response\nis of type {@link CompletionItem CompletionItem[]} or {@link CompletionList}\nor a Thenable that resolves to such.\n\nThe request can delay the computation of the {@link CompletionItem.detail `detail`}\nand {@link CompletionItem.documentation `documentation`} properties to the `completionItem/resolve`\nrequest. However, properties that are needed for the initial sorting and filtering, like `sortText`,\n`filterText`, `insertText`, and `textEdit`, must not be changed during resolve.", + .direction = .client_to_server, + .Params = CompletionParams, + .Result = ?union(enum) { + array_of_CompletionItem: []const CompletionItem, + CompletionList: CompletionList, + }, + .PartialResult = []const CompletionItem, + .ErrorData = null, + .registration = .{ .method = null, .Options = CompletionRegistrationOptions }, + }, + // Request to resolve additional information for a given completion item.The request's + // parameter is of type {@link CompletionItem} the response + // is of type {@link CompletionItem} or a Thenable that resolves to such. + .{ + .method = "completionItem/resolve", + .documentation = "Request to resolve additional information for a given completion item.The request's\nparameter is of type {@link CompletionItem} the response\nis of type {@link CompletionItem} or a Thenable that resolves to such.", + .direction = .client_to_server, + .Params = CompletionItem, + .Result = CompletionItem, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // Request to request hover information at a given text document position. The request's + // parameter is of type {@link TextDocumentPosition} the response is of + // type {@link Hover} or a Thenable that resolves to such. + .{ + .method = "textDocument/hover", + .documentation = "Request to request hover information at a given text document position. The request's\nparameter is of type {@link TextDocumentPosition} the response is of\ntype {@link Hover} or a Thenable that resolves to such.", + .direction = .client_to_server, + .Params = HoverParams, + .Result = ?Hover, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = HoverRegistrationOptions }, + }, + .{ + .method = "textDocument/signatureHelp", + .documentation = null, + .direction = .client_to_server, + .Params = SignatureHelpParams, + .Result = ?SignatureHelp, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = SignatureHelpRegistrationOptions }, + }, + // A request to resolve the definition location of a symbol at a given text + // document position. The request's parameter is of type [TextDocumentPosition] + // (#TextDocumentPosition) the response is of either type {@link Definition} + // or a typed array of {@link DefinitionLink} or a Thenable that resolves + // to such. + .{ + .method = "textDocument/definition", + .documentation = "A request to resolve the definition location of a symbol at a given text\ndocument position. The request's parameter is of type [TextDocumentPosition]\n(#TextDocumentPosition) the response is of either type {@link Definition}\nor a typed array of {@link DefinitionLink} or a Thenable that resolves\nto such.", + .direction = .client_to_server, + .Params = DefinitionParams, + .Result = ?union(enum) { + Definition: Definition, + array_of_DefinitionLink: []const DefinitionLink, + }, + .PartialResult = union(enum) { + array_of_Location: []const Location, + array_of_DefinitionLink: []const DefinitionLink, + }, + .ErrorData = null, + .registration = .{ .method = null, .Options = DefinitionRegistrationOptions }, + }, + // A request to resolve project-wide references for the symbol denoted + // by the given text document position. The request's parameter is of + // type {@link ReferenceParams} the response is of type + // {@link Location Location[]} or a Thenable that resolves to such. + .{ + .method = "textDocument/references", + .documentation = "A request to resolve project-wide references for the symbol denoted\nby the given text document position. The request's parameter is of\ntype {@link ReferenceParams} the response is of type\n{@link Location Location[]} or a Thenable that resolves to such.", + .direction = .client_to_server, + .Params = ReferenceParams, + .Result = ?[]const Location, + .PartialResult = []const Location, + .ErrorData = null, + .registration = .{ .method = null, .Options = ReferenceRegistrationOptions }, + }, + // Request to resolve a {@link DocumentHighlight} for a given + // text document position. The request's parameter is of type [TextDocumentPosition] + // (#TextDocumentPosition) the request response is of type [DocumentHighlight[]] + // (#DocumentHighlight) or a Thenable that resolves to such. + .{ + .method = "textDocument/documentHighlight", + .documentation = "Request to resolve a {@link DocumentHighlight} for a given\ntext document position. The request's parameter is of type [TextDocumentPosition]\n(#TextDocumentPosition) the request response is of type [DocumentHighlight[]]\n(#DocumentHighlight) or a Thenable that resolves to such.", + .direction = .client_to_server, + .Params = DocumentHighlightParams, + .Result = ?[]const DocumentHighlight, + .PartialResult = []const DocumentHighlight, + .ErrorData = null, + .registration = .{ .method = null, .Options = DocumentHighlightRegistrationOptions }, + }, + // A request to list all symbols found in a given text document. The request's + // parameter is of type {@link TextDocumentIdentifier} the + // response is of type {@link SymbolInformation SymbolInformation[]} or a Thenable + // that resolves to such. + .{ + .method = "textDocument/documentSymbol", + .documentation = "A request to list all symbols found in a given text document. The request's\nparameter is of type {@link TextDocumentIdentifier} the\nresponse is of type {@link SymbolInformation SymbolInformation[]} or a Thenable\nthat resolves to such.", + .direction = .client_to_server, + .Params = DocumentSymbolParams, + .Result = ?union(enum) { + array_of_SymbolInformation: []const SymbolInformation, + array_of_DocumentSymbol: []const DocumentSymbol, + }, + .PartialResult = union(enum) { + array_of_SymbolInformation: []const SymbolInformation, + array_of_DocumentSymbol: []const DocumentSymbol, + }, + .ErrorData = null, + .registration = .{ .method = null, .Options = DocumentSymbolRegistrationOptions }, + }, + // A request to provide commands for the given text document and range. + .{ + .method = "textDocument/codeAction", + .documentation = "A request to provide commands for the given text document and range.", + .direction = .client_to_server, + .Params = CodeActionParams, + .Result = ?[]const union(enum) { + Command: Command, + CodeAction: CodeAction, + }, + .PartialResult = []const union(enum) { + Command: Command, + CodeAction: CodeAction, + }, + .ErrorData = null, + .registration = .{ .method = null, .Options = CodeActionRegistrationOptions }, + }, + // Request to resolve additional information for a given code action.The request's + // parameter is of type {@link CodeAction} the response + // is of type {@link CodeAction} or a Thenable that resolves to such. + .{ + .method = "codeAction/resolve", + .documentation = "Request to resolve additional information for a given code action.The request's\nparameter is of type {@link CodeAction} the response\nis of type {@link CodeAction} or a Thenable that resolves to such.", + .direction = .client_to_server, + .Params = CodeAction, + .Result = CodeAction, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to list project-wide symbols matching the query string given + // by the {@link WorkspaceSymbolParams}. The response is + // of type {@link SymbolInformation SymbolInformation[]} or a Thenable that + // resolves to such. + // + // @since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients + // need to advertise support for WorkspaceSymbols via the client capability + // `workspace.symbol.resolveSupport`. + // + .{ + .method = "workspace/symbol", + .documentation = "A request to list project-wide symbols matching the query string given\nby the {@link WorkspaceSymbolParams}. The response is\nof type {@link SymbolInformation SymbolInformation[]} or a Thenable that\nresolves to such.\n\n@since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients\n need to advertise support for WorkspaceSymbols via the client capability\n `workspace.symbol.resolveSupport`.\n", + .direction = .client_to_server, + .Params = WorkspaceSymbolParams, + .Result = ?union(enum) { + array_of_SymbolInformation: []const SymbolInformation, + array_of_WorkspaceSymbol: []const WorkspaceSymbol, + }, + .PartialResult = union(enum) { + array_of_SymbolInformation: []const SymbolInformation, + array_of_WorkspaceSymbol: []const WorkspaceSymbol, + }, + .ErrorData = null, + .registration = .{ .method = null, .Options = WorkspaceSymbolRegistrationOptions }, + }, + // A request to resolve the range inside the workspace + // symbol's location. + // + // @since 3.17.0 + .{ + .method = "workspaceSymbol/resolve", + .documentation = "A request to resolve the range inside the workspace\nsymbol's location.\n\n@since 3.17.0", + .direction = .client_to_server, + .Params = WorkspaceSymbol, + .Result = WorkspaceSymbol, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to provide code lens for the given text document. + .{ + .method = "textDocument/codeLens", + .documentation = "A request to provide code lens for the given text document.", + .direction = .client_to_server, + .Params = CodeLensParams, + .Result = ?[]const CodeLens, + .PartialResult = []const CodeLens, + .ErrorData = null, + .registration = .{ .method = null, .Options = CodeLensRegistrationOptions }, + }, + // A request to resolve a command for a given code lens. + .{ + .method = "codeLens/resolve", + .documentation = "A request to resolve a command for a given code lens.", + .direction = .client_to_server, + .Params = CodeLens, + .Result = CodeLens, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to refresh all code actions + // + // @since 3.16.0 + .{ + .method = "workspace/codeLens/refresh", + .documentation = "A request to refresh all code actions\n\n@since 3.16.0", + .direction = .server_to_client, + .Params = null, + .Result = ?void, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to provide document links + .{ + .method = "textDocument/documentLink", + .documentation = "A request to provide document links", + .direction = .client_to_server, + .Params = DocumentLinkParams, + .Result = ?[]const DocumentLink, + .PartialResult = []const DocumentLink, + .ErrorData = null, + .registration = .{ .method = null, .Options = DocumentLinkRegistrationOptions }, + }, + // Request to resolve additional information for a given document link. The request's + // parameter is of type {@link DocumentLink} the response + // is of type {@link DocumentLink} or a Thenable that resolves to such. + .{ + .method = "documentLink/resolve", + .documentation = "Request to resolve additional information for a given document link. The request's\nparameter is of type {@link DocumentLink} the response\nis of type {@link DocumentLink} or a Thenable that resolves to such.", + .direction = .client_to_server, + .Params = DocumentLink, + .Result = DocumentLink, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request to to format a whole document. + .{ + .method = "textDocument/formatting", + .documentation = "A request to to format a whole document.", + .direction = .client_to_server, + .Params = DocumentFormattingParams, + .Result = ?[]const TextEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = DocumentFormattingRegistrationOptions }, + }, + // A request to to format a range in a document. + .{ + .method = "textDocument/rangeFormatting", + .documentation = "A request to to format a range in a document.", + .direction = .client_to_server, + .Params = DocumentRangeFormattingParams, + .Result = ?[]const TextEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = DocumentRangeFormattingRegistrationOptions }, + }, + // A request to format a document on type. + .{ + .method = "textDocument/onTypeFormatting", + .documentation = "A request to format a document on type.", + .direction = .client_to_server, + .Params = DocumentOnTypeFormattingParams, + .Result = ?[]const TextEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = DocumentOnTypeFormattingRegistrationOptions }, + }, + // A request to rename a symbol. + .{ + .method = "textDocument/rename", + .documentation = "A request to rename a symbol.", + .direction = .client_to_server, + .Params = RenameParams, + .Result = ?WorkspaceEdit, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = RenameRegistrationOptions }, + }, + // A request to test and perform the setup necessary for a rename. + // + // @since 3.16 - support for default behavior + .{ + .method = "textDocument/prepareRename", + .documentation = "A request to test and perform the setup necessary for a rename.\n\n@since 3.16 - support for default behavior", + .direction = .client_to_server, + .Params = PrepareRenameParams, + .Result = ?PrepareRenameResult, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, + // A request send from the client to the server to execute a command. The request might return + // a workspace edit which the client will apply to the workspace. + .{ + .method = "workspace/executeCommand", + .documentation = "A request send from the client to the server to execute a command. The request might return\na workspace edit which the client will apply to the workspace.", + .direction = .client_to_server, + .Params = ExecuteCommandParams, + .Result = ?LSPAny, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = ExecuteCommandRegistrationOptions }, + }, + // A request sent from the server to the client to modified certain resources. + .{ + .method = "workspace/applyEdit", + .documentation = "A request sent from the server to the client to modified certain resources.", + .direction = .server_to_client, + .Params = ApplyWorkspaceEditParams, + .Result = ApplyWorkspaceEditResult, + .PartialResult = null, + .ErrorData = null, + .registration = .{ .method = null, .Options = null }, + }, +}; diff --git a/src/main.zig b/src/main.zig index 03776c4..6aaab47 100644 --- a/src/main.zig +++ b/src/main.zig @@ -7,7 +7,7 @@ const Config = @import("Config.zig"); const configuration = @import("configuration.zig"); const Server = @import("Server.zig"); const setup = @import("setup.zig"); -const readRequestHeader = @import("header.zig").readRequestHeader; +const Header = @import("Header.zig"); const logger = std.log.scoped(.main); @@ -35,20 +35,34 @@ pub fn log( } fn loop(server: *Server) !void { - var reader = std.io.getStdIn().reader(); + const reader = std.io.getStdIn().reader(); + + const std_out = std.io.getStdOut().writer(); + var buffered_writer = std.io.bufferedWriter(std_out); + const writer = buffered_writer.writer(); while (true) { - const headers = readRequestHeader(server.allocator, reader) catch |err| { - logger.err("{s}; exiting!", .{@errorName(err)}); - return; - }; - const buffer = try server.allocator.alloc(u8, headers.content_length); - defer server.allocator.free(buffer); + var arena = std.heap.ArenaAllocator.init(server.allocator); + defer arena.deinit(); - try reader.readNoEof(buffer); + // write server -> client messages + for (server.outgoing_messages.items) |outgoing_message| { + const header = Header{ .content_length = outgoing_message.len }; + try writer.print("{}{s}", .{ header, outgoing_message }); + try buffered_writer.flush(); + } + for (server.outgoing_messages.items) |outgoing_message| { + server.allocator.free(outgoing_message); + } + server.outgoing_messages.clearRetainingCapacity(); - const writer = std.io.getStdOut().writer(); - try server.processJsonRpc(writer, buffer); + // read and handle client -> server message + const header = try Header.parse(arena.allocator(), reader); + + const json_message = try arena.allocator().alloc(u8, header.content_length); + try reader.readNoEof(json_message); + + server.processJsonRpc(&arena, json_message); } } diff --git a/src/offsets.zig b/src/offsets.zig index d9acfc4..7f328c4 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const types = @import("types.zig"); +const types = @import("lsp.zig"); const ast = @import("ast.zig"); const Ast = std.zig.Ast; @@ -263,8 +263,8 @@ pub fn advancePosition(text: []const u8, position: types.Position, from_index: u /// returns the number of code units in `text` pub fn countCodeUnits(text: []const u8, encoding: Encoding) usize { switch (encoding) { - .utf8 => return text.len, - .utf16 => { + .@"utf-8" => return text.len, + .@"utf-16" => { var iter: std.unicode.Utf8Iterator = .{ .bytes = text, .i = 0 }; var utf16_len: usize = 0; @@ -277,15 +277,15 @@ pub fn countCodeUnits(text: []const u8, encoding: Encoding) usize { } return utf16_len; }, - .utf32 => return std.unicode.utf8CountCodepoints(text) catch unreachable, + .@"utf-32" => return std.unicode.utf8CountCodepoints(text) catch unreachable, } } /// returns the number of (utf-8 code units / bytes) that represent `n` code units in `text` pub fn getNCodeUnitByteCount(text: []const u8, n: usize, encoding: Encoding) usize { switch (encoding) { - .utf8 => return n, - .utf16 => { + .@"utf-8" => return n, + .@"utf-16" => { if (n == 0) return 0; var iter: std.unicode.Utf8Iterator = .{ .bytes = text, .i = 0 }; @@ -300,7 +300,7 @@ pub fn getNCodeUnitByteCount(text: []const u8, n: usize, encoding: Encoding) usi } return iter.i; }, - .utf32 => { + .@"utf-32" => { var i: usize = 0; var count: usize = 0; while (count != n) : (count += 1) { diff --git a/src/references.zig b/src/references.zig index 4323c44..bbdd5e3 100644 --- a/src/references.zig +++ b/src/references.zig @@ -2,7 +2,7 @@ const std = @import("std"); const Ast = std.zig.Ast; const DocumentStore = @import("DocumentStore.zig"); const analysis = @import("analysis.zig"); -const types = @import("types.zig"); +const types = @import("lsp.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.references); const ast = @import("ast.zig"); diff --git a/src/requests.zig b/src/requests.zig deleted file mode 100644 index f0cdb2b..0000000 --- a/src/requests.zig +++ /dev/null @@ -1,324 +0,0 @@ -//! This file contains request types zls handles. -//! Note that the parameter types may be incomplete. -//! We only define what we actually use. - -const std = @import("std"); -const types = @import("types.zig"); - -/// Only check for the field's existence. -const Exists = struct { - exists: bool, -}; - -fn Default(comptime T: type, comptime default_value: T) type { - return struct { - pub const value_type = T; - pub const default = default_value; - value: T, - }; -} - -pub fn ErrorUnwrappedReturnOf(comptime func: anytype) type { - return switch (@typeInfo(@TypeOf(func))) { - .Fn, .BoundFn => |fn_info| switch (@typeInfo(fn_info.return_type.?)) { - .ErrorUnion => |err_union| err_union.payload, - else => |T| return T, - }, - else => unreachable, - }; -} - -fn Transform(comptime Original: type, comptime transform_fn: anytype) type { - return struct { - pub const original_type = Original; - pub const transform = transform_fn; - - value: ErrorUnwrappedReturnOf(transform_fn), - }; -} - -fn fromDynamicTreeInternal(arena: *std.heap.ArenaAllocator, value: std.json.Value, out: anytype) error{ MalformedJson, OutOfMemory }!void { - const T = comptime std.meta.Child(@TypeOf(out)); - - if (comptime std.meta.trait.is(.Struct)(T)) { - if (value != .Object) return error.MalformedJson; - - var err = false; - inline for (std.meta.fields(T)) |field| { - const is_exists = field.type == Exists; - - const is_optional = comptime std.meta.trait.is(.Optional)(field.type); - const actual_type = if (is_optional) std.meta.Child(field.type) else field.type; - - const is_struct = comptime std.meta.trait.is(.Struct)(actual_type); - const is_default = comptime if (is_struct) std.meta.trait.hasDecls(actual_type, .{ "default", "value_type" }) else false; - const is_transform = comptime if (is_struct) std.meta.trait.hasDecls(actual_type, .{ "original_type", "transform" }) else false; - - if (value.Object.get(field.name)) |json_field| { - if (is_exists) { - @field(out, field.name) = Exists{ .exists = true }; - } else if (is_transform) { - var original_value: actual_type.original_type = undefined; - try fromDynamicTreeInternal(arena, json_field, &original_value); - @field(out, field.name) = actual_type{ - .value = actual_type.transform(original_value) catch - return error.MalformedJson, - }; - } else if (is_default) { - try fromDynamicTreeInternal(arena, json_field, &@field(out, field.name).value); - } else if (is_optional) { - if (json_field == .Null) { - @field(out, field.name) = null; - } else { - var actual_value: actual_type = undefined; - try fromDynamicTreeInternal(arena, json_field, &actual_value); - @field(out, field.name) = actual_value; - } - } else { - try fromDynamicTreeInternal(arena, json_field, &@field(out, field.name)); - } - } else { - if (is_exists) { - @field(out, field.name) = Exists{ .exists = false }; - } else if (is_optional) { - @field(out, field.name) = null; - } else if (is_default) { - @field(out, field.name) = actual_type{ .value = actual_type.default }; - } else { - err = true; - } - } - } - if (err) return error.MalformedJson; - } else if (comptime (std.meta.trait.isSlice(T) and T != []const u8)) { - if (value != .Array) return error.MalformedJson; - const Child = std.meta.Child(T); - - if (value.Array.items.len == 0) { - out.* = &[0]Child{}; - } else { - var slice = try arena.allocator().alloc(Child, value.Array.items.len); - for (value.Array.items) |arr_item, idx| { - try fromDynamicTreeInternal(arena, arr_item, &slice[idx]); - } - out.* = slice; - } - } else if (T == std.json.Value) { - out.* = value; - } else if (comptime std.meta.trait.is(.Enum)(T)) { - const info = @typeInfo(T).Enum; - const TagType = info.tag_type; - if (value != .Integer) return error.MalformedJson; - out.* = std.meta.intToEnum( - T, - std.math.cast(TagType, value.Integer) orelse return error.MalformedJson, - ) catch return error.MalformedJson; - } else if (comptime std.meta.trait.is(.Int)(T)) { - if (value != .Integer) return error.MalformedJson; - out.* = std.math.cast(T, value.Integer) orelse return error.MalformedJson; - } else switch (T) { - bool => { - if (value != .Bool) return error.MalformedJson; - out.* = value.Bool; - }, - f64 => { - if (value != .Float) return error.MalformedJson; - out.* = value.Float; - }, - []const u8 => { - if (value != .String) return error.MalformedJson; - out.* = value.String; - }, - else => @compileError("Invalid type " ++ @typeName(T)), - } -} - -pub fn fromDynamicTree(arena: *std.heap.ArenaAllocator, comptime T: type, value: std.json.Value) error{ MalformedJson, OutOfMemory }!T { - var out: T = undefined; - try fromDynamicTreeInternal(arena, value, &out); - return out; -} - -const MaybeStringArray = Default([]const []const u8, &.{}); - -pub const Initialize = struct { - pub const ClientCapabilities = struct { - workspace: ?struct { - configuration: Default(bool, false), - didChangeConfiguration: ?struct { - dynamicRegistration: Default(bool, false), // NOTE: Should this be true? Seems like this critical feature should be nearly universal - }, - workspaceFolders: Default(bool, false), - }, - textDocument: ?struct { - synchronization: ?struct { - willSave: Default(bool, false), - willSaveWaitUntil: Default(bool, false), - didSave: Default(bool, false), - }, - semanticTokens: Exists, - inlayHint: Exists, - hover: ?struct { - contentFormat: MaybeStringArray, - }, - completion: ?struct { - completionItem: ?struct { - snippetSupport: Default(bool, false), - labelDetailsSupport: Default(bool, false), - documentationFormat: MaybeStringArray, - }, - }, - documentHighlight: Exists, - }, - general: ?struct { - positionEncodings: MaybeStringArray, - }, - }; - - pub const ClientInfo = struct { - name: []const u8, - version: ?[]const u8, - }; - - params: struct { - clientInfo: ?ClientInfo, - capabilities: ClientCapabilities, - workspaceFolders: ?[]const types.WorkspaceFolder, - }, -}; - -pub const WorkspaceFoldersChange = struct { - params: struct { - event: struct { - added: []const types.WorkspaceFolder, - removed: []const types.WorkspaceFolder, - }, - }, -}; - -pub const OpenDocument = struct { - params: struct { - textDocument: struct { - uri: []const u8, - text: []const u8, - }, - }, -}; - -const TextDocumentIdentifier = struct { - uri: []const u8, -}; - -pub const ChangeDocument = struct { - params: struct { - textDocument: TextDocumentIdentifier, - contentChanges: []TextDocumentContentChangeEvent, - }, -}; - -pub const TextDocumentContentChangeEvent = struct { - range: ?types.Range, - text: []const u8, -}; - -const TextDocumentIdentifierRequest = struct { - params: struct { - textDocument: TextDocumentIdentifier, - }, -}; - -pub const SaveDocument = TextDocumentIdentifierRequest; -pub const CloseDocument = TextDocumentIdentifierRequest; -pub const SemanticTokensFull = TextDocumentIdentifierRequest; - -const TextDocumentIdentifierPositionRequest = struct { - params: struct { - textDocument: TextDocumentIdentifier, - position: types.Position, - }, -}; - -pub const SaveReason = enum(u32) { - Manual = 1, - AfterDelay = 2, - FocusOut = 3, -}; - -pub const WillSave = struct { - params: struct { - textDocument: TextDocumentIdentifier, - reason: SaveReason, - }, -}; - -pub const SignatureHelp = struct { - params: struct { - textDocument: TextDocumentIdentifier, - position: types.Position, - context: ?struct { - triggerKind: enum(u32) { - invoked = 1, - trigger_character = 2, - content_change = 3, - }, - triggerCharacter: ?[]const u8, - isRetrigger: bool, - activeSignatureHelp: ?types.SignatureHelp, - }, - }, -}; - -pub const Completion = TextDocumentIdentifierPositionRequest; -pub const GotoDefinition = TextDocumentIdentifierPositionRequest; -pub const GotoDeclaration = TextDocumentIdentifierPositionRequest; -pub const Hover = TextDocumentIdentifierPositionRequest; -pub const DocumentSymbols = TextDocumentIdentifierRequest; -pub const Formatting = TextDocumentIdentifierRequest; -pub const DocumentHighlight = TextDocumentIdentifierPositionRequest; -pub const Rename = struct { - params: struct { - textDocument: TextDocumentIdentifier, - position: types.Position, - newName: []const u8, - }, -}; - -pub const References = struct { - params: struct { - textDocument: TextDocumentIdentifier, - position: types.Position, - context: struct { - includeDeclaration: bool, - }, - }, -}; - -pub const InlayHint = struct { - params: struct { - textDocument: TextDocumentIdentifier, - range: types.Range, - }, -}; - -pub const CodeAction = struct { - params: struct { - textDocument: TextDocumentIdentifier, - range: types.Range, - context: struct { - diagnostics: []types.Diagnostic, - }, - }, -}; - -pub const FoldingRange = struct { - params: struct { - textDocument: TextDocumentIdentifier, - }, -}; - -pub const SelectionRange = struct { - params: struct { - textDocument: TextDocumentIdentifier, - positions: []types.Position, - }, -}; diff --git a/src/signature_help.zig b/src/signature_help.zig index f5c7df3..e47e72d 100644 --- a/src/signature_help.zig +++ b/src/signature_help.zig @@ -2,31 +2,29 @@ const std = @import("std"); const analysis = @import("analysis.zig"); const offsets = @import("offsets.zig"); const DocumentStore = @import("DocumentStore.zig"); -const types = @import("types.zig"); +const types = @import("lsp.zig"); const Ast = std.zig.Ast; const Token = std.zig.Token; const identifierFromPosition = @import("Server.zig").identifierFromPosition; const ast = @import("ast.zig"); fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAllocator, commas: u32, skip_self_param: bool, handle: *const DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation { - const ParameterInformation = types.SignatureInformation.ParameterInformation; - const tree = handle.tree; const token_starts = tree.tokens.items(.start); const alloc = arena.allocator(); const label = analysis.getFunctionSignature(tree, proto); - const proto_comments = (try analysis.getDocComments(alloc, tree, fn_node, .Markdown)) orelse ""; + const proto_comments = (try analysis.getDocComments(alloc, tree, fn_node, .markdown)) orelse ""; const arg_idx = if (skip_self_param) blk: { const has_self_param = try analysis.hasSelfParam(arena, document_store, handle, proto); break :blk commas + @boolToInt(has_self_param); } else commas; - var params = std.ArrayListUnmanaged(ParameterInformation){}; + var params = std.ArrayListUnmanaged(types.ParameterInformation){}; var param_it = proto.iterate(&tree); while (ast.nextFnParam(¶m_it)) |param| { const param_comments = if (param.first_doc_comment) |dc| - try analysis.collectDocComments(alloc, tree, dc, .Markdown, false) + try analysis.collectDocComments(alloc, tree, dc, .markdown, false) else ""; @@ -55,13 +53,19 @@ fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.Arena } const param_label = tree.source[param_label_start..param_label_end]; try params.append(alloc, .{ - .label = param_label, - .documentation = types.MarkupContent{ .value = param_comments }, + .label = .{ .string = param_label }, + .documentation = .{ .MarkupContent = .{ + .kind = .markdown, + .value = param_comments, + } }, }); } return types.SignatureInformation{ .label = label, - .documentation = types.MarkupContent{ .value = proto_comments }, + .documentation = .{ .MarkupContent = .{ + .kind = .markdown, + .value = proto_comments, + } }, .parameters = params.items, .activeParameter = arg_idx, }; @@ -188,20 +192,18 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl for (data.builtins) |builtin| { if (std.mem.eql(u8, builtin.name, tree.tokenSlice(expr_last_token))) { const param_infos = try alloc.alloc( - types.SignatureInformation.ParameterInformation, + types.ParameterInformation, builtin.arguments.len, ); for (param_infos) |*info, i| { info.* = .{ - .label = builtin.arguments[i], + .label = .{ .string = builtin.arguments[i] }, .documentation = null, }; } return types.SignatureInformation{ .label = builtin.signature, - .documentation = .{ - .value = builtin.documentation, - }, + .documentation = .{ .string = builtin.documentation }, .parameters = param_infos, .activeParameter = paren_commas, }; diff --git a/src/tres b/src/tres new file mode 160000 index 0000000..16774b9 --- /dev/null +++ b/src/tres @@ -0,0 +1 @@ +Subproject commit 16774b94efa61757a5302a690837dfb8cf750a11 diff --git a/src/types.zig b/src/types.zig deleted file mode 100644 index 5ad7e62..0000000 --- a/src/types.zig +++ /dev/null @@ -1,536 +0,0 @@ -const std = @import("std"); -const string = []const u8; - -// LSP types -// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/ - -pub const Position = struct { - line: u32, - character: u32, -}; - -pub const Range = struct { - start: Position, - end: Position, -}; - -pub const Location = struct { - uri: string, - range: Range, -}; - -/// Id of a request -pub const RequestId = union(enum) { - String: string, - Integer: i32, -}; - -/// Params of a response (result) -pub const ResponseParams = union(enum) { - SignatureHelp: SignatureHelp, - CompletionList: CompletionList, - Location: Location, - Hover: Hover, - DocumentSymbols: []DocumentSymbol, - SemanticTokensFull: SemanticTokens, - InlayHint: []InlayHint, - TextEdits: []TextEdit, - Locations: []Location, - WorkspaceEdit: WorkspaceEdit, - InitializeResult: InitializeResult, - ConfigurationParams: ConfigurationParams, - RegistrationParams: RegistrationParams, - DocumentHighlight: []DocumentHighlight, - CodeAction: []CodeAction, - ApplyEdit: ApplyWorkspaceEditParams, - FoldingRange: []FoldingRange, - SelectionRange: []*SelectionRange, -}; - -pub const Response = struct { - jsonrpc: string = "2.0", - id: RequestId, - result: ResponseParams, -}; - -pub const Request = struct { - jsonrpc: string = "2.0", - id: RequestId, - method: []const u8, - params: ?ResponseParams, -}; - -pub const ResponseError = struct { - code: i32, - message: string, - data: std.json.Value, -}; - -pub const ErrorCodes = enum(i32) { - // Defined by JSON-RPC - ParseError = -32700, - InvalidRequest = -32600, - MethodNotFound = -32601, - InvalidParams = -32602, - InternalError = -32603, - - // JSON-RPC reserved error codes - ServerNotInitialized = -32002, - UnknownErrorCode = -3200, - - // LSP reserved error codes - RequestFailed = -32803, - ServerCancelled = -32802, - ContentModified = -32801, - RequestCancelled = -32800, -}; - -pub const Notification = struct { - jsonrpc: string = "2.0", - method: string, - params: NotificationParams, -}; - -pub const NotificationParams = union(enum) { - LogMessage: struct { - type: MessageType, - message: string, - }, - PublishDiagnostics: struct { - uri: string, - diagnostics: []Diagnostic, - }, - ShowMessage: struct { - type: MessageType, - message: string, - }, -}; - -/// Type of a debug message -pub const MessageType = enum(i64) { - Error = 1, - Warning = 2, - Info = 3, - Log = 4, - - pub fn jsonStringify(value: MessageType, options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } -}; - -pub const DiagnosticSeverity = enum(i64) { - Error = 1, - Warning = 2, - Information = 3, - Hint = 4, - - pub fn jsonStringify(value: DiagnosticSeverity, options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } -}; - -pub const DiagnosticRelatedInformation = struct { - location: Location, - message: string, -}; - -pub const Diagnostic = struct { - range: Range, - severity: ?DiagnosticSeverity, - code: ?string, - source: ?string, - message: string, - relatedInformation: ?[]DiagnosticRelatedInformation = null, -}; - -pub const WorkspaceEdit = struct { - changes: std.StringHashMapUnmanaged(std.ArrayListUnmanaged(TextEdit)), - - pub fn jsonStringify(self: WorkspaceEdit, options: std.json.StringifyOptions, writer: anytype) @TypeOf(writer).Error!void { - try writer.writeAll("{\"changes\": {"); - var it = self.changes.iterator(); - var idx: usize = 0; - while (it.next()) |entry| : (idx += 1) { - if (idx != 0) try writer.writeAll(", "); - - try writer.writeByte('"'); - try writer.writeAll(entry.key_ptr.*); - try writer.writeAll("\":"); - try std.json.stringify(entry.value_ptr.items, options, writer); - } - try writer.writeAll("}}"); - } -}; - -pub const TextEdit = struct { - range: Range, - newText: string, -}; - -pub const MarkupContent = struct { - pub const Kind = enum(u1) { - PlainText = 0, - Markdown = 1, - - pub fn jsonStringify(value: Kind, options: std.json.StringifyOptions, out_stream: anytype) !void { - const str = switch (value) { - .PlainText => "plaintext", - .Markdown => "markdown", - }; - try std.json.stringify(str, options, out_stream); - } - }; - - kind: Kind = .Markdown, - value: string, -}; - -pub const CompletionList = struct { - isIncomplete: bool, - items: []const CompletionItem, -}; - -pub const InsertTextFormat = enum(i64) { - PlainText = 1, - Snippet = 2, - - pub fn jsonStringify(value: InsertTextFormat, options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } -}; - -pub const Hover = struct { - contents: MarkupContent, -}; - -pub const SemanticTokens = struct { - data: []const u32, -}; - -pub const CompletionItem = struct { - pub const Kind = enum(i64) { - Text = 1, - Method = 2, - Function = 3, - Constructor = 4, - Field = 5, - Variable = 6, - Class = 7, - Interface = 8, - Module = 9, - Property = 10, - Unit = 11, - Value = 12, - Enum = 13, - Keyword = 14, - Snippet = 15, - Color = 16, - File = 17, - Reference = 18, - Folder = 19, - EnumMember = 20, - Constant = 21, - Struct = 22, - Event = 23, - Operator = 24, - TypeParameter = 25, - - pub fn jsonStringify(value: Kind, options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } - }; - - label: string, - labelDetails: ?CompletionItemLabelDetails = null, - kind: Kind, - detail: ?string = null, - - sortText: ?string = null, - filterText: ?string = null, - insertText: ?string = null, - - insertTextFormat: ?InsertTextFormat = .PlainText, - documentation: ?MarkupContent = null, - - // FIXME: i commented this out, because otherwise the vscode client complains about *ranges* - // and breaks code completion entirely - // see: https://github.com/zigtools/zls-vscode/pull/33 - // textEdit: ?TextEdit = null, -}; - -pub const CompletionItemLabelDetails = struct { - detail: ?string, - description: ?string, - sortText: ?string = null, -}; - -pub const DocumentSymbol = struct { - const Kind = enum(u32) { - File = 1, - Module = 2, - Namespace = 3, - Package = 4, - Class = 5, - Method = 6, - Property = 7, - Field = 8, - Constructor = 9, - Enum = 10, - Interface = 11, - Function = 12, - Variable = 13, - Constant = 14, - String = 15, - Number = 16, - Boolean = 17, - Array = 18, - Object = 19, - Key = 20, - Null = 21, - EnumMember = 22, - Struct = 23, - Event = 24, - Operator = 25, - TypeParameter = 26, - - pub fn jsonStringify(value: Kind, options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } - }; - - name: string, - detail: ?string = null, - kind: Kind, - deprecated: bool = false, - range: Range, - selectionRange: Range, - children: []const DocumentSymbol = &[_]DocumentSymbol{}, -}; - -pub const WorkspaceFolder = struct { - uri: string, - name: string, -}; - -pub const SignatureInformation = struct { - pub const ParameterInformation = struct { - // TODO Can also send a pair of encoded offsets - label: string, - documentation: ?MarkupContent, - }; - - label: string, - documentation: ?MarkupContent, - parameters: ?[]const ParameterInformation, - activeParameter: ?u32, -}; - -pub const SignatureHelp = struct { - signatures: ?[]const SignatureInformation, - activeSignature: ?u32, - activeParameter: ?u32, -}; - -pub const InlayHint = struct { - position: Position, - label: string, - kind: InlayHintKind, - tooltip: MarkupContent, - paddingLeft: bool, - paddingRight: bool, - - // appends a colon to the label and reduces the output size - pub fn jsonStringify(value: InlayHint, options: std.json.StringifyOptions, writer: anytype) @TypeOf(writer).Error!void { - try writer.writeAll("{\"position\":"); - try std.json.stringify(value.position, options, writer); - try writer.writeAll(",\"label\":\""); - try writer.writeAll(value.label); - try writer.writeAll(":\",\"kind\":"); - try std.json.stringify(value.kind, options, writer); - if (value.tooltip.value.len != 0) { - try writer.writeAll(",\"tooltip\":"); - try std.json.stringify(value.tooltip, options, writer); - } - if (value.paddingLeft) try writer.writeAll(",\"paddingLeft\":true"); - if (value.paddingRight) try writer.writeAll(",\"paddingRight\":true"); - try writer.writeByte('}'); - } -}; - -pub const InlayHintKind = enum(i64) { - Type = 1, - Parameter = 2, - - pub fn jsonStringify(value: InlayHintKind, options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } -}; - -pub const CodeActionKind = enum { - Empty, - QuickFix, - Refactor, - RefactorExtract, - RefactorInline, - RefactorRewrite, - Source, - SourceOrganizeImports, - SourceFixAll, - - pub fn jsonStringify(value: CodeActionKind, options: std.json.StringifyOptions, out_stream: anytype) !void { - const name = switch (value) { - .Empty => "", - .QuickFix => "quickfix", - .Refactor => "refactor", - .RefactorExtract => "refactor.extract", - .RefactorInline => "refactor.inline", - .RefactorRewrite => "refactor.rewrite", - .Source => "source", - .SourceOrganizeImports => "source.organizeImports", - .SourceFixAll => "source.fixAll", - }; - try std.json.stringify(name, options, out_stream); - } -}; - -pub const CodeAction = struct { - title: string, - kind: CodeActionKind, - // diagnostics: []Diagnostic, - isPreferred: bool, - edit: WorkspaceEdit, -}; - -pub const ApplyWorkspaceEditParams = struct { - label: string, - edit: WorkspaceEdit, -}; - -pub const PositionEncodingKind = enum { - utf8, - utf16, - utf32, - - pub fn jsonStringify(value: PositionEncodingKind, options: std.json.StringifyOptions, out_stream: anytype) !void { - const str = switch (value) { - .utf8 => "utf-8", - .utf16 => "utf-16", - .utf32 => "utf-32", - }; - try std.json.stringify(str, options, out_stream); - } -}; - -const TextDocumentSyncKind = enum(u32) { - None = 0, - Full = 1, - Incremental = 2, - - pub fn jsonStringify(value: @This(), options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } -}; - -// Only includes options we set in our initialize result. -const InitializeResult = struct { - capabilities: struct { - positionEncoding: PositionEncodingKind, - signatureHelpProvider: struct { - triggerCharacters: []const string, - retriggerCharacters: []const string, - }, - textDocumentSync: struct { - openClose: bool, - change: TextDocumentSyncKind, - willSave: bool, - willSaveWaitUntil: bool, - save: bool, - }, - renameProvider: bool, - completionProvider: struct { - resolveProvider: bool, - triggerCharacters: []const string, - completionItem: struct { labelDetailsSupport: bool }, - }, - documentHighlightProvider: bool, - hoverProvider: bool, - codeActionProvider: bool, - declarationProvider: bool, - definitionProvider: bool, - typeDefinitionProvider: bool, - implementationProvider: bool, - referencesProvider: bool, - documentSymbolProvider: bool, - colorProvider: bool, - documentFormattingProvider: bool, - documentRangeFormattingProvider: bool, - foldingRangeProvider: bool, - selectionRangeProvider: bool, - workspaceSymbolProvider: bool, - rangeProvider: bool, - documentProvider: bool, - workspace: ?struct { - workspaceFolders: ?struct { - supported: bool, - changeNotifications: bool, - }, - }, - semanticTokensProvider: struct { - full: bool, - range: bool, - legend: struct { - tokenTypes: []const string, - tokenModifiers: []const string, - }, - }, - inlayHintProvider: bool, - }, - serverInfo: struct { - name: string, - version: ?string = null, - }, -}; - -pub const ConfigurationParams = struct { - items: []const ConfigurationItem, - - pub const ConfigurationItem = struct { - section: ?[]const u8, - }; -}; - -pub const RegistrationParams = struct { - registrations: []const Registration, - - pub const Registration = struct { - id: string, - method: string, - - // registerOptions?: LSPAny; - }; -}; - -pub const DocumentHighlightKind = enum(u8) { - Text = 1, - Read = 2, - Write = 3, - - pub fn jsonStringify(value: DocumentHighlightKind, options: std.json.StringifyOptions, out_stream: anytype) !void { - try std.json.stringify(@enumToInt(value), options, out_stream); - } -}; - -pub const DocumentHighlight = struct { - range: Range, - kind: ?DocumentHighlightKind, -}; - -pub const FoldingRange = struct { - startLine: usize, - endLine: usize, -}; - -pub const SelectionRange = struct { - range: Range, - parent: ?*SelectionRange, -}; diff --git a/src/zls.zig b/src/zls.zig index 34290c1..21b1095 100644 --- a/src/zls.zig +++ b/src/zls.zig @@ -2,13 +2,12 @@ // zigbot9001 to take advantage of zls' tools pub const analysis = @import("analysis.zig"); -pub const header = @import("header.zig"); +pub const Header = @import("Header.zig"); pub const offsets = @import("offsets.zig"); -pub const requests = @import("requests.zig"); pub const Config = @import("Config.zig"); pub const Server = @import("Server.zig"); pub const translate_c = @import("translate_c.zig"); -pub const types = @import("types.zig"); +pub const types = @import("lsp.zig"); pub const URI = @import("uri.zig"); pub const DocumentStore = @import("DocumentStore.zig"); pub const ComptimeInterpreter = @import("ComptimeInterpreter.zig"); diff --git a/tests/context.zig b/tests/context.zig index cff28e8..bca2c8b 100644 --- a/tests/context.zig +++ b/tests/context.zig @@ -1,14 +1,21 @@ const std = @import("std"); const zls = @import("zls"); -const headerPkg = zls.header; +const tres = @import("tres"); + +const Header = zls.Header; const Config = zls.Config; const Server = zls.Server; const types = zls.types; -const requests = zls.requests; + +/// initialize request taken from Visual Studio Code with the following changes: +/// - removed locale, rootPath, rootUri, trace, workspaceFolders +/// - removed capabilities.workspace.configuration +/// - removed capabilities.workspace.didChangeConfiguration +/// - removed capabilities.textDocument.publishDiagnostics const initialize_msg = - \\{"processId":6896,"clientInfo":{"name":"vscode","version":"1.46.1"},"rootPath":null,"rootUri":null,"capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional"},"didChangeConfiguration":{"dynamicRegistration":true},"didChangeWatchedFiles":{"dynamicRegistration":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]}},"executeCommand":{"dynamicRegistration":true},"configuration":true,"workspaceFolders":true},"textDocument":{"publishDiagnostics":{"relatedInformation":true,"versionSupport":false,"tagSupport":{"valueSet":[1,2]},"complexDiagnosticCodeSupport":true},"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true},"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true}},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]}},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}}},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["comment","keyword","number","regexp","operator","namespace","type","struct","class","interface","enum","typeParameter","function","member","macro","variable","parameter","property","label"],"tokenModifiers":["declaration","documentation","static","abstract","deprecated","readonly"]}},"window":{"workDoneProgress":true}},"trace":"off","workspaceFolders":[{"uri":"file://./tests", "name":"root"}]} + \\{"processId":0,"clientInfo":{"name":"Visual Studio Code","version":"1.73.1"},"capabilities":{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true,"resourceOperations":["create","rename","delete"],"failureHandling":"textOnlyTransactional","normalizesLineEndings":true,"changeAnnotationSupport":{"groupsOnLabel":true}},"didChangeWatchedFiles":{"dynamicRegistration":true,"relativePatternSupport":true},"symbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"tagSupport":{"valueSet":[1]},"resolveSupport":{"properties":["location.range"]}},"codeLens":{"refreshSupport":true},"executeCommand":{"dynamicRegistration":true},"workspaceFolders":true,"semanticTokens":{"refreshSupport":true},"fileOperations":{"dynamicRegistration":true,"didCreate":true,"didRename":true,"didDelete":true,"willCreate":true,"willRename":true,"willDelete":true},"inlineValue":{"refreshSupport":true},"inlayHint":{"refreshSupport":true},"diagnostics":{"refreshSupport":true}},"textDocument":{"synchronization":{"dynamicRegistration":true,"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":{"dynamicRegistration":true,"contextSupport":true,"completionItem":{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true,"tagSupport":{"valueSet":[1]},"insertReplaceSupport":true,"resolveSupport":{"properties":["documentation","detail","additionalTextEdits"]},"insertTextModeSupport":{"valueSet":[1,2]},"labelDetailsSupport":true},"insertTextMode":2,"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]},"completionList":{"itemDefaults":["commitCharacters","editRange","insertTextFormat","insertTextMode"]}},"hover":{"dynamicRegistration":true,"contentFormat":["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["markdown","plaintext"],"parameterInformation":{"labelOffsetSupport":true},"activeParameterSupport":true},"contextSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"references":{"dynamicRegistration":true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},"hierarchicalDocumentSymbolSupport":true,"tagSupport":{"valueSet":[1]},"labelSupport":true},"codeAction":{"dynamicRegistration":true,"isPreferredSupport":true,"disabledSupport":true,"dataSupport":true,"resolveSupport":{"properties":["edit"]},"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}},"honorsChangeAnnotations":false},"codeLens":{"dynamicRegistration":true},"formatting":{"dynamicRegistration":true},"rangeFormatting":{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},"rename":{"dynamicRegistration":true,"prepareSupport":true,"prepareSupportDefaultBehavior":1,"honorsChangeAnnotations":true},"documentLink":{"dynamicRegistration":true,"tooltipSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true},"colorProvider":{"dynamicRegistration":true},"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":true,"foldingRangeKind":{"valueSet":["comment","imports","region"]},"foldingRange":{"collapsedText":false}},"declaration":{"dynamicRegistration":true,"linkSupport":true},"selectionRange":{"dynamicRegistration":true},"callHierarchy":{"dynamicRegistration":true},"semanticTokens":{"dynamicRegistration":true,"tokenTypes":["namespace","type","class","enum","interface","struct","typeParameter","parameter","variable","property","enumMember","event","function","method","macro","keyword","modifier","comment","string","number","regexp","operator","decorator"],"tokenModifiers":["declaration","definition","readonly","static","deprecated","abstract","async","modification","documentation","defaultLibrary"],"formats":["relative"],"requests":{"range":true,"full":{"delta":true}},"multilineTokenSupport":false,"overlappingTokenSupport":false,"serverCancelSupport":true,"augmentsSyntaxTokens":true},"linkedEditingRange":{"dynamicRegistration":true},"typeHierarchy":{"dynamicRegistration":true},"inlineValue":{"dynamicRegistration":true},"inlayHint":{"dynamicRegistration":true,"resolveSupport":{"properties":["tooltip","textEdits","label.tooltip","label.location","label.command"]}},"diagnostic":{"dynamicRegistration":true,"relatedDocumentSupport":false}},"window":{"showMessage":{"messageActionItem":{"additionalPropertiesSupport":true}},"showDocument":{"support":true},"workDoneProgress":true},"general":{"staleRequestSupport":{"cancel":true,"retryOnContentModified":["textDocument/semanticTokens/full","textDocument/semanticTokens/range","textDocument/semanticTokens/full/delta"]},"regularExpressions":{"engine":"ECMAScript","version":"ES2020"},"markdown":{"parser":"marked","version":"1.1.0"},"positionEncodings":["utf-16"]},"notebookDocument":{"synchronization":{"dynamicRegistration":true,"executionSummarySupport":true}}}} ; const default_config: Config = .{ @@ -23,6 +30,7 @@ const allocator = std.testing.allocator; pub const Context = struct { server: Server, + arena: std.heap.ArenaAllocator, config: *Config, request_id: u32 = 1, @@ -37,11 +45,16 @@ pub const Context = struct { var context: Context = .{ .server = server, + .arena = std.heap.ArenaAllocator.init(allocator), .config = config, }; try context.request("initialize", initialize_msg, null); - try context.request("initialized", "{}", null); + try context.notification("initialized", "{}"); + + // TODO this line shouldn't be needed + context.server.client_capabilities.label_details_support = false; + return context; } @@ -51,6 +64,30 @@ pub const Context = struct { self.request("shutdown", "{}", null) catch {}; self.server.deinit(); + self.arena.deinit(); + } + + pub fn notification( + self: *Context, + method: []const u8, + params: []const u8, + ) !void { + var output = std.ArrayListUnmanaged(u8){}; + defer output.deinit(allocator); + + // create the request + const req = try std.fmt.allocPrint(allocator, + \\{{"jsonrpc":"2.0","method":"{s}","params":{s}}} + , .{ method, params }); + defer allocator.free(req); + + // send the request to the server + self.server.processJsonRpc(&self.arena, req); + + for (self.server.outgoing_messages.items) |outgoing_message| { + self.server.allocator.free(outgoing_message); + } + self.server.outgoing_messages.clearRetainingCapacity(); } pub fn requestAlloc( @@ -58,9 +95,6 @@ pub const Context = struct { method: []const u8, params: []const u8, ) ![]const u8 { - var output = std.ArrayListUnmanaged(u8){}; - defer output.deinit(allocator); - // create the request self.request_id += 1; const req = try std.fmt.allocPrint(allocator, @@ -69,20 +103,18 @@ pub const Context = struct { defer allocator.free(req); // send the request to the server - try self.server.processJsonRpc(output.writer(allocator), req); + self.server.processJsonRpc(&self.arena, req); - // get the output from the server - var buffer_stream = std.io.fixedBufferStream(output.items); - const header = try headerPkg.readRequestHeader(allocator, buffer_stream.reader()); - defer header.deinit(allocator); + const messages = self.server.outgoing_messages.items; - var response_bytes = try allocator.alloc(u8, header.content_length); - errdefer allocator.free(response_bytes); + try std.testing.expect(messages.len != 0); - const content_length = try buffer_stream.reader().readAll(response_bytes); - try std.testing.expectEqual(content_length, header.content_length); + for (messages[0..(messages.len - 1)]) |message| { + self.server.allocator.free(message); + } + defer self.server.outgoing_messages.clearRetainingCapacity(); - return response_bytes; + return messages[messages.len - 1]; } pub fn request( @@ -92,7 +124,7 @@ pub const Context = struct { expect: ?[]const u8, ) !void { const response_bytes = try self.requestAlloc(method, params); - defer allocator.free(response_bytes); + defer self.server.allocator.free(response_bytes); const expected = expect orelse return; @@ -118,19 +150,17 @@ pub const Context = struct { // helper pub fn requestDidOpen(self: *Context, uri: []const u8, source: []const u8) !void { - const open_document = requests.OpenDocument{ - .params = .{ - .textDocument = .{ - .uri = uri, - // .languageId = "zig", - // .version = 420, - .text = source, - }, + const open_document = types.DidOpenTextDocumentParams{ + .textDocument = .{ + .uri = uri, + .languageId = "zig", + .version = 420, + .text = source, }, }; - const params = try std.json.stringifyAlloc(allocator, open_document.params, .{}); + const params = try std.json.stringifyAlloc(allocator, open_document, .{}); defer allocator.free(params); - try self.request("textDocument/didOpen", params, null); + try self.notification("textDocument/didOpen", params); } pub fn Response(comptime Result: type) type { @@ -138,35 +168,23 @@ pub const Context = struct { jsonrpc: []const u8, id: types.RequestId, result: Result, - - pub fn deinit(self: @This()) void { - const parse_options = std.json.ParseOptions{ - .allocator = allocator, - .ignore_unknown_fields = true, - }; - std.json.parseFree(@This(), self, parse_options); - } }; } - pub fn requestGetResponse(self: *Context, comptime Result: type, method: []const u8, request_struct: anytype) !Response(Result) { - const params = try std.json.stringifyAlloc(allocator, request_struct.params, .{}); - defer allocator.free(params); + pub fn requestGetResponse(self: *Context, comptime Result: type, method: []const u8, params: anytype) !Response(Result) { + var buffer = std.ArrayListUnmanaged(u8){}; + defer buffer.deinit(allocator); - const response_bytes = try self.requestAlloc(method, params); - defer allocator.free(response_bytes); + try tres.stringify(params, .{}, buffer.writer(allocator)); - const parse_options = std.json.ParseOptions{ - .allocator = allocator, - .ignore_unknown_fields = true, - }; + const response_bytes = try self.requestAlloc(method, buffer.items); + defer self.server.allocator.free(response_bytes); - var token_stream = std.json.TokenStream.init(response_bytes); - const response = try std.json.parse(Response(Result), &token_stream, parse_options); - errdefer std.json.parseFree(Response(Result), response, parse_options); + var parser = std.json.Parser.init(self.arena.allocator(), false); + var tree = try parser.parse(try self.arena.allocator().dupe(u8, response_bytes)); // TODO validate jsonrpc and id - return response; + return tres.parse(Response(Result), tree.root, self.arena.allocator()); } }; diff --git a/tests/language_features/cimport.zig b/tests/language_features/cimport.zig index 894d132..92a867f 100644 --- a/tests/language_features/cimport.zig +++ b/tests/language_features/cimport.zig @@ -68,8 +68,8 @@ fn testConvertCInclude(cimport_source: []const u8, expected: []const u8) !void { => {}, else => continue, } - - if(!std.mem.eql(u8, ast.tokenSlice(main_tokens[index]), "@cImport")) continue; + + if (!std.mem.eql(u8, ast.tokenSlice(main_tokens[index]), "@cImport")) continue; break :blk @intCast(Ast.Node.Index, index); } diff --git a/tests/lsp_features/completion.zig b/tests/lsp_features/completion.zig index b5f18b9..9f6ec86 100644 --- a/tests/lsp_features/completion.zig +++ b/tests/lsp_features/completion.zig @@ -8,13 +8,12 @@ const ErrorBuilder = @import("../ErrorBuilder.zig"); const types = zls.types; const offsets = zls.offsets; -const requests = zls.requests; const allocator: std.mem.Allocator = std.testing.allocator; const Completion = struct { label: []const u8, - kind: types.CompletionItem.Kind, + kind: types.CompletionItemKind, detail: ?[]const u8 = null, }; @@ -412,16 +411,13 @@ fn testCompletion(source: []const u8, expected_completions: []const Completion) try ctx.requestDidOpen(test_uri, text); - const request = requests.Completion{ - .params = .{ - .textDocument = .{ .uri = test_uri }, - .position = offsets.indexToPosition(source, cursor_idx, ctx.server.offset_encoding), - }, + const params = types.CompletionParams{ + .textDocument = .{ .uri = test_uri }, + .position = offsets.indexToPosition(source, cursor_idx, ctx.server.offset_encoding), }; - @setEvalBranchQuota(2000); - const response = try ctx.requestGetResponse(?types.CompletionList, "textDocument/completion", request); - defer response.deinit(); + @setEvalBranchQuota(5000); + const response = try ctx.requestGetResponse(?types.CompletionList, "textDocument/completion", params); const completion_list: types.CompletionList = response.result orelse { std.debug.print("Server returned `null` as the result\n", .{}); @@ -462,11 +458,11 @@ fn testCompletion(source: []const u8, expected_completions: []const Completion) unreachable; }; - if (expected_completion.kind != actual_completion.kind) { - try error_builder.msgAtIndex("label '{s}' should be of kind '{s}' but was '{s}'!", cursor_idx, .err, .{ + if (actual_completion.kind == null or expected_completion.kind != actual_completion.kind.?) { + try error_builder.msgAtIndex("label '{s}' should be of kind '{s}' but was '{?s}'!", cursor_idx, .err, .{ label, @tagName(expected_completion.kind), - @tagName(actual_completion.kind), + if (actual_completion.kind) |kind| @tagName(kind) else null, }); return error.InvalidCompletionKind; } @@ -500,9 +496,15 @@ fn extractCompletionLabels(items: anytype) error{ DuplicateCompletionLabel, OutO errdefer set.deinit(allocator); try set.ensureTotalCapacity(allocator, items.len); for (items) |item| { - switch (item.kind) { - .Keyword, .Snippet => continue, - else => {}, + const maybe_kind = switch (@typeInfo(@TypeOf(item.kind))) { + .Optional => item.kind, + else => @as(?@TypeOf(item.kind), item.kind), + }; + if (maybe_kind) |kind| { + switch (kind) { + .Keyword, .Snippet => continue, + else => {}, + } } if (set.fetchPutAssumeCapacity(item.label, {}) != null) return error.DuplicateCompletionLabel; } diff --git a/tests/lsp_features/folding_range.zig b/tests/lsp_features/folding_range.zig index 956d4f1..7db3908 100644 --- a/tests/lsp_features/folding_range.zig +++ b/tests/lsp_features/folding_range.zig @@ -2,10 +2,11 @@ const std = @import("std"); const zls = @import("zls"); const builtin = @import("builtin"); +const tres = @import("tres"); + const Context = @import("../context.zig").Context; const types = zls.types; -const requests = zls.requests; const allocator: std.mem.Allocator = std.testing.allocator; @@ -48,15 +49,16 @@ fn testFoldingRange(source: []const u8, expect: []const u8) !void { try ctx.requestDidOpen(test_uri, source); - const request = requests.FoldingRange{ .params = .{ .textDocument = .{ .uri = test_uri } } }; + const params = types.FoldingRangeParams{ .textDocument = .{ .uri = test_uri } }; - const response = try ctx.requestGetResponse(?[]types.FoldingRange, "textDocument/foldingRange", request); - defer response.deinit(); + const response = try ctx.requestGetResponse(?[]types.FoldingRange, "textDocument/foldingRange", params); var actual = std.ArrayList(u8).init(allocator); defer actual.deinit(); - try std.json.stringify(response.result, .{}, actual.writer()); + try tres.stringify(response.result, .{ + .emit_null_optional_fields = false, + }, actual.writer()); try expectEqualJson(expect, actual.items); } diff --git a/tests/lsp_features/inlay_hints.zig b/tests/lsp_features/inlay_hints.zig index 407e5ab..8660aa8 100644 --- a/tests/lsp_features/inlay_hints.zig +++ b/tests/lsp_features/inlay_hints.zig @@ -8,7 +8,6 @@ const ErrorBuilder = @import("../ErrorBuilder.zig"); const types = zls.types; const offsets = zls.offsets; -const requests = zls.requests; const allocator: std.mem.Allocator = std.testing.allocator; @@ -83,7 +82,7 @@ fn testInlayHints(source: []const u8) !void { const range = types.Range{ .start = types.Position{ .line = 0, .character = 0 }, - .end = offsets.indexToPosition(phr.new_source, phr.new_source.len, .utf16), + .end = offsets.indexToPosition(phr.new_source, phr.new_source.len, .@"utf-16"), }; const InlayHint = struct { @@ -92,15 +91,12 @@ fn testInlayHints(source: []const u8) !void { kind: types.InlayHintKind, }; - const request = requests.InlayHint{ - .params = .{ - .textDocument = .{ .uri = test_uri }, - .range = range, - }, + const params = types.InlayHintParams{ + .textDocument = .{ .uri = test_uri }, + .range = range, }; - const response = try ctx.requestGetResponse(?[]InlayHint, "textDocument/inlayHint", request); - defer response.deinit(); + const response = try ctx.requestGetResponse(?[]InlayHint, "textDocument/inlayHint", params); const hints: []InlayHint = response.result orelse { std.debug.print("Server returned `null` as the result\n", .{}); @@ -124,7 +120,7 @@ fn testInlayHints(source: []const u8) !void { for (hints) |hint| { if (position.line != hint.position.line or position.character != hint.position.character) continue; - const actual_label = hint.label[0 .. hint.label.len - 1]; // exclude : + const actual_label = hint.label[0..hint.label.len]; if (!std.mem.eql(u8, expected_label, actual_label)) { try error_builder.msgAtLoc("expected label `{s}` here but got `{s}`!", new_loc, .err, .{ expected_label, actual_label }); diff --git a/tests/lsp_features/references.zig b/tests/lsp_features/references.zig index a595094..e2632e3 100644 --- a/tests/lsp_features/references.zig +++ b/tests/lsp_features/references.zig @@ -7,7 +7,6 @@ const Context = @import("../context.zig").Context; const ErrorBuilder = @import("../ErrorBuilder.zig"); const types = zls.types; -const requests = zls.requests; const offsets = zls.offsets; const allocator: std.mem.Allocator = std.testing.allocator; @@ -113,16 +112,13 @@ fn testReferences(source: []const u8) !void { const var_name = offsets.locToSlice(source, var_loc); const var_loc_middle = var_loc.start + (var_loc.end - var_loc.start) / 2; - const request = requests.References{ - .params = .{ - .textDocument = .{ .uri = file_uri }, - .position = offsets.indexToPosition(source, var_loc_middle, ctx.server.offset_encoding), - .context = .{ .includeDeclaration = true }, - }, + const params = types.ReferenceParams{ + .textDocument = .{ .uri = file_uri }, + .position = offsets.indexToPosition(source, var_loc_middle, ctx.server.offset_encoding), + .context = .{ .includeDeclaration = true }, }; - const response = try ctx.requestGetResponse(?[]types.Location, "textDocument/references", request); - defer response.deinit(); + const response = try ctx.requestGetResponse(?[]types.Location, "textDocument/references", params); const locations: []types.Location = response.result orelse { std.debug.print("Server returned `null` as the result\n", .{}); diff --git a/tests/lsp_features/selection_range.zig b/tests/lsp_features/selection_range.zig index 0b92d28..95acdf5 100644 --- a/tests/lsp_features/selection_range.zig +++ b/tests/lsp_features/selection_range.zig @@ -38,20 +38,19 @@ fn testSelectionRange(source: []const u8, want: []const []const u8) !void { try ctx.requestDidOpen(test_uri, phr.new_source); - const position = offsets.locToRange(phr.new_source, phr.locations.items(.new)[0], .utf16).start; + const position = offsets.locToRange(phr.new_source, phr.locations.items(.new)[0], .@"utf-16").start; const SelectionRange = struct { range: types.Range, - parent: ?*@This(), + parent: ?*@This() = null, }; - const request = requests.SelectionRange{ .params = .{ + const params = types.SelectionRangeParams{ .textDocument = .{ .uri = test_uri }, .positions = &[_]types.Position{position}, - } }; + }; - const response = try ctx.requestGetResponse(?[]SelectionRange, "textDocument/selectionRange", request); - defer response.deinit(); + const response = try ctx.requestGetResponse(?[]SelectionRange, "textDocument/selectionRange", params); const selectionRanges: []SelectionRange = response.result orelse { std.debug.print("Server returned `null` as the result\n", .{}); @@ -63,7 +62,7 @@ fn testSelectionRange(source: []const u8, want: []const []const u8) !void { var it: ?*SelectionRange = &selectionRanges[0]; while (it) |r| { - const slice = offsets.rangeToSlice(phr.new_source, r.range, .utf16); + const slice = offsets.rangeToSlice(phr.new_source, r.range, .@"utf-16"); (try got.addOne()).* = slice; it = r.parent; } diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index ba906a9..459a0ea 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -4,7 +4,7 @@ const builtin = @import("builtin"); const Context = @import("../context.zig").Context; -const requests = zls.requests; +const types = zls.types; const allocator: std.mem.Allocator = std.testing.allocator; @@ -41,21 +41,7 @@ fn testSemanticTokens(source: []const u8, expected: []const u32) !void { var ctx = try Context.init(); defer ctx.deinit(); - const open_document = requests.OpenDocument{ - .params = .{ - .textDocument = .{ - .uri = file_uri, - // .languageId = "zig", - // .version = 420, - .text = source, - }, - }, - }; - - const did_open_method = try std.json.stringifyAlloc(allocator, open_document.params, .{}); - defer allocator.free(did_open_method); - - try ctx.request("textDocument/didOpen", did_open_method, null); + try ctx.requestDidOpen(file_uri, source); const Response = struct { data: []const u32, diff --git a/tests/utility/offsets.zig b/tests/utility/offsets.zig index 2a765a8..5dc189a 100644 --- a/tests/utility/offsets.zig +++ b/tests/utility/offsets.zig @@ -107,13 +107,13 @@ fn testIndexPosition(text: []const u8, index: usize, line: u32, characters: [3]u const position16: types.Position = .{ .line = line, .character = characters[1] }; const position32: types.Position = .{ .line = line, .character = characters[2] }; - try std.testing.expectEqual(position8, offsets.indexToPosition(text, index, .utf8)); - try std.testing.expectEqual(position16, offsets.indexToPosition(text, index, .utf16)); - try std.testing.expectEqual(position32, offsets.indexToPosition(text, index, .utf32)); + try std.testing.expectEqual(position8, offsets.indexToPosition(text, index, .@"utf-8")); + try std.testing.expectEqual(position16, offsets.indexToPosition(text, index, .@"utf-16")); + try std.testing.expectEqual(position32, offsets.indexToPosition(text, index, .@"utf-32")); - try std.testing.expectEqual(index, offsets.positionToIndex(text, position8, .utf8)); - try std.testing.expectEqual(index, offsets.positionToIndex(text, position16, .utf16)); - try std.testing.expectEqual(index, offsets.positionToIndex(text, position32, .utf32)); + try std.testing.expectEqual(index, offsets.positionToIndex(text, position8, .@"utf-8")); + try std.testing.expectEqual(index, offsets.positionToIndex(text, position16, .@"utf-16")); + try std.testing.expectEqual(index, offsets.positionToIndex(text, position32, .@"utf-32")); } fn testTokenToLoc(text: [:0]const u8, token_index: std.zig.Ast.TokenIndex, start: usize, end: usize) !void { @@ -135,7 +135,7 @@ fn testTokenIndexToLoc(text: [:0]const u8, index: usize, start: usize, end: usiz fn testAdvancePosition(text: [:0]const u8, expected_line: u32, expected_character: u32, line: u32, character: u32, from: usize, to: usize) !void { const expected: types.Position = .{ .line = expected_line, .character = expected_character }; - const actual = offsets.advancePosition(text, .{ .line = line, .character = character }, from, to, .utf16); + const actual = offsets.advancePosition(text, .{ .line = line, .character = character }, from, to, .@"utf-16"); try std.testing.expectEqual(expected, actual); } @@ -143,9 +143,9 @@ fn testAdvancePosition(text: [:0]const u8, expected_line: u32, expected_characte fn testConvertPositionEncoding(text: [:0]const u8, line: u32, character: u32, new_characters: [3]u32) !void { const position: types.Position = .{ .line = line, .character = character }; - const position8 = offsets.convertPositionEncoding(text, position, .utf8, .utf8); - const position16 = offsets.convertPositionEncoding(text, position, .utf8, .utf16); - const position32 = offsets.convertPositionEncoding(text, position, .utf8, .utf32); + const position8 = offsets.convertPositionEncoding(text, position, .@"utf-8", .@"utf-8"); + const position16 = offsets.convertPositionEncoding(text, position, .@"utf-8", .@"utf-16"); + const position32 = offsets.convertPositionEncoding(text, position, .@"utf-8", .@"utf-32"); try std.testing.expectEqual(line, position8.line); try std.testing.expectEqual(line, position16.line); @@ -157,13 +157,13 @@ fn testConvertPositionEncoding(text: [:0]const u8, line: u32, character: u32, ne } fn testCountCodeUnits(text: []const u8, counts: [3]usize) !void { - try std.testing.expectEqual(counts[0], offsets.countCodeUnits(text, .utf8)); - try std.testing.expectEqual(counts[1], offsets.countCodeUnits(text, .utf16)); - try std.testing.expectEqual(counts[2], offsets.countCodeUnits(text, .utf32)); + try std.testing.expectEqual(counts[0], offsets.countCodeUnits(text, .@"utf-8")); + try std.testing.expectEqual(counts[1], offsets.countCodeUnits(text, .@"utf-16")); + try std.testing.expectEqual(counts[2], offsets.countCodeUnits(text, .@"utf-32")); } fn testGetNCodeUnitByteCount(text: []const u8, n: [3]usize) !void { - try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[0], .utf8)); - try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[1], .utf16)); - try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[2], .utf32)); + try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[0], .@"utf-8")); + try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[1], .@"utf-16")); + try std.testing.expectEqual(n[0], offsets.getNCodeUnitByteCount(text, n[2], .@"utf-32")); } From faee213658609a51e34eaaa616ab5de844ee198e Mon Sep 17 00:00:00 2001 From: Rekai Musuka Date: Wed, 28 Dec 2022 22:59:00 -0400 Subject: [PATCH 05/18] chore: add command for updating master.zig on windows (#859) --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index 806e60c..f2520e8 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,13 @@ zig build -Drelease-safe There is a `generate-data.py` in the `src/data` folder, run this file to update data files. It writes to stdout and you can redirect output to a zig file like `master.zig`. By default it generates data file for `master`, but can be configured to generate for a different version by modifying the `zig_version` variable. Files generated by this tool **contains** formatting information. +On Powershell 5.1 (the default Powershell on Windows 10 & 11), the following will update `master.zig`. +```pwsh +New-Item -Force .\src\data\master.zig -Value ((python .\src\data\generate-data.py) -split "`r?`n" -join "`n") +``` + +This unweidly command is necesary in order to guarantee Unix-style line endings and UTF-8 text encoding. + There is also a `generate-data.js` in the `src/data` folder, you'll need to run this inside a Chrome DevTools console and copy the output. Files generated by this tool **does not contain** formatting information. ### Configuration Options From ebe3ba1471edfd46c29ed386d19e9a27b062434c Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Thu, 29 Dec 2022 05:59:19 +0000 Subject: [PATCH 06/18] Memory lifetime fixes (#861) * fix memory lifetime issues * more memory lifetime issue fixes --- src/DocumentStore.zig | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 3165859..ee17f81 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -146,12 +146,11 @@ pub fn openDocument(self: *DocumentStore, uri: Uri, text: []const u8) error{OutO return handle.*; } - const duped_text = try self.allocator.dupeZ(u8, text); - errdefer self.allocator.free(duped_text); - var handle = try self.allocator.create(Handle); errdefer self.allocator.destroy(handle); + const duped_text = try self.allocator.dupeZ(u8, text); + handle.* = try self.createDocument(uri, duped_text, true); errdefer handle.deinit(self.allocator); @@ -580,7 +579,9 @@ fn uriInImports( return false; // consider it checked even if a failure happens - try checked_uris.put(try self.allocator.dupe(u8, source_uri), {}); + + const duped_uri = try self.allocator.dupe(u8, source_uri); + checked_uris.put(duped_uri, {}) catch self.allocator.free(duped_uri); const handle = self.getOrLoadHandle(source_uri) orelse return false; @@ -638,11 +639,16 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro // TODO: Better logic for detecting std or subdirectories? const in_std = std.mem.indexOf(u8, uri, "/std/") != null; if (self.config.zig_exe_path != null and std.mem.endsWith(u8, uri, "/build.zig") and !in_std) { - errdefer |err| log.debug("Failed to load build file {s}: (error: {})", .{ uri, err }); - const duped_uri = try self.allocator.dupe(u8, uri); - var build_file = try self.createBuildFile(duped_uri); - errdefer build_file.deinit(self.allocator); - try self.build_files.putNoClobber(self.allocator, build_file.uri, build_file); + const gop = try self.build_files.getOrPut(self.allocator, uri); + errdefer |err| { + self.build_files.swapRemoveAt(gop.index); + log.debug("Failed to load build file {s}: (error: {})", .{ uri, err }); + } + if(!gop.found_existing) { + const duped_uri = try self.allocator.dupe(u8, uri); + gop.value_ptr.* = try self.createBuildFile(duped_uri); + gop.key_ptr.* = gop.value_ptr.uri; + } handle.is_build_file = true; } else if (self.config.zig_exe_path != null and !std.mem.endsWith(u8, uri, "/builtin.zig") and !in_std) blk: { log.debug("Going to walk down the tree towards: {s}", .{uri}); From aabdb0c6ecb3c9a47feff2c2bfb9be4e95adf723 Mon Sep 17 00:00:00 2001 From: Rekai Musuka Date: Thu, 29 Dec 2022 02:00:32 -0400 Subject: [PATCH 07/18] fix: update master.zig to reflect changes to builtins (#858) --- src/data/master.zig | 50 +++++++++++++++++++-------------------------- 1 file changed, 21 insertions(+), 29 deletions(-) diff --git a/src/data/master.zig b/src/data/master.zig index bc38179..b116201 100644 --- a/src/data/master.zig +++ b/src/data/master.zig @@ -21,16 +21,14 @@ pub const builtins = [_]Builtin{ }, .{ .name = "@addWithOverflow", - .signature = "@addWithOverflow(comptime T: type, a: T, b: T, result: *T) bool", - .snippet = "@addWithOverflow(${1:comptime T: type}, ${2:a: T}, ${3:b: T}, ${4:result: *T})", + .signature = "@addWithOverflow(a: anytype, b: anytype) struct { @TypeOf(a, b), u1 }", + .snippet = "@addWithOverflow(${1:a: anytype}, ${2:b: anytype})", .documentation = - \\Performs `result.* = a + b`. If overflow or underflow occurs, stores the overflowed bits in `result` and returns `true`. If no overflow or underflow occurs, returns `false`. + \\Performs `a + b` and returns a tuple with the result and a possible overflow bit. , .arguments = &.{ - "comptime T: type", - "a: T", - "b: T", - "result: *T", + "a: anytype", + "b: anytype", }, }, .{ @@ -1066,16 +1064,14 @@ pub const builtins = [_]Builtin{ }, .{ .name = "@mulWithOverflow", - .signature = "@mulWithOverflow(comptime T: type, a: T, b: T, result: *T) bool", - .snippet = "@mulWithOverflow(${1:comptime T: type}, ${2:a: T}, ${3:b: T}, ${4:result: *T})", + .signature = "@mulWithOverflow(a: anytype, b: anytype) struct { @TypeOf(a, b), u1 }", + .snippet = "@mulWithOverflow(${1:a: anytype}, ${2:b: anytype})", .documentation = - \\Performs `result.* = a * b`. If overflow or underflow occurs, stores the overflowed bits in `result` and returns `true`. If no overflow or underflow occurs, returns `false`. + \\Performs `a * b` and returns a tuple with the result and a possible overflow bit. , .arguments = &.{ - "comptime T: type", - "a: T", - "b: T", - "result: *T", + "a: anytype", + "b: anytype", }, }, .{ @@ -1326,18 +1322,16 @@ pub const builtins = [_]Builtin{ }, .{ .name = "@shlWithOverflow", - .signature = "@shlWithOverflow(comptime T: type, a: T, shift_amt: Log2T, result: *T) bool", - .snippet = "@shlWithOverflow(${1:comptime T: type}, ${2:a: T}, ${3:shift_amt: Log2T}, ${4:result: *T})", + .signature = "@shlWithOverflow(a: anytype, shift_amt: Log2T) struct { @TypeOf(a), u1 }", + .snippet = "@shlWithOverflow(${1:a: anytype}, ${2:shift_amt: Log2T})", .documentation = - \\Performs `result.* = a << b`. If overflow or underflow occurs, stores the overflowed bits in `result` and returns `true`. If no overflow or underflow occurs, returns `false`. + \\Performs `a << b` and returns a tuple with the result and a possible overflow bit. \\ - \\The type of `shift_amt` is an unsigned integer with `log2(@typeInfo(T).Int.bits)` bits. This is because `shift_amt >= @typeInfo(T).Int.bits` is undefined behavior. + \\The type of `shift_amt` is an unsigned integer with `log2(@typeInfo(@TypeOf(a)).Int.bits)` bits. This is because `shift_amt >= @typeInfo(@TypeOf(a)).Int.bits` is undefined behavior. , .arguments = &.{ - "comptime T: type", - "a: T", + "a: anytype", "shift_amt: Log2T", - "result: *T", }, }, .{ @@ -1619,16 +1613,14 @@ pub const builtins = [_]Builtin{ }, .{ .name = "@subWithOverflow", - .signature = "@subWithOverflow(comptime T: type, a: T, b: T, result: *T) bool", - .snippet = "@subWithOverflow(${1:comptime T: type}, ${2:a: T}, ${3:b: T}, ${4:result: *T})", + .signature = "@subWithOverflow(a: anytype, b: anytype) struct { @TypeOf(a, b), u1 }", + .snippet = "@subWithOverflow(${1:a: anytype}, ${2:b: anytype})", .documentation = - \\Performs `result.* = a - b`. If overflow or underflow occurs, stores the overflowed bits in `result` and returns `true`. If no overflow or underflow occurs, returns `false`. + \\Performs `a - b` and returns a tuple with the result and a possible overflow bit. , .arguments = &.{ - "comptime T: type", - "a: T", - "b: T", - "result: *T", + "a: anytype", + "b: anytype", }, }, .{ @@ -1787,4 +1779,4 @@ pub const builtins = [_]Builtin{ "Element: type", }, }, -}; +}; \ No newline at end of file From c6d74dbca565105b49655e50c062758e6af99127 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lan=20Cr=C3=ADstoffer?= Date: Thu, 29 Dec 2022 19:36:38 +0100 Subject: [PATCH 08/18] fix: tres not available in a nix environment. (#855) --- build.zig | 6 ++++-- flake.lock | 29 +++++++++++++++++++++++------ flake.nix | 7 +++++-- src/Server.zig | 7 +++---- 4 files changed, 35 insertions(+), 14 deletions(-) diff --git a/build.zig b/build.zig index 710dc65..c4fea91 100644 --- a/build.zig +++ b/build.zig @@ -92,7 +92,9 @@ pub fn build(b: *std.build.Builder) !void { const known_folders_path = b.option([]const u8, "known-folders", "Path to known-folders package (default: " ++ KNOWN_FOLDERS_DEFAULT_PATH ++ ")") orelse KNOWN_FOLDERS_DEFAULT_PATH; exe.addPackage(.{ .name = "known-folders", .source = .{ .path = known_folders_path } }); - exe.addPackage(.{ .name = "tres", .source = .{ .path = "src/tres/tres.zig" } }); + const TRES_DEFAULT_PATH = "src/tres/tres.zig"; + const tres_path = b.option([]const u8, "tres", "Path to tres package (default: " ++ TRES_DEFAULT_PATH ++ ")") orelse TRES_DEFAULT_PATH; + exe.addPackage(.{ .name = "tres", .source = .{ .path = tres_path } }); if (enable_tracy) { const client_cpp = "src/tracy/TracyClient.cpp"; @@ -148,7 +150,7 @@ pub fn build(b: *std.build.Builder) !void { } tests.addPackage(.{ .name = "zls", .source = .{ .path = "src/zls.zig" }, .dependencies = exe.packages.items }); - tests.addPackage(.{ .name = "tres", .source = .{ .path = "src/tres/tres.zig" } }); + tests.addPackage(.{ .name = "tres", .source = .{ .path = tres_path } }); tests.setBuildMode(.Debug); tests.setTarget(target); test_step.dependOn(&tests.step); diff --git a/flake.lock b/flake.lock index 99cfc96..21f8a55 100644 --- a/flake.lock +++ b/flake.lock @@ -68,11 +68,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1670086663, - "narHash": "sha256-hT8C8AQB74tdoCPwz4nlJypLMD7GI2F5q+vn+VE/qQk=", + "lastModified": 1672057183, + "narHash": "sha256-GN7/10DNNvs1FPj9tlZA2qgNdFuYKKuS3qlHTqAxasQ=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "813836d64fa57285d108f0dbf2356457ccd304e3", + "rev": "b139e44d78c36c69bcbb825b20dbfa51e7738347", "type": "github" }, "original": { @@ -88,9 +88,26 @@ "gitignore": "gitignore", "known-folders": "known-folders", "nixpkgs": "nixpkgs", + "tres": "tres", "zig-overlay": "zig-overlay" } }, + "tres": { + "flake": false, + "locked": { + "lastModified": 1672008284, + "narHash": "sha256-AtM9SV56PEud1MfbKDZMU2FlsNrI46PkcFQh3yMcDX0=", + "owner": "ziglibs", + "repo": "tres", + "rev": "16774b94efa61757a5302a690837dfb8cf750a11", + "type": "github" + }, + "original": { + "owner": "ziglibs", + "repo": "tres", + "type": "github" + } + }, "zig-overlay": { "inputs": { "flake-utils": "flake-utils_2", @@ -99,11 +116,11 @@ ] }, "locked": { - "lastModified": 1670113356, - "narHash": "sha256-43aMRMU0OuBin6M2LM+nxVG+whazyHuHnUvu92xoth0=", + "lastModified": 1672142864, + "narHash": "sha256-uXljuSZK8DP5c4o9u+gcF+Yc3dKYH1wsHmDpWcFBVRQ=", "owner": "mitchellh", "repo": "zig-overlay", - "rev": "17352071583eda4be43fa2a312f6e061326374f7", + "rev": "16e9191142d2a13d7870c03e500842321a466a74", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 22b3313..4a44b2e 100644 --- a/flake.nix +++ b/flake.nix @@ -12,9 +12,12 @@ known-folders.url = "github:ziglibs/known-folders"; known-folders.flake = false; + + tres.url = "github:ziglibs/tres"; + tres.flake = false; }; - outputs = { self, nixpkgs, zig-overlay, gitignore, flake-utils, known-folders }: + outputs = { self, nixpkgs, zig-overlay, gitignore, flake-utils, known-folders, tres }: let systems = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ]; inherit (gitignore.lib) gitignoreSource; @@ -35,7 +38,7 @@ dontInstall = true; buildPhase = '' mkdir -p $out - zig build install -Dcpu=baseline -Drelease-safe=true -Ddata_version=master -Dknown-folders=${known-folders}/known-folders.zig --prefix $out + zig build install -Dcpu=baseline -Drelease-safe=true -Ddata_version=master -Dtres=${tres}/tres.zig -Dknown-folders=${known-folders}/known-folders.zig --prefix $out ''; XDG_CACHE_HOME = ".cache"; }; diff --git a/src/Server.zig b/src/Server.zig index 45610b0..27c6ced 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -24,7 +24,7 @@ const ComptimeInterpreter = @import("ComptimeInterpreter.zig"); const data = @import("data/data.zig"); const snipped_data = @import("data/snippets.zig"); -const tres = @import("tres/tres.zig"); +const tres = @import("tres"); const log = std.log.scoped(.server); @@ -188,7 +188,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) !types.Pub defer tracy_zone.end(); std.debug.assert(server.client_capabilities.supports_publish_diagnostics); - + const tree = handle.tree; var allocator = server.arena.allocator(); @@ -1885,7 +1885,7 @@ fn openDocumentHandler(server: *Server, notification: types.DidOpenTextDocumentP defer tracy_zone.end(); const handle = try server.document_store.openDocument(notification.textDocument.uri, notification.textDocument.text); - + if (server.client_capabilities.supports_publish_diagnostics) { const diagnostics = try server.generateDiagnostics(handle); server.sendNotification("textDocument/publishDiagnostics", diagnostics); @@ -2128,7 +2128,6 @@ fn hoverHandler(server: *Server, request: types.HoverParams) !?types.Hover { else => null, }; - // TODO: Figure out a better solution for comptime interpreter diags if (server.client_capabilities.supports_publish_diagnostics) { const diagnostics = try server.generateDiagnostics(handle.*); From 978e41b8a53569659f3e60b7e74d83c5052a81b2 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Thu, 29 Dec 2022 23:20:12 +0000 Subject: [PATCH 09/18] always use scoped logs instead of default (#864) --- src/DocumentStore.zig | 6 +++--- src/Server.zig | 17 +++++++++-------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index ee17f81..887070f 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -279,7 +279,7 @@ fn garbageCollectionImports(self: *DocumentStore) error{OutOfMemory}!void { i += 1; continue; } - std.log.debug("Closing document {s}", .{handle.uri}); + log.debug("Closing document {s}", .{handle.uri}); var kv = self.handles.fetchSwapRemove(handle.uri).?; kv.value.deinit(self.allocator); self.allocator.destroy(kv.value); @@ -313,7 +313,7 @@ fn garbageCollectionCImports(self: *DocumentStore) error{OutOfMemory}!void { .failure => "", .success => |uri| uri, }; - std.log.debug("Destroying cimport {s}", .{message}); + log.debug("Destroying cimport {s}", .{message}); kv.value.deinit(self.allocator); } } @@ -339,7 +339,7 @@ fn garbageCollectionBuildFiles(self: *DocumentStore) error{OutOfMemory}!void { continue; } var kv = self.build_files.fetchSwapRemove(hash).?; - std.log.debug("Destroying build file {s}", .{kv.value.uri}); + log.debug("Destroying build file {s}", .{kv.value.uri}); kv.value.deinit(self.allocator); } } diff --git a/src/Server.zig b/src/Server.zig index 27c6ced..d34fab3 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -1475,7 +1475,7 @@ fn kindToSortScore(kind: types.CompletionItemKind) ?[]const u8 { => "6_", else => { - std.log.debug(@typeName(types.CompletionItemKind) ++ "{s} has no sort score specified!", .{@tagName(kind)}); + log.debug(@typeName(types.CompletionItemKind) ++ "{s} has no sort score specified!", .{@tagName(kind)}); return null; }, }; @@ -1551,7 +1551,7 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) !types.In defer tracy_zone.end(); if (request.clientInfo) |clientInfo| { - std.log.info("client is '{s}-{s}'", .{ clientInfo.name, clientInfo.version orelse "" }); + log.info("client is '{s}-{s}'", .{ clientInfo.name, clientInfo.version orelse "" }); if (std.mem.eql(u8, clientInfo.name, "Sublime Text LSP")) blk: { server.config.max_detail_length = 256; @@ -1750,7 +1750,7 @@ fn initializedHandler(server: *Server, notification: types.InitializedParams) !v _ = notification; if (server.status != .initializing) { - std.log.warn("received a initialized notification but the server has not send a initialize request!", .{}); + log.warn("received a initialized notification but the server has not send a initialize request!", .{}); } server.status = .initialized; @@ -1870,10 +1870,11 @@ fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory} else => @compileError("Not implemented for " ++ @typeName(ft)), }, }; - log.debug("setting configuration option '{s}' to '{any}'", .{ field.name, new_value }); + // log.debug("setting configuration option '{s}' to '{any}'", .{ field.name, new_value }); @field(server.config, field.name) = new_value; } } + log.debug("{}", .{server.client_capabilities}); configuration.configChanged(server.config, server.allocator, null) catch |err| { log.err("failed to update configuration: {}", .{err}); @@ -2865,18 +2866,18 @@ pub fn processJsonRpc( defer parser.deinit(); var tree = parser.parse(json) catch { - std.log.err("failed to parse message!", .{}); + log.err("failed to parse message!", .{}); return; // maybe panic? }; defer tree.deinit(); const message = Message.fromJsonValueTree(tree) catch { - std.log.err("failed to parse message!", .{}); + log.err("failed to parse message!", .{}); return; // maybe panic? }; server.processMessage(message) catch |err| { - std.log.err("got {} while processing message!", .{err}); // TODO include message information + log.err("got {} while processing message!", .{err}); // TODO include message information switch (message) { .RequestMessage => |request| server.sendResponseError(request.id, .{ .code = @errorToInt(err), @@ -2909,7 +2910,7 @@ fn processMessage(server: *Server, message: Message) Error!void { return; } - std.log.warn("received response from client with id '{s}' that has no handler!", .{response.id.string}); + log.warn("received response from client with id '{s}' that has no handler!", .{response.id.string}); return; }, } From c88562ca789dbe3083a35eaf30727002955b8646 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Thu, 29 Dec 2022 23:21:26 +0000 Subject: [PATCH 10/18] optimize `build.zig` discovery (#863) --- src/DocumentStore.zig | 67 ++++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 33 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 887070f..7e5d2cd 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -66,7 +66,6 @@ pub const Handle = struct { /// `DocumentStore.build_files` is guaranteed to contain this uri /// uri memory managed by its build_file associated_build_file: ?Uri = null, - is_build_file: bool = false, pub fn deinit(self: *Handle, allocator: std.mem.Allocator) void { self.document_scope.deinit(allocator); @@ -225,7 +224,7 @@ pub fn applySave(self: *DocumentStore, handle: *const Handle) !void { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - if (handle.is_build_file) { + if (isBuildFile(handle.uri)) { const build_file = self.build_files.getPtr(handle.uri).?; const build_config = loadBuildConfiguration(self.allocator, build_file.*, self.config.*) catch |err| { @@ -344,6 +343,19 @@ fn garbageCollectionBuildFiles(self: *DocumentStore) error{OutOfMemory}!void { } } +pub fn isBuildFile(uri: Uri) bool { + return std.mem.endsWith(u8, uri, "/build.zig"); +} + +pub fn isBuiltinFile(uri: Uri) bool { + return std.mem.endsWith(u8, uri, "/builtin.zig"); +} + +pub fn isInStd(uri: Uri) bool { + // TODO: Better logic for detecting std or subdirectories? + return std.mem.indexOf(u8, uri, "/std/") != null; +} + /// looks for a `zls.build.json` file in the build file directory /// has to be freed with `std.json.parseFree` fn loadBuildAssociatedConfiguration(allocator: std.mem.Allocator, build_file: BuildFile) !BuildAssociatedConfig { @@ -545,14 +557,8 @@ fn uriAssociatedWithBuild( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - var checked_uris = std.StringHashMap(void).init(self.allocator); - defer { - var it = checked_uris.iterator(); - while (it.next()) |entry| - self.allocator.free(entry.key_ptr.*); - - checked_uris.deinit(); - } + var checked_uris = std.StringHashMapUnmanaged(void){}; + defer checked_uris.deinit(self.allocator); for (build_file.config.packages) |package| { const package_uri = try URI.fromPath(self.allocator, package.path); @@ -562,7 +568,7 @@ fn uriAssociatedWithBuild( return true; } - if (try self.uriInImports(&checked_uris, package_uri, uri)) + if (try self.uriInImports(&checked_uris, build_file, package_uri, uri)) return true; } @@ -571,25 +577,30 @@ fn uriAssociatedWithBuild( fn uriInImports( self: *DocumentStore, - checked_uris: *std.StringHashMap(void), + checked_uris: *std.StringHashMapUnmanaged(void), + build_file: BuildFile, source_uri: Uri, uri: Uri, ) error{OutOfMemory}!bool { if (checked_uris.contains(source_uri)) return false; - // consider it checked even if a failure happens + if (isInStd(source_uri)) return false; - const duped_uri = try self.allocator.dupe(u8, source_uri); - checked_uris.put(duped_uri, {}) catch self.allocator.free(duped_uri); + // consider it checked even if a failure happens + try checked_uris.put(self.allocator, source_uri, {}); const handle = self.getOrLoadHandle(source_uri) orelse return false; + if (handle.associated_build_file) |associated_build_file_uri| { + return std.mem.eql(u8, associated_build_file_uri, build_file.uri); + } + for (handle.import_uris.items) |import_uri| { if (std.mem.eql(u8, uri, import_uri)) return true; - if (self.uriInImports(checked_uris, import_uri, uri) catch false) + if (try self.uriInImports(checked_uris, build_file, import_uri, uri)) return true; } @@ -626,7 +637,7 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro defer { if (handle.associated_build_file) |build_file_uri| { log.debug("Opened document `{s}` with build file `{s}`", .{ handle.uri, build_file_uri }); - } else if (handle.is_build_file) { + } else if (isBuildFile(handle.uri)) { log.debug("Opened document `{s}` (build file)", .{handle.uri}); } else { log.debug("Opened document `{s}`", .{handle.uri}); @@ -636,9 +647,7 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro handle.import_uris = try self.collectImportUris(handle); handle.cimports = try self.collectCIncludes(handle); - // TODO: Better logic for detecting std or subdirectories? - const in_std = std.mem.indexOf(u8, uri, "/std/") != null; - if (self.config.zig_exe_path != null and std.mem.endsWith(u8, uri, "/build.zig") and !in_std) { + if (self.config.zig_exe_path != null and isBuildFile(handle.uri) and !isInStd(handle.uri)) { const gop = try self.build_files.getOrPut(self.allocator, uri); errdefer |err| { self.build_files.swapRemoveAt(gop.index); @@ -649,9 +658,9 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro gop.value_ptr.* = try self.createBuildFile(duped_uri); gop.key_ptr.* = gop.value_ptr.uri; } - handle.is_build_file = true; - } else if (self.config.zig_exe_path != null and !std.mem.endsWith(u8, uri, "/builtin.zig") and !in_std) blk: { - log.debug("Going to walk down the tree towards: {s}", .{uri}); + } else if (self.config.zig_exe_path != null and !isBuiltinFile(handle.uri) and !isInStd(handle.uri)) blk: { + // log.debug("Going to walk down the tree towards: {s}", .{uri}); + // walk down the tree towards the uri. When we hit build.zig files // determine if the uri we're interested in is involved with the build. // This ensures that _relevant_ build.zig files higher in the @@ -659,12 +668,11 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro const path = URI.parse(self.allocator, uri) catch break :blk; defer self.allocator.free(path); - var prev_build_file: ?Uri = null; var build_it = try BuildDotZigIterator.init(self.allocator, path); while (try build_it.next()) |build_path| { defer self.allocator.free(build_path); - log.debug("found build path: {s}", .{build_path}); + // log.debug("found build path: {s}", .{build_path}); const build_file_uri = try URI.fromPath(self.allocator, build_path); const gop = self.build_files.getOrPut(self.allocator, build_file_uri) catch |err| { @@ -682,14 +690,7 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro if (try self.uriAssociatedWithBuild(gop.value_ptr.*, uri)) { handle.associated_build_file = gop.key_ptr.*; break; - } else { - prev_build_file = gop.key_ptr.*; - } - } - - // if there was no direct imports found, use the closest build file if possible - if (handle.associated_build_file == null) { - if (prev_build_file) |build_file_uri| { + } else if (handle.associated_build_file == null) { handle.associated_build_file = build_file_uri; } } From f473088b649a23976ec888284780fbaf123ab14f Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 30 Dec 2022 23:42:28 +0000 Subject: [PATCH 11/18] fix crashes found through fuzzing (#866) --- src/Server.zig | 18 +++++++++--------- src/analysis.zig | 2 ++ src/ast.zig | 13 +++++++++---- src/inlay_hints.zig | 2 +- src/semantic_tokens.zig | 2 +- tests/lsp_features/folding_range.zig | 2 +- 6 files changed, 23 insertions(+), 16 deletions(-) diff --git a/src/Server.zig b/src/Server.zig index d34fab3..5bc705c 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -2486,14 +2486,14 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) !?[]t .@"if", .if_simple => { const if_full = ast.ifFull(handle.tree, node); - const start_tok_1 = handle.tree.lastToken(if_full.ast.cond_expr); - const end_tok_1 = handle.tree.lastToken(if_full.ast.then_expr); + const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr); + const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr); _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); if (if_full.ast.else_expr == 0) continue; const start_tok_2 = if_full.else_token; - const end_tok_2 = handle.tree.lastToken(if_full.ast.else_expr); + const end_tok_2 = ast.lastToken(handle.tree, if_full.ast.else_expr); _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); }, @@ -2507,14 +2507,14 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) !?[]t => { const loop_full = ast.whileAst(handle.tree, node).?; - const start_tok_1 = handle.tree.lastToken(loop_full.ast.cond_expr); - const end_tok_1 = handle.tree.lastToken(loop_full.ast.then_expr); + const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr); + const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr); _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); if (loop_full.ast.else_expr == 0) continue; const start_tok_2 = loop_full.else_token; - const end_tok_2 = handle.tree.lastToken(loop_full.ast.else_expr); + const end_tok_2 = ast.lastToken(handle.tree, loop_full.ast.else_expr); _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); }, @@ -2552,7 +2552,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) !?[]t break :decl_node_blk; const list_start_tok: Ast.TokenIndex = fn_proto.lparen; - const list_end_tok: Ast.TokenIndex = handle.tree.lastToken(fn_proto.ast.proto_node); + const list_end_tok: Ast.TokenIndex = ast.lastToken(handle.tree, fn_proto.ast.proto_node); if (handle.tree.tokensOnSameLine(list_start_tok, list_end_tok)) break :decl_node_blk; try ranges.ensureUnusedCapacity(1 + fn_proto.ast.params.len); // best guess, doesn't include anytype params @@ -2579,7 +2579,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) !?[]t // .grouped_expression, => { const start_tok = handle.tree.firstToken(node); - const end_tok = handle.tree.lastToken(node); + const end_tok = ast.lastToken(handle.tree, node); _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive, server.offset_encoding); }, @@ -2626,7 +2626,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) !?[]t } const start_tok = handle.tree.firstToken(node); - const end_tok = handle.tree.lastToken(node); + const end_tok = ast.lastToken(handle.tree, node); _ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive, server.offset_encoding); }, } diff --git a/src/analysis.zig b/src/analysis.zig index 7b142f3..c314930 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1466,6 +1466,8 @@ pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]con if (params.len != 1) return null; + if(node_tags[params[0]] != .string_literal) return null; + const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]); return import_str[1 .. import_str.len - 1]; } diff --git a/src/ast.zig b/src/ast.zig index 3705147..48db2f4 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -539,10 +539,15 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex { .container_decl_arg_trailing, .switch_comma, => { - const members = tree.extraData(datas[n].rhs, Node.SubRange); - std.debug.assert(members.end - members.start > 0); - end_offset += 2; // for the comma + rbrace - n = tree.extra_data[members.end - 1]; // last parameter + if(datas[n].rhs != 0) { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + std.debug.assert(members.end - members.start > 0); + end_offset += 2; // for the comma + rbrace + n = tree.extra_data[members.end - 1]; // last parameter + } else { + end_offset += 1; + n = datas[n].lhs; + } }, .array_init_dot, .struct_init_dot, diff --git a/src/inlay_hints.zig b/src/inlay_hints.zig index 1bc2747..b1351ee 100644 --- a/src/inlay_hints.zig +++ b/src/inlay_hints.zig @@ -22,7 +22,7 @@ pub const inlay_hints_max_inline_children = 12; /// checks whether node is inside the range fn isNodeInRange(tree: Ast, node: Ast.Node.Index, range: types.Range) bool { - const endLocation = tree.tokenLocation(0, tree.lastToken(node)); + const endLocation = tree.tokenLocation(0, ast.lastToken(tree, node)); if (endLocation.line < range.start.line) return false; const beginLocation = tree.tokenLocation(0, tree.firstToken(node)); diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 9e2cf0b..788cd50 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -289,7 +289,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr const token_tags = tree.tokens.items(.tag); const node_data = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - if (node == 0 or node > node_data.len) return; + if (node == 0 or node >= node_data.len) return; var allocator = builder.arena.allocator(); diff --git a/tests/lsp_features/folding_range.zig b/tests/lsp_features/folding_range.zig index 7db3908..472ed4d 100644 --- a/tests/lsp_features/folding_range.zig +++ b/tests/lsp_features/folding_range.zig @@ -34,7 +34,7 @@ test "foldingRange - #801" { \\ }; \\} , - \\[] + \\[{"startLine":1,"endLine":4},{"startLine":0,"endLine":5}] ); } From 94ec3a0a86257d06bbfc8d5ab799480f19a34b1f Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 30 Dec 2022 23:42:53 +0000 Subject: [PATCH 12/18] Debugging utilities (#860) * add debug printing for Ast and DocumentScope * add optional failing allocator --- build.zig | 12 +++++ src/analysis.zig | 24 --------- src/debug.zig | 136 +++++++++++++++++++++++++++++++++++++++++++++++ src/main.zig | 8 ++- src/zls.zig | 1 + 5 files changed, 155 insertions(+), 26 deletions(-) create mode 100644 src/debug.zig diff --git a/build.zig b/build.zig index c4fea91..52477c7 100644 --- a/build.zig +++ b/build.zig @@ -49,6 +49,18 @@ pub fn build(b: *std.build.Builder) !void { "enable_tracy_callstack", b.option(bool, "enable_tracy_callstack", "Enable callstack graphs.") orelse false, ); + + exe_options.addOption( + bool, + "enable_failing_allocator", + b.option(bool, "enable_failing_allocator", "Whether to use a randomly failing allocator.") orelse false, + ); + + exe_options.addOption( + u32, + "enable_failing_allocator_likelihood", + b.option(u32, "enable_failing_allocator_likelihood", "The chance that an allocation will fail is `1/likelihood`") orelse 256, + ); const version = v: { const version_string = b.fmt("{d}.{d}.{d}", .{ zls_version.major, zls_version.minor, zls_version.patch }); diff --git a/src/analysis.zig b/src/analysis.zig index c314930..ee34216 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2389,30 +2389,6 @@ pub const DocumentScope = struct { error_completions: CompletionSet, enum_completions: CompletionSet, - pub fn debugPrint(self: DocumentScope) void { - for (self.scopes.items) |scope| { - log.debug( - \\-------------------------- - \\Scope {}, loc: [{d}, {d}) - \\ {d} usingnamespaces - \\Decls: - , .{ - scope.data, - scope.loc.start, - scope.loc.end, - scope.uses.len, - }); - - var decl_it = scope.decls.iterator(); - var idx: usize = 0; - while (decl_it.next()) |_| : (idx += 1) { - if (idx != 0) log.debug(", ", .{}); - } - // log.debug("{s}", .{name_decl.key}); - log.debug("\n--------------------------\n", .{}); - } - } - pub fn deinit(self: *DocumentScope, allocator: std.mem.Allocator) void { for (self.scopes.items) |*scope| { scope.deinit(allocator); diff --git a/src/debug.zig b/src/debug.zig new file mode 100644 index 0000000..02a1b38 --- /dev/null +++ b/src/debug.zig @@ -0,0 +1,136 @@ +const std = @import("std"); + +const analysis = @import("analysis.zig"); +const offsets = @import("offsets.zig"); + +pub fn printTree(tree: std.zig.Ast) void { + if (!std.debug.runtime_safety) @compileError("this function should only be used in debug mode!"); + + std.debug.print( + \\ + \\nodes tag lhs rhs token + \\----------------------------------------------- + \\ + , .{}); + var i: usize = 0; + while (i < tree.nodes.len) : (i += 1) { + std.debug.print(" {d:<3} {s:<20} {d:<3} {d:<3} {d:<3} {s}\n", .{ + i, + @tagName(tree.nodes.items(.tag)[i]), + tree.nodes.items(.data)[i].lhs, + tree.nodes.items(.data)[i].rhs, + tree.nodes.items(.main_token)[i], + offsets.tokenToSlice(tree, tree.nodes.items(.main_token)[i]), + }); + } + + std.debug.print( + \\ + \\tokens tag start + \\---------------------------------- + \\ + , .{}); + i = 0; + while (i < tree.tokens.len) : (i += 1) { + std.debug.print(" {d:<3} {s:<20} {d:<}\n", .{ + i, + @tagName(tree.tokens.items(.tag)[i]), + tree.tokens.items(.start)[i], + }); + } +} + +pub fn printDocumentScope(doc_scope: analysis.DocumentScope) void { + if (!std.debug.runtime_safety) @compileError("this function should only be used in debug mode!"); + + for (doc_scope.scopes.items) |scope, i| { + if (i != 0) std.debug.print("\n\n", .{}); + std.debug.print( + \\[{d}, {d}] {} + \\usingnamespaces: {d} + \\Decls: + \\ + , .{ + scope.loc.start, + scope.loc.end, + scope.data, + scope.uses.items.len, + }); + + var decl_it = scope.decls.iterator(); + var idx: usize = 0; + while (decl_it.next()) |entry| : (idx += 1) { + std.debug.print(" {s:<8} {}\n", .{ entry.key_ptr.*, entry.value_ptr.* }); + } + } +} + +pub const FailingAllocator = struct { + internal_allocator: std.mem.Allocator, + random: std.rand.DefaultPrng, + likelihood: u32, + + /// the chance that an allocation will fail is `1/likelihood` + /// `likelihood == 0` means that every allocation will fail + /// `likelihood == std.math.intMax(u32)` means that no allocation will be forced to fail + pub fn init(internal_allocator: std.mem.Allocator, likelihood: u32) FailingAllocator { + var seed = std.mem.zeroes([8]u8); + std.os.getrandom(&seed) catch {}; + + return FailingAllocator{ + .internal_allocator = internal_allocator, + .random = std.rand.DefaultPrng.init(@bitCast(u64, seed)), + .likelihood = likelihood, + }; + } + + pub fn allocator(self: *FailingAllocator) std.mem.Allocator { + return .{ + .ptr = self, + .vtable = &.{ + .alloc = alloc, + .resize = resize, + .free = free, + }, + }; + } + + fn alloc( + ctx: *anyopaque, + len: usize, + log2_ptr_align: u8, + return_address: usize, + ) ?[*]u8 { + const self = @ptrCast(*FailingAllocator, @alignCast(@alignOf(FailingAllocator), ctx)); + if (shouldFail(self)) return null; + return self.internal_allocator.rawAlloc(len, log2_ptr_align, return_address); + } + + fn resize( + ctx: *anyopaque, + old_mem: []u8, + log2_old_align: u8, + new_len: usize, + ra: usize, + ) bool { + const self = @ptrCast(*FailingAllocator, @alignCast(@alignOf(FailingAllocator), ctx)); + if (!self.internal_allocator.rawResize(old_mem, log2_old_align, new_len, ra)) + return false; + return true; + } + + fn free( + ctx: *anyopaque, + old_mem: []u8, + log2_old_align: u8, + ra: usize, + ) void { + const self = @ptrCast(*FailingAllocator, @alignCast(@alignOf(FailingAllocator), ctx)); + self.internal_allocator.rawFree(old_mem, log2_old_align, ra); + } + + fn shouldFail(self: *FailingAllocator) bool { + if (self.likelihood == std.math.maxInt(u32)) return false; + return 0 == self.random.random().intRangeAtMostBiased(u32, 0, self.likelihood); + } +}; diff --git a/src/main.zig b/src/main.zig index 6aaab47..29122ec 100644 --- a/src/main.zig +++ b/src/main.zig @@ -8,6 +8,7 @@ const configuration = @import("configuration.zig"); const Server = @import("Server.zig"); const setup = @import("setup.zig"); const Header = @import("Header.zig"); +const debug = @import("debug.zig"); const logger = std.log.scoped(.main); @@ -263,9 +264,12 @@ const stack_frames = switch (zig_builtin.mode) { pub fn main() !void { var gpa_state = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = stack_frames }){}; defer _ = gpa_state.deinit(); - var tracy_state = if (tracy.enable_allocation) tracy.tracyAllocator(gpa_state.allocator()) else void{}; - const allocator: std.mem.Allocator = if (tracy.enable_allocation) tracy_state.allocator() else gpa_state.allocator(); + var tracy_state = if (tracy.enable_allocation) tracy.tracyAllocator(gpa_state.allocator()) else void{}; + const inner_allocator: std.mem.Allocator = if (tracy.enable_allocation) tracy_state.allocator() else gpa_state.allocator(); + + var failing_allocator_state = if(build_options.enable_failing_allocator) debug.FailingAllocator.init(inner_allocator, build_options.enable_failing_allocator_likelihood) else void{}; + const allocator: std.mem.Allocator = if(build_options.enable_failing_allocator) failing_allocator_state.allocator() else inner_allocator; var config = ConfigWithPath{ .config = undefined, diff --git a/src/zls.zig b/src/zls.zig index 21b1095..fb362d0 100644 --- a/src/zls.zig +++ b/src/zls.zig @@ -3,6 +3,7 @@ pub const analysis = @import("analysis.zig"); pub const Header = @import("Header.zig"); +pub const debug = @import("debug.zig"); pub const offsets = @import("offsets.zig"); pub const Config = @import("Config.zig"); pub const Server = @import("Server.zig"); From d86d05d969c6872f2fa0786a23ffe5e682985e6a Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 30 Dec 2022 23:43:40 +0000 Subject: [PATCH 13/18] fix ast-check with single error (#865) --- src/Server.zig | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/Server.zig b/src/Server.zig index 5bc705c..ccf51d4 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -453,6 +453,12 @@ fn getAstCheckDiagnostics( }; } } + + if (last_diagnostic) |*diagnostic| { + diagnostic.relatedInformation = try last_related_diagnostics.toOwnedSlice(allocator); + try diagnostics.append(allocator, diagnostic.*); + last_diagnostic = null; + } } /// caller owns returned memory. From 417bf9bd0a48a3f49560ed68d07f30236ad21b0b Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 30 Dec 2022 23:45:31 +0000 Subject: [PATCH 14/18] add vscode config generation to `zig build gen` (#862) * add vscode config generation to `zig build gen` * correctly handle removing configs in config_gen.zig * update log messages in config_gen.zig --- build.zig | 5 + src/config_gen/config_gen.zig | 183 +++++++++++++++++------------ src/data/generate-vscode-config.js | 45 ------- src/tres | 2 +- 4 files changed, 115 insertions(+), 120 deletions(-) delete mode 100644 src/data/generate-vscode-config.js diff --git a/build.zig b/build.zig index 52477c7..d96dd1f 100644 --- a/build.zig +++ b/build.zig @@ -133,6 +133,7 @@ pub fn build(b: *std.build.Builder) !void { exe.install(); const gen_exe = b.addExecutable("zls_gen", "src/config_gen/config_gen.zig"); + gen_exe.addPackage(.{ .name = "tres", .source = .{ .path = "src/tres/tres.zig" } }); const gen_cmd = gen_exe.run(); gen_cmd.addArgs(&.{ @@ -141,6 +142,10 @@ pub fn build(b: *std.build.Builder) !void { b.fmt("{s}/README.md", .{b.build_root}), }); + if (b.option([]const u8, "vscode-config-path", "Output path to vscode-config")) |path| { + gen_cmd.addArg(b.pathFromRoot(path)); + } + const gen_step = b.step("gen", "Regenerate config files"); gen_step.dependOn(&gen_cmd.step); diff --git a/src/config_gen/config_gen.zig b/src/config_gen/config_gen.zig index 0952df1..73f9d4d 100644 --- a/src/config_gen/config_gen.zig +++ b/src/config_gen/config_gen.zig @@ -1,4 +1,6 @@ const std = @import("std"); +const builtin = @import("builtin"); +const tres = @import("tres"); const ConfigOption = struct { /// Name of config option @@ -10,7 +12,7 @@ const ConfigOption = struct { /// used in Config.zig as the default initializer default: []const u8, /// If set, this option can be configured through `zls --config` - /// currently unused but could laer be used to automatically generate queries for setup.zig + /// currently unused but could later be used to automatically generate queries for setup.zig setup_question: ?[]const u8, }; @@ -46,9 +48,7 @@ fn zigTypeToTypescript(ty: []const u8) ![]const u8 { fn generateConfigFile(allocator: std.mem.Allocator, config: Config, path: []const u8) !void { _ = allocator; - const config_file = try std.fs.openFileAbsolute(path, .{ - .mode = .write_only, - }); + const config_file = try std.fs.createFileAbsolute(path, .{}); defer config_file.close(); var buff_out = std.io.bufferedWriter(config_file.writer()); @@ -69,10 +69,10 @@ fn generateConfigFile(allocator: std.mem.Allocator, config: Config, path: []cons \\{s}: {s} = {s}, \\ , .{ - std.mem.trim(u8, option.description, " \t\n\r"), - std.mem.trim(u8, option.name, " \t\n\r"), - std.mem.trim(u8, option.type, " \t\n\r"), - std.mem.trim(u8, option.default, " \t\n\r"), + std.mem.trim(u8, option.description, &std.ascii.whitespace), + std.mem.trim(u8, option.name, &std.ascii.whitespace), + std.mem.trim(u8, option.type, &std.ascii.whitespace), + std.mem.trim(u8, option.default, &std.ascii.whitespace), }); } @@ -114,7 +114,7 @@ fn generateSchemaFile(allocator: std.mem.Allocator, config: Config, path: []cons \\ "properties": ); - try serializeObjectMap(properties, .{ + try tres.stringify(properties, .{ .whitespace = .{ .indent_level = 1, }, @@ -122,26 +122,26 @@ fn generateSchemaFile(allocator: std.mem.Allocator, config: Config, path: []cons _ = try buff_out.write("\n}\n"); try buff_out.flush(); + try schema_file.setEndPos(try schema_file.getPos()); } fn updateREADMEFile(allocator: std.mem.Allocator, config: Config, path: []const u8) !void { var readme_file = try std.fs.openFileAbsolute(path, .{ .mode = .read_write }); defer readme_file.close(); - var readme = std.ArrayListUnmanaged(u8){ - .items = try readme_file.readToEndAlloc(allocator, std.math.maxInt(usize)), - }; - defer readme.deinit(allocator); + var readme = try readme_file.readToEndAlloc(allocator, std.math.maxInt(usize)); + defer allocator.free(readme); const start_indicator = ""; const end_indicator = ""; - const start = start_indicator.len + (std.mem.indexOf(u8, readme.items, start_indicator) orelse return error.SectionNotFound); - const end = std.mem.indexOfPos(u8, readme.items, start, end_indicator) orelse return error.SectionNotFound; + const start = start_indicator.len + (std.mem.indexOf(u8, readme, start_indicator) orelse return error.SectionNotFound); + const end = std.mem.indexOfPos(u8, readme, start, end_indicator) orelse return error.SectionNotFound; - var new_readme = std.ArrayListUnmanaged(u8){}; - defer new_readme.deinit(allocator); - var writer = new_readme.writer(allocator); + try readme_file.seekTo(0); + var writer = readme_file.writer(); + + try writer.writeAll(readme[0..start]); try writer.writeAll( \\ @@ -155,29 +155,95 @@ fn updateREADMEFile(allocator: std.mem.Allocator, config: Config, path: []const \\| `{s}` | `{s}` | `{s}` | {s} | \\ , .{ - std.mem.trim(u8, option.name, " \t\n\r"), - std.mem.trim(u8, option.type, " \t\n\r"), - std.mem.trim(u8, option.default, " \t\n\r"), - std.mem.trim(u8, option.description, " \t\n\r"), + std.mem.trim(u8, option.name, &std.ascii.whitespace), + std.mem.trim(u8, option.type, &std.ascii.whitespace), + std.mem.trim(u8, option.default, &std.ascii.whitespace), + std.mem.trim(u8, option.description, &std.ascii.whitespace), }); } - try readme.replaceRange(allocator, start, end - start, new_readme.items); + try writer.writeAll(readme[end..]); - try readme_file.seekTo(0); - try readme_file.writeAll(readme.items); + try readme_file.setEndPos(try readme_file.getPos()); +} + +const ConfigurationProperty = struct { + scope: []const u8 = "resource", + type: []const u8, + description: []const u8, + @"enum": ?[]const []const u8 = null, + format: ?[]const u8 = null, + default: ?std.json.Value = null, +}; + +fn generateVSCodeConfigFile(allocator: std.mem.Allocator, config: Config, path: []const u8) !void { + var config_file = try std.fs.createFileAbsolute(path, .{}); + defer config_file.close(); + + const predefined_configurations: usize = 3; + var configuration: std.StringArrayHashMapUnmanaged(ConfigurationProperty) = .{}; + try configuration.ensureTotalCapacity(allocator, predefined_configurations + @intCast(u32, config.options.len)); + defer { + for (configuration.keys()[predefined_configurations..]) |name| allocator.free(name); + configuration.deinit(allocator); + } + + configuration.putAssumeCapacityNoClobber("trace.server", .{ + .scope = "window", + .type = "string", + .@"enum" = &.{"off", "message", "verbose"}, + .description = "Traces the communication between VS Code and the language server.", + .default = .{.String = "off"}, + }); + configuration.putAssumeCapacityNoClobber("check_for_update", .{ + .type = "boolean", + .description = "Whether to automatically check for new updates", + .default = .{.Bool = true}, + }); + configuration.putAssumeCapacityNoClobber("path", .{ + .type = "string", + .description = "Path to `zls` executable. Example: `C:/zls/zig-cache/bin/zls.exe`.", + .format = "path", + .default = null, + }); + + for (config.options) |option| { + const name = try std.fmt.allocPrint(allocator, "zls.{s}", .{option.name}); + + var parser = std.json.Parser.init(allocator, false); + const default = (try parser.parse(option.default)).root; + + configuration.putAssumeCapacityNoClobber(name, .{ + .type = try zigTypeToTypescript(option.type), + .description = option.description, + .format = if (std.mem.indexOf(u8, option.name, "path") != null) "path" else null, + .default = if(default == .Null) null else default, + }); + } + + var buffered_writer = std.io.bufferedWriter(config_file.writer()); + var writer = buffered_writer.writer(); + + try tres.stringify(configuration, .{ + .whitespace = .{}, + .emit_null_optional_fields = false, + }, writer); + + try buffered_writer.flush(); } pub fn main() !void { - var arg_it = std.process.args(); + var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{}){}; + var gpa = general_purpose_allocator.allocator(); + + var arg_it = try std.process.argsWithAllocator(gpa); + defer arg_it.deinit(); _ = arg_it.next() orelse @panic(""); const config_path = arg_it.next() orelse @panic("first argument must be path to Config.zig"); const schema_path = arg_it.next() orelse @panic("second argument must be path to schema.json"); const readme_path = arg_it.next() orelse @panic("third argument must be path to README.md"); - - var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{}){}; - var gpa = general_purpose_allocator.allocator(); + const maybe_vscode_config_path = arg_it.next(); const parse_options = std.json.ParseOptions{ .allocator = gpa, @@ -190,50 +256,19 @@ pub fn main() !void { try generateSchemaFile(gpa, config, schema_path); try updateREADMEFile(gpa, config, readme_path); - std.log.warn( - \\ If you have added a new configuration option and it should be configuration through the config wizard, then edit src/setup.zig - , .{}); + if (maybe_vscode_config_path) |vscode_config_path| { + try generateVSCodeConfigFile(gpa, config, vscode_config_path); + } - std.log.info( - \\ Changing configuration options may also require editing the `package.json` from zls-vscode at https://github.com/zigtools/zls-vscode/blob/master/package.json - , .{}); -} - -fn serializeObjectMap( - value: anytype, - options: std.json.StringifyOptions, - out_stream: anytype, -) @TypeOf(out_stream).Error!void { - try out_stream.writeByte('{'); - var field_output = false; - var child_options = options; - if (child_options.whitespace) |*child_whitespace| { - child_whitespace.indent_level += 1; - } - var it = value.iterator(); - while (it.next()) |entry| { - if (!field_output) { - field_output = true; - } else { - try out_stream.writeByte(','); - } - if (child_options.whitespace) |child_whitespace| { - try child_whitespace.outputIndent(out_stream); - } - - try std.json.stringify(entry.key_ptr.*, options, out_stream); - try out_stream.writeByte(':'); - if (child_options.whitespace) |child_whitespace| { - if (child_whitespace.separator) { - try out_stream.writeByte(' '); - } - } - try std.json.stringify(entry.value_ptr.*, child_options, out_stream); - } - if (field_output) { - if (options.whitespace) |whitespace| { - try whitespace.outputIndent(out_stream); - } - } - try out_stream.writeByte('}'); + if (builtin.os.tag == .windows) { + std.log.warn("Running on windows may result in CRLF and LF mismatch", .{}); + } + + try std.io.getStdOut().writeAll( + \\If you have added a new configuration option and it should be configuration through the config wizard, then edit `src/setup.zig` + \\ + \\Changing configuration options may also require editing the `package.json` from zls-vscode at https://github.com/zigtools/zls-vscode/blob/master/package.json + \\You can use `zig build gen -Dvscode-config-path=/path/to/output/file.json` to generate the new configuration properties which you can then copy into `package.json` + \\ + ); } diff --git a/src/data/generate-vscode-config.js b/src/data/generate-vscode-config.js deleted file mode 100644 index 599566d..0000000 --- a/src/data/generate-vscode-config.js +++ /dev/null @@ -1,45 +0,0 @@ -// Run with node - -const fs = require("fs"); -const path = require("path"); - -const sourceOfTruth = fs.readFileSync(path.join(__dirname, "..", "Config.zig")); - -const lines = sourceOfTruth.toString().split("\n"); - -function mapType(type) { - switch (type) { - case "?[]const u8": - return "string"; - - case "bool": - return "boolean"; - - case "usize": - return "integer"; - - default: - throw new Error("unknown type!"); - } -} - -let comment = null; -for (const line of lines) { - if (line.startsWith("///")) { - if (comment === null) comment = line.slice(3).trim(); - else comment += line.slice(3); - } else if (comment) { - const name = line.split(":")[0].trim(); - const type = line.split(":")[1].split("=")[0].trim(); - const defaultValue = line.split(":")[1].split("=")[1].trim().replace(",",""); - - console.log(`"zls.${name}": ${JSON.stringify({ - "scope": "resource", - "type": mapType(type), - "description": comment, - "default": JSON.parse(defaultValue) - })},`); - - comment = null; - } -} diff --git a/src/tres b/src/tres index 16774b9..fb23d64 160000 --- a/src/tres +++ b/src/tres @@ -1 +1 @@ -Subproject commit 16774b94efa61757a5302a690837dfb8cf750a11 +Subproject commit fb23d644500ae5b93dd71b5a8406d0c83e8e4fbe From 3449269fd3567328480eb5cdcb24793d4df781b5 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Sat, 31 Dec 2022 06:45:45 +0000 Subject: [PATCH 15/18] Add a replay feature to zls (#857) * add config options for `zls --replay` * implement `zls --replay` * remove carriage return from zls replay files * add missing arguments for Server.init in tests --- .gitignore | 1 + README.md | 3 + schema.json | 15 +++ src/Config.zig | 9 ++ src/Header.zig | 28 +++-- src/Server.zig | 44 ++++++-- src/config_gen/config.json | 21 ++++ src/main.zig | 219 ++++++++++++++++++++++++++----------- tests/context.zig | 2 +- 9 files changed, 250 insertions(+), 92 deletions(-) diff --git a/.gitignore b/.gitignore index 468f835..96e5b2b 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ zig-* debug release +*.zlsreplay diff --git a/README.md b/README.md index f2520e8..b0b5f01 100644 --- a/README.md +++ b/README.md @@ -86,6 +86,9 @@ The following options are currently available. | `include_at_in_builtins` | `bool` | `false` | Whether the @ sign should be part of the completion of builtins | | `skip_std_references` | `bool` | `false` | When true, skips searching for references in std. Improves lookup speed for functions in user's code. Renaming and go-to-definition will continue to work as is | | `max_detail_length` | `usize` | `1048576` | The detail field of completions is truncated to be no longer than this (in bytes) | +| `record_session` | `bool` | `false` | When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay` | +| `record_session_path` | `?[]const u8` | `null` | Output file path when `record_session` is set. The recommended file extension *.zlsreplay | +| `replay_session_path` | `?[]const u8` | `null` | Used when calling `zls replay` for specifying the replay file. If no extra argument is given `record_session_path` is used as the default path. | | `builtin_path` | `?[]const u8` | `null` | Path to 'builtin;' useful for debugging, automatically set if let null | | `zig_lib_path` | `?[]const u8` | `null` | Zig library path, e.g. `/path/to/zig/lib/zig`, used to analyze std library imports | | `zig_exe_path` | `?[]const u8` | `null` | Zig executable path, e.g. `/path/to/zig/zig`, used to run the custom build runner. If `null`, zig is looked up in `PATH`. Will be used to infer the zig standard library path if none is provided | diff --git a/schema.json b/schema.json index ce0c552..665c5de 100644 --- a/schema.json +++ b/schema.json @@ -89,6 +89,21 @@ "type": "integer", "default": "1048576" }, + "record_session": { + "description": "When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`", + "type": "boolean", + "default": "false" + }, + "record_session_path": { + "description": "Output file path when `record_session` is set. The recommended file extension *.zlsreplay", + "type": "string", + "default": "null" + }, + "replay_session_path": { + "description": "Used when calling `zls replay` for specifying the replay file. If no extra argument is given `record_session_path` is used as the default path.", + "type": "string", + "default": "null" + }, "builtin_path": { "description": "Path to 'builtin;' useful for debugging, automatically set if let null", "type": "string", diff --git a/src/Config.zig b/src/Config.zig index 55af114..9ce599b 100644 --- a/src/Config.zig +++ b/src/Config.zig @@ -55,6 +55,15 @@ skip_std_references: bool = false, /// The detail field of completions is truncated to be no longer than this (in bytes) max_detail_length: usize = 1048576, +/// When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay` +record_session: bool = false, + +/// Output file path when `record_session` is set. The recommended file extension *.zlsreplay +record_session_path: ?[]const u8 = null, + +/// Used when calling `zls replay` for specifying the replay file. If no extra argument is given `record_session_path` is used as the default path. +replay_session_path: ?[]const u8 = null, + /// Path to 'builtin;' useful for debugging, automatically set if let null builtin_path: ?[]const u8 = null, diff --git a/src/Header.zig b/src/Header.zig index d0b7d6e..669948b 100644 --- a/src/Header.zig +++ b/src/Header.zig @@ -12,7 +12,7 @@ pub fn deinit(self: @This(), allocator: std.mem.Allocator) void { } // Caller owns returned memory. -pub fn parse(allocator: std.mem.Allocator, reader: anytype) !Header { +pub fn parse(allocator: std.mem.Allocator, include_carriage_return: bool, reader: anytype) !Header { var r = Header{ .content_length = undefined, .content_type = null, @@ -23,11 +23,15 @@ pub fn parse(allocator: std.mem.Allocator, reader: anytype) !Header { while (true) { const header = try reader.readUntilDelimiterAlloc(allocator, '\n', 0x100); defer allocator.free(header); - if (header.len == 0 or header[header.len - 1] != '\r') return error.MissingCarriageReturn; - if (header.len == 1) break; + if (include_carriage_return) { + if (header.len == 0 or header[header.len - 1] != '\r') return error.MissingCarriageReturn; + if (header.len == 1) break; + } else { + if (header.len == 0) break; + } const header_name = header[0 .. std.mem.indexOf(u8, header, ": ") orelse return error.MissingColon]; - const header_value = header[header_name.len + 2 .. header.len - 1]; + const header_value = header[header_name.len + 2 .. header.len - @boolToInt(include_carriage_return)]; if (std.mem.eql(u8, header_name, "Content-Length")) { if (header_value.len == 0) return error.MissingHeaderValue; r.content_length = std.fmt.parseInt(usize, header_value, 10) catch return error.InvalidContentLength; @@ -43,17 +47,11 @@ pub fn parse(allocator: std.mem.Allocator, reader: anytype) !Header { return r; } -pub fn format( - header: Header, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - _ = options; - std.debug.assert(unused_fmt_string.len == 0); - try writer.print("Content-Length: {}\r\n", .{header.content_length}); +pub fn write(header: Header, include_carriage_return: bool, writer: anytype) @TypeOf(writer).Error!void { + const seperator: []const u8 = if (include_carriage_return) "\r\n" else "\n"; + try writer.print("Content-Length: {}{s}", .{header.content_length, seperator}); if (header.content_type) |content_type| { - try writer.print("Content-Type: {s}\r\n", .{content_type}); + try writer.print("Content-Type: {s}{s}", .{content_type, seperator}); } - try writer.writeAll("\r\n"); + try writer.writeAll(seperator); } diff --git a/src/Server.zig b/src/Server.zig index ccf51d4..04353e0 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -37,6 +37,8 @@ document_store: DocumentStore = undefined, builtin_completions: std.ArrayListUnmanaged(types.CompletionItem), client_capabilities: ClientCapabilities = .{}, outgoing_messages: std.ArrayListUnmanaged([]const u8) = .{}, +recording_enabled: bool, +replay_enabled: bool, offset_encoding: offsets.Encoding = .@"utf-16", status: enum { /// the server has not received a `initialize` request @@ -176,7 +178,13 @@ fn sendInternal( try server.outgoing_messages.append(server.allocator, message); } -fn showMessage(server: *Server, message_type: types.MessageType, message: []const u8) void { +fn showMessage( + server: *Server, + message_type: types.MessageType, + comptime fmt: []const u8, + args: anytype, +) void { + const message = std.fmt.allocPrint(server.arena.allocator(), fmt, args) catch return; server.sendNotification("window/showMessage", types.ShowMessageParams{ .type = message_type, .message = message, @@ -1661,19 +1669,20 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) !types.In const zig_exe_version = std.SemanticVersion.parse(env.version) catch break :blk; if (zig_builtin.zig_version.order(zig_exe_version) == .gt) { - const version_mismatch_message = try std.fmt.allocPrint( - server.arena.allocator(), - "ZLS was built with Zig {}, but your Zig version is {s}. Update Zig to avoid unexpected behavior.", - .{ zig_builtin.zig_version, env.version }, - ); - server.showMessage(.Warning, version_mismatch_message); + server.showMessage(.Warning, + \\ZLS was built with Zig {}, but your Zig version is {s}. Update Zig to avoid unexpected behavior. + , .{ zig_builtin.zig_version, env.version }); } } else { - server.showMessage( - .Warning, + server.showMessage(.Warning, \\ZLS failed to find Zig. Please add Zig to your PATH or set the zig_exe_path config option in your zls.json. - , - ); + , .{}); + } + + if (server.recording_enabled) { + server.showMessage(.Info, + \\This zls session is being recorded to {?s}. + , .{server.config.record_session_path}); } return .{ @@ -1810,6 +1819,11 @@ fn registerCapability(server: *Server, method: []const u8) !void { } fn requestConfiguration(server: *Server) !void { + if (server.recording_enabled) { + log.info("workspace/configuration are disabled during a recording session!", .{}); + return; + } + const configuration_items = comptime confi: { var comp_confi: [std.meta.fields(Config).len]types.ConfigurationItem = undefined; inline for (std.meta.fields(Config)) |field, index| { @@ -1831,6 +1845,10 @@ fn requestConfiguration(server: *Server) !void { } fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory}!void { + if (server.replay_enabled) { + log.info("workspace/configuration are disabled during a replay!", .{}); + return; + } log.info("Setting configuration...", .{}); // NOTE: Does this work with other editors? @@ -3028,6 +3046,8 @@ pub fn init( allocator: std.mem.Allocator, config: *Config, config_path: ?[]const u8, + recording_enabled: bool, + replay_enabled: bool, ) !Server { // TODO replace global with something like an Analyser struct // which contains using_trail & resolve_trail and place it inside Server @@ -3068,6 +3088,8 @@ pub fn init( .allocator = allocator, .document_store = document_store, .builtin_completions = builtin_completions, + .recording_enabled = recording_enabled, + .replay_enabled = replay_enabled, .status = .uninitialized, }; } diff --git a/src/config_gen/config.json b/src/config_gen/config.json index e637338..1a8a78c 100644 --- a/src/config_gen/config.json +++ b/src/config_gen/config.json @@ -119,6 +119,27 @@ "default": "1048576", "setup_question": null }, + { + "name": "record_session", + "description": "When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`", + "type": "bool", + "default": "false", + "setup_question": null + }, + { + "name": "record_session_path", + "description": "Output file path when `record_session` is set. The recommended file extension *.zlsreplay", + "type": "?[]const u8", + "default": "null", + "setup_question": null + }, + { + "name": "replay_session_path", + "description": "Used when calling `zls replay` for specifying the replay file. If no extra argument is given `record_session_path` is used as the default path.", + "type": "?[]const u8", + "default": "null", + "setup_question": null + }, { "name": "builtin_path", "description": "Path to 'builtin;' useful for debugging, automatically set if let null", diff --git a/src/main.zig b/src/main.zig index 29122ec..4bb1075 100644 --- a/src/main.zig +++ b/src/main.zig @@ -35,10 +35,17 @@ pub fn log( std.debug.print(format ++ "\n", args); } -fn loop(server: *Server) !void { - const reader = std.io.getStdIn().reader(); - +fn loop( + server: *Server, + record_file: ?std.fs.File, + replay_file: ?std.fs.File, +) !void { + const std_in = std.io.getStdIn().reader(); const std_out = std.io.getStdOut().writer(); + + var buffered_reader = std.io.bufferedReader(if (replay_file) |file| file.reader() else std_in); + const reader = buffered_reader.reader(); + var buffered_writer = std.io.bufferedWriter(std_out); const writer = buffered_writer.writer(); @@ -49,24 +56,96 @@ fn loop(server: *Server) !void { // write server -> client messages for (server.outgoing_messages.items) |outgoing_message| { const header = Header{ .content_length = outgoing_message.len }; - try writer.print("{}{s}", .{ header, outgoing_message }); - try buffered_writer.flush(); + try header.write(true, writer); + try writer.writeAll(outgoing_message); } + try buffered_writer.flush(); for (server.outgoing_messages.items) |outgoing_message| { server.allocator.free(outgoing_message); } server.outgoing_messages.clearRetainingCapacity(); // read and handle client -> server message - const header = try Header.parse(arena.allocator(), reader); + const header = try Header.parse(arena.allocator(), replay_file == null, reader); const json_message = try arena.allocator().alloc(u8, header.content_length); try reader.readNoEof(json_message); + if (record_file) |file| { + try header.write(false, file.writer()); + try file.writeAll(json_message); + } + server.processJsonRpc(&arena, json_message); } } +fn getRecordFile(config: Config) ?std.fs.File { + if (!config.record_session) return null; + + if (config.record_session_path) |record_path| { + if (std.fs.createFileAbsolute(record_path, .{})) |file| { + std.debug.print("recording to {s}\n", .{record_path}); + return file; + } else |err| { + std.log.err("failed to create record file at {s}: {}", .{ record_path, err }); + return null; + } + } else { + std.log.err("`record_session` is set but `record_session_path` is unspecified", .{}); + return null; + } +} + +fn getReplayFile(config: Config) ?std.fs.File { + const replay_path = config.replay_session_path orelse config.record_session_path orelse return null; + + if (std.fs.openFileAbsolute(replay_path, .{})) |file| { + std.debug.print("replaying from {s}\n", .{replay_path}); + return file; + } else |err| { + std.log.err("failed to open replay file at {s}: {}", .{ replay_path, err }); + return null; + } +} + +/// when recording we add a message that saves the current configuration in the replay +/// when replaying we read this message and replace the current config +fn updateConfig( + allocator: std.mem.Allocator, + config: *Config, + record_file: ?std.fs.File, + replay_file: ?std.fs.File, +) !void { + if (record_file) |file| { + var cfg = config.*; + cfg.record_session = false; + cfg.record_session_path = null; + cfg.replay_session_path = null; + + var buffer = std.ArrayListUnmanaged(u8){}; + defer buffer.deinit(allocator); + + try std.json.stringify(cfg, .{}, buffer.writer(allocator)); + const header = Header{ .content_length = buffer.items.len }; + try header.write(false, file.writer()); + try file.writeAll(buffer.items); + } + + if (replay_file) |file| { + const header = try Header.parse(allocator, false, file.reader()); + defer header.deinit(allocator); + const json_message = try allocator.alloc(u8, header.content_length); + defer allocator.free(json_message); + try file.reader().readNoEof(json_message); + + var token_stream = std.json.TokenStream.init(json_message); + const new_config = try std.json.parse(Config, &token_stream, .{ .allocator = allocator }); + std.json.parseFree(Config, config.*, .{ .allocator = allocator }); + config.* = new_config; + } +} + const ConfigWithPath = struct { config: Config, config_path: ?[]const u8, @@ -75,44 +154,28 @@ const ConfigWithPath = struct { fn getConfig( allocator: std.mem.Allocator, config_path: ?[]const u8, - /// If true, and the provided config_path is non-null, frees - /// the aforementioned path, in the case that it is - /// not returned. - free_old_config_path: bool, ) !ConfigWithPath { if (config_path) |path| { - if (configuration.loadFromFile(allocator, path)) |conf| { - return ConfigWithPath{ - .config = conf, - .config_path = path, - }; + if (configuration.loadFromFile(allocator, path)) |config| { + return ConfigWithPath{ .config = config, .config_path = path }; } std.debug.print( \\Could not open configuration file '{s}' \\Falling back to a lookup in the local and global configuration folders \\ , .{path}); - if (free_old_config_path) { - allocator.free(path); - } } if (try known_folders.getPath(allocator, .local_configuration)) |path| { - if (configuration.loadFromFolder(allocator, path)) |conf| { - return ConfigWithPath{ - .config = conf, - .config_path = path, - }; + if (configuration.loadFromFolder(allocator, path)) |config| { + return ConfigWithPath{ .config = config, .config_path = path }; } allocator.free(path); } if (try known_folders.getPath(allocator, .global_configuration)) |path| { - if (configuration.loadFromFolder(allocator, path)) |conf| { - return ConfigWithPath{ - .config = conf, - .config_path = path, - }; + if (configuration.loadFromFolder(allocator, path)) |config| { + return ConfigWithPath{ .config = config, .config_path = path }; } allocator.free(path); } @@ -123,22 +186,33 @@ fn getConfig( }; } -const ParseArgsResult = enum { proceed, exit }; -fn parseArgs( - allocator: std.mem.Allocator, - config: *ConfigWithPath, -) !ParseArgsResult { +const ParseArgsResult = struct { + action: enum { proceed, exit }, + config_path: ?[]const u8, + replay_enabled: bool, + replay_session_path: ?[]const u8, +}; + +fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult { + var result = ParseArgsResult{ + .action = .exit, + .config_path = null, + .replay_enabled = false, + .replay_session_path = null, + }; + const ArgId = enum { help, version, config, + replay, @"enable-debug-log", @"show-config-path", @"config-path", }; const arg_id_map = std.ComptimeStringMap(ArgId, comptime blk: { const fields = @typeInfo(ArgId).Enum.fields; - const KV = std.meta.Tuple(&.{ []const u8, ArgId }); + const KV = struct { []const u8, ArgId }; var pairs: [fields.len]KV = undefined; for (pairs) |*pair, i| pair.* = .{ fields[i].name, @intToEnum(ArgId, fields[i].value) }; break :blk pairs[0..]; @@ -155,10 +229,11 @@ fn parseArgs( var cmd_infos: InfoMap = InfoMap.init(.{ .help = "Prints this message.", .version = "Prints the compiler version with which the server was compiled.", + .config = "Run the ZLS configuration wizard.", + .replay = "Replay a previous recorded zls session", .@"enable-debug-log" = "Enables debug logs.", .@"config-path" = "Specify the path to a configuration file specifying LSP behaviour.", .@"show-config-path" = "Prints the path to the configuration file to stdout", - .config = "Run the ZLS configuration wizard.", }); var info_it = cmd_infos.iterator(); while (info_it.next()) |entry| { @@ -174,9 +249,6 @@ fn parseArgs( // Makes behavior of enabling debug more logging consistent regardless of argument order. var specified = std.enums.EnumArray(ArgId, bool).initFill(false); - var config_path: ?[]const u8 = null; - errdefer if (config_path) |path| allocator.free(path); - const stdout = std.io.getStdOut().writer(); const stderr = std.io.getStdErr().writer(); @@ -184,60 +256,61 @@ fn parseArgs( if (!std.mem.startsWith(u8, tok, "--") or tok.len == 2) { try stderr.print("{s}\n", .{help_message}); try stderr.print("Unexpected positional argument '{s}'.\n", .{tok}); - return .exit; + return result; } const argname = tok["--".len..]; const id = arg_id_map.get(argname) orelse { try stderr.print("{s}\n", .{help_message}); try stderr.print("Unrecognized argument '{s}'.\n", .{argname}); - return .exit; + return result; }; if (specified.get(id)) { try stderr.print("{s}\n", .{help_message}); try stderr.print("Duplicate argument '{s}'.\n", .{argname}); - return .exit; + return result; } specified.set(id, true); switch (id) { - .help => {}, - .version => {}, - .@"enable-debug-log" => {}, - .config => {}, - .@"show-config-path" => {}, + .help, .version, .@"enable-debug-log", .config, .@"show-config-path" => {}, .@"config-path" => { const path = args_it.next() orelse { try stderr.print("Expected configuration file path after --config-path argument.\n", .{}); - return .exit; + return result; }; - config.config_path = try allocator.dupe(u8, path); + result.config_path = try allocator.dupe(u8, path); + }, + .replay => { + result.replay_enabled = true; + const path = args_it.next() orelse break; + result.replay_session_path = try allocator.dupe(u8, path); }, } } if (specified.get(.help)) { try stderr.print("{s}\n", .{help_message}); - return .exit; + return result; } if (specified.get(.version)) { - try std.io.getStdOut().writeAll(build_options.version ++ "\n"); - return .exit; + try stdout.writeAll(build_options.version ++ "\n"); + return result; } if (specified.get(.config)) { try setup.wizard(allocator); - return .exit; + return result; } if (specified.get(.@"enable-debug-log")) { actual_log_level = .debug; logger.info("Enabled debug logging.\n", .{}); } if (specified.get(.@"config-path")) { - std.debug.assert(config.config_path != null); + std.debug.assert(result.config_path != null); } if (specified.get(.@"show-config-path")) { - const new_config = try getConfig(allocator, config.config_path, true); + const new_config = try getConfig(allocator, result.config_path); defer if (new_config.config_path) |path| allocator.free(path); defer std.json.parseFree(Config, new_config.config, .{ .allocator = allocator }); @@ -250,10 +323,11 @@ fn parseArgs( } else { logger.err("Failed to find zls.json!\n", .{}); } - return .exit; + return result; } - return .proceed; + result.action = .proceed; + return result; } const stack_frames = switch (zig_builtin.mode) { @@ -271,30 +345,45 @@ pub fn main() !void { var failing_allocator_state = if(build_options.enable_failing_allocator) debug.FailingAllocator.init(inner_allocator, build_options.enable_failing_allocator_likelihood) else void{}; const allocator: std.mem.Allocator = if(build_options.enable_failing_allocator) failing_allocator_state.allocator() else inner_allocator; - var config = ConfigWithPath{ - .config = undefined, - .config_path = null, - }; - defer if (config.config_path) |path| allocator.free(path); - - switch (try parseArgs(allocator, &config)) { + const result = try parseArgs(allocator); + defer if (result.config_path) |path| allocator.free(path); + defer if (result.replay_session_path) |path| allocator.free(path); + switch (result.action) { .proceed => {}, .exit => return, } - config = try getConfig(allocator, config.config_path, true); + var config = try getConfig(allocator, result.config_path); defer std.json.parseFree(Config, config.config, .{ .allocator = allocator }); + defer if (config.config_path) |path| allocator.free(path); + + if (result.replay_enabled and config.config.replay_session_path == null and config.config.record_session_path == null) { + logger.err("No replay file specified", .{}); + return; + } if (config.config_path == null) { logger.info("No config file zls.json found.", .{}); } + const record_file = if (!result.replay_enabled) getRecordFile(config.config) else null; + defer if (record_file) |file| file.close(); + + const replay_file = if (result.replay_enabled) getReplayFile(config.config) else null; + defer if (replay_file) |file| file.close(); + + std.debug.assert(record_file == null or replay_file == null); + + try updateConfig(allocator, &config.config, record_file, replay_file); + var server = try Server.init( allocator, &config.config, config.config_path, + record_file != null, + replay_file != null, ); defer server.deinit(); - try loop(&server); + try loop(&server, record_file, replay_file); } diff --git a/tests/context.zig b/tests/context.zig index bca2c8b..4292b55 100644 --- a/tests/context.zig +++ b/tests/context.zig @@ -40,7 +40,7 @@ pub const Context = struct { config.* = default_config; - var server = try Server.init(allocator, config, null); + var server = try Server.init(allocator, config, null, false, false); errdefer server.deinit(); var context: Context = .{ From b95d5095af89a3484c9124f83b7bacd2e5e38fd1 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Mon, 2 Jan 2023 09:02:28 +0000 Subject: [PATCH 16/18] enable all capabilities by default (#877) --- README.md | 8 ++++---- schema.json | 8 ++++---- src/Config.zig | 8 ++++---- src/config_gen/config.json | 8 ++++---- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index b0b5f01..e1f9e4e 100644 --- a/README.md +++ b/README.md @@ -69,12 +69,12 @@ The following options are currently available. | Option | Type | Default value | What it Does | | --- | --- | --- | --- | -| `enable_snippets` | `bool` | `false` | Enables snippet completions when the client also supports them | +| `enable_snippets` | `bool` | `true` | Enables snippet completions when the client also supports them | | `enable_ast_check_diagnostics` | `bool` | `true` | Whether to enable ast-check diagnostics | -| `enable_autofix` | `bool` | `false` | Whether to automatically fix errors on save. Currently supports adding and removing discards. | -| `enable_import_embedfile_argument_completions` | `bool` | `false` | Whether to enable import/embedFile argument completions | +| `enable_autofix` | `bool` | `true` | Whether to automatically fix errors on save. Currently supports adding and removing discards. | +| `enable_import_embedfile_argument_completions` | `bool` | `true` | Whether to enable import/embedFile argument completions | | `enable_semantic_tokens` | `bool` | `true` | Enables semantic token support when the client also supports it | -| `enable_inlay_hints` | `bool` | `false` | Enables inlay hint support when the client also supports it | +| `enable_inlay_hints` | `bool` | `true` | Enables inlay hint support when the client also supports it | | `inlay_hints_show_builtin` | `bool` | `true` | Enable inlay hints for builtin functions | | `inlay_hints_exclude_single_argument` | `bool` | `true` | Don't show inlay hints for single argument calls | | `inlay_hints_hide_redundant_param_names` | `bool` | `false` | Hides inlay hints when parameter name matches the identifier (e.g. foo: foo) | diff --git a/schema.json b/schema.json index 665c5de..5675491 100644 --- a/schema.json +++ b/schema.json @@ -7,7 +7,7 @@ "enable_snippets": { "description": "Enables snippet completions when the client also supports them", "type": "boolean", - "default": "false" + "default": "true" }, "enable_ast_check_diagnostics": { "description": "Whether to enable ast-check diagnostics", @@ -17,12 +17,12 @@ "enable_autofix": { "description": "Whether to automatically fix errors on save. Currently supports adding and removing discards.", "type": "boolean", - "default": "false" + "default": "true" }, "enable_import_embedfile_argument_completions": { "description": "Whether to enable import/embedFile argument completions", "type": "boolean", - "default": "false" + "default": "true" }, "enable_semantic_tokens": { "description": "Enables semantic token support when the client also supports it", @@ -32,7 +32,7 @@ "enable_inlay_hints": { "description": "Enables inlay hint support when the client also supports it", "type": "boolean", - "default": "false" + "default": "true" }, "inlay_hints_show_builtin": { "description": "Enable inlay hints for builtin functions", diff --git a/src/Config.zig b/src/Config.zig index 9ce599b..a01861c 100644 --- a/src/Config.zig +++ b/src/Config.zig @@ -5,22 +5,22 @@ //! GENERATED BY src/config_gen/config_gen.zig /// Enables snippet completions when the client also supports them -enable_snippets: bool = false, +enable_snippets: bool = true, /// Whether to enable ast-check diagnostics enable_ast_check_diagnostics: bool = true, /// Whether to automatically fix errors on save. Currently supports adding and removing discards. -enable_autofix: bool = false, +enable_autofix: bool = true, /// Whether to enable import/embedFile argument completions -enable_import_embedfile_argument_completions: bool = false, +enable_import_embedfile_argument_completions: bool = true, /// Enables semantic token support when the client also supports it enable_semantic_tokens: bool = true, /// Enables inlay hint support when the client also supports it -enable_inlay_hints: bool = false, +enable_inlay_hints: bool = true, /// Enable inlay hints for builtin functions inlay_hints_show_builtin: bool = true, diff --git a/src/config_gen/config.json b/src/config_gen/config.json index 1a8a78c..ed21934 100644 --- a/src/config_gen/config.json +++ b/src/config_gen/config.json @@ -4,7 +4,7 @@ "name": "enable_snippets", "description": "Enables snippet completions when the client also supports them", "type": "bool", - "default": "false", + "default": "true", "setup_question": "Do you want to enable snippets?" }, { @@ -18,14 +18,14 @@ "name": "enable_autofix", "description": "Whether to automatically fix errors on save. Currently supports adding and removing discards.", "type": "bool", - "default": "false", + "default": "true", "setup_question": "Do you want to zls to automatically try to fix errors on save? (supports adding & removing discards)" }, { "name": "enable_import_embedfile_argument_completions", "description": "Whether to enable import/embedFile argument completions", "type": "bool", - "default": "false", + "default": "true", "setup_question": "Do you want to enable @import/@embedFile argument path completion?" }, { @@ -39,7 +39,7 @@ "name": "enable_inlay_hints", "description": "Enables inlay hint support when the client also supports it", "type": "bool", - "default": "false", + "default": "true", "setup_question": "Do you want to enable inlay hints?" }, { From 9badc745c5ea395ab85abde71b408d908990ba0d Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Mon, 2 Jan 2023 18:54:13 +0000 Subject: [PATCH 17/18] remove setup wizard (#878) * remove setup wizard * add back findZig function --- README.md | 7 +- src/config_gen/config.json | 67 ++++------ src/config_gen/config_gen.zig | 3 - src/configuration.zig | 37 +++++- src/main.zig | 41 +++--- src/setup.zig | 230 ---------------------------------- 6 files changed, 81 insertions(+), 304 deletions(-) delete mode 100644 src/setup.zig diff --git a/README.md b/README.md index e1f9e4e..88e1f06 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,6 @@ Building `zls` is very easy. You will need [a build of Zig master](https://zigla git clone --recurse-submodules https://github.com/zigtools/zls cd zls zig build -Drelease-safe -./zig-out/bin/zls --config # Configure ZLS ``` #### Build Options @@ -59,8 +58,10 @@ There is also a `generate-data.js` in the `src/data` folder, you'll need to run ### Configuration Options -You can configure zls by running `zls --config` or manually creating your own `zls.json` configuration file. -zls will look for a zls.json configuration file in multiple locations with the following priority: +You can configure zls by editing your `zls.json` configuration file. +Running `zls --show-config-path` will a path to an already existing `zls.json` or a path to the local configuration folder instead. + +zls will look for a `zls.json` configuration file in multiple locations with the following priority: - In the local configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders/blob/master/RESOURCES.md#folder-list)) - In the global configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders/blob/master/RESOURCES.md#folder-list)) diff --git a/src/config_gen/config.json b/src/config_gen/config.json index ed21934..f032d33 100644 --- a/src/config_gen/config.json +++ b/src/config_gen/config.json @@ -5,175 +5,150 @@ "description": "Enables snippet completions when the client also supports them", "type": "bool", "default": "true", - "setup_question": "Do you want to enable snippets?" }, { "name": "enable_ast_check_diagnostics", "description": "Whether to enable ast-check diagnostics", "type": "bool", - "default": "true", - "setup_question": "Do you want to enable ast-check diagnostics?" + "default": "true" }, { "name": "enable_autofix", "description": "Whether to automatically fix errors on save. Currently supports adding and removing discards.", "type": "bool", "default": "true", - "setup_question": "Do you want to zls to automatically try to fix errors on save? (supports adding & removing discards)" }, { "name": "enable_import_embedfile_argument_completions", "description": "Whether to enable import/embedFile argument completions", "type": "bool", "default": "true", - "setup_question": "Do you want to enable @import/@embedFile argument path completion?" }, { "name": "enable_semantic_tokens", "description": "Enables semantic token support when the client also supports it", "type": "bool", - "default": "true", - "setup_question": "Do you want to enable semantic highlighting?" + "default": "true" }, { "name": "enable_inlay_hints", "description": "Enables inlay hint support when the client also supports it", "type": "bool", "default": "true", - "setup_question": "Do you want to enable inlay hints?" }, { "name": "inlay_hints_show_builtin", "description": "Enable inlay hints for builtin functions", "type": "bool", - "default": "true", - "setup_question": null + "default": "true" }, { "name": "inlay_hints_exclude_single_argument", "description": "Don't show inlay hints for single argument calls", "type": "bool", - "default": "true", - "setup_question": null + "default": "true" }, { "name": "inlay_hints_hide_redundant_param_names", "description": "Hides inlay hints when parameter name matches the identifier (e.g. foo: foo)", "type": "bool", - "default": "false", - "setup_question": null + "default": "false" }, { "name": "inlay_hints_hide_redundant_param_names_last_token", "description": "Hides inlay hints when parameter name matches the last token of a parameter node (e.g. foo: bar.foo, foo: &foo)", "type": "bool", - "default": "false", - "setup_question": null + "default": "false" }, { "name": "operator_completions", "description": "Enables `*` and `?` operators in completion lists", "type": "bool", - "default": "true", - "setup_question": "Do you want to enable .* and .? completions?" + "default": "true" }, { "name": "warn_style", "description": "Enables warnings for style guideline mismatches", "type": "bool", - "default": "false", - "setup_question": "Do you want to enable style warnings?" + "default": "false" }, { "name": "highlight_global_var_declarations", "description": "Whether to highlight global var declarations", "type": "bool", - "default": "false", - "setup_question": null + "default": "false" }, { "name": "use_comptime_interpreter", "description": "Whether to use the comptime interpreter", "type": "bool", - "default": "false", - "setup_question": null + "default": "false" }, { "name": "include_at_in_builtins", "description": "Whether the @ sign should be part of the completion of builtins", "type": "bool", - "default": "false", - "setup_question": null + "default": "false" }, { "name": "skip_std_references", "description": "When true, skips searching for references in std. Improves lookup speed for functions in user's code. Renaming and go-to-definition will continue to work as is", "type": "bool", - "default": "false", - "setup_question": null + "default": "false" }, { "name": "max_detail_length", "description": "The detail field of completions is truncated to be no longer than this (in bytes)", "type": "usize", - "default": "1048576", - "setup_question": null + "default": "1048576" }, { "name": "record_session", "description": "When true, zls will record all request is receives and write in into `record_session_path`, so that they can replayed with `zls replay`", "type": "bool", - "default": "false", - "setup_question": null + "default": "false" }, { "name": "record_session_path", "description": "Output file path when `record_session` is set. The recommended file extension *.zlsreplay", "type": "?[]const u8", - "default": "null", - "setup_question": null + "default": "null" }, { "name": "replay_session_path", "description": "Used when calling `zls replay` for specifying the replay file. If no extra argument is given `record_session_path` is used as the default path.", "type": "?[]const u8", - "default": "null", - "setup_question": null + "default": "null" }, { "name": "builtin_path", "description": "Path to 'builtin;' useful for debugging, automatically set if let null", "type": "?[]const u8", - "default": "null", - "setup_question": null + "default": "null" }, { "name": "zig_lib_path", "description": "Zig library path, e.g. `/path/to/zig/lib/zig`, used to analyze std library imports", "type": "?[]const u8", - "default": "null", - "setup_question": null + "default": "null" }, { "name": "zig_exe_path", "description": "Zig executable path, e.g. `/path/to/zig/zig`, used to run the custom build runner. If `null`, zig is looked up in `PATH`. Will be used to infer the zig standard library path if none is provided", "type": "?[]const u8", - "default": "null", - "setup_question": null + "default": "null" }, { "name": "build_runner_path", "description": "Path to the `build_runner.zig` file provided by zls. null is equivalent to `${executable_directory}/build_runner.zig`", "type": "?[]const u8", - "default": "null", - "setup_question": null + "default": "null" }, { "name": "global_cache_path", "description": "Path to a directroy that will be used as zig's cache. null is equivalent to `${KnownFloders.Cache}/zls`", "type": "?[]const u8", - "default": "null", - "setup_question": null + "default": "null" } ] } \ No newline at end of file diff --git a/src/config_gen/config_gen.zig b/src/config_gen/config_gen.zig index 73f9d4d..bb3249d 100644 --- a/src/config_gen/config_gen.zig +++ b/src/config_gen/config_gen.zig @@ -11,9 +11,6 @@ const ConfigOption = struct { type: []const u8, /// used in Config.zig as the default initializer default: []const u8, - /// If set, this option can be configured through `zls --config` - /// currently unused but could later be used to automatically generate queries for setup.zig - setup_question: ?[]const u8, }; const Config = struct { diff --git a/src/configuration.zig b/src/configuration.zig index 8006941..ebab2e8 100644 --- a/src/configuration.zig +++ b/src/configuration.zig @@ -1,6 +1,6 @@ const std = @import("std"); +const builtin = @import("builtin"); -const setup = @import("setup.zig"); const tracy = @import("tracy.zig"); const known_folders = @import("known-folders"); @@ -65,7 +65,7 @@ pub fn configChanged(config: *Config, allocator: std.mem.Allocator, builtin_crea logger.debug("zig path `{s}` is not absolute, will look in path", .{exe_path}); allocator.free(exe_path); } - config.zig_exe_path = try setup.findZig(allocator); + config.zig_exe_path = try findZig(allocator); } if (config.zig_exe_path) |exe_path| blk: { @@ -208,3 +208,36 @@ fn getConfigurationType() type { config_info.Struct.decls = &.{}; return @Type(config_info); } + +pub fn findZig(allocator: std.mem.Allocator) !?[]const u8 { + const env_path = std.process.getEnvVarOwned(allocator, "PATH") catch |err| switch (err) { + error.EnvironmentVariableNotFound => { + return null; + }, + else => return err, + }; + defer allocator.free(env_path); + + const exe_extension = builtin.target.exeFileExt(); + const zig_exe = try std.fmt.allocPrint(allocator, "zig{s}", .{exe_extension}); + defer allocator.free(zig_exe); + + var it = std.mem.tokenize(u8, env_path, &[_]u8{std.fs.path.delimiter}); + while (it.next()) |path| { + if (builtin.os.tag == .windows) { + if (std.mem.indexOfScalar(u8, path, '/') != null) continue; + } + const full_path = try std.fs.path.join(allocator, &[_][]const u8{ path, zig_exe }); + defer allocator.free(full_path); + + if (!std.fs.path.isAbsolute(full_path)) continue; + + const file = std.fs.openFileAbsolute(full_path, .{}) catch continue; + defer file.close(); + const stat = file.stat() catch continue; + if (stat.kind == .Directory) continue; + + return try allocator.dupe(u8, full_path); + } + return null; +} \ No newline at end of file diff --git a/src/main.zig b/src/main.zig index 4bb1075..e4f3407 100644 --- a/src/main.zig +++ b/src/main.zig @@ -6,7 +6,6 @@ const known_folders = @import("known-folders"); const Config = @import("Config.zig"); const configuration = @import("configuration.zig"); const Server = @import("Server.zig"); -const setup = @import("setup.zig"); const Header = @import("Header.zig"); const debug = @import("debug.zig"); @@ -204,7 +203,6 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult { const ArgId = enum { help, version, - config, replay, @"enable-debug-log", @"show-config-path", @@ -229,7 +227,6 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult { var cmd_infos: InfoMap = InfoMap.init(.{ .help = "Prints this message.", .version = "Prints the compiler version with which the server was compiled.", - .config = "Run the ZLS configuration wizard.", .replay = "Replay a previous recorded zls session", .@"enable-debug-log" = "Enables debug logs.", .@"config-path" = "Specify the path to a configuration file specifying LSP behaviour.", @@ -274,7 +271,11 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult { specified.set(id, true); switch (id) { - .help, .version, .@"enable-debug-log", .config, .@"show-config-path" => {}, + .help, + .version, + .@"enable-debug-log", + .@"show-config-path", + => {}, .@"config-path" => { const path = args_it.next() orelse { try stderr.print("Expected configuration file path after --config-path argument.\n", .{}); @@ -298,10 +299,6 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult { try stdout.writeAll(build_options.version ++ "\n"); return result; } - if (specified.get(.config)) { - try setup.wizard(allocator); - return result; - } if (specified.get(.@"enable-debug-log")) { actual_log_level = .debug; logger.info("Enabled debug logging.\n", .{}); @@ -314,15 +311,19 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult { defer if (new_config.config_path) |path| allocator.free(path); defer std.json.parseFree(Config, new_config.config, .{ .allocator = allocator }); - if (new_config.config_path) |path| { - const full_path = try std.fs.path.resolve(allocator, &.{ path, "zls.json" }); - defer allocator.free(full_path); - - try stdout.writeAll(full_path); - try stdout.writeByte('\n'); - } else { - logger.err("Failed to find zls.json!\n", .{}); - } + const full_path = if (new_config.config_path) |path| blk: { + break :blk try std.fs.path.resolve(allocator, &.{ path, "zls.json" }); + } else blk: { + const local_config_path = try known_folders.getPath(allocator, .local_configuration) orelse { + logger.err("failed to find local configuration folder", .{}); + return result; + }; + defer allocator.free(local_config_path); + break :blk try std.fs.path.resolve(allocator, &.{ local_config_path, "zls.json" }); + }; + defer allocator.free(full_path); + try stdout.writeAll(full_path); + try stdout.writeByte('\n'); return result; } @@ -341,9 +342,9 @@ pub fn main() !void { var tracy_state = if (tracy.enable_allocation) tracy.tracyAllocator(gpa_state.allocator()) else void{}; const inner_allocator: std.mem.Allocator = if (tracy.enable_allocation) tracy_state.allocator() else gpa_state.allocator(); - - var failing_allocator_state = if(build_options.enable_failing_allocator) debug.FailingAllocator.init(inner_allocator, build_options.enable_failing_allocator_likelihood) else void{}; - const allocator: std.mem.Allocator = if(build_options.enable_failing_allocator) failing_allocator_state.allocator() else inner_allocator; + + var failing_allocator_state = if (build_options.enable_failing_allocator) debug.FailingAllocator.init(inner_allocator, build_options.enable_failing_allocator_likelihood) else void{}; + const allocator: std.mem.Allocator = if (build_options.enable_failing_allocator) failing_allocator_state.allocator() else inner_allocator; const result = try parseArgs(allocator); defer if (result.config_path) |path| allocator.free(path); diff --git a/src/setup.zig b/src/setup.zig deleted file mode 100644 index 6085a03..0000000 --- a/src/setup.zig +++ /dev/null @@ -1,230 +0,0 @@ -const std = @import("std"); -const builtin = @import("builtin"); -const known_folders = @import("known-folders"); - -/// Caller must free memory. -pub fn askString(allocator: std.mem.Allocator, prompt: []const u8, max_size: usize) ![]u8 { - const in = std.io.getStdIn().reader(); - const out = std.io.getStdOut().writer(); - - try out.print("? {s}", .{prompt}); - - const result = try in.readUntilDelimiterAlloc(allocator, '\n', max_size); - return if (std.mem.endsWith(u8, result, "\r")) result[0..(result.len - 1)] else result; -} - -/// Caller must free memory. Max size is recommended to be a high value, like 512. -pub fn askDirPath(allocator: std.mem.Allocator, prompt: []const u8, max_size: usize) ![]u8 { - const out = std.io.getStdOut().writer(); - - while (true) { - const path = try askString(allocator, prompt, max_size); - if (!std.fs.path.isAbsolute(path)) { - try out.writeAll("Error: Invalid directory, please try again.\n\n"); - allocator.free(path); - continue; - } - - var dir = std.fs.cwd().openDir(path, std.fs.Dir.OpenDirOptions{}) catch { - try out.writeAll("Error: Invalid directory, please try again.\n\n"); - allocator.free(path); - continue; - }; - - dir.close(); - return path; - } -} - -pub fn askBool(prompt: []const u8) !bool { - const in = std.io.getStdIn().reader(); - const out = std.io.getStdOut().writer(); - - var buffer: [1]u8 = undefined; - - while (true) { - try out.print("? {s} (y/n) > ", .{prompt}); - - const read = in.read(&buffer) catch continue; - try in.skipUntilDelimiterOrEof('\n'); - - if (read == 0) return error.EndOfStream; - - switch (buffer[0]) { - 'y' => return true, - 'n' => return false, - else => continue, - } - } -} - -pub fn askSelectOne(prompt: []const u8, comptime Options: type) !Options { - const in = std.io.getStdIn().reader(); - const out = std.io.getStdOut().writer(); - - try out.print("? {s} (select one)\n\n", .{prompt}); - - comptime var max_size: usize = 0; - inline for (@typeInfo(Options).Enum.fields) |option| { - try out.print(" - {s}\n", .{option.name}); - if (option.name.len > max_size) max_size = option.name.len; - } - - while (true) { - var buffer: [max_size + 1]u8 = undefined; - - try out.writeAll("\n> "); - - var result = (in.readUntilDelimiterOrEof(&buffer, '\n') catch { - try in.skipUntilDelimiterOrEof('\n'); - try out.writeAll("Error: Invalid option, please try again.\n"); - continue; - }) orelse return error.EndOfStream; - result = if (std.mem.endsWith(u8, result, "\r")) result[0..(result.len - 1)] else result; - - inline for (@typeInfo(Options).Enum.fields) |option| - if (std.ascii.eqlIgnoreCase(option.name, result)) - return @intToEnum(Options, option.value); - - try out.writeAll("Error: Invalid option, please try again.\n"); - } -} - -pub fn wizard(allocator: std.mem.Allocator) !void { - @setEvalBranchQuota(2500); - const stdout = std.io.getStdOut().writer(); - - try stdout.writeAll( - \\Welcome to the ZLS configuration wizard! - \\ * - \\ |\ - \\ /* \ - \\ | *\ - \\ _/_*___|_ x - \\ | @ @ / - \\ @ \ / - \\ \__-/ / - \\ - \\ - ); - - var local_path = known_folders.getPath(allocator, .local_configuration) catch null; - var global_path = known_folders.getPath(allocator, .global_configuration) catch null; - defer if (local_path) |d| allocator.free(d); - defer if (global_path) |d| allocator.free(d); - - const can_access_global = blk: { - std.fs.accessAbsolute(global_path orelse break :blk false, .{}) catch break :blk false; - break :blk true; - }; - - if (global_path == null and local_path == null) { - try stdout.writeAll("Could not open a global or local config directory.\n"); - return; - } - var config_path: []const u8 = undefined; - if (can_access_global and try askBool("Should this configuration be system-wide?")) { - config_path = global_path.?; - } else { - if (local_path) |p| { - config_path = p; - } else { - try stdout.writeAll("Could not find a local config directory.\n"); - return; - } - } - var dir = std.fs.cwd().openDir(config_path, .{}) catch |err| { - try stdout.print("Could not open {s}: {}.\n", .{ config_path, err }); - return; - }; - defer dir.close(); - var file = dir.createFile("zls.json", .{}) catch |err| { - try stdout.print("Could not create {s}/zls.json: {}.\n", .{ config_path, err }); - return; - }; - defer file.close(); - const out = file.writer(); - - var zig_exe_path = try findZig(allocator); - defer if (zig_exe_path) |p| allocator.free(p); - - if (zig_exe_path) |path| { - try stdout.print("Found zig executable '{s}' in PATH.\n", .{path}); - } else { - try stdout.writeAll("Could not find 'zig' in PATH\n"); - zig_exe_path = try askString(allocator, if (builtin.os.tag == .windows) - \\What is the path to the 'zig' executable you would like to use? - \\Note that due to a bug in zig (https://github.com/ziglang/zig/issues/6044), - \\your zig directory cannot contain the '/' character. - else - "What is the path to the 'zig' executable you would like to use?", std.fs.MAX_PATH_BYTES); - } - - const snippets = try askBool("Do you want to enable snippets?"); - const ast_check = try askBool("Do you want to enable ast-check diagnostics?"); - const autofix = try askBool("Do you want to zls to automatically try to fix errors on save? (supports adding & removing discards)"); - const ief_apc = try askBool("Do you want to enable @import/@embedFile argument path completion?"); - const style = try askBool("Do you want to enable style warnings?"); - const semantic_tokens = try askBool("Do you want to enable semantic highlighting?"); - const inlay_hints = try askBool("Do you want to enable inlay hints?"); - const operator_completions = try askBool("Do you want to enable .* and .? completions?"); - - std.debug.print("Writing config to {s}/zls.json ... ", .{config_path}); - - try std.json.stringify(.{ - .@"$schema" = "https://raw.githubusercontent.com/zigtools/zls/master/schema.json", - .zig_exe_path = zig_exe_path, - .enable_snippets = snippets, - .enable_ast_check_diagnostics = ast_check, - .enable_autofix = autofix, - .enable_import_embedfile_argument_completions = ief_apc, - .warn_style = style, - .enable_semantic_tokens = semantic_tokens, - .enable_inlay_hints = inlay_hints, - .operator_completions = operator_completions, - }, .{ - .whitespace = .{}, - }, out); - - try stdout.writeAll( - \\successful. - \\ - \\You can find information on how to setup zls for your editor on zigtools.github.io/install-zls/ - \\ - \\Thank you for choosing ZLS! - \\ - ); -} - -pub fn findZig(allocator: std.mem.Allocator) !?[]const u8 { - const env_path = std.process.getEnvVarOwned(allocator, "PATH") catch |err| switch (err) { - error.EnvironmentVariableNotFound => { - return null; - }, - else => return err, - }; - defer allocator.free(env_path); - - const exe_extension = builtin.target.exeFileExt(); - const zig_exe = try std.fmt.allocPrint(allocator, "zig{s}", .{exe_extension}); - defer allocator.free(zig_exe); - - var it = std.mem.tokenize(u8, env_path, &[_]u8{std.fs.path.delimiter}); - while (it.next()) |path| { - if (builtin.os.tag == .windows) { - if (std.mem.indexOfScalar(u8, path, '/') != null) continue; - } - const full_path = try std.fs.path.join(allocator, &[_][]const u8{ path, zig_exe }); - defer allocator.free(full_path); - - if (!std.fs.path.isAbsolute(full_path)) continue; - - const file = std.fs.openFileAbsolute(full_path, .{}) catch continue; - defer file.close(); - const stat = file.stat() catch continue; - if (stat.kind == .Directory) continue; - - return try allocator.dupe(u8, full_path); - } - return null; -} From c718e12d16cabb422671bc0b2472565f5e84ad13 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Mon, 2 Jan 2023 19:59:01 +0000 Subject: [PATCH 18/18] Autofix improvements (#879) * improve autofix stability and client support * run zig fmt --- src/DocumentStore.zig | 4 +- src/Header.zig | 4 +- src/Server.zig | 104 ++++++++++++++++++++-------------- src/analysis.zig | 2 +- src/ast.zig | 2 +- src/config_gen/config_gen.zig | 8 +-- src/data/master.zig | 2 +- 7 files changed, 71 insertions(+), 55 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 7e5d2cd..d78d809 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -653,14 +653,14 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]u8, open: bool) erro self.build_files.swapRemoveAt(gop.index); log.debug("Failed to load build file {s}: (error: {})", .{ uri, err }); } - if(!gop.found_existing) { + if (!gop.found_existing) { const duped_uri = try self.allocator.dupe(u8, uri); gop.value_ptr.* = try self.createBuildFile(duped_uri); gop.key_ptr.* = gop.value_ptr.uri; } } else if (self.config.zig_exe_path != null and !isBuiltinFile(handle.uri) and !isInStd(handle.uri)) blk: { // log.debug("Going to walk down the tree towards: {s}", .{uri}); - + // walk down the tree towards the uri. When we hit build.zig files // determine if the uri we're interested in is involved with the build. // This ensures that _relevant_ build.zig files higher in the diff --git a/src/Header.zig b/src/Header.zig index 669948b..16b6600 100644 --- a/src/Header.zig +++ b/src/Header.zig @@ -49,9 +49,9 @@ pub fn parse(allocator: std.mem.Allocator, include_carriage_return: bool, reader pub fn write(header: Header, include_carriage_return: bool, writer: anytype) @TypeOf(writer).Error!void { const seperator: []const u8 = if (include_carriage_return) "\r\n" else "\n"; - try writer.print("Content-Length: {}{s}", .{header.content_length, seperator}); + try writer.print("Content-Length: {}{s}", .{ header.content_length, seperator }); if (header.content_type) |content_type| { - try writer.print("Content-Type: {s}{s}", .{content_type, seperator}); + try writer.print("Content-Type: {s}{s}", .{ content_type, seperator }); } try writer.writeAll(seperator); } diff --git a/src/Server.zig b/src/Server.zig index 04353e0..5f750c4 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -53,13 +53,13 @@ status: enum { // Code was based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig -const ClientCapabilities = struct { +const ClientCapabilities = packed struct { supports_snippets: bool = false, - supports_semantic_tokens: bool = false, - supports_inlay_hints: bool = false, + supports_apply_edits: bool = false, supports_will_save: bool = false, supports_will_save_wait_until: bool = false, supports_publish_diagnostics: bool = false, + supports_code_action_fixall: bool = false, hover_supports_md: bool = false, completion_doc_supports_md: bool = false, label_details_support: bool = false, @@ -469,8 +469,26 @@ fn getAstCheckDiagnostics( } } +fn getAutofixMode(server: *Server) enum { + on_save, + will_save_wait_until, + fixall, + none, +} { + if (!server.config.enable_autofix) return .none; + if (server.client_capabilities.supports_code_action_fixall) return .fixall; + if (server.client_capabilities.supports_apply_edits) { + if (server.client_capabilities.supports_will_save_wait_until) return .will_save_wait_until; + return .on_save; + } + return .none; +} + /// caller owns returned memory. fn autofix(server: *Server, allocator: std.mem.Allocator, handle: *const DocumentStore.Handle) !std.ArrayListUnmanaged(types.TextEdit) { + if (!server.config.enable_ast_check_diagnostics) return .{}; + + if (handle.tree.errors.len != 0) return .{}; var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){}; try getAstCheckDiagnostics(server, handle.*, &diagnostics); @@ -1564,11 +1582,16 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) !types.In const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + var skip_set_fixall = false; + if (request.clientInfo) |clientInfo| { log.info("client is '{s}-{s}'", .{ clientInfo.name, clientInfo.version orelse "" }); if (std.mem.eql(u8, clientInfo.name, "Sublime Text LSP")) blk: { server.config.max_detail_length = 256; + // TODO investigate why fixall doesn't work in sublime text + server.client_capabilities.supports_code_action_fixall = false; + skip_set_fixall = true; const version_str = clientInfo.version orelse break :blk; const version = std.SemanticVersion.parse(version_str) catch break :blk; @@ -1577,6 +1600,9 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) !types.In if (version.major == 0) { server.config.include_at_in_builtins = true; } + } else if (std.mem.eql(u8, clientInfo.name, "Visual Studio Code")) { + server.client_capabilities.supports_code_action_fixall = true; + skip_set_fixall = true; } } @@ -1604,8 +1630,6 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) !types.In } if (request.capabilities.textDocument) |textDocument| { - server.client_capabilities.supports_semantic_tokens = textDocument.semanticTokens != null; - server.client_capabilities.supports_inlay_hints = textDocument.inlayHint != null; server.client_capabilities.supports_publish_diagnostics = textDocument.publishDiagnostics != null; if (textDocument.hover) |hover| { if (hover.contentFormat) |content_format| { @@ -1635,6 +1659,14 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) !types.In server.client_capabilities.supports_will_save = synchronization.willSave orelse false; server.client_capabilities.supports_will_save_wait_until = synchronization.willSaveWaitUntil orelse false; } + if (textDocument.codeAction) |codeaction| { + if (codeaction.codeActionLiteralSupport) |literlSupport| { + if (!skip_set_fixall) { + const fixall = std.mem.indexOfScalar(types.CodeActionKind, literlSupport.codeActionKind.valueSet, .@"source.fixAll") != null; + server.client_capabilities.supports_code_action_fixall = fixall; + } + } + } } // NOTE: everything is initialized, we got the client capabilities @@ -1646,6 +1678,7 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) !types.In } if (request.capabilities.workspace) |workspace| { + server.client_capabilities.supports_apply_edits = workspace.applyEdit orelse false; server.client_capabilities.supports_configuration = workspace.configuration orelse false; if (workspace.didChangeConfiguration) |did_change| { if (did_change.dynamicRegistration orelse false) { @@ -1943,25 +1976,21 @@ fn saveDocumentHandler(server: *Server, notification: types.DidSaveTextDocumentP const handle = server.document_store.getHandle(uri) orelse return; try server.document_store.applySave(handle); - if (handle.tree.errors.len != 0) return; - if (!server.config.enable_ast_check_diagnostics) return; - if (!server.config.enable_autofix) return; - if (server.client_capabilities.supports_will_save) return; - if (server.client_capabilities.supports_will_save_wait_until) return; + if (server.getAutofixMode() == .on_save) { + var text_edits = try server.autofix(allocator, handle); - var text_edits = try server.autofix(allocator, handle); + var workspace_edit = types.WorkspaceEdit{ .changes = .{} }; + try workspace_edit.changes.?.putNoClobber(allocator, uri, try text_edits.toOwnedSlice(allocator)); - var workspace_edit = types.WorkspaceEdit{ .changes = .{} }; - try workspace_edit.changes.?.putNoClobber(allocator, uri, try text_edits.toOwnedSlice(allocator)); - - server.sendRequest( - .{ .string = "apply_edit" }, - "workspace/applyEdit", - types.ApplyWorkspaceEditParams{ - .label = "autofix", - .edit = workspace_edit, - }, - ); + server.sendRequest( + .{ .string = "apply_edit" }, + "workspace/applyEdit", + types.ApplyWorkspaceEditParams{ + .label = "autofix", + .edit = workspace_edit, + }, + ); + } } fn closeDocumentHandler(server: *Server, notification: types.DidCloseTextDocumentParams) error{}!void { @@ -1971,27 +2000,15 @@ fn closeDocumentHandler(server: *Server, notification: types.DidCloseTextDocumen server.document_store.closeDocument(notification.textDocument.uri); } -fn willSaveHandler(server: *Server, request: types.WillSaveTextDocumentParams) !?[]types.TextEdit { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - if (server.client_capabilities.supports_will_save_wait_until) return null; - return try willSaveWaitUntilHandler(server, request); -} - fn willSaveWaitUntilHandler(server: *Server, request: types.WillSaveTextDocumentParams) !?[]types.TextEdit { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); const allocator = server.arena.allocator(); - if (!server.config.enable_ast_check_diagnostics) return null; - if (!server.config.enable_autofix) return null; + if (server.getAutofixMode() != .will_save_wait_until) return null; - const uri = request.textDocument.uri; - - const handle = server.document_store.getHandle(uri) orelse return null; - if (handle.tree.errors.len != 0) return null; + const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; var text_edits = try server.autofix(allocator, handle); @@ -2422,15 +2439,15 @@ fn codeActionHandler(server: *Server, request: types.CodeActionParams) !?[]types .offset_encoding = server.offset_encoding, }; - var actions = std.ArrayListUnmanaged(types.CodeAction){}; - - for (request.context.diagnostics) |diagnostic| { - try builder.generateCodeAction(diagnostic, &actions); + // as of right now, only ast-check errors may get a code action + var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){}; + if (server.config.enable_ast_check_diagnostics and handle.tree.errors.len == 0) { + try getAstCheckDiagnostics(server, handle.*, &diagnostics); } - for (actions.items) |*action| { - // TODO query whether SourceFixAll is supported by the server - if (action.kind.? == .@"source.fixAll") action.kind = .quickfix; + var actions = std.ArrayListUnmanaged(types.CodeAction){}; + for (diagnostics.items) |diagnostic| { + try builder.generateCodeAction(diagnostic, &actions); } return actions.items; @@ -2981,7 +2998,6 @@ fn processMessage(server: *Server, message: Message) Error!void { .{ "textDocument/didChange", changeDocumentHandler }, .{ "textDocument/didSave", saveDocumentHandler }, .{ "textDocument/didClose", closeDocumentHandler }, - .{ "textDocument/willSave", willSaveHandler }, .{ "textDocument/willSaveWaitUntil", willSaveWaitUntilHandler }, .{ "textDocument/semanticTokens/full", semanticTokensFullHandler }, .{ "textDocument/inlayHint", inlayHintHandler }, diff --git a/src/analysis.zig b/src/analysis.zig index ee34216..7c1879f 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1466,7 +1466,7 @@ pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]con if (params.len != 1) return null; - if(node_tags[params[0]] != .string_literal) return null; + if (node_tags[params[0]] != .string_literal) return null; const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]); return import_str[1 .. import_str.len - 1]; diff --git a/src/ast.zig b/src/ast.zig index 48db2f4..5c7f5ce 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -539,7 +539,7 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex { .container_decl_arg_trailing, .switch_comma, => { - if(datas[n].rhs != 0) { + if (datas[n].rhs != 0) { const members = tree.extraData(datas[n].rhs, Node.SubRange); std.debug.assert(members.end - members.start > 0); end_offset += 2; // for the comma + rbrace diff --git a/src/config_gen/config_gen.zig b/src/config_gen/config_gen.zig index bb3249d..8939274 100644 --- a/src/config_gen/config_gen.zig +++ b/src/config_gen/config_gen.zig @@ -188,14 +188,14 @@ fn generateVSCodeConfigFile(allocator: std.mem.Allocator, config: Config, path: configuration.putAssumeCapacityNoClobber("trace.server", .{ .scope = "window", .type = "string", - .@"enum" = &.{"off", "message", "verbose"}, + .@"enum" = &.{ "off", "message", "verbose" }, .description = "Traces the communication between VS Code and the language server.", - .default = .{.String = "off"}, + .default = .{ .String = "off" }, }); configuration.putAssumeCapacityNoClobber("check_for_update", .{ .type = "boolean", .description = "Whether to automatically check for new updates", - .default = .{.Bool = true}, + .default = .{ .Bool = true }, }); configuration.putAssumeCapacityNoClobber("path", .{ .type = "string", @@ -214,7 +214,7 @@ fn generateVSCodeConfigFile(allocator: std.mem.Allocator, config: Config, path: .type = try zigTypeToTypescript(option.type), .description = option.description, .format = if (std.mem.indexOf(u8, option.name, "path") != null) "path" else null, - .default = if(default == .Null) null else default, + .default = if (default == .Null) null else default, }); } diff --git a/src/data/master.zig b/src/data/master.zig index b116201..8896e78 100644 --- a/src/data/master.zig +++ b/src/data/master.zig @@ -1779,4 +1779,4 @@ pub const builtins = [_]Builtin{ "Element: type", }, }, -}; \ No newline at end of file +};