Merge pull request #673 from Techatrix/code-actions
Implement Code actions with autofix
This commit is contained in:
commit
e776a5c8e3
@ -103,6 +103,7 @@ The following options are currently available.
|
||||
| --- | --- | --- | --- |
|
||||
| `enable_snippets` | `bool` | `false` | Enables snippet completions when the client also supports them. |
|
||||
| `enable_ast_check_diagnostics` | `bool` | `true`| Whether to enable ast-check diagnostics |
|
||||
| `enable_autofix` | `bool` | `false`| Whether to automatically fix errors on save. Currently supports adding and removing discards. |
|
||||
| `enable_import_embedfile_argument_completions` | `bool` | `false` | Whether to enable import/embedFile argument completions |
|
||||
| `zig_lib_path` | `?[]const u8` | `null` | zig library path, e.g. `/path/to/zig/lib/zig`, used to analyze std library imports. |
|
||||
| `zig_exe_path` | `?[]const u8` | `null` | zig executable path, e.g. `/path/to/zig/zig`, used to run the custom build runner. If `null`, zig is looked up in `PATH`. Will be used to infer the zig standard library path if none is provided. |
|
||||
|
@ -14,6 +14,11 @@
|
||||
"type": "boolean",
|
||||
"default": "true"
|
||||
},
|
||||
"enable_autofix": {
|
||||
"description": "Whether to automatically fix errors on save. Currently supports adding and removing discards.",
|
||||
"type": "boolean",
|
||||
"default": "false"
|
||||
},
|
||||
"enable_import_embedfile_argument_completions": {
|
||||
"description": "Whether to enable import/embedFile argument completions",
|
||||
"type": "boolean",
|
||||
|
@ -16,6 +16,10 @@ enable_snippets: bool = false,
|
||||
/// Whether to enable ast-check diagnostics
|
||||
enable_ast_check_diagnostics: bool = true,
|
||||
|
||||
/// Whether to automatically fix errors on save.
|
||||
/// Currently supports adding and removing discards.
|
||||
enable_autofix: bool = false,
|
||||
|
||||
/// Whether to enable import/embedFile argument completions (NOTE: these are triggered manually as updating the autotrigger characters may cause issues)
|
||||
enable_import_embedfile_argument_completions: bool = false,
|
||||
|
||||
|
308
src/Server.zig
308
src/Server.zig
@ -12,6 +12,7 @@ const references = @import("references.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
const semantic_tokens = @import("semantic_tokens.zig");
|
||||
const inlay_hints = @import("inlay_hints.zig");
|
||||
const code_actions = @import("code_actions.zig");
|
||||
const shared = @import("shared.zig");
|
||||
const Ast = std.zig.Ast;
|
||||
const tracy = @import("tracy.zig");
|
||||
@ -141,7 +142,7 @@ fn showMessage(server: *Server, writer: anytype, message_type: types.MessageType
|
||||
});
|
||||
}
|
||||
|
||||
fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void {
|
||||
fn publishDiagnostics(server: *Server, writer: anytype, handle: *DocumentStore.Handle) !void {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -165,88 +166,8 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
||||
});
|
||||
}
|
||||
|
||||
if (server.config.enable_ast_check_diagnostics and tree.errors.len == 0) diag: {
|
||||
if (server.config.zig_exe_path) |zig_exe_path| {
|
||||
var process = std.ChildProcess.init(&[_][]const u8{ zig_exe_path, "ast-check", "--color", "off" }, server.allocator);
|
||||
process.stdin_behavior = .Pipe;
|
||||
process.stderr_behavior = .Pipe;
|
||||
|
||||
process.spawn() catch |err| {
|
||||
log.warn("Failed to spawn zig ast-check process, error: {}", .{err});
|
||||
break :diag;
|
||||
};
|
||||
try process.stdin.?.writeAll(handle.document.text);
|
||||
process.stdin.?.close();
|
||||
|
||||
process.stdin = null;
|
||||
|
||||
const stderr_bytes = try process.stderr.?.reader().readAllAlloc(server.allocator, std.math.maxInt(usize));
|
||||
defer server.allocator.free(stderr_bytes);
|
||||
|
||||
switch (try process.wait()) {
|
||||
.Exited => {
|
||||
// NOTE: I believe that with color off it's one diag per line; is this correct?
|
||||
var line_iterator = std.mem.split(u8, stderr_bytes, "\n");
|
||||
|
||||
while (line_iterator.next()) |line| lin: {
|
||||
var pos_and_diag_iterator = std.mem.split(u8, line, ":");
|
||||
const maybe_first = pos_and_diag_iterator.next();
|
||||
if (maybe_first) |first| {
|
||||
if (first.len <= 1) break :lin;
|
||||
} else break;
|
||||
|
||||
const utf8_position = types.Position{
|
||||
.line = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||
.character = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||
};
|
||||
|
||||
// zig uses utf-8 encoding for character offsets
|
||||
const position = offsets.convertPositionEncoding(handle.document.text, utf8_position, .utf8, server.offset_encoding);
|
||||
const range = offsets.tokenPositionToRange(handle.document.text, position, server.offset_encoding);
|
||||
|
||||
const msg = pos_and_diag_iterator.rest()[1..];
|
||||
|
||||
if (std.mem.startsWith(u8, msg, "error: ")) {
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = range,
|
||||
.severity = .Error,
|
||||
.code = "ast_check",
|
||||
.source = "zls",
|
||||
.message = try server.arena.allocator().dupe(u8, msg["error: ".len..]),
|
||||
});
|
||||
} else if (std.mem.startsWith(u8, msg, "note: ")) {
|
||||
var latestDiag = &diagnostics.items[diagnostics.items.len - 1];
|
||||
|
||||
var fresh = if (latestDiag.relatedInformation.len == 0)
|
||||
try server.arena.allocator().alloc(types.DiagnosticRelatedInformation, 1)
|
||||
else
|
||||
try server.arena.allocator().realloc(@ptrCast([]types.DiagnosticRelatedInformation, latestDiag.relatedInformation), latestDiag.relatedInformation.len + 1);
|
||||
|
||||
const location = types.Location{
|
||||
.uri = handle.uri(),
|
||||
.range = range,
|
||||
};
|
||||
|
||||
fresh[fresh.len - 1] = .{
|
||||
.location = location,
|
||||
.message = try server.arena.allocator().dupe(u8, msg["note: ".len..]),
|
||||
};
|
||||
|
||||
latestDiag.relatedInformation = fresh;
|
||||
} else {
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = range,
|
||||
.severity = .Error,
|
||||
.code = "ast_check",
|
||||
.source = "zls",
|
||||
.message = try server.arena.allocator().dupe(u8, msg),
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
if (server.config.enable_ast_check_diagnostics and tree.errors.len == 0) {
|
||||
try getAstCheckDiagnostics(server, handle, &diagnostics);
|
||||
}
|
||||
|
||||
if (server.config.warn_style) {
|
||||
@ -351,6 +272,98 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
|
||||
});
|
||||
}
|
||||
|
||||
fn getAstCheckDiagnostics(
|
||||
server: *Server,
|
||||
handle: *DocumentStore.Handle,
|
||||
diagnostics: *std.ArrayListUnmanaged(types.Diagnostic),
|
||||
) !void {
|
||||
var allocator = server.arena.allocator();
|
||||
|
||||
const zig_exe_path = server.config.zig_exe_path orelse return;
|
||||
|
||||
var process = std.ChildProcess.init(&[_][]const u8{ zig_exe_path, "ast-check", "--color", "off" }, server.allocator);
|
||||
process.stdin_behavior = .Pipe;
|
||||
process.stderr_behavior = .Pipe;
|
||||
|
||||
process.spawn() catch |err| {
|
||||
log.warn("Failed to spawn zig ast-check process, error: {}", .{err});
|
||||
return;
|
||||
};
|
||||
try process.stdin.?.writeAll(handle.document.text);
|
||||
process.stdin.?.close();
|
||||
|
||||
process.stdin = null;
|
||||
|
||||
const stderr_bytes = try process.stderr.?.reader().readAllAlloc(server.allocator, std.math.maxInt(usize));
|
||||
defer server.allocator.free(stderr_bytes);
|
||||
|
||||
const term = process.wait() catch |err| {
|
||||
log.warn("Failed to await zig ast-check process, error: {}", .{err});
|
||||
return;
|
||||
};
|
||||
|
||||
if (term != .Exited) return;
|
||||
|
||||
// NOTE: I believe that with color off it's one diag per line; is this correct?
|
||||
var line_iterator = std.mem.split(u8, stderr_bytes, "\n");
|
||||
|
||||
while (line_iterator.next()) |line| lin: {
|
||||
var pos_and_diag_iterator = std.mem.split(u8, line, ":");
|
||||
const maybe_first = pos_and_diag_iterator.next();
|
||||
if (maybe_first) |first| {
|
||||
if (first.len <= 1) break :lin;
|
||||
} else break;
|
||||
|
||||
const utf8_position = types.Position{
|
||||
.line = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||
.character = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
|
||||
};
|
||||
|
||||
// zig uses utf-8 encoding for character offsets
|
||||
const position = offsets.convertPositionEncoding(handle.document.text, utf8_position, .utf8, server.offset_encoding);
|
||||
const range = offsets.tokenPositionToRange(handle.document.text, position, server.offset_encoding);
|
||||
|
||||
const msg = pos_and_diag_iterator.rest()[1..];
|
||||
|
||||
if (std.mem.startsWith(u8, msg, "error: ")) {
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = range,
|
||||
.severity = .Error,
|
||||
.code = "ast_check",
|
||||
.source = "zls",
|
||||
.message = try server.arena.allocator().dupe(u8, msg["error: ".len..]),
|
||||
});
|
||||
} else if (std.mem.startsWith(u8, msg, "note: ")) {
|
||||
var latestDiag = &diagnostics.items[diagnostics.items.len - 1];
|
||||
|
||||
var fresh = if (latestDiag.relatedInformation) |related_information|
|
||||
try server.arena.allocator().realloc(@ptrCast([]types.DiagnosticRelatedInformation, related_information), related_information.len + 1)
|
||||
else
|
||||
try server.arena.allocator().alloc(types.DiagnosticRelatedInformation, 1);
|
||||
|
||||
const location = types.Location{
|
||||
.uri = handle.uri(),
|
||||
.range = range,
|
||||
};
|
||||
|
||||
fresh[fresh.len - 1] = .{
|
||||
.location = location,
|
||||
.message = try server.arena.allocator().dupe(u8, msg["note: ".len..]),
|
||||
};
|
||||
|
||||
latestDiag.relatedInformation = fresh;
|
||||
} else {
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = range,
|
||||
.severity = .Error,
|
||||
.code = "ast_check",
|
||||
.source = "zls",
|
||||
.message = try server.arena.allocator().dupe(u8, msg),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn typeToCompletion(
|
||||
server: *Server,
|
||||
list: *std.ArrayListUnmanaged(types.CompletionItem),
|
||||
@ -718,16 +731,14 @@ fn hoverSymbol(
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
}
|
||||
},
|
||||
.param_decl => |param| def: {
|
||||
.param_payload => |pay| def: {
|
||||
const param = pay.param;
|
||||
if (param.first_doc_comment) |doc_comments| {
|
||||
doc_str = try analysis.collectDocComments(server.arena.allocator(), handle.tree, doc_comments, hover_kind, false);
|
||||
}
|
||||
|
||||
const first_token = param.first_doc_comment orelse
|
||||
param.comptime_noalias orelse
|
||||
param.name_token orelse
|
||||
tree.firstToken(param.type_expr); // extern fn
|
||||
const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
|
||||
const first_token = ast.paramFirstToken(tree, param);
|
||||
const last_token = ast.paramLastToken(tree, param);
|
||||
|
||||
const start = offsets.tokenToIndex(tree, first_token);
|
||||
const end = offsets.tokenToLoc(tree, last_token).end;
|
||||
@ -1040,7 +1051,8 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
|
||||
false,
|
||||
context.parent_is_type_val,
|
||||
),
|
||||
.param_decl => |param| {
|
||||
.param_payload => |pay| {
|
||||
const param = pay.param;
|
||||
const doc_kind: types.MarkupContent.Kind = if (context.server.client_capabilities.completion_doc_supports_md) .Markdown else .PlainText;
|
||||
const doc = if (param.first_doc_comment) |doc_comments|
|
||||
types.MarkupContent{
|
||||
@ -1050,11 +1062,8 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
|
||||
else
|
||||
null;
|
||||
|
||||
const first_token = param.first_doc_comment orelse
|
||||
param.comptime_noalias orelse
|
||||
param.name_token orelse
|
||||
tree.firstToken(param.type_expr);
|
||||
const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
|
||||
const first_token = ast.paramFirstToken(tree, param);
|
||||
const last_token = ast.paramLastToken(tree, param);
|
||||
|
||||
try context.completions.append(allocator, .{
|
||||
.label = tree.tokenSlice(param.name_token.?),
|
||||
@ -1496,23 +1505,23 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
if(req.params.capabilities.general) |general| {
|
||||
if (req.params.capabilities.general) |general| {
|
||||
var supports_utf8 = false;
|
||||
var supports_utf16 = false;
|
||||
var supports_utf32 = false;
|
||||
for(general.positionEncodings.value) |encoding| {
|
||||
for (general.positionEncodings.value) |encoding| {
|
||||
if (std.mem.eql(u8, encoding, "utf-8")) {
|
||||
supports_utf8 = true;
|
||||
} else if(std.mem.eql(u8, encoding, "utf-16")) {
|
||||
} else if (std.mem.eql(u8, encoding, "utf-16")) {
|
||||
supports_utf16 = true;
|
||||
} else if(std.mem.eql(u8, encoding, "utf-32")) {
|
||||
} else if (std.mem.eql(u8, encoding, "utf-32")) {
|
||||
supports_utf32 = true;
|
||||
}
|
||||
}
|
||||
|
||||
if(supports_utf8) {
|
||||
if (supports_utf8) {
|
||||
server.offset_encoding = .utf8;
|
||||
} else if(supports_utf32) {
|
||||
} else if (supports_utf32) {
|
||||
server.offset_encoding = .utf32;
|
||||
} else {
|
||||
server.offset_encoding = .utf16;
|
||||
@ -1561,7 +1570,7 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
.completionProvider = .{ .resolveProvider = false, .triggerCharacters = &[_][]const u8{ ".", ":", "@", "]" }, .completionItem = .{ .labelDetailsSupport = true } },
|
||||
.documentHighlightProvider = true,
|
||||
.hoverProvider = true,
|
||||
.codeActionProvider = false,
|
||||
.codeActionProvider = true,
|
||||
.declarationProvider = true,
|
||||
.definitionProvider = true,
|
||||
.typeDefinitionProvider = true,
|
||||
@ -1694,7 +1703,7 @@ fn openDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, re
|
||||
defer tracy_zone.end();
|
||||
|
||||
const handle = try server.document_store.openDocument(req.params.textDocument.uri, req.params.textDocument.text);
|
||||
try server.publishDiagnostics(writer, handle.*);
|
||||
try server.publishDiagnostics(writer, handle);
|
||||
|
||||
if (server.client_capabilities.supports_semantic_tokens) {
|
||||
const request: requests.SemanticTokensFull = .{ .params = .{ .textDocument = .{ .uri = req.params.textDocument.uri } } };
|
||||
@ -1714,21 +1723,68 @@ fn changeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId,
|
||||
};
|
||||
|
||||
try server.document_store.applyChanges(handle, req.params.contentChanges.Array, server.offset_encoding);
|
||||
try server.publishDiagnostics(writer, handle.*);
|
||||
try server.publishDiagnostics(writer, handle);
|
||||
}
|
||||
|
||||
fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SaveDocument) error{OutOfMemory}!void {
|
||||
fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SaveDocument) !void {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
_ = id;
|
||||
_ = writer;
|
||||
const allocator = server.arena.allocator();
|
||||
const uri = req.params.textDocument.uri;
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to save non existent document {s}", .{req.params.textDocument.uri});
|
||||
const handle = server.document_store.getHandle(uri) orelse {
|
||||
log.warn("Trying to save non existent document {s}", .{uri});
|
||||
return;
|
||||
};
|
||||
try server.document_store.applySave(handle);
|
||||
|
||||
if (handle.tree.errors.len != 0) return;
|
||||
if (!server.config.enable_ast_check_diagnostics) return;
|
||||
if (!server.config.enable_autofix) return;
|
||||
|
||||
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
||||
try getAstCheckDiagnostics(server, handle, &diagnostics);
|
||||
|
||||
var builder = code_actions.Builder{
|
||||
.arena = &server.arena,
|
||||
.document_store = &server.document_store,
|
||||
.handle = handle,
|
||||
.offset_encoding = server.offset_encoding,
|
||||
};
|
||||
|
||||
var actions = std.ArrayListUnmanaged(types.CodeAction){};
|
||||
for (diagnostics.items) |diagnostic| {
|
||||
try builder.generateCodeAction(diagnostic, &actions);
|
||||
}
|
||||
|
||||
var text_edits = std.ArrayListUnmanaged(types.TextEdit){};
|
||||
for (actions.items) |action| {
|
||||
if (action.kind != .SourceFixAll) continue;
|
||||
|
||||
if (action.edit.changes.size != 1) continue;
|
||||
const edits = action.edit.changes.get(uri) orelse continue;
|
||||
|
||||
try text_edits.appendSlice(allocator, edits.items);
|
||||
}
|
||||
|
||||
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
|
||||
try workspace_edit.changes.putNoClobber(allocator, uri, text_edits);
|
||||
|
||||
// NOTE: stage1 moment
|
||||
const params = types.ResponseParams{
|
||||
.ApplyEdit = types.ApplyWorkspaceEditParams{
|
||||
.label = "autofix",
|
||||
.edit = workspace_edit,
|
||||
},
|
||||
};
|
||||
|
||||
try send(writer, allocator, types.Request{
|
||||
.id = .{ .String = "apply_edit" },
|
||||
.method = "workspace/applyEdit",
|
||||
.params = params,
|
||||
});
|
||||
}
|
||||
|
||||
fn closeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.CloseDocument) error{}!void {
|
||||
@ -2249,6 +2305,38 @@ fn inlayHintHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
}
|
||||
|
||||
fn codeActionHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.CodeAction) !void {
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to get code actions of non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
|
||||
const allocator = server.arena.allocator();
|
||||
|
||||
var builder = code_actions.Builder{
|
||||
.arena = &server.arena,
|
||||
.document_store = &server.document_store,
|
||||
.handle = handle,
|
||||
.offset_encoding = server.offset_encoding,
|
||||
};
|
||||
|
||||
var actions = std.ArrayListUnmanaged(types.CodeAction){};
|
||||
|
||||
for (req.params.context.diagnostics) |diagnostic| {
|
||||
try builder.generateCodeAction(diagnostic, &actions);
|
||||
}
|
||||
|
||||
for (actions.items) |*action| {
|
||||
// TODO query whether SourceFixAll is supported by the server
|
||||
if (action.kind == .SourceFixAll) action.kind = .QuickFix;
|
||||
}
|
||||
|
||||
return try send(writer, allocator, types.Response{
|
||||
.id = id,
|
||||
.result = .{ .CodeAction = actions.items },
|
||||
});
|
||||
}
|
||||
|
||||
// Needed for the hack seen below.
|
||||
fn extractErr(val: anytype) anyerror {
|
||||
val catch |e| return e;
|
||||
@ -2276,6 +2364,8 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
|
||||
|
||||
if (id == .String and std.mem.startsWith(u8, id.String, "register"))
|
||||
return;
|
||||
if (id == .String and std.mem.startsWith(u8, id.String, "apply_edit"))
|
||||
return;
|
||||
if (id == .String and std.mem.eql(u8, id.String, "i_haz_configuration")) {
|
||||
log.info("Setting configuration...", .{});
|
||||
|
||||
@ -2360,6 +2450,7 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
|
||||
.{ "textDocument/rename", requests.Rename, renameHandler },
|
||||
.{ "textDocument/references", requests.References, referencesHandler },
|
||||
.{ "textDocument/documentHighlight", requests.DocumentHighlight, documentHighlightHandler },
|
||||
.{ "textDocument/codeAction", requests.CodeAction, codeActionHandler },
|
||||
.{ "workspace/didChangeConfiguration", std.json.Value, didChangeConfigurationHandler },
|
||||
};
|
||||
|
||||
@ -2400,7 +2491,6 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
|
||||
// needs a response) or false if the method is a notification (in which
|
||||
// case it should be silently ignored)
|
||||
const unimplemented_map = std.ComptimeStringMap(bool, .{
|
||||
.{ "textDocument/codeAction", true },
|
||||
.{ "textDocument/codeLens", true },
|
||||
.{ "textDocument/documentLink", true },
|
||||
.{ "textDocument/rangeFormatting", true },
|
||||
|
@ -1844,7 +1844,10 @@ pub const Declaration = union(enum) {
|
||||
/// Index of the ast node
|
||||
ast_node: Ast.Node.Index,
|
||||
/// Function parameter
|
||||
param_decl: Ast.full.FnProto.Param,
|
||||
param_payload: struct {
|
||||
param: Ast.full.FnProto.Param,
|
||||
func: Ast.Node.Index,
|
||||
},
|
||||
pointer_payload: struct {
|
||||
name: Ast.TokenIndex,
|
||||
condition: Ast.Node.Index,
|
||||
@ -1870,7 +1873,7 @@ pub const DeclWithHandle = struct {
|
||||
const tree = self.handle.tree;
|
||||
return switch (self.decl.*) {
|
||||
.ast_node => |n| getDeclNameToken(tree, n).?,
|
||||
.param_decl => |p| p.name_token.?,
|
||||
.param_payload => |pp| pp.param.name_token.?,
|
||||
.pointer_payload => |pp| pp.name,
|
||||
.array_payload => |ap| ap.identifier,
|
||||
.array_index => |ai| ai,
|
||||
@ -1897,7 +1900,8 @@ pub const DeclWithHandle = struct {
|
||||
.{ .node = node, .handle = self.handle },
|
||||
bound_type_params,
|
||||
),
|
||||
.param_decl => |param_decl| {
|
||||
.param_payload => |pay| {
|
||||
const param_decl = pay.param;
|
||||
if (isMetaType(self.handle.tree, param_decl.type_expr)) {
|
||||
var bound_param_it = bound_type_params.iterator();
|
||||
while (bound_param_it.next()) |entry| {
|
||||
@ -2555,7 +2559,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
if (try scopes.items[scope_idx].decls.fetchPut(
|
||||
allocator,
|
||||
tree.tokenSlice(name_token),
|
||||
.{ .param_decl = param },
|
||||
.{ .param_payload = .{ .param = param, .func = node_idx } },
|
||||
)) |existing| {
|
||||
_ = existing;
|
||||
// TODO record a redefinition error
|
||||
|
11
src/ast.zig
11
src/ast.zig
@ -896,6 +896,17 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn paramFirstToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
|
||||
return param.first_doc_comment orelse
|
||||
param.comptime_noalias orelse
|
||||
param.name_token orelse
|
||||
tree.firstToken(param.type_expr);
|
||||
}
|
||||
|
||||
pub fn paramLastToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
|
||||
return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
|
||||
}
|
||||
|
||||
pub fn containerField(tree: Ast, node: Ast.Node.Index) ?Ast.full.ContainerField {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.container_field => tree.containerField(node),
|
||||
|
485
src/code_actions.zig
Normal file
485
src/code_actions.zig
Normal file
@ -0,0 +1,485 @@
|
||||
const std = @import("std");
|
||||
const Ast = std.zig.Ast;
|
||||
|
||||
const DocumentStore = @import("DocumentStore.zig");
|
||||
const analysis = @import("analysis.zig");
|
||||
const ast = @import("ast.zig");
|
||||
|
||||
const types = @import("types.zig");
|
||||
const requests = @import("requests.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
|
||||
pub const Builder = struct {
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
document_store: *DocumentStore,
|
||||
handle: *DocumentStore.Handle,
|
||||
offset_encoding: offsets.Encoding,
|
||||
|
||||
pub fn generateCodeAction(
|
||||
builder: *Builder,
|
||||
diagnostic: types.Diagnostic,
|
||||
actions: *std.ArrayListUnmanaged(types.CodeAction),
|
||||
) error{OutOfMemory}!void {
|
||||
const kind = DiagnosticKind.parse(diagnostic.message) orelse return;
|
||||
|
||||
const loc = offsets.rangeToLoc(builder.text(), diagnostic.range, builder.offset_encoding);
|
||||
|
||||
switch (kind) {
|
||||
.unused => |id| switch (id) {
|
||||
.@"function parameter" => try handleUnusedFunctionParameter(builder, actions, loc),
|
||||
.@"local constant" => try handleUnusedVariableOrConstant(builder, actions, loc),
|
||||
.@"local variable" => try handleUnusedVariableOrConstant(builder, actions, loc),
|
||||
.@"loop index capture" => try handleUnusedIndexCapture(builder, actions, loc),
|
||||
.@"capture" => try handleUnusedCapture(builder, actions, loc),
|
||||
},
|
||||
.pointless_discard => try handlePointlessDiscard(builder, actions, loc),
|
||||
.omit_discard => |id| switch (id) {
|
||||
.@"index capture" => try handleUnusedIndexCapture(builder, actions, loc),
|
||||
.@"error capture" => try handleUnusedCapture(builder, actions, loc),
|
||||
},
|
||||
.unreachable_code => {
|
||||
// TODO
|
||||
// autofix: comment out code
|
||||
// fix: remove code
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn createTextEditLoc(self: *Builder, loc: offsets.Loc, new_text: []const u8) types.TextEdit {
|
||||
const range = offsets.locToRange(self.text(), loc, self.offset_encoding);
|
||||
return types.TextEdit{ .range = range, .newText = new_text };
|
||||
}
|
||||
|
||||
pub fn createTextEditPos(self: *Builder, index: usize, new_text: []const u8) types.TextEdit {
|
||||
const position = offsets.indexToPosition(self.text(), index, self.offset_encoding);
|
||||
return types.TextEdit{ .range = .{ .start = position, .end = position }, .newText = new_text };
|
||||
}
|
||||
|
||||
pub fn createWorkspaceEdit(self: *Builder, edits: []const types.TextEdit) error{OutOfMemory}!types.WorkspaceEdit {
|
||||
var text_edits = std.ArrayListUnmanaged(types.TextEdit){};
|
||||
try text_edits.appendSlice(self.arena.allocator(), edits);
|
||||
|
||||
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
|
||||
try workspace_edit.changes.putNoClobber(self.arena.allocator(), self.handle.uri(), text_edits);
|
||||
|
||||
return workspace_edit;
|
||||
}
|
||||
|
||||
fn text(self: *Builder) []const u8 {
|
||||
return self.handle.document.text;
|
||||
}
|
||||
};
|
||||
|
||||
fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
|
||||
const identifier_name = offsets.locToSlice(builder.text(), loc);
|
||||
|
||||
const tree = builder.handle.tree;
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tokens = tree.nodes.items(.main_token);
|
||||
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const decl = (try analysis.lookupSymbolGlobal(
|
||||
builder.document_store,
|
||||
builder.arena,
|
||||
builder.handle,
|
||||
identifier_name,
|
||||
loc.start,
|
||||
)) orelse return;
|
||||
|
||||
const payload = switch (decl.decl.*) {
|
||||
.param_payload => |pay| pay,
|
||||
else => return,
|
||||
};
|
||||
|
||||
std.debug.assert(node_tags[payload.func] == .fn_decl);
|
||||
|
||||
const block = node_datas[payload.func].rhs;
|
||||
|
||||
const indent = offsets.lineSliceUntilIndex(builder.text(), token_starts[node_tokens[payload.func]]).len;
|
||||
const new_text = try createDiscardText(builder.arena.allocator(), identifier_name, indent + 4);
|
||||
|
||||
const index = token_starts[node_tokens[block]] + 1;
|
||||
|
||||
const action1 = types.CodeAction{
|
||||
.title = "discard function parameter",
|
||||
.kind = .SourceFixAll,
|
||||
.isPreferred = true,
|
||||
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditPos(index, new_text)}),
|
||||
};
|
||||
|
||||
const param_loc = .{
|
||||
.start = offsets.tokenToIndex(tree, ast.paramFirstToken(tree, payload.param)),
|
||||
.end = offsets.tokenToLoc(tree, ast.paramLastToken(tree, payload.param)).end,
|
||||
};
|
||||
|
||||
// TODO fix formatting
|
||||
// TODO remove trailing comma on last parameter
|
||||
const action2 = types.CodeAction{
|
||||
.title = "remove function parameter",
|
||||
.kind = .QuickFix,
|
||||
.isPreferred = false,
|
||||
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(param_loc, "")}),
|
||||
};
|
||||
|
||||
try actions.appendSlice(builder.arena.allocator(), &.{ action1, action2 });
|
||||
}
|
||||
|
||||
fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
|
||||
const identifier_name = offsets.locToSlice(builder.text(), loc);
|
||||
|
||||
const tree = builder.handle.tree;
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const decl = (try analysis.lookupSymbolGlobal(
|
||||
builder.document_store,
|
||||
builder.arena,
|
||||
builder.handle,
|
||||
identifier_name,
|
||||
loc.start,
|
||||
)) orelse return;
|
||||
|
||||
const node = switch (decl.decl.*) {
|
||||
.ast_node => |node| node,
|
||||
else => return,
|
||||
};
|
||||
|
||||
const first_token = tree.firstToken(node);
|
||||
const last_token = ast.lastToken(tree, node) + 1;
|
||||
|
||||
const indent = offsets.lineSliceUntilIndex(builder.text(), token_starts[first_token]).len;
|
||||
|
||||
if (token_tags[last_token] != .semicolon) return;
|
||||
|
||||
const new_text = try createDiscardText(builder.arena.allocator(), identifier_name, indent);
|
||||
|
||||
const index = token_starts[last_token] + 1;
|
||||
|
||||
try actions.append(builder.arena.allocator(), .{
|
||||
.title = "discard value",
|
||||
.kind = .SourceFixAll,
|
||||
.isPreferred = true,
|
||||
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditPos(index, new_text)}),
|
||||
});
|
||||
}
|
||||
|
||||
fn handleUnusedIndexCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
|
||||
const capture_locs = getCaptureLoc(builder.text(), loc, true) orelse return;
|
||||
|
||||
// TODO support discarding without modifying the capture
|
||||
// by adding a discard in the block scope
|
||||
const is_value_discarded = std.mem.eql(u8, offsets.locToSlice(builder.text(), capture_locs.value), "_");
|
||||
if (is_value_discarded) {
|
||||
// |_, i| ->
|
||||
// TODO fix formatting
|
||||
try actions.append(builder.arena.allocator(), .{
|
||||
.title = "remove capture",
|
||||
.kind = .QuickFix,
|
||||
.isPreferred = true,
|
||||
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.loc, "")}),
|
||||
});
|
||||
} else {
|
||||
// |v, i| -> |v|
|
||||
// |v, _| -> |v|
|
||||
try actions.append(builder.arena.allocator(), .{
|
||||
.title = "remove index capture",
|
||||
.kind = .QuickFix,
|
||||
.isPreferred = true,
|
||||
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(
|
||||
.{ .start = capture_locs.value.end, .end = capture_locs.loc.end - 1 },
|
||||
"",
|
||||
)}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn handleUnusedCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
|
||||
const capture_locs = getCaptureLoc(builder.text(), loc, false) orelse return;
|
||||
|
||||
// TODO support discarding without modifying the capture
|
||||
// by adding a discard in the block scope
|
||||
if (capture_locs.index != null) {
|
||||
// |v, i| -> |_, i|
|
||||
try actions.append(builder.arena.allocator(), .{
|
||||
.title = "discard capture",
|
||||
.kind = .QuickFix,
|
||||
.isPreferred = true,
|
||||
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.value, "_")}),
|
||||
});
|
||||
} else {
|
||||
// |v| ->
|
||||
// TODO fix formatting
|
||||
try actions.append(builder.arena.allocator(), .{
|
||||
.title = "remove capture",
|
||||
.kind = .QuickFix,
|
||||
.isPreferred = true,
|
||||
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.loc, "")}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn handlePointlessDiscard(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
|
||||
const edit_loc = getDiscardLoc(builder.text(), loc) orelse return;
|
||||
|
||||
try actions.append(builder.arena.allocator(), .{
|
||||
.title = "remove pointless discard",
|
||||
.kind = .SourceFixAll,
|
||||
.isPreferred = true,
|
||||
.edit = try builder.createWorkspaceEdit(&.{
|
||||
builder.createTextEditLoc(edit_loc, ""),
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
// returns a discard string `\n{indent}_ = identifier_name;`
|
||||
fn createDiscardText(allocator: std.mem.Allocator, identifier_name: []const u8, indent: usize) ![]const u8 {
|
||||
const new_text_len = 1 + indent + "_ = ;".len + identifier_name.len;
|
||||
var new_text = try std.ArrayListUnmanaged(u8).initCapacity(allocator, new_text_len);
|
||||
errdefer new_text.deinit(allocator);
|
||||
|
||||
new_text.appendAssumeCapacity('\n');
|
||||
new_text.appendNTimesAssumeCapacity(' ', indent);
|
||||
new_text.appendSliceAssumeCapacity("_ = ");
|
||||
new_text.appendSliceAssumeCapacity(identifier_name);
|
||||
new_text.appendAssumeCapacity(';');
|
||||
|
||||
return new_text.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
const DiagnosticKind = union(enum) {
|
||||
unused: IdCat,
|
||||
pointless_discard: IdCat,
|
||||
omit_discard: DiscardCat,
|
||||
unreachable_code,
|
||||
|
||||
const IdCat = enum {
|
||||
@"function parameter",
|
||||
@"local constant",
|
||||
@"local variable",
|
||||
@"loop index capture",
|
||||
@"capture",
|
||||
};
|
||||
|
||||
const DiscardCat = enum {
|
||||
// "discard of index capture; omit it instead"
|
||||
@"index capture",
|
||||
// "discard of error capture; omit it instead"
|
||||
@"error capture",
|
||||
};
|
||||
|
||||
pub fn parse(diagnostic_message: []const u8) ?DiagnosticKind {
|
||||
const msg = diagnostic_message;
|
||||
|
||||
if (std.mem.startsWith(u8, msg, "unused ")) {
|
||||
return DiagnosticKind{
|
||||
.unused = parseEnum(IdCat, msg["unused ".len..]) orelse return null,
|
||||
};
|
||||
} else if (std.mem.startsWith(u8, msg, "pointless discard of ")) {
|
||||
return DiagnosticKind{
|
||||
.pointless_discard = parseEnum(IdCat, msg["pointless discard of ".len..]) orelse return null,
|
||||
};
|
||||
} else if (std.mem.startsWith(u8, msg, "discard of ")) {
|
||||
return DiagnosticKind{
|
||||
.omit_discard = parseEnum(DiscardCat, msg["discard of ".len..]) orelse return null,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn parseEnum(comptime T: type, message: []const u8) ?T {
|
||||
inline for (std.meta.fields(T)) |field| {
|
||||
if (std.mem.startsWith(u8, message, field.name)) {
|
||||
// is there a better way to achieve this?
|
||||
return @intToEnum(T, field.value);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/// takes the location of an identifier which is part of a discard `_ = location_here;`
|
||||
/// and returns the location from '_' until ';' or null on failure
|
||||
fn getDiscardLoc(text: []const u8, loc: offsets.Loc) ?offsets.Loc {
|
||||
// check of the loc points to a valid identifier
|
||||
for (offsets.locToSlice(text, loc)) |c| {
|
||||
if (!isSymbolChar(c)) return null;
|
||||
}
|
||||
|
||||
// check if the identifier is followed by a colon
|
||||
const colon_position = found: {
|
||||
var i = loc.end;
|
||||
while (i < text.len) : (i += 1) {
|
||||
switch (text[i]) {
|
||||
' ' => continue,
|
||||
';' => break :found i + 1,
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
// check if the identifier is preceed by a equal sign and then an underscore
|
||||
var i: usize = loc.start - 1;
|
||||
var found_equal_sign = false;
|
||||
const underscore_position = found: {
|
||||
while (true) : (i -= 1) {
|
||||
if (i == 0) return null;
|
||||
switch (text[i]) {
|
||||
' ' => {},
|
||||
'=' => {
|
||||
if (found_equal_sign) return null;
|
||||
found_equal_sign = true;
|
||||
},
|
||||
'_' => if (found_equal_sign) break :found i else return null,
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// move backwards until we find a newline
|
||||
i = underscore_position - 1;
|
||||
const start_position = found: {
|
||||
while (true) : (i -= 1) {
|
||||
if (i == 0) break :found underscore_position;
|
||||
switch (text[i]) {
|
||||
' ' => {},
|
||||
'\n' => break :found i,
|
||||
else => break :found underscore_position,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return offsets.Loc{
|
||||
.start = start_position,
|
||||
.end = colon_position,
|
||||
};
|
||||
}
|
||||
|
||||
const CaptureLocs = struct {
|
||||
loc: offsets.Loc,
|
||||
value: offsets.Loc,
|
||||
index: ?offsets.Loc,
|
||||
};
|
||||
|
||||
/// takes the location of an identifier which is part of a payload `|value, index|`
|
||||
/// and returns the location from '|' until '|' or null on failure
|
||||
/// use `is_index_payload` to indicate whether `loc` points to `value` or `index`
|
||||
fn getCaptureLoc(text: []const u8, loc: offsets.Loc, is_index_payload: bool) ?CaptureLocs {
|
||||
const value_end = if (!is_index_payload) loc.end else found: {
|
||||
// move back until we find a comma
|
||||
const comma_position = found_comma: {
|
||||
var i = loc.start - 1;
|
||||
while (i != 0) : (i -= 1) {
|
||||
switch (text[i]) {
|
||||
' ' => continue,
|
||||
',' => break :found_comma i,
|
||||
else => return null,
|
||||
}
|
||||
} else return null;
|
||||
};
|
||||
|
||||
// trim space
|
||||
var i = comma_position - 1;
|
||||
while (i != 0) : (i -= 1) {
|
||||
switch (text[i]) {
|
||||
' ' => continue,
|
||||
else => {
|
||||
if (!isSymbolChar(text[i])) return null;
|
||||
break :found i + 1;
|
||||
},
|
||||
}
|
||||
} else return null;
|
||||
};
|
||||
|
||||
const value_start = if (!is_index_payload) loc.start else found: {
|
||||
// move back until we find a non identifier character
|
||||
var i = value_end - 1;
|
||||
while (i != 0) : (i -= 1) {
|
||||
if (isSymbolChar(text[i])) continue;
|
||||
switch (text[i]) {
|
||||
' ', '|', '*' => break :found i + 1,
|
||||
else => return null,
|
||||
}
|
||||
} else return null;
|
||||
};
|
||||
|
||||
var index: ?offsets.Loc = null;
|
||||
|
||||
if (is_index_payload) {
|
||||
index = loc;
|
||||
} else blk: {
|
||||
// move forward until we find a comma
|
||||
const comma_position = found_comma: {
|
||||
var i = value_end;
|
||||
while (i < text.len) : (i += 1) {
|
||||
switch (text[i]) {
|
||||
' ' => continue,
|
||||
',' => break :found_comma i,
|
||||
else => break :blk,
|
||||
}
|
||||
}
|
||||
break :blk;
|
||||
};
|
||||
|
||||
// trim space
|
||||
const index_start = found_start: {
|
||||
var i = comma_position + 1;
|
||||
while (i < text.len) : (i += 1) {
|
||||
switch (text[i]) {
|
||||
' ' => continue,
|
||||
else => {
|
||||
if (!isSymbolChar(text[i])) break :blk;
|
||||
break :found_start i;
|
||||
},
|
||||
}
|
||||
}
|
||||
break :blk;
|
||||
};
|
||||
|
||||
// move forward until we find a non identifier character
|
||||
var i = index_start + 1;
|
||||
while (i < text.len) : (i += 1) {
|
||||
if (isSymbolChar(text[i])) continue;
|
||||
index = offsets.Loc{
|
||||
.start = index_start,
|
||||
.end = i,
|
||||
};
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const start_pipe_position = found: {
|
||||
var i = value_start - 1;
|
||||
while (i != 0) : (i -= 1) {
|
||||
switch (text[i]) {
|
||||
' ' => continue,
|
||||
'|' => break :found i,
|
||||
else => return null,
|
||||
}
|
||||
} else return null;
|
||||
};
|
||||
|
||||
const end_pipe_position = found: {
|
||||
var i: usize = if (index) |index_loc| index_loc.end else value_end;
|
||||
while (i < text.len) : (i += 1) {
|
||||
switch (text[i]) {
|
||||
' ' => continue,
|
||||
'|' => break :found i + 1,
|
||||
else => return null,
|
||||
}
|
||||
} else return null;
|
||||
};
|
||||
|
||||
return CaptureLocs{
|
||||
.loc = .{ .start = start_pipe_position, .end = end_pipe_position },
|
||||
.value = .{ .start = value_start, .end = value_end },
|
||||
.index = index,
|
||||
};
|
||||
}
|
||||
|
||||
fn isSymbolChar(char: u8) bool {
|
||||
return std.ascii.isAlNum(char) or char == '_';
|
||||
}
|
@ -516,8 +516,9 @@ pub fn symbolReferences(
|
||||
try imports.resize(arena.allocator(), 0);
|
||||
}
|
||||
},
|
||||
.param_decl => |param| blk: {
|
||||
.param_payload => |pay| blk: {
|
||||
// Rename the param tok.
|
||||
const param = pay.param;
|
||||
for (curr_handle.document_scope.scopes.items) |scope| {
|
||||
if (scope.data != .function) continue;
|
||||
|
||||
|
@ -274,11 +274,22 @@ pub const InlayHint = struct {
|
||||
},
|
||||
};
|
||||
|
||||
pub const CodeAction = struct {
|
||||
params: struct {
|
||||
textDocument: TextDocumentIdentifier,
|
||||
range: types.Range,
|
||||
context: struct {
|
||||
diagnostics: []types.Diagnostic,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
pub const Configuration = struct {
|
||||
params: struct {
|
||||
settings: struct {
|
||||
enable_snippets: ?bool,
|
||||
enable_ast_check_diagnostics: ?bool,
|
||||
enable_autofix: ?bool,
|
||||
enable_import_embedfile_argument_completions: ?bool,
|
||||
zig_lib_path: ?[]const u8,
|
||||
zig_exe_path: ?[]const u8,
|
||||
|
@ -417,7 +417,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
||||
name,
|
||||
tree.tokens.items(.start)[main_token],
|
||||
)) |child| {
|
||||
if (child.decl.* == .param_decl) {
|
||||
if (child.decl.* == .param_payload) {
|
||||
return try writeToken(builder, main_token, .parameter);
|
||||
}
|
||||
var bound_type_params = analysis.BoundTypeParams{};
|
||||
@ -716,8 +716,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
||||
},
|
||||
.number_literal,
|
||||
=> {
|
||||
.number_literal => {
|
||||
try writeToken(builder, main_token, .number);
|
||||
},
|
||||
.enum_literal => {
|
||||
|
@ -171,6 +171,7 @@ pub fn wizard(allocator: std.mem.Allocator) !void {
|
||||
const editor = try askSelectOne("Which code editor do you use?", enum { VSCode, Sublime, Kate, Neovim, Vim8, Emacs, Doom, Spacemacs, Helix, Other });
|
||||
const snippets = try askBool("Do you want to enable snippets?");
|
||||
const ast_check = try askBool("Do you want to enable ast-check diagnostics?");
|
||||
const autofix = try askBool("Do you want to zls to automatically try to fix errors on save? (supports adding & removing discards)");
|
||||
const ief_apc = try askBool("Do you want to enable @import/@embedFile argument path completion?");
|
||||
const style = try askBool("Do you want to enable style warnings?");
|
||||
const semantic_tokens = try askBool("Do you want to enable semantic highlighting?");
|
||||
@ -193,6 +194,7 @@ pub fn wizard(allocator: std.mem.Allocator) !void {
|
||||
.zig_exe_path = zig_exe_path,
|
||||
.enable_snippets = snippets,
|
||||
.enable_ast_check_diagnostics = ast_check,
|
||||
.enable_autofix = autofix,
|
||||
.enable_import_embedfile_argument_completions = ief_apc,
|
||||
.warn_style = style,
|
||||
.enable_semantic_tokens = semantic_tokens,
|
||||
|
@ -47,6 +47,8 @@ pub const ResponseParams = union(enum) {
|
||||
ConfigurationParams: ConfigurationParams,
|
||||
RegistrationParams: RegistrationParams,
|
||||
DocumentHighlight: []DocumentHighlight,
|
||||
CodeAction: []CodeAction,
|
||||
ApplyEdit: ApplyWorkspaceEditParams,
|
||||
};
|
||||
|
||||
/// JSONRPC notifications
|
||||
@ -115,11 +117,11 @@ pub const DiagnosticRelatedInformation = struct {
|
||||
|
||||
pub const Diagnostic = struct {
|
||||
range: Range,
|
||||
severity: DiagnosticSeverity,
|
||||
code: string,
|
||||
source: string,
|
||||
severity: ?DiagnosticSeverity,
|
||||
code: ?string,
|
||||
source: ?string,
|
||||
message: string,
|
||||
relatedInformation: []const DiagnosticRelatedInformation = &.{},
|
||||
relatedInformation: ?[]const DiagnosticRelatedInformation = null,
|
||||
};
|
||||
|
||||
pub const TextDocument = struct {
|
||||
@ -372,6 +374,46 @@ pub const InlayHintKind = enum(i64) {
|
||||
}
|
||||
};
|
||||
|
||||
pub const CodeActionKind = enum {
|
||||
Empty,
|
||||
QuickFix,
|
||||
Refactor,
|
||||
RefactorExtract,
|
||||
RefactorInline,
|
||||
RefactorRewrite,
|
||||
Source,
|
||||
SourceOrganizeImports,
|
||||
SourceFixAll,
|
||||
|
||||
pub fn jsonStringify(value: CodeActionKind, options: std.json.StringifyOptions, out_stream: anytype) !void {
|
||||
const name = switch (value) {
|
||||
.Empty => "",
|
||||
.QuickFix => "quickfix",
|
||||
.Refactor => "refactor",
|
||||
.RefactorExtract => "refactor.extract",
|
||||
.RefactorInline => "refactor.inline",
|
||||
.RefactorRewrite => "refactor.rewrite",
|
||||
.Source => "source",
|
||||
.SourceOrganizeImports => "source.organizeImports",
|
||||
.SourceFixAll => "source.fixAll",
|
||||
};
|
||||
try std.json.stringify(name, options, out_stream);
|
||||
}
|
||||
};
|
||||
|
||||
pub const CodeAction = struct {
|
||||
title: string,
|
||||
kind: CodeActionKind,
|
||||
// diagnostics: []Diagnostic,
|
||||
isPreferred: bool,
|
||||
edit: WorkspaceEdit,
|
||||
};
|
||||
|
||||
pub const ApplyWorkspaceEditParams = struct {
|
||||
label: string,
|
||||
edit: WorkspaceEdit,
|
||||
};
|
||||
|
||||
pub const PositionEncodingKind = enum {
|
||||
utf8,
|
||||
utf16,
|
||||
|
Loading…
Reference in New Issue
Block a user