Merge branch 'master' into dev

This commit is contained in:
Techatrix 2022-09-26 18:41:07 +02:00
commit 8edaa7f506
15 changed files with 902 additions and 226 deletions

View File

@ -103,6 +103,7 @@ The following options are currently available.
| --- | --- | --- | --- |
| `enable_snippets` | `bool` | `false` | Enables snippet completions when the client also supports them. |
| `enable_ast_check_diagnostics` | `bool` | `true`| Whether to enable ast-check diagnostics |
| `enable_autofix` | `bool` | `false`| Whether to automatically fix errors on save. Currently supports adding and removing discards. |
| `enable_import_embedfile_argument_completions` | `bool` | `false` | Whether to enable import/embedFile argument completions |
| `zig_lib_path` | `?[]const u8` | `null` | zig library path, e.g. `/path/to/zig/lib/zig`, used to analyze std library imports. |
| `zig_exe_path` | `?[]const u8` | `null` | zig executable path, e.g. `/path/to/zig/zig`, used to run the custom build runner. If `null`, zig is looked up in `PATH`. Will be used to infer the zig standard library path if none is provided. |

View File

@ -14,6 +14,11 @@
"type": "boolean",
"default": "true"
},
"enable_autofix": {
"description": "Whether to automatically fix errors on save. Currently supports adding and removing discards.",
"type": "boolean",
"default": "false"
},
"enable_import_embedfile_argument_completions": {
"description": "Whether to enable import/embedFile argument completions",
"type": "boolean",

View File

@ -16,6 +16,10 @@ enable_snippets: bool = false,
/// Whether to enable ast-check diagnostics
enable_ast_check_diagnostics: bool = true,
/// Whether to automatically fix errors on save.
/// Currently supports adding and removing discards.
enable_autofix: bool = false,
/// Whether to enable import/embedFile argument completions (NOTE: these are triggered manually as updating the autotrigger characters may cause issues)
enable_import_embedfile_argument_completions: bool = false,

View File

@ -12,13 +12,16 @@ const references = @import("references.zig");
const offsets = @import("offsets.zig");
const semantic_tokens = @import("semantic_tokens.zig");
const inlay_hints = @import("inlay_hints.zig");
const code_actions = @import("code_actions.zig");
const shared = @import("shared.zig");
const Ast = std.zig.Ast;
const tracy = @import("tracy.zig");
const uri_utils = @import("uri.zig");
const data = @import("data/data.zig");
const diff = @import("diff.zig");
const data = @import("data/data.zig");
const snipped_data = @import("data/snippets.zig");
const log = std.log.scoped(.server);
// Server fields
@ -141,7 +144,7 @@ fn showMessage(server: *Server, writer: anytype, message_type: types.MessageType
});
}
fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void {
fn publishDiagnostics(server: *Server, writer: anytype, handle: *DocumentStore.Handle) !void {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
@ -165,88 +168,8 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
});
}
if (server.config.enable_ast_check_diagnostics and tree.errors.len == 0) diag: {
if (server.config.zig_exe_path) |zig_exe_path| {
var process = std.ChildProcess.init(&[_][]const u8{ zig_exe_path, "ast-check", "--color", "off" }, server.allocator);
process.stdin_behavior = .Pipe;
process.stderr_behavior = .Pipe;
process.spawn() catch |err| {
log.warn("Failed to spawn zig ast-check process, error: {}", .{err});
break :diag;
};
try process.stdin.?.writeAll(handle.document.text);
process.stdin.?.close();
process.stdin = null;
const stderr_bytes = try process.stderr.?.reader().readAllAlloc(server.allocator, std.math.maxInt(usize));
defer server.allocator.free(stderr_bytes);
switch (try process.wait()) {
.Exited => {
// NOTE: I believe that with color off it's one diag per line; is this correct?
var line_iterator = std.mem.split(u8, stderr_bytes, "\n");
while (line_iterator.next()) |line| lin: {
var pos_and_diag_iterator = std.mem.split(u8, line, ":");
const maybe_first = pos_and_diag_iterator.next();
if (maybe_first) |first| {
if (first.len <= 1) break :lin;
} else break;
const utf8_position = types.Position{
.line = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
.character = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
};
// zig uses utf-8 encoding for character offsets
const position = offsets.convertPositionEncoding(handle.document.text, utf8_position, .utf8, server.offset_encoding);
const range = offsets.tokenPositionToRange(handle.document.text, position, server.offset_encoding);
const msg = pos_and_diag_iterator.rest()[1..];
if (std.mem.startsWith(u8, msg, "error: ")) {
try diagnostics.append(allocator, .{
.range = range,
.severity = .Error,
.code = "ast_check",
.source = "zls",
.message = try server.arena.allocator().dupe(u8, msg["error: ".len..]),
});
} else if (std.mem.startsWith(u8, msg, "note: ")) {
var latestDiag = &diagnostics.items[diagnostics.items.len - 1];
var fresh = if (latestDiag.relatedInformation.len == 0)
try server.arena.allocator().alloc(types.DiagnosticRelatedInformation, 1)
else
try server.arena.allocator().realloc(@ptrCast([]types.DiagnosticRelatedInformation, latestDiag.relatedInformation), latestDiag.relatedInformation.len + 1);
const location = types.Location{
.uri = handle.uri(),
.range = range,
};
fresh[fresh.len - 1] = .{
.location = location,
.message = try server.arena.allocator().dupe(u8, msg["note: ".len..]),
};
latestDiag.relatedInformation = fresh;
} else {
try diagnostics.append(allocator, .{
.range = range,
.severity = .Error,
.code = "ast_check",
.source = "zls",
.message = try server.arena.allocator().dupe(u8, msg),
});
}
}
},
else => {},
}
}
if (server.config.enable_ast_check_diagnostics and tree.errors.len == 0) {
try getAstCheckDiagnostics(server, handle, &diagnostics);
}
if (server.config.warn_style) {
@ -351,6 +274,98 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
});
}
fn getAstCheckDiagnostics(
server: *Server,
handle: *DocumentStore.Handle,
diagnostics: *std.ArrayListUnmanaged(types.Diagnostic),
) !void {
var allocator = server.arena.allocator();
const zig_exe_path = server.config.zig_exe_path orelse return;
var process = std.ChildProcess.init(&[_][]const u8{ zig_exe_path, "ast-check", "--color", "off" }, server.allocator);
process.stdin_behavior = .Pipe;
process.stderr_behavior = .Pipe;
process.spawn() catch |err| {
log.warn("Failed to spawn zig ast-check process, error: {}", .{err});
return;
};
try process.stdin.?.writeAll(handle.document.text);
process.stdin.?.close();
process.stdin = null;
const stderr_bytes = try process.stderr.?.reader().readAllAlloc(server.allocator, std.math.maxInt(usize));
defer server.allocator.free(stderr_bytes);
const term = process.wait() catch |err| {
log.warn("Failed to await zig ast-check process, error: {}", .{err});
return;
};
if (term != .Exited) return;
// NOTE: I believe that with color off it's one diag per line; is this correct?
var line_iterator = std.mem.split(u8, stderr_bytes, "\n");
while (line_iterator.next()) |line| lin: {
var pos_and_diag_iterator = std.mem.split(u8, line, ":");
const maybe_first = pos_and_diag_iterator.next();
if (maybe_first) |first| {
if (first.len <= 1) break :lin;
} else break;
const utf8_position = types.Position{
.line = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
.character = (try std.fmt.parseInt(u32, pos_and_diag_iterator.next().?, 10)) - 1,
};
// zig uses utf-8 encoding for character offsets
const position = offsets.convertPositionEncoding(handle.document.text, utf8_position, .utf8, server.offset_encoding);
const range = offsets.tokenPositionToRange(handle.document.text, position, server.offset_encoding);
const msg = pos_and_diag_iterator.rest()[1..];
if (std.mem.startsWith(u8, msg, "error: ")) {
try diagnostics.append(allocator, .{
.range = range,
.severity = .Error,
.code = "ast_check",
.source = "zls",
.message = try server.arena.allocator().dupe(u8, msg["error: ".len..]),
});
} else if (std.mem.startsWith(u8, msg, "note: ")) {
var latestDiag = &diagnostics.items[diagnostics.items.len - 1];
var fresh = if (latestDiag.relatedInformation) |related_information|
try server.arena.allocator().realloc(@ptrCast([]types.DiagnosticRelatedInformation, related_information), related_information.len + 1)
else
try server.arena.allocator().alloc(types.DiagnosticRelatedInformation, 1);
const location = types.Location{
.uri = handle.uri(),
.range = range,
};
fresh[fresh.len - 1] = .{
.location = location,
.message = try server.arena.allocator().dupe(u8, msg["note: ".len..]),
};
latestDiag.relatedInformation = fresh;
} else {
try diagnostics.append(allocator, .{
.range = range,
.severity = .Error,
.code = "ast_check",
.source = "zls",
.message = try server.arena.allocator().dupe(u8, msg),
});
}
}
}
fn typeToCompletion(
server: *Server,
list: *std.ArrayListUnmanaged(types.CompletionItem),
@ -705,16 +720,14 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO
break :def analysis.nodeToString(tree, node) orelse return null;
}
},
.param_decl => |param| def: {
.param_payload => |pay| def: {
const param = pay.param;
if (param.first_doc_comment) |doc_comments| {
doc_str = try analysis.collectDocComments(server.arena.allocator(), handle.tree, doc_comments, hover_kind, false);
}
const first_token = param.first_doc_comment orelse
param.comptime_noalias orelse
param.name_token orelse
tree.firstToken(param.type_expr); // extern fn
const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
const first_token = ast.paramFirstToken(tree, param);
const last_token = ast.paramLastToken(tree, param);
const start = offsets.tokenToIndex(tree, first_token);
const end = offsets.tokenToLoc(tree, last_token).end;
@ -987,7 +1000,8 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
false,
context.parent_is_type_val,
),
.param_decl => |param| {
.param_payload => |pay| {
const param = pay.param;
const doc_kind: types.MarkupContent.Kind = if (context.server.client_capabilities.completion_doc_supports_md) .Markdown else .PlainText;
const doc = if (param.first_doc_comment) |doc_comments|
types.MarkupContent{
@ -997,11 +1011,8 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
else
null;
const first_token = param.first_doc_comment orelse
param.comptime_noalias orelse
param.name_token orelse
tree.firstToken(param.type_expr);
const last_token = param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
const first_token = ast.paramFirstToken(tree, param);
const last_token = ast.paramLastToken(tree, param);
try context.completions.append(allocator, .{
.label = tree.tokenSlice(param.name_token.?),
@ -1075,6 +1086,30 @@ fn completeLabel(
return completions.toOwnedSlice(server.arena.allocator());
}
fn populateSnippedCompletions(
allocator: std.mem.Allocator,
completions: *std.ArrayListUnmanaged(types.CompletionItem),
snippets: []const snipped_data.Snipped,
config: Config,
start_with: ?[]const u8,
) error{OutOfMemory}!void {
try completions.ensureUnusedCapacity(allocator, snippets.len);
for (snippets) |snipped| {
if (start_with) |needle| {
if (!std.mem.startsWith(u8, snipped.label, needle)) continue;
}
completions.appendAssumeCapacity(.{
.label = snipped.label,
.kind = snipped.kind,
.detail = if (config.enable_snippets) snipped.text else null,
.insertText = if (config.enable_snippets) snipped.text else null,
.insertTextFormat = if (config.enable_snippets and snipped.text != null) .Snippet else .PlainText,
});
}
}
fn completeBuiltin(server: *Server) ![]types.CompletionItem {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
@ -1099,7 +1134,7 @@ fn completeBuiltin(server: *Server) ![]types.CompletionItem {
},
});
}
server.builtin_completions = completions;
return completions.items;
}
@ -1116,6 +1151,9 @@ fn completeGlobal(server: *Server, pos_index: usize, handle: *DocumentStore.Hand
.orig_handle = handle,
};
try analysis.iterateSymbolsGlobal(&server.document_store, &server.arena, handle, pos_index, declToCompletion, context);
try populateSnippedCompletions(server.arena.allocator(), &completions, &snipped_data.generic, server.config.*, null);
try sortCompletionItems(completions.items, server.arena.allocator());
truncateCompletions(completions.items, server.config.max_detail_length);
if (server.client_capabilities.label_details_support) {
for (completions.items) |*item| {
@ -1331,7 +1369,7 @@ fn kindToSortScore(kind: types.CompletionItem.Kind) ?[]const u8 {
.Field => "3_",
.Function => "4_",
.Keyword, .EnumMember => "5_",
.Keyword, .Snippet, .EnumMember => "5_",
.Class,
.Interface,
@ -1499,7 +1537,7 @@ fn initializeHandler(server: *Server, writer: anytype, id: types.RequestId, req:
.completionProvider = .{ .resolveProvider = false, .triggerCharacters = &[_][]const u8{ ".", ":", "@", "]" }, .completionItem = .{ .labelDetailsSupport = true } },
.documentHighlightProvider = true,
.hoverProvider = true,
.codeActionProvider = false,
.codeActionProvider = true,
.declarationProvider = true,
.definitionProvider = true,
.typeDefinitionProvider = true,
@ -1632,7 +1670,7 @@ fn openDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, re
defer tracy_zone.end();
const handle = try server.document_store.openDocument(req.params.textDocument.uri, req.params.textDocument.text);
try server.publishDiagnostics(writer, handle.*);
try server.publishDiagnostics(writer, handle);
if (server.client_capabilities.supports_semantic_tokens) {
const request: requests.SemanticTokensFull = .{ .params = .{ .textDocument = .{ .uri = req.params.textDocument.uri } } };
@ -1652,21 +1690,68 @@ fn changeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId,
};
try server.document_store.applyChanges(handle, req.params.contentChanges.Array, server.offset_encoding);
try server.publishDiagnostics(writer, handle.*);
try server.publishDiagnostics(writer, handle);
}
fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SaveDocument) error{OutOfMemory}!void {
fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SaveDocument) !void {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
_ = id;
_ = writer;
const allocator = server.arena.allocator();
const uri = req.params.textDocument.uri;
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
log.warn("Trying to save non existent document {s}", .{req.params.textDocument.uri});
const handle = server.document_store.getHandle(uri) orelse {
log.warn("Trying to save non existent document {s}", .{uri});
return;
};
try server.document_store.applySave(handle);
if (handle.tree.errors.len != 0) return;
if (!server.config.enable_ast_check_diagnostics) return;
if (!server.config.enable_autofix) return;
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
try getAstCheckDiagnostics(server, handle, &diagnostics);
var builder = code_actions.Builder{
.arena = &server.arena,
.document_store = &server.document_store,
.handle = handle,
.offset_encoding = server.offset_encoding,
};
var actions = std.ArrayListUnmanaged(types.CodeAction){};
for (diagnostics.items) |diagnostic| {
try builder.generateCodeAction(diagnostic, &actions);
}
var text_edits = std.ArrayListUnmanaged(types.TextEdit){};
for (actions.items) |action| {
if (action.kind != .SourceFixAll) continue;
if (action.edit.changes.size != 1) continue;
const edits = action.edit.changes.get(uri) orelse continue;
try text_edits.appendSlice(allocator, edits.items);
}
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
try workspace_edit.changes.putNoClobber(allocator, uri, text_edits);
// NOTE: stage1 moment
const params = types.ResponseParams{
.ApplyEdit = types.ApplyWorkspaceEditParams{
.label = "autofix",
.edit = workspace_edit,
},
};
try send(writer, allocator, types.Request{
.id = .{ .String = "apply_edit" },
.method = "workspace/applyEdit",
.params = params,
});
}
fn closeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.CloseDocument) error{}!void {
@ -1708,8 +1793,17 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req:
return try respondGeneric(writer, id, no_completions_response);
};
if (req.params.position.character == 0)
return try respondGeneric(writer, id, no_completions_response);
if (req.params.position.character == 0) {
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
try populateSnippedCompletions(server.arena.allocator(), &completions, &snipped_data.top_level_decl_data, server.config.*, null);
return try send(writer, server.arena.allocator(), types.Response{
.id = id,
.result = .{
.CompletionList = .{ .isIncomplete = false, .items = completions.items },
},
});
}
const source_index = offsets.positionToIndex(handle.document.text, req.params.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.document, source_index);
@ -2150,6 +2244,38 @@ fn inlayHintHandler(server: *Server, writer: anytype, id: types.RequestId, req:
return try respondGeneric(writer, id, null_result_response);
}
fn codeActionHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.CodeAction) !void {
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
log.warn("Trying to get code actions of non existent document {s}", .{req.params.textDocument.uri});
return try respondGeneric(writer, id, null_result_response);
};
const allocator = server.arena.allocator();
var builder = code_actions.Builder{
.arena = &server.arena,
.document_store = &server.document_store,
.handle = handle,
.offset_encoding = server.offset_encoding,
};
var actions = std.ArrayListUnmanaged(types.CodeAction){};
for (req.params.context.diagnostics) |diagnostic| {
try builder.generateCodeAction(diagnostic, &actions);
}
for (actions.items) |*action| {
// TODO query whether SourceFixAll is supported by the server
if (action.kind == .SourceFixAll) action.kind = .QuickFix;
}
return try send(writer, allocator, types.Response{
.id = id,
.result = .{ .CodeAction = actions.items },
});
}
// Needed for the hack seen below.
fn extractErr(val: anytype) anyerror {
val catch |e| return e;
@ -2177,6 +2303,8 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
if (id == .String and std.mem.startsWith(u8, id.String, "register"))
return;
if (id == .String and std.mem.startsWith(u8, id.String, "apply_edit"))
return;
if (id == .String and std.mem.eql(u8, id.String, "i_haz_configuration")) {
log.info("Setting configuration...", .{});
@ -2261,6 +2389,7 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
.{ "textDocument/rename", requests.Rename, renameHandler },
.{ "textDocument/references", requests.References, referencesHandler },
.{ "textDocument/documentHighlight", requests.DocumentHighlight, documentHighlightHandler },
.{ "textDocument/codeAction", requests.CodeAction, codeActionHandler },
.{ "workspace/didChangeConfiguration", std.json.Value, didChangeConfigurationHandler },
};
@ -2301,7 +2430,6 @@ pub fn processJsonRpc(server: *Server, writer: anytype, json: []const u8) !void
// needs a response) or false if the method is a notification (in which
// case it should be silently ignored)
const unimplemented_map = std.ComptimeStringMap(bool, .{
.{ "textDocument/codeAction", true },
.{ "textDocument/codeLens", true },
.{ "textDocument/documentLink", true },
.{ "textDocument/rangeFormatting", true },

View File

@ -1844,7 +1844,10 @@ pub const Declaration = union(enum) {
/// Index of the ast node
ast_node: Ast.Node.Index,
/// Function parameter
param_decl: Ast.full.FnProto.Param,
param_payload: struct {
param: Ast.full.FnProto.Param,
func: Ast.Node.Index,
},
pointer_payload: struct {
name: Ast.TokenIndex,
condition: Ast.Node.Index,
@ -1870,7 +1873,7 @@ pub const DeclWithHandle = struct {
const tree = self.handle.tree;
return switch (self.decl.*) {
.ast_node => |n| getDeclNameToken(tree, n).?,
.param_decl => |p| p.name_token.?,
.param_payload => |pp| pp.param.name_token.?,
.pointer_payload => |pp| pp.name,
.array_payload => |ap| ap.identifier,
.array_index => |ai| ai,
@ -1897,7 +1900,8 @@ pub const DeclWithHandle = struct {
.{ .node = node, .handle = self.handle },
bound_type_params,
),
.param_decl => |param_decl| {
.param_payload => |pay| {
const param_decl = pay.param;
if (isMetaType(self.handle.tree, param_decl.type_expr)) {
var bound_param_it = bound_type_params.iterator();
while (bound_param_it.next()) |entry| {
@ -2555,7 +2559,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
if (try scopes.items[scope_idx].decls.fetchPut(
allocator,
tree.tokenSlice(name_token),
.{ .param_decl = param },
.{ .param_payload = .{ .param = param, .func = node_idx } },
)) |existing| {
_ = existing;
// TODO record a redefinition error

View File

@ -896,6 +896,17 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
};
}
pub fn paramFirstToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
return param.first_doc_comment orelse
param.comptime_noalias orelse
param.name_token orelse
tree.firstToken(param.type_expr);
}
pub fn paramLastToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
}
pub fn containerField(tree: Ast, node: Ast.Node.Index) ?Ast.full.ContainerField {
return switch (tree.nodes.items(.tag)[node]) {
.container_field => tree.containerField(node),

485
src/code_actions.zig Normal file
View File

@ -0,0 +1,485 @@
const std = @import("std");
const Ast = std.zig.Ast;
const DocumentStore = @import("DocumentStore.zig");
const analysis = @import("analysis.zig");
const ast = @import("ast.zig");
const types = @import("types.zig");
const requests = @import("requests.zig");
const offsets = @import("offsets.zig");
pub const Builder = struct {
arena: *std.heap.ArenaAllocator,
document_store: *DocumentStore,
handle: *DocumentStore.Handle,
offset_encoding: offsets.Encoding,
pub fn generateCodeAction(
builder: *Builder,
diagnostic: types.Diagnostic,
actions: *std.ArrayListUnmanaged(types.CodeAction),
) error{OutOfMemory}!void {
const kind = DiagnosticKind.parse(diagnostic.message) orelse return;
const loc = offsets.rangeToLoc(builder.text(), diagnostic.range, builder.offset_encoding);
switch (kind) {
.unused => |id| switch (id) {
.@"function parameter" => try handleUnusedFunctionParameter(builder, actions, loc),
.@"local constant" => try handleUnusedVariableOrConstant(builder, actions, loc),
.@"local variable" => try handleUnusedVariableOrConstant(builder, actions, loc),
.@"loop index capture" => try handleUnusedIndexCapture(builder, actions, loc),
.@"capture" => try handleUnusedCapture(builder, actions, loc),
},
.pointless_discard => try handlePointlessDiscard(builder, actions, loc),
.omit_discard => |id| switch (id) {
.@"index capture" => try handleUnusedIndexCapture(builder, actions, loc),
.@"error capture" => try handleUnusedCapture(builder, actions, loc),
},
.unreachable_code => {
// TODO
// autofix: comment out code
// fix: remove code
},
}
}
pub fn createTextEditLoc(self: *Builder, loc: offsets.Loc, new_text: []const u8) types.TextEdit {
const range = offsets.locToRange(self.text(), loc, self.offset_encoding);
return types.TextEdit{ .range = range, .newText = new_text };
}
pub fn createTextEditPos(self: *Builder, index: usize, new_text: []const u8) types.TextEdit {
const position = offsets.indexToPosition(self.text(), index, self.offset_encoding);
return types.TextEdit{ .range = .{ .start = position, .end = position }, .newText = new_text };
}
pub fn createWorkspaceEdit(self: *Builder, edits: []const types.TextEdit) error{OutOfMemory}!types.WorkspaceEdit {
var text_edits = std.ArrayListUnmanaged(types.TextEdit){};
try text_edits.appendSlice(self.arena.allocator(), edits);
var workspace_edit = types.WorkspaceEdit{ .changes = .{} };
try workspace_edit.changes.putNoClobber(self.arena.allocator(), self.handle.uri(), text_edits);
return workspace_edit;
}
fn text(self: *Builder) []const u8 {
return self.handle.document.text;
}
};
fn handleUnusedFunctionParameter(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const identifier_name = offsets.locToSlice(builder.text(), loc);
const tree = builder.handle.tree;
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
const node_tokens = tree.nodes.items(.main_token);
const token_starts = tree.tokens.items(.start);
const decl = (try analysis.lookupSymbolGlobal(
builder.document_store,
builder.arena,
builder.handle,
identifier_name,
loc.start,
)) orelse return;
const payload = switch (decl.decl.*) {
.param_payload => |pay| pay,
else => return,
};
std.debug.assert(node_tags[payload.func] == .fn_decl);
const block = node_datas[payload.func].rhs;
const indent = offsets.lineSliceUntilIndex(builder.text(), token_starts[node_tokens[payload.func]]).len;
const new_text = try createDiscardText(builder.arena.allocator(), identifier_name, indent + 4);
const index = token_starts[node_tokens[block]] + 1;
const action1 = types.CodeAction{
.title = "discard function parameter",
.kind = .SourceFixAll,
.isPreferred = true,
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditPos(index, new_text)}),
};
const param_loc = .{
.start = offsets.tokenToIndex(tree, ast.paramFirstToken(tree, payload.param)),
.end = offsets.tokenToLoc(tree, ast.paramLastToken(tree, payload.param)).end,
};
// TODO fix formatting
// TODO remove trailing comma on last parameter
const action2 = types.CodeAction{
.title = "remove function parameter",
.kind = .QuickFix,
.isPreferred = false,
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(param_loc, "")}),
};
try actions.appendSlice(builder.arena.allocator(), &.{ action1, action2 });
}
fn handleUnusedVariableOrConstant(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const identifier_name = offsets.locToSlice(builder.text(), loc);
const tree = builder.handle.tree;
const token_tags = tree.tokens.items(.tag);
const token_starts = tree.tokens.items(.start);
const decl = (try analysis.lookupSymbolGlobal(
builder.document_store,
builder.arena,
builder.handle,
identifier_name,
loc.start,
)) orelse return;
const node = switch (decl.decl.*) {
.ast_node => |node| node,
else => return,
};
const first_token = tree.firstToken(node);
const last_token = ast.lastToken(tree, node) + 1;
const indent = offsets.lineSliceUntilIndex(builder.text(), token_starts[first_token]).len;
if (token_tags[last_token] != .semicolon) return;
const new_text = try createDiscardText(builder.arena.allocator(), identifier_name, indent);
const index = token_starts[last_token] + 1;
try actions.append(builder.arena.allocator(), .{
.title = "discard value",
.kind = .SourceFixAll,
.isPreferred = true,
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditPos(index, new_text)}),
});
}
fn handleUnusedIndexCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const capture_locs = getCaptureLoc(builder.text(), loc, true) orelse return;
// TODO support discarding without modifying the capture
// by adding a discard in the block scope
const is_value_discarded = std.mem.eql(u8, offsets.locToSlice(builder.text(), capture_locs.value), "_");
if (is_value_discarded) {
// |_, i| ->
// TODO fix formatting
try actions.append(builder.arena.allocator(), .{
.title = "remove capture",
.kind = .QuickFix,
.isPreferred = true,
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.loc, "")}),
});
} else {
// |v, i| -> |v|
// |v, _| -> |v|
try actions.append(builder.arena.allocator(), .{
.title = "remove index capture",
.kind = .QuickFix,
.isPreferred = true,
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(
.{ .start = capture_locs.value.end, .end = capture_locs.loc.end - 1 },
"",
)}),
});
}
}
fn handleUnusedCapture(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const capture_locs = getCaptureLoc(builder.text(), loc, false) orelse return;
// TODO support discarding without modifying the capture
// by adding a discard in the block scope
if (capture_locs.index != null) {
// |v, i| -> |_, i|
try actions.append(builder.arena.allocator(), .{
.title = "discard capture",
.kind = .QuickFix,
.isPreferred = true,
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.value, "_")}),
});
} else {
// |v| ->
// TODO fix formatting
try actions.append(builder.arena.allocator(), .{
.title = "remove capture",
.kind = .QuickFix,
.isPreferred = true,
.edit = try builder.createWorkspaceEdit(&.{builder.createTextEditLoc(capture_locs.loc, "")}),
});
}
}
fn handlePointlessDiscard(builder: *Builder, actions: *std.ArrayListUnmanaged(types.CodeAction), loc: offsets.Loc) !void {
const edit_loc = getDiscardLoc(builder.text(), loc) orelse return;
try actions.append(builder.arena.allocator(), .{
.title = "remove pointless discard",
.kind = .SourceFixAll,
.isPreferred = true,
.edit = try builder.createWorkspaceEdit(&.{
builder.createTextEditLoc(edit_loc, ""),
}),
});
}
// returns a discard string `\n{indent}_ = identifier_name;`
fn createDiscardText(allocator: std.mem.Allocator, identifier_name: []const u8, indent: usize) ![]const u8 {
const new_text_len = 1 + indent + "_ = ;".len + identifier_name.len;
var new_text = try std.ArrayListUnmanaged(u8).initCapacity(allocator, new_text_len);
errdefer new_text.deinit(allocator);
new_text.appendAssumeCapacity('\n');
new_text.appendNTimesAssumeCapacity(' ', indent);
new_text.appendSliceAssumeCapacity("_ = ");
new_text.appendSliceAssumeCapacity(identifier_name);
new_text.appendAssumeCapacity(';');
return new_text.toOwnedSlice(allocator);
}
const DiagnosticKind = union(enum) {
unused: IdCat,
pointless_discard: IdCat,
omit_discard: DiscardCat,
unreachable_code,
const IdCat = enum {
@"function parameter",
@"local constant",
@"local variable",
@"loop index capture",
@"capture",
};
const DiscardCat = enum {
// "discard of index capture; omit it instead"
@"index capture",
// "discard of error capture; omit it instead"
@"error capture",
};
pub fn parse(diagnostic_message: []const u8) ?DiagnosticKind {
const msg = diagnostic_message;
if (std.mem.startsWith(u8, msg, "unused ")) {
return DiagnosticKind{
.unused = parseEnum(IdCat, msg["unused ".len..]) orelse return null,
};
} else if (std.mem.startsWith(u8, msg, "pointless discard of ")) {
return DiagnosticKind{
.pointless_discard = parseEnum(IdCat, msg["pointless discard of ".len..]) orelse return null,
};
} else if (std.mem.startsWith(u8, msg, "discard of ")) {
return DiagnosticKind{
.omit_discard = parseEnum(DiscardCat, msg["discard of ".len..]) orelse return null,
};
}
return null;
}
fn parseEnum(comptime T: type, message: []const u8) ?T {
inline for (std.meta.fields(T)) |field| {
if (std.mem.startsWith(u8, message, field.name)) {
// is there a better way to achieve this?
return @intToEnum(T, field.value);
}
}
return null;
}
};
/// takes the location of an identifier which is part of a discard `_ = location_here;`
/// and returns the location from '_' until ';' or null on failure
fn getDiscardLoc(text: []const u8, loc: offsets.Loc) ?offsets.Loc {
// check of the loc points to a valid identifier
for (offsets.locToSlice(text, loc)) |c| {
if (!isSymbolChar(c)) return null;
}
// check if the identifier is followed by a colon
const colon_position = found: {
var i = loc.end;
while (i < text.len) : (i += 1) {
switch (text[i]) {
' ' => continue,
';' => break :found i + 1,
else => return null,
}
}
return null;
};
// check if the identifier is preceed by a equal sign and then an underscore
var i: usize = loc.start - 1;
var found_equal_sign = false;
const underscore_position = found: {
while (true) : (i -= 1) {
if (i == 0) return null;
switch (text[i]) {
' ' => {},
'=' => {
if (found_equal_sign) return null;
found_equal_sign = true;
},
'_' => if (found_equal_sign) break :found i else return null,
else => return null,
}
}
};
// move backwards until we find a newline
i = underscore_position - 1;
const start_position = found: {
while (true) : (i -= 1) {
if (i == 0) break :found underscore_position;
switch (text[i]) {
' ' => {},
'\n' => break :found i,
else => break :found underscore_position,
}
}
};
return offsets.Loc{
.start = start_position,
.end = colon_position,
};
}
const CaptureLocs = struct {
loc: offsets.Loc,
value: offsets.Loc,
index: ?offsets.Loc,
};
/// takes the location of an identifier which is part of a payload `|value, index|`
/// and returns the location from '|' until '|' or null on failure
/// use `is_index_payload` to indicate whether `loc` points to `value` or `index`
fn getCaptureLoc(text: []const u8, loc: offsets.Loc, is_index_payload: bool) ?CaptureLocs {
const value_end = if (!is_index_payload) loc.end else found: {
// move back until we find a comma
const comma_position = found_comma: {
var i = loc.start - 1;
while (i != 0) : (i -= 1) {
switch (text[i]) {
' ' => continue,
',' => break :found_comma i,
else => return null,
}
} else return null;
};
// trim space
var i = comma_position - 1;
while (i != 0) : (i -= 1) {
switch (text[i]) {
' ' => continue,
else => {
if (!isSymbolChar(text[i])) return null;
break :found i + 1;
},
}
} else return null;
};
const value_start = if (!is_index_payload) loc.start else found: {
// move back until we find a non identifier character
var i = value_end - 1;
while (i != 0) : (i -= 1) {
if (isSymbolChar(text[i])) continue;
switch (text[i]) {
' ', '|', '*' => break :found i + 1,
else => return null,
}
} else return null;
};
var index: ?offsets.Loc = null;
if (is_index_payload) {
index = loc;
} else blk: {
// move forward until we find a comma
const comma_position = found_comma: {
var i = value_end;
while (i < text.len) : (i += 1) {
switch (text[i]) {
' ' => continue,
',' => break :found_comma i,
else => break :blk,
}
}
break :blk;
};
// trim space
const index_start = found_start: {
var i = comma_position + 1;
while (i < text.len) : (i += 1) {
switch (text[i]) {
' ' => continue,
else => {
if (!isSymbolChar(text[i])) break :blk;
break :found_start i;
},
}
}
break :blk;
};
// move forward until we find a non identifier character
var i = index_start + 1;
while (i < text.len) : (i += 1) {
if (isSymbolChar(text[i])) continue;
index = offsets.Loc{
.start = index_start,
.end = i,
};
break;
}
}
const start_pipe_position = found: {
var i = value_start - 1;
while (i != 0) : (i -= 1) {
switch (text[i]) {
' ' => continue,
'|' => break :found i,
else => return null,
}
} else return null;
};
const end_pipe_position = found: {
var i: usize = if (index) |index_loc| index_loc.end else value_end;
while (i < text.len) : (i += 1) {
switch (text[i]) {
' ' => continue,
'|' => break :found i + 1,
else => return null,
}
} else return null;
};
return CaptureLocs{
.loc = .{ .start = start_pipe_position, .end = end_pipe_position },
.value = .{ .start = value_start, .end = value_end },
.index = index,
};
}
fn isSymbolChar(char: u8) bool {
return std.ascii.isAlNum(char) or char == '_';
}

89
src/data/snippets.zig Normal file
View File

@ -0,0 +1,89 @@
const types = @import("../types.zig");
pub const Snipped = struct {
label: []const u8,
kind: types.CompletionItem.Kind,
text: ?[]const u8 = null,
};
pub const top_level_decl_data = [_]Snipped{
.{ .label = "fn", .kind = .Snippet, .text = "fn ${1:name}($2) ${3:!void} {$0}" },
.{ .label = "pub fn", .kind = .Snippet, .text = "pub fn ${1:name}($2) ${3:!void} {$0}" },
.{ .label = "struct", .kind = .Snippet, .text = "const $1 = struct {$0};" },
.{ .label = "error set", .kind = .Snippet, .text = "const ${1:Error} = error {$0};" },
.{ .label = "enum", .kind = .Snippet, .text = "const $1 = enum {$0};" },
.{ .label = "union", .kind = .Snippet, .text = "const $1 = union {$0};" },
.{ .label = "union tagged", .kind = .Snippet, .text = "const $1 = union(${2:enum}) {$0};" },
.{ .label = "test", .kind = .Snippet, .text = "test \"$1\" {$0}" },
.{ .label = "main", .kind = .Snippet, .text = "pub fn main() !void {$0}" },
};
pub const generic = [_]Snipped{
// keywords
.{ .label = "align", .kind = .Keyword },
.{ .label = "allowzero", .kind = .Keyword },
.{ .label = "and", .kind = .Keyword },
.{ .label = "anyframe", .kind = .Keyword },
.{ .label = "anytype", .kind = .Keyword },
.{ .label = "asm", .kind = .Keyword },
.{ .label = "async", .kind = .Keyword },
.{ .label = "await", .kind = .Keyword },
.{ .label = "break", .kind = .Keyword },
.{ .label = "callconv", .kind = .Keyword, .text = "callconv($0)" },
.{ .label = "catch", .kind = .Keyword },
.{ .label = "comptime", .kind = .Keyword },
.{ .label = "const", .kind = .Keyword },
.{ .label = "continue", .kind = .Keyword },
.{ .label = "defer", .kind = .Keyword },
.{ .label = "else", .kind = .Keyword, .text = "else {$0}" },
.{ .label = "enum", .kind = .Keyword, .text = "enum {$0}" },
.{ .label = "errdefer", .kind = .Keyword },
.{ .label = "error", .kind = .Keyword },
.{ .label = "export", .kind = .Keyword },
.{ .label = "extern", .kind = .Keyword },
.{ .label = "fn", .kind = .Keyword, .text = "fn ${1:name}($2) ${3:!void} {$0}" },
.{ .label = "for", .kind = .Keyword, .text = "for ($1) {$0}" },
.{ .label = "if", .kind = .Keyword, .text = "if ($1) {$0}" },
.{ .label = "inline", .kind = .Keyword },
.{ .label = "noalias", .kind = .Keyword },
.{ .label = "nosuspend", .kind = .Keyword },
.{ .label = "noinline", .kind = .Keyword },
.{ .label = "opaque", .kind = .Keyword },
.{ .label = "or", .kind = .Keyword },
.{ .label = "orelse", .kind = .Keyword },
.{ .label = "packed", .kind = .Keyword },
.{ .label = "pub", .kind = .Keyword },
.{ .label = "resume", .kind = .Keyword },
.{ .label = "return", .kind = .Keyword },
.{ .label = "linksection", .kind = .Keyword },
.{ .label = "struct", .kind = .Keyword, .text = "struct {$0};" },
.{ .label = "suspend", .kind = .Keyword },
.{ .label = "switch", .kind = .Keyword, .text = "switch ($1) {$0}" },
.{ .label = "test", .kind = .Keyword, .text = "test \"$1\" {$0}" },
.{ .label = "threadlocal", .kind = .Keyword },
.{ .label = "try", .kind = .Keyword },
.{ .label = "union", .kind = .Keyword },
.{ .label = "unreachable", .kind = .Keyword },
.{ .label = "usingnamespace", .kind = .Keyword },
.{ .label = "var", .kind = .Keyword },
.{ .label = "volatile", .kind = .Keyword },
.{ .label = "while", .kind = .Keyword, .text = "while ($1) {$0}" },
// keyword snippets
.{ .label = "asmv", .kind = .Snippet, .text = "asm volatile (${1:input}, ${0:input})" },
.{ .label = "pub fn", .kind = .Snippet, .text = "pub fn ${1:name}($2) ${3:!void} {$0}" },
.{ .label = "forv", .kind = .Snippet, .text = "for ($1) |${2:value}| {$0}" },
.{ .label = "fori", .kind = .Snippet, .text = "for ($1) |_, ${2:i}| {$0}" },
.{ .label = "forvi", .kind = .Snippet, .text = "for ($1) |${2:value},${3:i}| {$0}" },
.{ .label = "if else", .kind = .Snippet, .text = "if ($1) {$2} else {$0}" },
.{ .label = "catch switch", .kind = .Snippet, .text = "catch |${1:err}| switch(${1:err}) {$0};" },
// snippets
.{ .label = "main", .kind = .Snippet, .text = "pub fn main() !void {$0}" },
.{ .label = "todo", .kind = .Snippet, .text = "std.debug.todo(\"$0\");" },
.{ .label = "print", .kind = .Snippet, .text = "std.debug.print(\"$1\", .{$0});" },
.{ .label = "log err", .kind = .Snippet, .text = "std.log.err(\"$1\", .{$0});" },
.{ .label = "log warn", .kind = .Snippet, .text = "std.log.warn(\"$1\", .{$0});" },
.{ .label = "log info", .kind = .Snippet, .text = "std.log.info(\"$1\", .{$0});" },
.{ .label = "log debug", .kind = .Snippet, .text = "std.log.debug(\"$1\", .{$0});" },
};

View File

@ -516,8 +516,9 @@ pub fn symbolReferences(
try imports.resize(arena.allocator(), 0);
}
},
.param_decl => |param| blk: {
.param_payload => |pay| blk: {
// Rename the param tok.
const param = pay.param;
for (curr_handle.document_scope.scopes.items) |scope| {
if (scope.data != .function) continue;

View File

@ -274,11 +274,22 @@ pub const InlayHint = struct {
},
};
pub const CodeAction = struct {
params: struct {
textDocument: TextDocumentIdentifier,
range: types.Range,
context: struct {
diagnostics: []types.Diagnostic,
},
},
};
pub const Configuration = struct {
params: struct {
settings: struct {
enable_snippets: ?bool,
enable_ast_check_diagnostics: ?bool,
enable_autofix: ?bool,
enable_import_embedfile_argument_completions: ?bool,
zig_lib_path: ?[]const u8,
zig_exe_path: ?[]const u8,

View File

@ -417,7 +417,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
name,
tree.tokens.items(.start)[main_token],
)) |child| {
if (child.decl.* == .param_decl) {
if (child.decl.* == .param_payload) {
return try writeToken(builder, main_token, .parameter);
}
var bound_type_params = analysis.BoundTypeParams{};
@ -716,8 +716,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
try writeToken(builder, main_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
},
.number_literal,
=> {
.number_literal => {
try writeToken(builder, main_token, .number);
},
.enum_literal => {

View File

@ -171,6 +171,7 @@ pub fn wizard(allocator: std.mem.Allocator) !void {
const editor = try askSelectOne("Which code editor do you use?", enum { VSCode, Sublime, Kate, Neovim, Vim8, Emacs, Doom, Spacemacs, Helix, Other });
const snippets = try askBool("Do you want to enable snippets?");
const ast_check = try askBool("Do you want to enable ast-check diagnostics?");
const autofix = try askBool("Do you want to zls to automatically try to fix errors on save? (supports adding & removing discards)");
const ief_apc = try askBool("Do you want to enable @import/@embedFile argument path completion?");
const style = try askBool("Do you want to enable style warnings?");
const semantic_tokens = try askBool("Do you want to enable semantic highlighting?");
@ -193,6 +194,7 @@ pub fn wizard(allocator: std.mem.Allocator) !void {
.zig_exe_path = zig_exe_path,
.enable_snippets = snippets,
.enable_ast_check_diagnostics = ast_check,
.enable_autofix = autofix,
.enable_import_embedfile_argument_completions = ief_apc,
.warn_style = style,
.enable_semantic_tokens = semantic_tokens,

View File

@ -47,6 +47,8 @@ pub const ResponseParams = union(enum) {
ConfigurationParams: ConfigurationParams,
RegistrationParams: RegistrationParams,
DocumentHighlight: []DocumentHighlight,
CodeAction: []CodeAction,
ApplyEdit: ApplyWorkspaceEditParams,
};
/// JSONRPC notifications
@ -115,11 +117,11 @@ pub const DiagnosticRelatedInformation = struct {
pub const Diagnostic = struct {
range: Range,
severity: DiagnosticSeverity,
code: string,
source: string,
severity: ?DiagnosticSeverity,
code: ?string,
source: ?string,
message: string,
relatedInformation: []const DiagnosticRelatedInformation = &.{},
relatedInformation: ?[]const DiagnosticRelatedInformation = null,
};
pub const TextDocument = struct {
@ -372,6 +374,46 @@ pub const InlayHintKind = enum(i64) {
}
};
pub const CodeActionKind = enum {
Empty,
QuickFix,
Refactor,
RefactorExtract,
RefactorInline,
RefactorRewrite,
Source,
SourceOrganizeImports,
SourceFixAll,
pub fn jsonStringify(value: CodeActionKind, options: std.json.StringifyOptions, out_stream: anytype) !void {
const name = switch (value) {
.Empty => "",
.QuickFix => "quickfix",
.Refactor => "refactor",
.RefactorExtract => "refactor.extract",
.RefactorInline => "refactor.inline",
.RefactorRewrite => "refactor.rewrite",
.Source => "source",
.SourceOrganizeImports => "source.organizeImports",
.SourceFixAll => "source.fixAll",
};
try std.json.stringify(name, options, out_stream);
}
};
pub const CodeAction = struct {
title: string,
kind: CodeActionKind,
// diagnostics: []Diagnostic,
isPreferred: bool,
edit: WorkspaceEdit,
};
pub const ApplyWorkspaceEditParams = struct {
label: string,
edit: WorkspaceEdit,
};
pub const PositionEncodingKind = enum {
utf8,
utf16,

View File

@ -1,105 +0,0 @@
const std = @import("std");
const Context = @import("context.zig").Context;
const allocator = std.testing.allocator;
test "Request completion in an empty file" {
var ctx = try Context.init();
defer ctx.deinit();
try ctx.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":""}}
, null);
try ctx.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":0,"character":0}}
, null);
}
test "Request completion with no trailing whitespace" {
var ctx = try Context.init();
defer ctx.deinit();
try ctx.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const std = @import(\"std\");\nc"}}
, null);
try ctx.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
,
\\{"isIncomplete":false,"items":[{"label":"std","labelDetails":{"detail":"","description":"@import(\"std\")","sortText":null},"kind":21,"detail":"std","sortText":"1_std","filterText":null,"insertText":"std","insertTextFormat":1,"documentation":null}]}
);
}
test "Encoded space in file name and usingnamespace on non-existing symbol" {
var ctx = try Context.init();
defer ctx.deinit();
try ctx.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///%20test.zig","languageId":"zig","version":420,"text":"usingnamespace a.b;\nb."}}
, null);
try ctx.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///%20test.zig"}, "position":{"line":1,"character":2}}
,
\\{"isIncomplete":false,"items":[]}
);
}
test "Self-referential definition" {
var ctx = try Context.init();
defer ctx.deinit();
try ctx.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const h = h(0);\nc"}}
, null);
try ctx.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
,
\\{"isIncomplete":false,"items":[{"label":"h","labelDetails":{"detail":"","description":"h(0)","sortText":null},"kind":21,"detail":"h","sortText":"1_h","filterText":null,"insertText":"h","insertTextFormat":1,"documentation":null}]}
);
}
// This test as written depends on the configuration in the *host* machines zls.json, if `enable_snippets` is true then
// the insert text is "w()" if it is false it is "w"
//
// test "Missing return type" {
// var server = try Server.start(initialize_msg, null);
// defer server.shutdown();
// try server.request("textDocument/didOpen",
// \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"fn w() {}\nc"}}
// , null);
// try server.request("textDocument/completion",
// \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
// ,
// \\{"isIncomplete":false,"items":[{"label":"w","kind":3,"textEdit":null,"filterText":null,"insertText":"w","insertTextFormat":1,"detail":"fn","documentation":null}]}
// );
// }
test "Pointer and optional deref" {
var ctx = try Context.init();
defer ctx.deinit();
try ctx.request("textDocument/didOpen",
\\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"var value: ?struct { data: i32 = 5 } = null;const ptr = &value;\nconst a = ptr.*.?."}}
, null);
try ctx.request("textDocument/completion",
\\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":18}}
,
\\{"isIncomplete":false,"items":[{"label":"data","labelDetails":{"detail":"","description":"i32 ","sortText":null},"kind":5,"detail":"data","sortText":"3_data","filterText":null,"insertText":"data","insertTextFormat":1,"documentation":null}]}
);
}
// not fixed yet!
// test "Self-referential import" {
// var ctx = try Context.init();
// defer ctx.deinit();
//
// try ctx.request("textDocument/didOpen",
// \\{"textDocument":{"uri":"file:///test.zig","languageId":"zig","version":420,"text":"const a = @import(\"test.zig\").a;\nc"}}
// , null);
// try ctx.request("textDocument/completion",
// \\{"textDocument":{"uri":"file:///test.zig"}, "position":{"line":1,"character":1}}
// ,
// \\{"isIncomplete":false,"items":[]}
// );
// }

View File

@ -1,6 +1,5 @@
comptime {
_ = @import("helper.zig");
_ = @import("sessions.zig");
_ = @import("utility/offsets.zig");
_ = @import("utility/position_context.zig");