Merge pull request #702 from Techatrix/document-store-refactor
DocumentStore refactor
This commit is contained in:
commit
7c54ded487
File diff suppressed because it is too large
Load Diff
137
src/Server.zig
137
src/Server.zig
@ -151,7 +151,7 @@ fn showMessage(server: *Server, writer: anytype, message_type: types.MessageType
|
||||
});
|
||||
}
|
||||
|
||||
fn publishDiagnostics(server: *Server, writer: anytype, handle: *DocumentStore.Handle) !void {
|
||||
fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Handle) !void {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -175,17 +175,19 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: *DocumentStore.H
|
||||
});
|
||||
}
|
||||
|
||||
for (handle.cimports) |cimport| {
|
||||
if (cimport.result != .failure) continue;
|
||||
const stderr = std.mem.trim(u8, cimport.result.failure, " ");
|
||||
for (handle.cimports.items(.hash)) |hash, i| {
|
||||
const result = server.document_store.cimports.get(hash) orelse continue;
|
||||
if (result != .failure) continue;
|
||||
const stderr = std.mem.trim(u8, result.failure, " ");
|
||||
|
||||
var pos_and_diag_iterator = std.mem.split(u8, stderr, ":");
|
||||
_ = pos_and_diag_iterator.next(); // skip file path
|
||||
_ = pos_and_diag_iterator.next(); // skip line
|
||||
_ = pos_and_diag_iterator.next(); // skip character
|
||||
|
||||
const node = handle.cimports.items(.node)[i];
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = offsets.nodeToRange(handle.tree, cimport.node, server.offset_encoding),
|
||||
.range = offsets.nodeToRange(handle.tree, node, server.offset_encoding),
|
||||
.severity = .Error,
|
||||
.code = "cImport",
|
||||
.source = "zls",
|
||||
@ -269,7 +271,27 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: *DocumentStore.H
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (handle.cimports.items(.hash)) |hash, i| {
|
||||
const result = server.document_store.cimports.get(hash) orelse continue;
|
||||
if (result != .failure) continue;
|
||||
const stderr = std.mem.trim(u8, result.failure, " ");
|
||||
|
||||
var pos_and_diag_iterator = std.mem.split(u8, stderr, ":");
|
||||
_ = pos_and_diag_iterator.next(); // skip file path
|
||||
_ = pos_and_diag_iterator.next(); // skip line
|
||||
_ = pos_and_diag_iterator.next(); // skip character
|
||||
|
||||
const node = handle.cimports.items(.node)[i];
|
||||
try diagnostics.append(allocator, .{
|
||||
.range = offsets.nodeToRange(handle.tree, node, server.offset_encoding),
|
||||
.severity = .Error,
|
||||
.code = "cImport",
|
||||
.source = "zls",
|
||||
.message = try allocator.dupe(u8, pos_and_diag_iterator.rest()),
|
||||
});
|
||||
}
|
||||
|
||||
if (server.config.highlight_global_var_declarations) {
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tags = tree.tokens.items(.tag);
|
||||
@ -312,7 +334,7 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: *DocumentStore.H
|
||||
|
||||
fn getAstCheckDiagnostics(
|
||||
server: *Server,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: DocumentStore.Handle,
|
||||
diagnostics: *std.ArrayListUnmanaged(types.Diagnostic),
|
||||
) !void {
|
||||
var allocator = server.arena.allocator();
|
||||
@ -406,7 +428,7 @@ fn typeToCompletion(
|
||||
server: *Server,
|
||||
list: *std.ArrayListUnmanaged(types.CompletionItem),
|
||||
field_access: analysis.FieldAccessReturn,
|
||||
orig_handle: *DocumentStore.Handle,
|
||||
orig_handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!void {
|
||||
var allocator = server.arena.allocator();
|
||||
|
||||
@ -468,7 +490,7 @@ fn nodeToCompletion(
|
||||
list: *std.ArrayListUnmanaged(types.CompletionItem),
|
||||
node_handle: analysis.NodeWithHandle,
|
||||
unwrapped: ?analysis.TypeWithHandle,
|
||||
orig_handle: *DocumentStore.Handle,
|
||||
orig_handle: *const DocumentStore.Handle,
|
||||
is_type_val: bool,
|
||||
parent_is_type_val: ?bool,
|
||||
) error{OutOfMemory}!void {
|
||||
@ -838,7 +860,7 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO
|
||||
};
|
||||
}
|
||||
|
||||
fn getLabelGlobal(pos_index: usize, handle: *DocumentStore.Handle) error{OutOfMemory}!?analysis.DeclWithHandle {
|
||||
fn getLabelGlobal(pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?analysis.DeclWithHandle {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -851,7 +873,7 @@ fn getLabelGlobal(pos_index: usize, handle: *DocumentStore.Handle) error{OutOfMe
|
||||
fn getSymbolGlobal(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!?analysis.DeclWithHandle {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
@ -865,7 +887,7 @@ fn getSymbolGlobal(
|
||||
fn gotoDefinitionLabel(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!?types.Location {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
@ -877,7 +899,7 @@ fn gotoDefinitionLabel(
|
||||
fn gotoDefinitionGlobal(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
resolve_alias: bool,
|
||||
) error{OutOfMemory}!?types.Location {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
@ -887,7 +909,7 @@ fn gotoDefinitionGlobal(
|
||||
return try server.gotoDefinitionSymbol(decl, resolve_alias);
|
||||
}
|
||||
|
||||
fn hoverDefinitionLabel(server: *Server, pos_index: usize, handle: *DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
|
||||
fn hoverDefinitionLabel(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -895,7 +917,7 @@ fn hoverDefinitionLabel(server: *Server, pos_index: usize, handle: *DocumentStor
|
||||
return try server.hoverSymbol(decl);
|
||||
}
|
||||
|
||||
fn hoverDefinitionBuiltin(server: *Server, pos_index: usize, handle: *DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
|
||||
fn hoverDefinitionBuiltin(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -919,7 +941,7 @@ fn hoverDefinitionBuiltin(server: *Server, pos_index: usize, handle: *DocumentSt
|
||||
return null;
|
||||
}
|
||||
|
||||
fn hoverDefinitionGlobal(server: *Server, pos_index: usize, handle: *DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
|
||||
fn hoverDefinitionGlobal(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}!?types.Hover {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -929,7 +951,7 @@ fn hoverDefinitionGlobal(server: *Server, pos_index: usize, handle: *DocumentSto
|
||||
|
||||
fn getSymbolFieldAccess(
|
||||
server: *Server,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
source_index: usize,
|
||||
loc: offsets.Loc,
|
||||
) !?analysis.DeclWithHandle {
|
||||
@ -961,7 +983,7 @@ fn getSymbolFieldAccess(
|
||||
|
||||
fn gotoDefinitionFieldAccess(
|
||||
server: *Server,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
source_index: usize,
|
||||
loc: offsets.Loc,
|
||||
resolve_alias: bool,
|
||||
@ -975,7 +997,7 @@ fn gotoDefinitionFieldAccess(
|
||||
|
||||
fn hoverDefinitionFieldAccess(
|
||||
server: *Server,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
source_index: usize,
|
||||
loc: offsets.Loc,
|
||||
) error{OutOfMemory}!?types.Hover {
|
||||
@ -989,16 +1011,13 @@ fn hoverDefinitionFieldAccess(
|
||||
fn gotoDefinitionString(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!?types.Location {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
const import_str = analysis.getImportStr(handle.tree, 0, pos_index) orelse return null;
|
||||
const uri = server.document_store.uriFromImportStr(server.arena.allocator(), handle.*, import_str) catch |err| switch (err) {
|
||||
error.UriBadScheme => return null,
|
||||
error.OutOfMemory => |e| return e,
|
||||
};
|
||||
const uri = try server.document_store.uriFromImportStr(server.arena.allocator(), handle.*, import_str);
|
||||
|
||||
return types.Location{
|
||||
.uri = uri orelse return null,
|
||||
@ -1012,7 +1031,7 @@ fn gotoDefinitionString(
|
||||
const DeclToCompletionContext = struct {
|
||||
server: *Server,
|
||||
completions: *std.ArrayListUnmanaged(types.CompletionItem),
|
||||
orig_handle: *DocumentStore.Handle,
|
||||
orig_handle: *const DocumentStore.Handle,
|
||||
parent_is_type_val: ?bool = null,
|
||||
};
|
||||
|
||||
@ -1101,7 +1120,7 @@ fn declToCompletion(context: DeclToCompletionContext, decl_handle: analysis.Decl
|
||||
fn completeLabel(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) ![]types.CompletionItem {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
@ -1142,7 +1161,7 @@ fn populateSnippedCompletions(
|
||||
}
|
||||
}
|
||||
|
||||
fn completeGlobal(server: *Server, pos_index: usize, handle: *DocumentStore.Handle) ![]types.CompletionItem {
|
||||
fn completeGlobal(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) ![]types.CompletionItem {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -1165,7 +1184,7 @@ fn completeGlobal(server: *Server, pos_index: usize, handle: *DocumentStore.Hand
|
||||
return completions.toOwnedSlice(server.arena.allocator());
|
||||
}
|
||||
|
||||
fn completeFieldAccess(server: *Server, handle: *DocumentStore.Handle, source_index: usize, loc: offsets.Loc) !?[]types.CompletionItem {
|
||||
fn completeFieldAccess(server: *Server, handle: *const DocumentStore.Handle, source_index: usize, loc: offsets.Loc) !?[]types.CompletionItem {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -1352,11 +1371,11 @@ fn formatDetailledLabel(item: *types.CompletionItem, alloc: std.mem.Allocator) !
|
||||
// logger.info("labelDetails: {s} :: {s}", .{item.labelDetails.?.detail, item.labelDetails.?.description});
|
||||
}
|
||||
|
||||
fn completeError(server: *Server, handle: *DocumentStore.Handle) ![]types.CompletionItem {
|
||||
fn completeError(server: *Server, handle: *const DocumentStore.Handle) ![]types.CompletionItem {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
return try server.document_store.errorCompletionItems(&server.arena, handle);
|
||||
return try server.document_store.errorCompletionItems(server.arena.allocator(), handle.*);
|
||||
}
|
||||
|
||||
fn kindToSortScore(kind: types.CompletionItem.Kind) ?[]const u8 {
|
||||
@ -1387,16 +1406,16 @@ fn kindToSortScore(kind: types.CompletionItem.Kind) ?[]const u8 {
|
||||
};
|
||||
}
|
||||
|
||||
fn completeDot(server: *Server, handle: *DocumentStore.Handle) ![]types.CompletionItem {
|
||||
fn completeDot(server: *Server, handle: *const DocumentStore.Handle) ![]types.CompletionItem {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
var completions = try server.document_store.enumCompletionItems(&server.arena, handle);
|
||||
var completions = try server.document_store.enumCompletionItems(server.arena.allocator(), handle.*);
|
||||
|
||||
return completions;
|
||||
}
|
||||
|
||||
fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, handle: *DocumentStore.Handle, completing: []const u8, is_import: bool) ![]types.CompletionItem {
|
||||
fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, store: *const DocumentStore, handle: *const DocumentStore.Handle, completing: []const u8, is_import: bool) ![]types.CompletionItem {
|
||||
var subpath_present = false;
|
||||
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
|
||||
|
||||
@ -1436,10 +1455,11 @@ fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, handle: *Docume
|
||||
}
|
||||
|
||||
if (!subpath_present and is_import) {
|
||||
if (handle.associated_build_file) |bf| {
|
||||
try completions.ensureUnusedCapacity(allocator, bf.config.packages.len);
|
||||
if (handle.associated_build_file) |uri| {
|
||||
const build_file = store.build_files.get(uri).?;
|
||||
try completions.ensureUnusedCapacity(allocator, build_file.config.packages.len);
|
||||
|
||||
for (bf.config.packages) |pkg| {
|
||||
for (build_file.config.packages) |pkg| {
|
||||
completions.appendAssumeCapacity(.{
|
||||
.label = pkg.name,
|
||||
.kind = .Module,
|
||||
@ -1451,7 +1471,7 @@ fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, handle: *Docume
|
||||
return completions.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
fn documentSymbol(server: *Server, writer: anytype, id: types.RequestId, handle: *DocumentStore.Handle) !void {
|
||||
fn documentSymbol(server: *Server, writer: anytype, id: types.RequestId, handle: *const DocumentStore.Handle) !void {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
@ -1741,13 +1761,12 @@ fn changeDocumentHandler(server: *Server, writer: anytype, id: types.RequestId,
|
||||
|
||||
_ = id;
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.debug("Trying to change non existent document {s}", .{req.params.textDocument.uri});
|
||||
return;
|
||||
};
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse return;
|
||||
|
||||
try server.document_store.applyChanges(handle, req.params.contentChanges, server.offset_encoding);
|
||||
try server.publishDiagnostics(writer, handle);
|
||||
const new_text = try diff.applyTextEdits(server.allocator, handle.text, req.params.contentChanges, server.offset_encoding);
|
||||
|
||||
try server.document_store.refreshDocument(handle.uri, new_text);
|
||||
try server.publishDiagnostics(writer, handle.*);
|
||||
}
|
||||
|
||||
fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.SaveDocument) !void {
|
||||
@ -1758,10 +1777,7 @@ fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, re
|
||||
const allocator = server.arena.allocator();
|
||||
const uri = req.params.textDocument.uri;
|
||||
|
||||
const handle = server.document_store.getHandle(uri) orelse {
|
||||
log.warn("Trying to save non existent document {s}", .{uri});
|
||||
return;
|
||||
};
|
||||
const handle = server.document_store.getHandle(uri) orelse return;
|
||||
try server.document_store.applySave(handle);
|
||||
|
||||
if (handle.tree.errors.len != 0) return;
|
||||
@ -1769,7 +1785,7 @@ fn saveDocumentHandler(server: *Server, writer: anytype, id: types.RequestId, re
|
||||
if (!server.config.enable_autofix) return;
|
||||
|
||||
var diagnostics = std.ArrayListUnmanaged(types.Diagnostic){};
|
||||
try getAstCheckDiagnostics(server, handle, &diagnostics);
|
||||
try getAstCheckDiagnostics(server, handle.*, &diagnostics);
|
||||
|
||||
var builder = code_actions.Builder{
|
||||
.arena = &server.arena,
|
||||
@ -1827,7 +1843,6 @@ fn semanticTokensFullHandler(server: *Server, writer: anytype, id: types.Request
|
||||
if (!server.config.enable_semantic_tokens) return try respondGeneric(writer, id, no_semantic_tokens_response);
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to get semantic tokens of non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, no_semantic_tokens_response);
|
||||
};
|
||||
|
||||
@ -1844,7 +1859,6 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
defer tracy_zone.end();
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to complete in non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, no_completions_response);
|
||||
};
|
||||
|
||||
@ -1875,7 +1889,7 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
|
||||
const completing = offsets.locToSlice(handle.tree.source, loc);
|
||||
const is_import = pos_context == .import_string_literal;
|
||||
break :blk try completeFileSystemStringLiteral(server.arena.allocator(), handle, completing, is_import);
|
||||
break :blk try completeFileSystemStringLiteral(server.arena.allocator(), &server.document_store, handle, completing, is_import);
|
||||
},
|
||||
else => null,
|
||||
};
|
||||
@ -1915,7 +1929,6 @@ fn signatureHelpHandler(server: *Server, writer: anytype, id: types.RequestId, r
|
||||
|
||||
const getSignatureInfo = @import("signature_help.zig").getSignatureInfo;
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to get signature help in non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, no_signatures_response);
|
||||
};
|
||||
|
||||
@ -1949,7 +1962,6 @@ fn gotoHandler(server: *Server, writer: anytype, id: types.RequestId, req: reque
|
||||
defer tracy_zone.end();
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to go to definition in non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
|
||||
@ -1993,7 +2005,6 @@ fn hoverHandler(server: *Server, writer: anytype, id: types.RequestId, req: requ
|
||||
defer tracy_zone.end();
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to get hover in non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
|
||||
@ -2023,7 +2034,6 @@ fn documentSymbolsHandler(server: *Server, writer: anytype, id: types.RequestId,
|
||||
defer tracy_zone.end();
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to get document symbols in non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
try server.documentSymbol(writer, id, handle);
|
||||
@ -2035,7 +2045,6 @@ fn formattingHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
|
||||
if (server.config.zig_exe_path) |zig_exe_path| {
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to got to definition in non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
|
||||
@ -2161,14 +2170,6 @@ const GeneralReferencesRequest = union(enum) {
|
||||
.highlight => |highlight| highlight.params.position,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn name(self: @This()) []const u8 {
|
||||
return switch (self) {
|
||||
.rename => "rename",
|
||||
.references => "references",
|
||||
.highlight => "highlight references",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestId, req: GeneralReferencesRequest) !void {
|
||||
@ -2178,7 +2179,6 @@ fn generalReferencesHandler(server: *Server, writer: anytype, id: types.RequestI
|
||||
const allocator = server.arena.allocator();
|
||||
|
||||
const handle = server.document_store.getHandle(req.uri()) orelse {
|
||||
log.warn("Trying to get {s} in non existent document {s}", .{ req.name(), req.uri() });
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
|
||||
@ -2260,7 +2260,6 @@ fn inlayHintHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
if (!server.config.enable_inlay_hints) return try respondGeneric(writer, id, null_result_response);
|
||||
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to get inlay hint of non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
|
||||
@ -2309,7 +2308,6 @@ fn inlayHintHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
||||
|
||||
fn codeActionHandler(server: *Server, writer: anytype, id: types.RequestId, req: requests.CodeAction) !void {
|
||||
const handle = server.document_store.getHandle(req.params.textDocument.uri) orelse {
|
||||
log.warn("Trying to get code actions of non existent document {s}", .{req.params.textDocument.uri});
|
||||
return try respondGeneric(writer, id, null_result_response);
|
||||
};
|
||||
|
||||
@ -2572,7 +2570,10 @@ pub fn init(
|
||||
|
||||
try config.configChanged(allocator, config_path);
|
||||
|
||||
var document_store = try DocumentStore.init(allocator, config);
|
||||
var document_store = DocumentStore{
|
||||
.allocator = allocator,
|
||||
.config = config,
|
||||
};
|
||||
errdefer document_store.deinit();
|
||||
|
||||
var builtin_completions = try std.ArrayListUnmanaged(types.CompletionItem).initCapacity(allocator, data.builtins.len);
|
||||
|
@ -47,7 +47,7 @@ pub fn getDocComments(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.In
|
||||
}
|
||||
|
||||
/// Get the first doc comment of a declaration.
|
||||
pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: Ast.TokenIndex) ?Ast.TokenIndex {
|
||||
pub fn getDocCommentTokenIndex(tokens: []const std.zig.Token.Tag, base_token: Ast.TokenIndex) ?Ast.TokenIndex {
|
||||
var idx = base_token;
|
||||
if (idx == 0) return null;
|
||||
idx -= 1;
|
||||
@ -181,7 +181,7 @@ pub fn getFunctionSnippet(allocator: std.mem.Allocator, tree: Ast, func: Ast.ful
|
||||
return buffer.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentStore, handle: *DocumentStore.Handle, func: Ast.full.FnProto) !bool {
|
||||
pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentStore, handle: *const DocumentStore.Handle, func: Ast.full.FnProto) !bool {
|
||||
// Non-decl prototypes cannot have a self parameter.
|
||||
if (func.name_token == null) return false;
|
||||
if (func.ast.params.len == 0) return false;
|
||||
@ -431,7 +431,7 @@ fn findReturnStatement(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.Node.Inde
|
||||
return findReturnStatementInternal(tree, fn_decl, body, &already_found);
|
||||
}
|
||||
|
||||
pub fn resolveReturnType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, fn_decl: Ast.full.FnProto, handle: *DocumentStore.Handle, bound_type_params: *BoundTypeParams, fn_body: ?Ast.Node.Index) !?TypeWithHandle {
|
||||
pub fn resolveReturnType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, fn_decl: Ast.full.FnProto, handle: *const DocumentStore.Handle, bound_type_params: *BoundTypeParams, fn_body: ?Ast.Node.Index) !?TypeWithHandle {
|
||||
const tree = handle.tree;
|
||||
if (isTypeFunction(tree, fn_decl) and fn_body != null) {
|
||||
// If this is a type function and it only contains a single return statement that returns
|
||||
@ -912,18 +912,16 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
if (node_tags[import_param] != .string_literal) return null;
|
||||
|
||||
const import_str = tree.tokenSlice(main_tokens[import_param]);
|
||||
const new_handle = (store.resolveImport(handle, import_str[1 .. import_str.len - 1]) catch |err| {
|
||||
log.debug("Error {} while processing import {s}", .{ err, import_str });
|
||||
return null;
|
||||
}) orelse return null;
|
||||
const import_uri = (try store.uriFromImportStr(arena.allocator(), handle.*, import_str[1 .. import_str.len - 1])) orelse return null;
|
||||
|
||||
const new_handle = store.getOrLoadHandle(import_uri) orelse return null;
|
||||
|
||||
// reference to node '0' which is root
|
||||
return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle });
|
||||
} else if (std.mem.eql(u8, call_name, "@cImport")) {
|
||||
const new_handle = (store.resolveCImport(handle, node) catch |err| {
|
||||
log.debug("Error {} while processing cImport", .{err}); // TODO improve
|
||||
return null;
|
||||
}) orelse return null;
|
||||
const cimport_uri = (try store.resolveCImport(handle.*, node)) orelse return null;
|
||||
|
||||
const new_handle = store.getOrLoadHandle(cimport_uri) orelse return null;
|
||||
|
||||
// reference to node '0' which is root
|
||||
return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle });
|
||||
@ -974,7 +972,7 @@ pub const Type = struct {
|
||||
|
||||
pub const TypeWithHandle = struct {
|
||||
type: Type,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
|
||||
pub fn typeVal(node_handle: NodeWithHandle) TypeWithHandle {
|
||||
return .{
|
||||
@ -1088,7 +1086,6 @@ pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator,
|
||||
}
|
||||
|
||||
/// Collects all `@import`'s we can find into a slice of import paths (without quotes).
|
||||
/// Caller owns returned memory.
|
||||
pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}!std.ArrayListUnmanaged([]const u8) {
|
||||
var imports = std.ArrayListUnmanaged([]const u8){};
|
||||
errdefer {
|
||||
@ -1148,7 +1145,7 @@ pub fn collectCImportNodes(allocator: std.mem.Allocator, tree: Ast) error{OutOfM
|
||||
|
||||
pub const NodeWithHandle = struct {
|
||||
node: Ast.Node.Index,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
};
|
||||
|
||||
pub const FieldAccessReturn = struct {
|
||||
@ -1156,7 +1153,7 @@ pub const FieldAccessReturn = struct {
|
||||
unwrapped: ?TypeWithHandle = null,
|
||||
};
|
||||
|
||||
pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, tokenizer: *std.zig.Tokenizer) !?FieldAccessReturn {
|
||||
pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, source_index: usize, tokenizer: *std.zig.Tokenizer) !?FieldAccessReturn {
|
||||
var current_type = TypeWithHandle.typeVal(.{
|
||||
.node = undefined,
|
||||
.handle = handle,
|
||||
@ -1868,7 +1865,7 @@ pub const Declaration = union(enum) {
|
||||
|
||||
pub const DeclWithHandle = struct {
|
||||
decl: *Declaration,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
|
||||
pub fn nameToken(self: DeclWithHandle) Ast.TokenIndex {
|
||||
const tree = self.handle.tree;
|
||||
@ -2000,7 +1997,7 @@ fn findContainerScope(container_handle: NodeWithHandle) ?*Scope {
|
||||
} else null;
|
||||
}
|
||||
|
||||
fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void {
|
||||
fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *const DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void {
|
||||
const container = container_handle.node;
|
||||
const handle = container_handle.handle;
|
||||
|
||||
@ -2067,12 +2064,12 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool) error{OutOfMemory}!void {
|
||||
pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *const DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool) error{OutOfMemory}!void {
|
||||
var use_trail = std.ArrayList(Ast.Node.Index).init(arena.allocator());
|
||||
return try iterateSymbolsContainerInternal(store, arena, container_handle, orig_handle, callback, context, instance_access, &use_trail);
|
||||
}
|
||||
|
||||
pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
||||
pub fn iterateLabels(handle: *const DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
||||
for (handle.document_scope.scopes.items) |scope| {
|
||||
if (source_index >= scope.loc.start and source_index < scope.loc.end) {
|
||||
var decl_it = scope.decls.iterator();
|
||||
@ -2088,7 +2085,7 @@ pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptim
|
||||
}
|
||||
}
|
||||
|
||||
fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void {
|
||||
fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(Ast.Node.Index)) error{OutOfMemory}!void {
|
||||
for (handle.document_scope.scopes.items) |scope| {
|
||||
if (source_index >= scope.loc.start and source_index <= scope.loc.end) {
|
||||
var decl_it = scope.decls.iterator();
|
||||
@ -2129,7 +2126,7 @@ fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAll
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iterateSymbolsGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
||||
pub fn iterateSymbolsGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
||||
var use_trail = std.ArrayList(Ast.Node.Index).init(arena.allocator());
|
||||
return try iterateSymbolsGlobalInternal(store, arena, handle, source_index, callback, context, &use_trail);
|
||||
}
|
||||
@ -2154,7 +2151,7 @@ pub fn innermostBlockScope(handle: DocumentStore.Handle, source_index: usize) As
|
||||
return handle.document_scope.scopes.items[innermostBlockScopeIndex(handle, source_index)].toNodeIndex().?;
|
||||
}
|
||||
|
||||
pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) TypeWithHandle {
|
||||
pub fn innermostContainer(handle: *const DocumentStore.Handle, source_index: usize) TypeWithHandle {
|
||||
var current = handle.document_scope.scopes.items[0].data.container;
|
||||
if (handle.document_scope.scopes.items.len == 1) return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
||||
|
||||
@ -2170,7 +2167,7 @@ pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) Ty
|
||||
return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
||||
}
|
||||
|
||||
fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []const Ast.Node.Index, symbol: []const u8, handle: *DocumentStore.Handle) error{OutOfMemory}!?DeclWithHandle {
|
||||
fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []const Ast.Node.Index, symbol: []const u8, handle: *const DocumentStore.Handle) error{OutOfMemory}!?DeclWithHandle {
|
||||
// If we were asked to resolve this symbol before,
|
||||
// it is self-referential and we cannot resolve it.
|
||||
if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null)
|
||||
@ -2202,7 +2199,7 @@ fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []co
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn lookupLabel(handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
|
||||
pub fn lookupLabel(handle: *const DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
|
||||
for (handle.document_scope.scopes.items) |scope| {
|
||||
if (source_index >= scope.loc.start and source_index < scope.loc.end) {
|
||||
if (scope.decls.getEntry(symbol)) |candidate| {
|
||||
@ -2221,7 +2218,7 @@ pub fn lookupLabel(handle: *DocumentStore.Handle, symbol: []const u8, source_ind
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
|
||||
pub fn lookupSymbolGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, symbol: []const u8, source_index: usize) error{OutOfMemory}!?DeclWithHandle {
|
||||
const innermost_scope_idx = innermostBlockScopeIndex(handle.*, source_index);
|
||||
|
||||
var curr = innermost_scope_idx;
|
||||
|
@ -12,7 +12,7 @@ const offsets = @import("offsets.zig");
|
||||
pub const Builder = struct {
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
document_store: *DocumentStore,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
offset_encoding: offsets.Encoding,
|
||||
|
||||
pub fn generateCodeAction(
|
||||
|
36
src/diff.zig
36
src/diff.zig
@ -1,5 +1,7 @@
|
||||
const std = @import("std");
|
||||
const types = @import("types.zig");
|
||||
const requests = @import("requests.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
|
||||
pub const Error = error{ OutOfMemory, InvalidRange };
|
||||
|
||||
@ -350,3 +352,37 @@ fn char_pos_to_range(
|
||||
.end = result_end_pos.?,
|
||||
};
|
||||
}
|
||||
|
||||
// Caller owns returned memory.
|
||||
pub fn applyTextEdits(
|
||||
allocator: std.mem.Allocator,
|
||||
text: []const u8,
|
||||
content_changes: []const requests.TextDocumentContentChangeEvent,
|
||||
encoding: offsets.Encoding,
|
||||
) ![:0]const u8 {
|
||||
var last_full_text_change: ?usize = null;
|
||||
var i: usize = content_changes.len;
|
||||
while (i > 0) {
|
||||
i -= 1;
|
||||
if (content_changes[i].range == null) {
|
||||
last_full_text_change = i;
|
||||
}
|
||||
}
|
||||
|
||||
var text_array = std.ArrayListUnmanaged(u8){};
|
||||
errdefer text_array.deinit(allocator);
|
||||
|
||||
try text_array.appendSlice(allocator, if (last_full_text_change) |index| content_changes[index].text else text);
|
||||
|
||||
// don't even bother applying changes before a full text change
|
||||
const changes = content_changes[if (last_full_text_change) |index| index + 1 else 0..];
|
||||
|
||||
for (changes) |item| {
|
||||
const range = item.range.?; // every element is guaranteed to have `range` set
|
||||
|
||||
const loc = offsets.rangeToLoc(text_array.items, range, encoding);
|
||||
try text_array.replaceRange(allocator, loc.start, loc.end - loc.start, item.text);
|
||||
}
|
||||
|
||||
return try text_array.toOwnedSliceSentinel(allocator, 0);
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ fn isNodeInRange(tree: Ast, node: Ast.Node.Index, range: types.Range) bool {
|
||||
const Builder = struct {
|
||||
allocator: std.mem.Allocator,
|
||||
config: *const Config,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
hints: std.ArrayListUnmanaged(types.InlayHint),
|
||||
hover_kind: types.MarkupContent.Kind,
|
||||
encoding: offsets.Encoding,
|
||||
@ -691,7 +691,7 @@ pub fn writeRangeInlayHint(
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
config: Config,
|
||||
store: *DocumentStore,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
range: types.Range,
|
||||
hover_kind: types.MarkupContent.Kind,
|
||||
encoding: offsets.Encoding,
|
||||
|
@ -70,7 +70,7 @@ const Builder = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn add(self: *Builder, handle: *DocumentStore.Handle, token_index: Ast.TokenIndex) !void {
|
||||
pub fn add(self: *Builder, handle: *const DocumentStore.Handle, token_index: Ast.TokenIndex) !void {
|
||||
try self.locations.append(self.arena.allocator(), .{
|
||||
.uri = handle.uri,
|
||||
.range = offsets.tokenToRange(handle.tree, token_index, self.encoding),
|
||||
@ -81,7 +81,7 @@ const Builder = struct {
|
||||
fn symbolReferencesInternal(
|
||||
builder: *Builder,
|
||||
node: Ast.Node.Index,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!void {
|
||||
const tree = handle.tree;
|
||||
|
||||
@ -483,41 +483,19 @@ pub fn symbolReferences(
|
||||
if (decl_handle.decl.* != .ast_node) return builder.locations;
|
||||
if (!workspace) return builder.locations;
|
||||
|
||||
var imports = std.ArrayListUnmanaged(*DocumentStore.Handle){};
|
||||
|
||||
var handle_it = store.handles.iterator();
|
||||
while (handle_it.next()) |entry| {
|
||||
if (skip_std_references and std.mem.indexOf(u8, entry.key_ptr.*, "std") != null) {
|
||||
if (!include_decl or entry.value_ptr.* != curr_handle)
|
||||
for (store.handles.values()) |handle| {
|
||||
if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) {
|
||||
if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri))
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check entry's transitive imports
|
||||
try imports.append(arena.allocator(), entry.value_ptr.*);
|
||||
var i: usize = 0;
|
||||
blk: while (i < imports.items.len) : (i += 1) {
|
||||
const import = imports.items[i];
|
||||
for (import.imports_used.items) |uri| {
|
||||
const h = store.getHandle(uri) orelse break;
|
||||
var dependencies = std.ArrayListUnmanaged([]const u8){};
|
||||
try store.collectDependencies(store.allocator, handle.*, &dependencies);
|
||||
|
||||
if (h == curr_handle) {
|
||||
// entry does import curr_handle
|
||||
try symbolReferencesInternal(&builder, 0, entry.value_ptr.*);
|
||||
break :blk;
|
||||
}
|
||||
|
||||
select: {
|
||||
for (imports.items) |item| {
|
||||
if (item == h) {
|
||||
// already checked this import
|
||||
break :select;
|
||||
}
|
||||
}
|
||||
try imports.append(arena.allocator(), h);
|
||||
}
|
||||
}
|
||||
for (dependencies.items) |uri| {
|
||||
const hdl = store.getHandle(uri) orelse continue;
|
||||
try symbolReferencesInternal(&builder, 0, hdl);
|
||||
}
|
||||
try imports.resize(arena.allocator(), 0);
|
||||
}
|
||||
},
|
||||
.param_payload => |pay| blk: {
|
||||
|
@ -53,13 +53,13 @@ pub const TokenModifiers = packed struct {
|
||||
const Builder = struct {
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
store: *DocumentStore,
|
||||
handle: *DocumentStore.Handle,
|
||||
handle: *const DocumentStore.Handle,
|
||||
previous_position: usize = 0,
|
||||
previous_token: ?Ast.TokenIndex = null,
|
||||
arr: std.ArrayListUnmanaged(u32),
|
||||
encoding: offsets.Encoding,
|
||||
|
||||
fn init(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) Builder {
|
||||
fn init(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *const DocumentStore.Handle, encoding: offsets.Encoding) Builder {
|
||||
return Builder{
|
||||
.arena = arena,
|
||||
.store = store,
|
||||
@ -223,7 +223,7 @@ fn writeDocComments(builder: *Builder, tree: Ast, doc: Ast.TokenIndex) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn fieldTokenType(container_decl: Ast.Node.Index, handle: *DocumentStore.Handle) ?TokenType {
|
||||
fn fieldTokenType(container_decl: Ast.Node.Index, handle: *const DocumentStore.Handle) ?TokenType {
|
||||
const main_token = handle.tree.nodes.items(.main_token)[container_decl];
|
||||
if (main_token > handle.tree.tokens.len) return null;
|
||||
return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) {
|
||||
@ -1023,7 +1023,12 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type
|
||||
}
|
||||
|
||||
// TODO Range version, edit version.
|
||||
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {
|
||||
pub fn writeAllSemanticTokens(
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
store: *DocumentStore,
|
||||
handle: *const DocumentStore.Handle,
|
||||
encoding: offsets.Encoding,
|
||||
) ![]u32 {
|
||||
var builder = Builder.init(arena, store, handle, encoding);
|
||||
|
||||
// reverse the ast from the root declarations
|
||||
|
@ -8,7 +8,7 @@ const Token = std.zig.Token;
|
||||
const identifierFromPosition = @import("Server.zig").identifierFromPosition;
|
||||
const ast = @import("ast.zig");
|
||||
|
||||
fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAllocator, commas: u32, skip_self_param: bool, handle: *DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation {
|
||||
fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAllocator, commas: u32, skip_self_param: bool, handle: *const DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation {
|
||||
const ParameterInformation = types.SignatureInformation.ParameterInformation;
|
||||
|
||||
const tree = handle.tree;
|
||||
@ -67,7 +67,7 @@ fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.Arena
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, absolute_index: usize, comptime data: type) !?types.SignatureInformation {
|
||||
pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *const DocumentStore.Handle, absolute_index: usize, comptime data: type) !?types.SignatureInformation {
|
||||
const innermost_block = analysis.innermostBlockScope(handle.*, absolute_index);
|
||||
const tree = handle.tree;
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
|
Loading…
Reference in New Issue
Block a user