implement textDocument/semanticTokens/range
This commit is contained in:
parent
ce9ae21cf4
commit
0f3319315f
@ -2009,7 +2009,7 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ
|
|||||||
.semanticTokensProvider = .{
|
.semanticTokensProvider = .{
|
||||||
.SemanticTokensOptions = .{
|
.SemanticTokensOptions = .{
|
||||||
.full = .{ .bool = true },
|
.full = .{ .bool = true },
|
||||||
.range = .{ .bool = false },
|
.range = .{ .bool = true },
|
||||||
.legend = .{
|
.legend = .{
|
||||||
.tokenTypes = comptime block: {
|
.tokenTypes = comptime block: {
|
||||||
const tokTypeFields = std.meta.fields(semantic_tokens.TokenType);
|
const tokTypeFields = std.meta.fields(semantic_tokens.TokenType);
|
||||||
@ -2286,9 +2286,19 @@ fn semanticTokensFullHandler(server: *Server, request: types.SemanticTokensParam
|
|||||||
|
|
||||||
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
||||||
|
|
||||||
const token_array = try semantic_tokens.writeAllSemanticTokens(server.arena.allocator(), &server.document_store, handle, server.offset_encoding);
|
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.document_store, handle, null, server.offset_encoding);
|
||||||
|
}
|
||||||
|
|
||||||
return .{ .data = token_array };
|
fn semanticTokensRangeHandler(server: *Server, request: types.SemanticTokensRangeParams) Error!?types.SemanticTokens {
|
||||||
|
const tracy_zone = tracy.trace(@src());
|
||||||
|
defer tracy_zone.end();
|
||||||
|
|
||||||
|
if (!server.config.enable_semantic_tokens) return null;
|
||||||
|
|
||||||
|
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
||||||
|
const loc = offsets.rangeToLoc(handle.tree.source, request.range, server.offset_encoding);
|
||||||
|
|
||||||
|
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.document_store, handle, loc, server.offset_encoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn completionHandler(server: *Server, request: types.CompletionParams) Error!?types.CompletionList {
|
pub fn completionHandler(server: *Server, request: types.CompletionParams) Error!?types.CompletionList {
|
||||||
@ -3110,6 +3120,7 @@ pub fn processMessage(server: *Server, message: Message) Error!void {
|
|||||||
.{ "textDocument/didClose", closeDocumentHandler },
|
.{ "textDocument/didClose", closeDocumentHandler },
|
||||||
.{ "textDocument/willSaveWaitUntil", willSaveWaitUntilHandler },
|
.{ "textDocument/willSaveWaitUntil", willSaveWaitUntilHandler },
|
||||||
.{ "textDocument/semanticTokens/full", semanticTokensFullHandler },
|
.{ "textDocument/semanticTokens/full", semanticTokensFullHandler },
|
||||||
|
.{ "textDocument/semanticTokens/range", semanticTokensRangeHandler },
|
||||||
.{ "textDocument/inlayHint", inlayHintHandler },
|
.{ "textDocument/inlayHint", inlayHintHandler },
|
||||||
.{ "textDocument/completion", completionHandler },
|
.{ "textDocument/completion", completionHandler },
|
||||||
.{ "textDocument/signatureHelp", signatureHelpHandler },
|
.{ "textDocument/signatureHelp", signatureHelpHandler },
|
||||||
|
@ -5,6 +5,7 @@ const DocumentStore = @import("DocumentStore.zig");
|
|||||||
const analysis = @import("analysis.zig");
|
const analysis = @import("analysis.zig");
|
||||||
const Ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const ast = @import("ast.zig");
|
const ast = @import("ast.zig");
|
||||||
|
const types = @import("lsp.zig");
|
||||||
|
|
||||||
pub const TokenType = enum(u32) {
|
pub const TokenType = enum(u32) {
|
||||||
type,
|
type,
|
||||||
@ -1030,19 +1031,24 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO Range version, edit version.
|
/// If `loc` is `null`, semantic tokens will be computed for the entire source range
|
||||||
pub fn writeAllSemanticTokens(
|
/// Otherwise only tokens in the give source range will be returned
|
||||||
|
/// TODO edit version.
|
||||||
|
pub fn writeSemanticTokens(
|
||||||
arena: std.mem.Allocator,
|
arena: std.mem.Allocator,
|
||||||
store: *DocumentStore,
|
store: *DocumentStore,
|
||||||
handle: *const DocumentStore.Handle,
|
handle: *const DocumentStore.Handle,
|
||||||
|
loc: ?offsets.Loc,
|
||||||
encoding: offsets.Encoding,
|
encoding: offsets.Encoding,
|
||||||
) ![]u32 {
|
) !types.SemanticTokens {
|
||||||
var builder = Builder.init(arena, store, handle, encoding);
|
var builder = Builder.init(arena, store, handle, encoding);
|
||||||
|
|
||||||
|
const nodes = if (loc) |l| try ast.nodesAtLoc(arena, handle.tree, l) else handle.tree.rootDecls();
|
||||||
|
|
||||||
// reverse the ast from the root declarations
|
// reverse the ast from the root declarations
|
||||||
for (handle.tree.rootDecls()) |child| {
|
for (nodes) |child| {
|
||||||
try writeNodeTokens(&builder, child);
|
try writeNodeTokens(&builder, child);
|
||||||
}
|
}
|
||||||
try builder.finish();
|
try builder.finish();
|
||||||
return builder.toOwnedSlice();
|
return .{ .data = try builder.toOwnedSlice() };
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user