make semantic token configuration an enum
This commit is contained in:
parent
e1d90a1a1b
commit
61b42ca63a
@ -66,7 +66,7 @@ The following options are currently available.
|
||||
| `enable_ast_check_diagnostics` | `bool` | `true` | Whether to enable ast-check diagnostics |
|
||||
| `enable_autofix` | `bool` | `true` | Whether to automatically fix errors on save. Currently supports adding and removing discards. |
|
||||
| `enable_import_embedfile_argument_completions` | `bool` | `true` | Whether to enable import/embedFile argument completions |
|
||||
| `enable_semantic_tokens` | `bool` | `true` | Enables semantic token support when the client also supports it |
|
||||
| `semantic_tokens` | `enum` | `.full` | Set level of semantic tokens. Partial only includes information that requires semantic analysis. |
|
||||
| `enable_inlay_hints` | `bool` | `true` | Enables inlay hint support when the client also supports it |
|
||||
| `inlay_hints_show_builtin` | `bool` | `true` | Enable inlay hints for builtin functions |
|
||||
| `inlay_hints_exclude_single_argument` | `bool` | `true` | Don't show inlay hints for single argument calls |
|
||||
|
13
schema.json
13
schema.json
@ -24,10 +24,15 @@
|
||||
"type": "boolean",
|
||||
"default": "true"
|
||||
},
|
||||
"enable_semantic_tokens": {
|
||||
"description": "Enables semantic token support when the client also supports it",
|
||||
"type": "boolean",
|
||||
"default": "true"
|
||||
"semantic_tokens": {
|
||||
"description": "Set level of semantic tokens. Partial only includes information that requires semantic analysis.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"none",
|
||||
"partial",
|
||||
"full"
|
||||
],
|
||||
"default": "full"
|
||||
},
|
||||
"enable_inlay_hints": {
|
||||
"description": "Enables inlay hint support when the client also supports it",
|
||||
|
@ -16,8 +16,14 @@ enable_autofix: bool = true,
|
||||
/// Whether to enable import/embedFile argument completions
|
||||
enable_import_embedfile_argument_completions: bool = true,
|
||||
|
||||
/// Enables semantic token support when the client also supports it
|
||||
enable_semantic_tokens: bool = true,
|
||||
/// Set level of semantic tokens. Partial only includes information that requires semantic analysis.
|
||||
semantic_tokens: enum {
|
||||
none,
|
||||
partial,
|
||||
full,
|
||||
|
||||
pub const tres_string_enum = true;
|
||||
} = .full,
|
||||
|
||||
/// Enables inlay hint support when the client also supports it
|
||||
enable_inlay_hints: bool = true,
|
||||
|
@ -989,6 +989,19 @@ fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory}
|
||||
break :blk @field(server.config, field.name);
|
||||
},
|
||||
},
|
||||
.Enum => switch (value) {
|
||||
.String => |s| blk: {
|
||||
const trimmed = std.mem.trim(u8, s, " ");
|
||||
break :blk std.meta.stringToEnum(field.type, trimmed) orelse inner: {
|
||||
log.warn("Ignoring new value for \"zls.{s}\": the given new value is invalid", .{field.name});
|
||||
break :inner @field(server.config, field.name);
|
||||
};
|
||||
},
|
||||
else => blk: {
|
||||
log.warn("Ignoring new value for \"zls.{s}\": the given new value has an invalid type", .{field.name});
|
||||
break :blk @field(server.config, field.name);
|
||||
},
|
||||
},
|
||||
else => @compileError("Not implemented for " ++ @typeName(ft)),
|
||||
},
|
||||
};
|
||||
@ -1075,20 +1088,34 @@ fn willSaveWaitUntilHandler(server: *Server, request: types.WillSaveTextDocument
|
||||
}
|
||||
|
||||
fn semanticTokensFullHandler(server: *Server, request: types.SemanticTokensParams) Error!?types.SemanticTokens {
|
||||
if (!server.config.enable_semantic_tokens) return null;
|
||||
if (server.config.semantic_tokens == .none) return null;
|
||||
|
||||
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
||||
|
||||
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.analyser, handle, null, server.offset_encoding);
|
||||
return try semantic_tokens.writeSemanticTokens(
|
||||
server.arena.allocator(),
|
||||
&server.analyser,
|
||||
handle,
|
||||
null,
|
||||
server.offset_encoding,
|
||||
server.config.semantic_tokens == .partial,
|
||||
);
|
||||
}
|
||||
|
||||
fn semanticTokensRangeHandler(server: *Server, request: types.SemanticTokensRangeParams) Error!?types.SemanticTokens {
|
||||
if (!server.config.enable_semantic_tokens) return null;
|
||||
if (server.config.semantic_tokens == .none) return null;
|
||||
|
||||
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
|
||||
const loc = offsets.rangeToLoc(handle.tree.source, request.range, server.offset_encoding);
|
||||
|
||||
return try semantic_tokens.writeSemanticTokens(server.arena.allocator(), &server.analyser, handle, loc, server.offset_encoding);
|
||||
return try semantic_tokens.writeSemanticTokens(
|
||||
server.arena.allocator(),
|
||||
&server.analyser,
|
||||
handle,
|
||||
loc,
|
||||
server.offset_encoding,
|
||||
server.config.semantic_tokens == .partial,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn completionHandler(server: *Server, request: types.CompletionParams) Error!?types.CompletionList {
|
||||
|
@ -25,10 +25,15 @@
|
||||
"default": "true"
|
||||
},
|
||||
{
|
||||
"name": "enable_semantic_tokens",
|
||||
"description": "Enables semantic token support when the client also supports it",
|
||||
"type": "bool",
|
||||
"default": "true"
|
||||
"name": "semantic_tokens",
|
||||
"description": "Set level of semantic tokens. Partial only includes information that requires semantic analysis.",
|
||||
"type": "enum",
|
||||
"enum": [
|
||||
"none",
|
||||
"partial",
|
||||
"full"
|
||||
],
|
||||
"default": "full"
|
||||
},
|
||||
{
|
||||
"name": "enable_inlay_hints",
|
||||
|
@ -39,14 +39,17 @@ const ConfigOption = struct {
|
||||
_ = options;
|
||||
if (fmt.len != 0) return std.fmt.invalidFmtError(fmt, ConfigOption);
|
||||
if (config.@"enum") |enum_members| {
|
||||
try writer.writeAll("enum {");
|
||||
if (enum_members.len > 1) try writer.writeByte(' ');
|
||||
for (enum_members, 0..) |member_name, i| {
|
||||
if (i != 0) try writer.writeAll(", ");
|
||||
try writer.writeAll("enum {\n ");
|
||||
for (enum_members) |member_name| {
|
||||
try writer.writeAll(member_name);
|
||||
try writer.writeAll(",\n ");
|
||||
}
|
||||
if (enum_members.len > 1) try writer.writeByte(' ');
|
||||
try writer.writeByte('}');
|
||||
std.debug.assert(enum_members.len > 1);
|
||||
try writer.writeAll(
|
||||
\\
|
||||
\\ pub const tres_string_enum = true;
|
||||
\\}
|
||||
);
|
||||
return;
|
||||
}
|
||||
try writer.writeAll(config.type);
|
||||
|
@ -47,8 +47,21 @@ const Builder = struct {
|
||||
previous_token: ?Ast.TokenIndex = null,
|
||||
token_buffer: std.ArrayListUnmanaged(u32) = .{},
|
||||
encoding: offsets.Encoding,
|
||||
limited: bool,
|
||||
|
||||
fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
||||
switch (token_type) {
|
||||
.type,
|
||||
.parameter,
|
||||
.variable,
|
||||
.enumMember,
|
||||
.field,
|
||||
.errorTag,
|
||||
.function,
|
||||
.label,
|
||||
=> {},
|
||||
else => if (self.limited) return,
|
||||
}
|
||||
const tree = self.handle.tree;
|
||||
const starts = tree.tokens.items(.start);
|
||||
|
||||
@ -159,6 +172,18 @@ const Builder = struct {
|
||||
|
||||
fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void {
|
||||
if (start < self.previous_source_index) return;
|
||||
switch (tok_type) {
|
||||
.type,
|
||||
.parameter,
|
||||
.variable,
|
||||
.enumMember,
|
||||
.field,
|
||||
.errorTag,
|
||||
.function,
|
||||
.label,
|
||||
=> {},
|
||||
else => if (self.limited) return,
|
||||
}
|
||||
|
||||
const text = self.handle.tree.source[self.previous_source_index..start];
|
||||
const delta = offsets.indexToPosition(text, text.len, self.encoding);
|
||||
@ -1011,12 +1036,14 @@ pub fn writeSemanticTokens(
|
||||
handle: *const DocumentStore.Handle,
|
||||
loc: ?offsets.Loc,
|
||||
encoding: offsets.Encoding,
|
||||
limited: bool,
|
||||
) error{OutOfMemory}!types.SemanticTokens {
|
||||
var builder = Builder{
|
||||
.arena = arena,
|
||||
.analyser = analyser,
|
||||
.handle = handle,
|
||||
.encoding = encoding,
|
||||
.limited = limited,
|
||||
};
|
||||
|
||||
const nodes = if (loc) |l| try ast.nodesAtLoc(arena, handle.tree, l) else handle.tree.rootDecls();
|
||||
|
@ -3,6 +3,7 @@ const zig_builtin = @import("builtin");
|
||||
const build_options = @import("build_options");
|
||||
const tracy = @import("tracy.zig");
|
||||
const known_folders = @import("known-folders");
|
||||
const tres = @import("tres");
|
||||
const Config = @import("Config.zig");
|
||||
const configuration = @import("configuration.zig");
|
||||
const Server = @import("Server.zig");
|
||||
@ -129,7 +130,7 @@ fn updateConfig(
|
||||
var buffer = std.ArrayListUnmanaged(u8){};
|
||||
defer buffer.deinit(allocator);
|
||||
|
||||
try std.json.stringify(cfg, .{}, buffer.writer(allocator));
|
||||
try tres.stringify(cfg, .{}, buffer.writer(allocator));
|
||||
const header = Header{ .content_length = buffer.items.len };
|
||||
try header.write(false, file.writer());
|
||||
try file.writeAll(buffer.items);
|
||||
|
@ -20,7 +20,7 @@ const initialize_msg =
|
||||
|
||||
const default_config: Config = .{
|
||||
.enable_ast_check_diagnostics = false,
|
||||
.enable_semantic_tokens = true,
|
||||
.semantic_tokens = .full,
|
||||
.enable_inlay_hints = true,
|
||||
.inlay_hints_exclude_single_argument = false,
|
||||
.inlay_hints_show_builtin = true,
|
||||
|
Loading…
Reference in New Issue
Block a user