Removed no_snippets build option.
Instead, we now read a configuration file, zls.json in the same folder as the executable. Options are defined in the struct config.zig and must have default values. The only option right now is enable_snippets and it defaults to true.
This commit is contained in:
parent
01b2c2e328
commit
35b4913b3e
@ -23,12 +23,6 @@ pub fn build(b: *std.build.Builder) !void {
|
|||||||
data_version,
|
data_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
exe.addBuildOption(
|
|
||||||
bool,
|
|
||||||
"no_snippets",
|
|
||||||
b.option(bool, "no_snippets", "Use regular completion without snippets. Useful for compatibility.") orelse false,
|
|
||||||
);
|
|
||||||
|
|
||||||
exe.addBuildOption(
|
exe.addBuildOption(
|
||||||
bool,
|
bool,
|
||||||
"allocation_info",
|
"allocation_info",
|
||||||
|
4
src/config.zig
Normal file
4
src/config.zig
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
// Configuration options for zls.
|
||||||
|
|
||||||
|
/// Wether to enable snippet completions
|
||||||
|
enable_snippets: bool = true,
|
83
src/main.zig
83
src/main.zig
@ -1,6 +1,7 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const build_options = @import("build_options");
|
const build_options = @import("build_options");
|
||||||
|
|
||||||
|
const Config = @import("config.zig");
|
||||||
const Uri = @import("uri.zig");
|
const Uri = @import("uri.zig");
|
||||||
const data = @import("data/" ++ build_options.data_version ++ ".zig");
|
const data = @import("data/" ++ build_options.data_version ++ ".zig");
|
||||||
const types = @import("types.zig");
|
const types = @import("types.zig");
|
||||||
@ -133,7 +134,7 @@ fn astLocationToRange(loc: std.zig.ast.Tree.Location) types.Range {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn publishDiagnostics(document: *types.TextDocument) !void {
|
fn publishDiagnostics(document: *types.TextDocument, config: Config) !void {
|
||||||
const tree = try std.zig.parse(allocator, document.text);
|
const tree = try std.zig.parse(allocator, document.text);
|
||||||
defer tree.deinit();
|
defer tree.deinit();
|
||||||
|
|
||||||
@ -221,7 +222,7 @@ fn publishDiagnostics(document: *types.TextDocument) !void {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completeGlobal(id: i64, document: *types.TextDocument) !void {
|
fn completeGlobal(id: i64, document: *types.TextDocument, config: Config) !void {
|
||||||
// The tree uses its own arena, so we just pass our main allocator.
|
// The tree uses its own arena, so we just pass our main allocator.
|
||||||
var tree = try std.zig.parse(allocator, document.text);
|
var tree = try std.zig.parse(allocator, document.text);
|
||||||
|
|
||||||
@ -297,16 +298,18 @@ fn completeGlobal(id: i64, document: *types.TextDocument) !void {
|
|||||||
// Compute builtin completions at comptime.
|
// Compute builtin completions at comptime.
|
||||||
const builtin_completions = block: {
|
const builtin_completions = block: {
|
||||||
@setEvalBranchQuota(3_500);
|
@setEvalBranchQuota(3_500);
|
||||||
var temp: [data.builtins.len]types.CompletionItem = undefined;
|
const CompletionList = [data.builtins.len]types.CompletionItem;
|
||||||
|
var with_snippets: CompletionList = undefined;
|
||||||
|
var without_snippets: CompletionList = undefined;
|
||||||
|
|
||||||
for (data.builtins) |builtin, i| {
|
for (data.builtins) |builtin, i| {
|
||||||
var cutoff = std.mem.indexOf(u8, builtin, "(") orelse builtin.len;
|
const cutoff = std.mem.indexOf(u8, builtin, "(") orelse builtin.len;
|
||||||
temp[i] = .{
|
|
||||||
|
const base_completion = types.CompletionItem{
|
||||||
.label = builtin[0..cutoff],
|
.label = builtin[0..cutoff],
|
||||||
.kind = .Function,
|
.kind = .Function,
|
||||||
|
|
||||||
.filterText = builtin[1..cutoff],
|
.filterText = builtin[1..cutoff],
|
||||||
.insertText = builtin[1..],
|
|
||||||
.detail = data.builtin_details[i],
|
.detail = data.builtin_details[i],
|
||||||
.documentation = .{
|
.documentation = .{
|
||||||
.kind = .Markdown,
|
.kind = .Markdown,
|
||||||
@ -314,11 +317,17 @@ const builtin_completions = block: {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!build_options.no_snippets)
|
with_snippets[i] = base_completion;
|
||||||
temp[i].insertTextFormat = .Snippet;
|
with_snippets[i].insertText = builtin[1..];
|
||||||
|
with_snippets[i].insertTextFormat = .Snippet;
|
||||||
|
|
||||||
|
without_snippets[i] = base_completion;
|
||||||
|
without_snippets[i].insertText = builtin[1..cutoff];
|
||||||
}
|
}
|
||||||
|
|
||||||
break :block temp;
|
break :block [2]CompletionList {
|
||||||
|
without_snippets, with_snippets
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const PositionContext = enum {
|
const PositionContext = enum {
|
||||||
@ -430,7 +439,7 @@ fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionCo
|
|||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn processJsonRpc(parser: *std.json.Parser, json: []const u8) !void {
|
fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !void {
|
||||||
var tree = try parser.parse(json);
|
var tree = try parser.parse(json);
|
||||||
defer tree.deinit();
|
defer tree.deinit();
|
||||||
|
|
||||||
@ -458,7 +467,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8) !void {
|
|||||||
const text = document.getValue("text").?.String;
|
const text = document.getValue("text").?.String;
|
||||||
|
|
||||||
try openDocument(uri, text);
|
try openDocument(uri, text);
|
||||||
try publishDiagnostics(&(documents.get(uri).?.value));
|
try publishDiagnostics(&(documents.get(uri).?.value), config);
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didChange")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didChange")) {
|
||||||
const text_document = params.getValue("textDocument").?.Object;
|
const text_document = params.getValue("textDocument").?.Object;
|
||||||
const uri = text_document.getValue("uri").?.String;
|
const uri = text_document.getValue("uri").?.String;
|
||||||
@ -516,7 +525,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8) !void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try publishDiagnostics(document);
|
try publishDiagnostics(document, config);
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didSave")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didSave")) {
|
||||||
// noop
|
// noop
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didClose")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didClose")) {
|
||||||
@ -546,12 +555,12 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8) !void {
|
|||||||
.result = .{
|
.result = .{
|
||||||
.CompletionList = .{
|
.CompletionList = .{
|
||||||
.isIncomplete = false,
|
.isIncomplete = false,
|
||||||
.items = builtin_completions[0..],
|
.items = builtin_completions[@boolToInt(config.enable_snippets)][0..],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} else if (pos_context == .var_access or pos_context == .empty) {
|
} else if (pos_context == .var_access or pos_context == .empty) {
|
||||||
try completeGlobal(id, document);
|
try completeGlobal(id, document, config);
|
||||||
} else {
|
} else {
|
||||||
try respondGeneric(id, no_completions_response);
|
try respondGeneric(id, no_completions_response);
|
||||||
}
|
}
|
||||||
@ -584,7 +593,6 @@ var debug_alloc_state: std.testing.LeakCountAllocator = undefined;
|
|||||||
const debug_alloc: ?*std.testing.LeakCountAllocator = if (build_options.allocation_info) &debug_alloc_state else null;
|
const debug_alloc: ?*std.testing.LeakCountAllocator = if (build_options.allocation_info) &debug_alloc_state else null;
|
||||||
|
|
||||||
pub fn main() anyerror!void {
|
pub fn main() anyerror!void {
|
||||||
|
|
||||||
// TODO: Use a better purpose general allocator once std has one.
|
// TODO: Use a better purpose general allocator once std has one.
|
||||||
// Probably after the generic composable allocators PR?
|
// Probably after the generic composable allocators PR?
|
||||||
// This is not too bad for now since most allocations happen in local areans.
|
// This is not too bad for now since most allocations happen in local areans.
|
||||||
@ -609,18 +617,51 @@ pub fn main() anyerror!void {
|
|||||||
const stdin = std.io.getStdIn().inStream();
|
const stdin = std.io.getStdIn().inStream();
|
||||||
stdout = std.io.getStdOut().outStream();
|
stdout = std.io.getStdOut().outStream();
|
||||||
|
|
||||||
|
|
||||||
documents = std.StringHashMap(types.TextDocument).init(allocator);
|
documents = std.StringHashMap(types.TextDocument).init(allocator);
|
||||||
|
|
||||||
|
// Read he configuration, if any.
|
||||||
|
var config = Config{};
|
||||||
|
|
||||||
|
// TODO: Investigate using std.fs.Watch to detect writes to the config and reload it.
|
||||||
|
config_read: {
|
||||||
|
var exec_dir_bytes: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||||
|
const exec_dir_path = std.fs.selfExeDirPath(&exec_dir_bytes) catch break :config_read;
|
||||||
|
|
||||||
|
var exec_dir = std.fs.cwd().openDir(exec_dir_path, .{}) catch break :config_read;
|
||||||
|
defer exec_dir.close();
|
||||||
|
|
||||||
|
var conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read;
|
||||||
|
defer conf_file.close();
|
||||||
|
|
||||||
|
const conf_file_stat = conf_file.stat() catch break :config_read;
|
||||||
|
|
||||||
|
// Allocate enough memory for the whole file.
|
||||||
|
var file_buf = try allocator.alloc(u8, conf_file_stat.size);
|
||||||
|
defer allocator.free(file_buf);
|
||||||
|
|
||||||
|
const bytes_read = conf_file.readAll(file_buf) catch break :config_read;
|
||||||
|
if (bytes_read != conf_file_stat.size) break :config_read;
|
||||||
|
|
||||||
|
// TODO: Better errors? Doesnt seem like std.json can provide us positions or context.
|
||||||
|
// Note that we don't need to pass an allocator to parse since we are not using pointer or slice fields.
|
||||||
|
// Thus, we don't need to even call parseFree.
|
||||||
|
config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{}) catch |err| {
|
||||||
|
std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err});
|
||||||
|
break :config_read;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// This JSON parser is passed to processJsonRpc and reset.
|
||||||
|
var json_parser = std.json.Parser.init(allocator, false);
|
||||||
|
defer json_parser.deinit();
|
||||||
|
|
||||||
var offset: usize = 0;
|
var offset: usize = 0;
|
||||||
var bytes_read: usize = 0;
|
var bytes_read: usize = 0;
|
||||||
|
|
||||||
var index: usize = 0;
|
var index: usize = 0;
|
||||||
var content_len: usize = 0;
|
var content_len: usize = 0;
|
||||||
|
|
||||||
// This JSON parser is passed to processJsonRpc and reset.
|
|
||||||
var parser = std.json.Parser.init(allocator, false);
|
|
||||||
defer parser.deinit();
|
|
||||||
|
|
||||||
stdin_poll: while (true) {
|
stdin_poll: while (true) {
|
||||||
if (offset >= 16 and std.mem.startsWith(u8, buffer.items, "Content-Length: ")) {
|
if (offset >= 16 and std.mem.startsWith(u8, buffer.items, "Content-Length: ")) {
|
||||||
|
|
||||||
@ -651,8 +692,8 @@ pub fn main() anyerror!void {
|
|||||||
offset += bytes_read;
|
offset += bytes_read;
|
||||||
}
|
}
|
||||||
|
|
||||||
try processJsonRpc(&parser, buffer.items[index .. index + content_len]);
|
try processJsonRpc(&json_parser, buffer.items[index .. index + content_len], config);
|
||||||
parser.reset();
|
json_parser.reset();
|
||||||
|
|
||||||
offset = 0;
|
offset = 0;
|
||||||
content_len = 0;
|
content_len = 0;
|
||||||
|
Loading…
Reference in New Issue
Block a user