Merge pull request #62 from alexnask/master
Added workspace folder and local configuration folder config support
This commit is contained in:
commit
7939dc5185
1
.github/workflows/main.yml
vendored
1
.github/workflows/main.yml
vendored
@ -19,6 +19,7 @@ jobs:
|
|||||||
wget --quiet --output-document=- $ZIG | tar Jx
|
wget --quiet --output-document=- $ZIG | tar Jx
|
||||||
mv zig-linux-x86_64-* zig
|
mv zig-linux-x86_64-* zig
|
||||||
echo zig version $(./zig/zig version)
|
echo zig version $(./zig/zig version)
|
||||||
|
git submodule update --init --recursive
|
||||||
- name: build
|
- name: build
|
||||||
run: |
|
run: |
|
||||||
export PATH=./zig:$PATH
|
export PATH=./zig:$PATH
|
||||||
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[submodule "src/known-folders"]
|
||||||
|
path = src/known-folders
|
||||||
|
url = https://github.com/ziglibs/known-folders
|
@ -37,7 +37,12 @@ Then, you can use the `zls` executable in an editor of your choice that has a Zi
|
|||||||
|
|
||||||
### Configuration Options
|
### Configuration Options
|
||||||
|
|
||||||
You can configure zls by providing a zls.json file in the same directory as the executable.
|
You can configure zls by providing a zls.json file.
|
||||||
|
zls will look for a zls.json configuration file in multiple locations with the following priority:
|
||||||
|
- In the folders open in your workspace (this applies for files in those folders)
|
||||||
|
- In the local configuration folder of your OS (as provided by [known-folders](https://github.com/ziglibs/known-folders#folder-list))
|
||||||
|
- In the same directory as the executable
|
||||||
|
|
||||||
The following options are currently available.
|
The following options are currently available.
|
||||||
|
|
||||||
| Option | Type | Default value | What it Does |
|
| Option | Type | Default value | What it Does |
|
||||||
@ -60,6 +65,8 @@ Install the `zls-vscode` extension from [here](https://github.com/zigtools/zls-v
|
|||||||
- Uses data provided by `src/data` to perform builtin autocompletion
|
- Uses data provided by `src/data` to perform builtin autocompletion
|
||||||
- [`zig-lsp` by @xackus](https://github.com/xackus/zig-lsp)
|
- [`zig-lsp` by @xackus](https://github.com/xackus/zig-lsp)
|
||||||
- Inspiration for `zls`
|
- Inspiration for `zls`
|
||||||
|
- [`known-folders` by @ziglibs](https://github.com/ziglibs/known-folders)
|
||||||
|
- Provides API to acces known folders on Linux, Windows and Mac OS
|
||||||
|
|
||||||
## License
|
## License
|
||||||
MIT
|
MIT
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
// Configuration options for zls.
|
// Configuration options for zls.
|
||||||
|
|
||||||
/// Whether to enable snippet completions
|
/// Whether to enable snippet completions
|
||||||
enable_snippets: bool = true,
|
enable_snippets: bool = false,
|
||||||
|
|
||||||
/// zig library path
|
/// zig library path
|
||||||
zig_lib_path: ?[]const u8 = null,
|
zig_lib_path: ?[]const u8 = null,
|
||||||
|
@ -22,29 +22,11 @@ pub const Handle = struct {
|
|||||||
|
|
||||||
allocator: *std.mem.Allocator,
|
allocator: *std.mem.Allocator,
|
||||||
handles: std.StringHashMap(*Handle),
|
handles: std.StringHashMap(*Handle),
|
||||||
std_uri: ?[]const u8,
|
|
||||||
|
|
||||||
pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !void {
|
pub fn init(self: *DocumentStore, allocator: *std.mem.Allocator) !void {
|
||||||
self.allocator = allocator;
|
self.allocator = allocator;
|
||||||
self.handles = std.StringHashMap(*Handle).init(allocator);
|
self.handles = std.StringHashMap(*Handle).init(allocator);
|
||||||
errdefer self.handles.deinit();
|
errdefer self.handles.deinit();
|
||||||
|
|
||||||
if (zig_lib_path) |zpath| {
|
|
||||||
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
|
|
||||||
zpath, "./std/std.zig",
|
|
||||||
}) catch |err| block: {
|
|
||||||
std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err});
|
|
||||||
self.std_uri = null;
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
defer allocator.free(std_path);
|
|
||||||
// Get the std_path as a URI, so we can just append to it!
|
|
||||||
self.std_uri = try URI.fromPath(allocator, std_path);
|
|
||||||
std.debug.warn("Standard library base uri: {}\n", .{self.std_uri});
|
|
||||||
} else {
|
|
||||||
self.std_uri = null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function asserts the document is not open yet and takes ownership
|
/// This function asserts the document is not open yet and takes ownership
|
||||||
@ -120,7 +102,7 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if the document text is now sane, move it to sane_text if so.
|
// Check if the document text is now sane, move it to sane_text if so.
|
||||||
fn removeOldImports(self: *DocumentStore, handle: *Handle) !void {
|
fn removeOldImports(self: *DocumentStore, handle: *Handle, zig_lib_path: ?[]const u8) !void {
|
||||||
std.debug.warn("New text for document {}\n", .{handle.uri()});
|
std.debug.warn("New text for document {}\n", .{handle.uri()});
|
||||||
// TODO: Better algorithm or data structure?
|
// TODO: Better algorithm or data structure?
|
||||||
// Removing the imports is costly since they live in an array list
|
// Removing the imports is costly since they live in an array list
|
||||||
@ -143,8 +125,9 @@ fn removeOldImports(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
ex.* = false;
|
ex.* = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
||||||
for (import_strs.items) |str| {
|
for (import_strs.items) |str| {
|
||||||
const uri = (try uriFromImportStr(self, &arena.allocator, handle.*, str)) orelse continue;
|
const uri = (try uriFromImportStr(self, &arena.allocator, handle.*, str, std_uri)) orelse continue;
|
||||||
|
|
||||||
var idx: usize = 0;
|
var idx: usize = 0;
|
||||||
exists_loop: while (idx < still_exist.len) : (idx += 1) {
|
exists_loop: while (idx < still_exist.len) : (idx += 1) {
|
||||||
@ -172,7 +155,12 @@ fn removeOldImports(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.json.Array) !void {
|
pub fn applyChanges(
|
||||||
|
self: *DocumentStore,
|
||||||
|
handle: *Handle,
|
||||||
|
content_changes: std.json.Array,
|
||||||
|
zig_lib_path: ?[]const u8,
|
||||||
|
) !void {
|
||||||
const document = &handle.document;
|
const document = &handle.document;
|
||||||
|
|
||||||
for (content_changes.items) |change| {
|
for (content_changes.items) |change| {
|
||||||
@ -225,12 +213,18 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try self.removeOldImports(handle);
|
try self.removeOldImports(handle, zig_lib_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn uriFromImportStr(store: *DocumentStore, allocator: *std.mem.Allocator, handle: Handle, import_str: []const u8) !?[]const u8 {
|
fn uriFromImportStr(
|
||||||
|
store: *DocumentStore,
|
||||||
|
allocator: *std.mem.Allocator,
|
||||||
|
handle: Handle,
|
||||||
|
import_str: []const u8,
|
||||||
|
std_uri: ?[]const u8,
|
||||||
|
) !?[]const u8 {
|
||||||
return if (std.mem.eql(u8, import_str, "std"))
|
return if (std.mem.eql(u8, import_str, "std"))
|
||||||
if (store.std_uri) |std_root_uri| try std.mem.dupe(allocator, u8, std_root_uri) else {
|
if (std_uri) |uri| try std.mem.dupe(allocator, u8, uri) else {
|
||||||
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
|
std.debug.warn("Cannot resolve std library import, path is null.\n", .{});
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -259,6 +253,7 @@ pub const AnalysisContext = struct {
|
|||||||
tree: *std.zig.ast.Tree,
|
tree: *std.zig.ast.Tree,
|
||||||
scope_nodes: []*std.zig.ast.Node,
|
scope_nodes: []*std.zig.ast.Node,
|
||||||
last_this_node: *std.zig.ast.Node,
|
last_this_node: *std.zig.ast.Node,
|
||||||
|
std_uri: ?[]const u8,
|
||||||
|
|
||||||
fn refreshScopeNodes(self: *AnalysisContext) !void {
|
fn refreshScopeNodes(self: *AnalysisContext) !void {
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator);
|
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&self.arena.allocator);
|
||||||
@ -269,7 +264,13 @@ pub const AnalysisContext = struct {
|
|||||||
|
|
||||||
pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
|
pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
|
||||||
const allocator = self.store.allocator;
|
const allocator = self.store.allocator;
|
||||||
const final_uri = (try uriFromImportStr(self.store, self.store.allocator, self.handle.*, import_str)) orelse return null;
|
const final_uri = (try uriFromImportStr(
|
||||||
|
self.store,
|
||||||
|
self.store.allocator,
|
||||||
|
self.handle.*,
|
||||||
|
import_str,
|
||||||
|
self.std_uri,
|
||||||
|
)) orelse return null;
|
||||||
|
|
||||||
std.debug.warn("Import final URI: {}\n", .{final_uri});
|
std.debug.warn("Import final URI: {}\n", .{final_uri});
|
||||||
var consumed_final_uri = false;
|
var consumed_final_uri = false;
|
||||||
@ -351,6 +352,7 @@ pub const AnalysisContext = struct {
|
|||||||
.tree = tree,
|
.tree = tree,
|
||||||
.scope_nodes = self.scope_nodes,
|
.scope_nodes = self.scope_nodes,
|
||||||
.last_this_node = &tree.root_node.base,
|
.last_this_node = &tree.root_node.base,
|
||||||
|
.std_uri = self.std_uri,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -369,12 +371,36 @@ pub const AnalysisContext = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator, position: types.Position) !AnalysisContext {
|
fn stdUriFromLibPath(allocator: *std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
|
||||||
|
if (zig_lib_path) |zpath| {
|
||||||
|
const std_path = std.fs.path.resolve(allocator, &[_][]const u8{
|
||||||
|
zpath, "./std/std.zig",
|
||||||
|
}) catch |err| block: {
|
||||||
|
std.debug.warn("Failed to resolve zig std library path, error: {}\n", .{err});
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
defer allocator.free(std_path);
|
||||||
|
// Get the std_path as a URI, so we can just append to it!
|
||||||
|
return try URI.fromPath(allocator, std_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn analysisContext(
|
||||||
|
self: *DocumentStore,
|
||||||
|
handle: *Handle,
|
||||||
|
arena: *std.heap.ArenaAllocator,
|
||||||
|
position: types.Position,
|
||||||
|
zig_lib_path: ?[]const u8,
|
||||||
|
) !AnalysisContext {
|
||||||
const tree = try handle.tree(self.allocator);
|
const tree = try handle.tree(self.allocator);
|
||||||
|
|
||||||
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
var scope_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
||||||
try analysis.declsFromIndex(&scope_nodes, tree, try handle.document.positionToIndex(position));
|
try analysis.declsFromIndex(&scope_nodes, tree, try handle.document.positionToIndex(position));
|
||||||
|
|
||||||
|
const std_uri = try stdUriFromLibPath(&arena.allocator, zig_lib_path);
|
||||||
return AnalysisContext{
|
return AnalysisContext{
|
||||||
.store = self,
|
.store = self,
|
||||||
.handle = handle,
|
.handle = handle,
|
||||||
@ -382,6 +408,7 @@ pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.A
|
|||||||
.tree = tree,
|
.tree = tree,
|
||||||
.scope_nodes = scope_nodes.items,
|
.scope_nodes = scope_nodes.items,
|
||||||
.last_this_node = &tree.root_node.base,
|
.last_this_node = &tree.root_node.base,
|
||||||
|
.std_uri = std_uri,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -400,7 +427,4 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.handles.deinit();
|
self.handles.deinit();
|
||||||
if (self.std_uri) |uri| {
|
|
||||||
self.allocator.free(uri);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
1
src/known-folders
Submodule
1
src/known-folders
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 42a32b0241a5aaeaa09d7edeceefc5384b4eb466
|
182
src/main.zig
182
src/main.zig
@ -8,6 +8,7 @@ const readRequestHeader = @import("header.zig").readRequestHeader;
|
|||||||
const data = @import("data/" ++ build_options.data_version ++ ".zig");
|
const data = @import("data/" ++ build_options.data_version ++ ".zig");
|
||||||
const types = @import("types.zig");
|
const types = @import("types.zig");
|
||||||
const analysis = @import("analysis.zig");
|
const analysis = @import("analysis.zig");
|
||||||
|
const URI = @import("uri.zig");
|
||||||
|
|
||||||
// Code is largely based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig
|
// Code is largely based off of https://github.com/andersfr/zig-lsp/blob/master/server.zig
|
||||||
|
|
||||||
@ -15,9 +16,10 @@ var stdout: std.io.BufferedOutStream(4096, std.fs.File.OutStream) = undefined;
|
|||||||
var allocator: *std.mem.Allocator = undefined;
|
var allocator: *std.mem.Allocator = undefined;
|
||||||
|
|
||||||
var document_store: DocumentStore = undefined;
|
var document_store: DocumentStore = undefined;
|
||||||
|
var workspace_folder_configs: std.StringHashMap(?Config) = undefined;
|
||||||
|
|
||||||
const initialize_response =
|
const initialize_response =
|
||||||
\\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"workspace":{"workspaceFolders":{"supported":true}}}}}
|
\\,"result":{"capabilities":{"signatureHelpProvider":{"triggerCharacters":["(",","]},"textDocumentSync":1,"completionProvider":{"resolveProvider":false,"triggerCharacters":[".",":","@"]},"documentHighlightProvider":false,"codeActionProvider":false,"declarationProvider":true,"definitionProvider":true,"typeDefinitionProvider":true,"workspace":{"workspaceFolders":{"supported":true,"changeNotifications":true}}}}}
|
||||||
;
|
;
|
||||||
|
|
||||||
const not_implemented_response =
|
const not_implemented_response =
|
||||||
@ -319,11 +321,17 @@ fn gotoDefinitionGlobal(id: i64, pos_index: usize, handle: DocumentStore.Handle)
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gotoDefinitionFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, line_start_idx: usize) !void {
|
fn gotoDefinitionFieldAccess(
|
||||||
|
id: i64,
|
||||||
|
handle: *DocumentStore.Handle,
|
||||||
|
position: types.Position,
|
||||||
|
line_start_idx: usize,
|
||||||
|
config: Config,
|
||||||
|
) !void {
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, position);
|
var analysis_ctx = try document_store.analysisContext(handle, &arena, position, config.zig_lib_path);
|
||||||
defer analysis_ctx.deinit();
|
defer analysis_ctx.deinit();
|
||||||
|
|
||||||
const pos_index = try handle.document.positionToIndex(position);
|
const pos_index = try handle.document.positionToIndex(position);
|
||||||
@ -365,7 +373,7 @@ fn completeGlobal(id: i64, pos_index: usize, handle: *DocumentStore.Handle, conf
|
|||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, types.Position{
|
var analysis_ctx = try document_store.analysisContext(handle, &arena, types.Position{
|
||||||
.line = 0,
|
.line = 0,
|
||||||
.character = 0,
|
.character = 0,
|
||||||
});
|
}, config.zig_lib_path);
|
||||||
defer analysis_ctx.deinit();
|
defer analysis_ctx.deinit();
|
||||||
|
|
||||||
var decl_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
var decl_nodes = std.ArrayList(*std.zig.ast.Node).init(&arena.allocator);
|
||||||
@ -390,7 +398,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
|
|||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
|
|
||||||
var analysis_ctx = try document_store.analysisContext(handle, &arena, position);
|
var analysis_ctx = try document_store.analysisContext(handle, &arena, position, config.zig_lib_path);
|
||||||
defer analysis_ctx.deinit();
|
defer analysis_ctx.deinit();
|
||||||
|
|
||||||
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
|
||||||
@ -573,27 +581,121 @@ fn documentPositionContext(doc: types.TextDocument, pos_index: usize) PositionCo
|
|||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn loadConfig(folder_path: []const u8) ?Config {
|
||||||
|
var folder = std.fs.cwd().openDir(folder_path, .{}) catch return null;
|
||||||
|
defer folder.close();
|
||||||
|
|
||||||
|
const conf_file = folder.openFile("zls.json", .{}) catch return null;
|
||||||
|
defer conf_file.close();
|
||||||
|
|
||||||
|
// Max 1MB
|
||||||
|
const file_buf = conf_file.inStream().readAllAlloc(allocator, 0x1000000) catch return null;
|
||||||
|
defer allocator.free(file_buf);
|
||||||
|
|
||||||
|
// TODO: Better errors? Doesn't seem like std.json can provide us positions or context.
|
||||||
|
var config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), std.json.ParseOptions{ .allocator = allocator }) catch |err| {
|
||||||
|
std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err});
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (config.zig_lib_path) |zig_lib_path| {
|
||||||
|
if (!std.fs.path.isAbsolute(zig_lib_path)) {
|
||||||
|
std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{});
|
||||||
|
allocator.free(zig_lib_path);
|
||||||
|
config.zig_lib_path = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loadWorkspaceConfigs() !void {
|
||||||
|
var folder_config_it = workspace_folder_configs.iterator();
|
||||||
|
while (folder_config_it.next()) |entry| {
|
||||||
|
if (entry.value) |_| continue;
|
||||||
|
|
||||||
|
const folder_path = try URI.parse(allocator, entry.key);
|
||||||
|
defer allocator.free(folder_path);
|
||||||
|
|
||||||
|
entry.value = loadConfig(folder_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn configFromUriOr(uri: []const u8, default: Config) Config {
|
||||||
|
var folder_config_it = workspace_folder_configs.iterator();
|
||||||
|
while (folder_config_it.next()) |entry| {
|
||||||
|
if (std.mem.startsWith(u8, uri, entry.key)) {
|
||||||
|
return entry.value orelse default;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return default;
|
||||||
|
}
|
||||||
|
|
||||||
fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !void {
|
fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !void {
|
||||||
var tree = try parser.parse(json);
|
var tree = try parser.parse(json);
|
||||||
defer tree.deinit();
|
defer tree.deinit();
|
||||||
|
|
||||||
const root = tree.root;
|
const root = tree.root;
|
||||||
|
|
||||||
std.debug.assert(root.Object.getValue("method") != null);
|
|
||||||
|
|
||||||
const method = root.Object.getValue("method").?.String;
|
|
||||||
const id = if (root.Object.getValue("id")) |id| id.Integer else 0;
|
const id = if (root.Object.getValue("id")) |id| id.Integer else 0;
|
||||||
|
if (id == 1337 and (root.Object.getValue("method") == null or std.mem.eql(u8, root.Object.getValue("method").?.String, ""))) {
|
||||||
|
const result = (root.Object.getValue("result") orelse return).Array;
|
||||||
|
|
||||||
|
for (result.items) |workspace_folder| {
|
||||||
|
const duped_uri = try std.mem.dupe(allocator, u8, workspace_folder.Object.getValue("uri").?.String);
|
||||||
|
try workspace_folder_configs.putNoClobber(duped_uri, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
try loadWorkspaceConfigs();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
std.debug.assert(root.Object.getValue("method") != null);
|
||||||
|
const method = root.Object.getValue("method").?.String;
|
||||||
const params = root.Object.getValue("params").?.Object;
|
const params = root.Object.getValue("params").?.Object;
|
||||||
|
|
||||||
// Core
|
// Core
|
||||||
if (std.mem.eql(u8, method, "initialize")) {
|
if (std.mem.eql(u8, method, "initialize")) {
|
||||||
try respondGeneric(id, initialize_response);
|
try respondGeneric(id, initialize_response);
|
||||||
} else if (std.mem.eql(u8, method, "initialized")) {
|
} else if (std.mem.eql(u8, method, "initialized")) {
|
||||||
// noop
|
// Send the workspaceFolders request
|
||||||
|
try send(types.Request{
|
||||||
|
.id = .{ .Integer = 1337 },
|
||||||
|
.method = "workspace/workspaceFolders",
|
||||||
|
.params = {},
|
||||||
|
});
|
||||||
} else if (std.mem.eql(u8, method, "$/cancelRequest")) {
|
} else if (std.mem.eql(u8, method, "$/cancelRequest")) {
|
||||||
// noop
|
// noop
|
||||||
}
|
}
|
||||||
|
// Workspace folder changes
|
||||||
|
else if (std.mem.eql(u8, method, "workspace/didChangeWorkspaceFolders")) {
|
||||||
|
const event = params.getValue("event").?.Object;
|
||||||
|
const added = event.getValue("added").?.Array;
|
||||||
|
const removed = event.getValue("removed").?.Array;
|
||||||
|
|
||||||
|
for (removed.items) |rem| {
|
||||||
|
const uri = rem.Object.getValue("uri").?.String;
|
||||||
|
if (workspace_folder_configs.remove(uri)) |entry| {
|
||||||
|
allocator.free(entry.key);
|
||||||
|
if (entry.value) |c| {
|
||||||
|
std.json.parseFree(Config, c, std.json.ParseOptions{ .allocator = allocator });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (added.items) |add| {
|
||||||
|
const duped_uri = try std.mem.dupe(allocator, u8, add.Object.getValue("uri").?.String);
|
||||||
|
if (try workspace_folder_configs.put(duped_uri, null)) |old| {
|
||||||
|
allocator.free(old.key);
|
||||||
|
if (old.value) |c| {
|
||||||
|
std.json.parseFree(Config, c, std.json.ParseOptions{ .allocator = allocator });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try loadWorkspaceConfigs();
|
||||||
|
}
|
||||||
// File changes
|
// File changes
|
||||||
else if (std.mem.eql(u8, method, "textDocument/didOpen")) {
|
else if (std.mem.eql(u8, method, "textDocument/didOpen")) {
|
||||||
const document = params.getValue("textDocument").?.Object;
|
const document = params.getValue("textDocument").?.Object;
|
||||||
@ -601,7 +703,7 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
const text = document.getValue("text").?.String;
|
const text = document.getValue("text").?.String;
|
||||||
|
|
||||||
const handle = try document_store.openDocument(uri, text);
|
const handle = try document_store.openDocument(uri, text);
|
||||||
try publishDiagnostics(handle.*, config);
|
try publishDiagnostics(handle.*, configFromUriOr(uri, config));
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didChange")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didChange")) {
|
||||||
const text_document = params.getValue("textDocument").?.Object;
|
const text_document = params.getValue("textDocument").?.Object;
|
||||||
const uri = text_document.getValue("uri").?.String;
|
const uri = text_document.getValue("uri").?.String;
|
||||||
@ -612,8 +714,9 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
try document_store.applyChanges(handle, content_changes);
|
const local_config = configFromUriOr(uri, config);
|
||||||
try publishDiagnostics(handle.*, config);
|
try document_store.applyChanges(handle, content_changes, local_config.zig_lib_path);
|
||||||
|
try publishDiagnostics(handle.*, local_config);
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didSave")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didSave")) {
|
||||||
// noop
|
// noop
|
||||||
} else if (std.mem.eql(u8, method, "textDocument/didClose")) {
|
} else if (std.mem.eql(u8, method, "textDocument/didClose")) {
|
||||||
@ -641,18 +744,19 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
const pos_index = try handle.document.positionToIndex(pos);
|
const pos_index = try handle.document.positionToIndex(pos);
|
||||||
const pos_context = documentPositionContext(handle.document, pos_index);
|
const pos_context = documentPositionContext(handle.document, pos_index);
|
||||||
|
|
||||||
|
const this_config = configFromUriOr(uri, config);
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.builtin => try send(types.Response{
|
.builtin => try send(types.Response{
|
||||||
.id = .{ .Integer = id },
|
.id = .{ .Integer = id },
|
||||||
.result = .{
|
.result = .{
|
||||||
.CompletionList = .{
|
.CompletionList = .{
|
||||||
.isIncomplete = false,
|
.isIncomplete = false,
|
||||||
.items = builtin_completions[@boolToInt(config.enable_snippets)][0..],
|
.items = builtin_completions[@boolToInt(this_config.enable_snippets)][0..],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
.var_access, .empty => try completeGlobal(id, pos_index, handle, config),
|
.var_access, .empty => try completeGlobal(id, pos_index, handle, this_config),
|
||||||
.field_access => |start_idx| try completeFieldAccess(id, handle, pos, start_idx, config),
|
.field_access => |start_idx| try completeFieldAccess(id, handle, pos, start_idx, this_config),
|
||||||
else => try respondGeneric(id, no_completions_response),
|
else => try respondGeneric(id, no_completions_response),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -685,7 +789,13 @@ fn processJsonRpc(parser: *std.json.Parser, json: []const u8, config: Config) !v
|
|||||||
|
|
||||||
switch (pos_context) {
|
switch (pos_context) {
|
||||||
.var_access => try gotoDefinitionGlobal(id, pos_index, handle.*),
|
.var_access => try gotoDefinitionGlobal(id, pos_index, handle.*),
|
||||||
.field_access => |start_idx| try gotoDefinitionFieldAccess(id, handle, pos, start_idx),
|
.field_access => |start_idx| try gotoDefinitionFieldAccess(
|
||||||
|
id,
|
||||||
|
handle,
|
||||||
|
pos,
|
||||||
|
start_idx,
|
||||||
|
configFromUriOr(uri, config),
|
||||||
|
),
|
||||||
else => try respondGeneric(id, null_result_response),
|
else => try respondGeneric(id, null_result_response),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -723,39 +833,31 @@ pub fn main() anyerror!void {
|
|||||||
var config = Config{};
|
var config = Config{};
|
||||||
defer std.json.parseFree(Config, config, config_parse_options);
|
defer std.json.parseFree(Config, config, config_parse_options);
|
||||||
|
|
||||||
// TODO: Investigate using std.fs.Watch to detect writes to the config and reload it.
|
|
||||||
config_read: {
|
config_read: {
|
||||||
|
const known_folders = @import("known-folders/known-folders.zig");
|
||||||
|
const res = try known_folders.getPath(allocator, .local_configuration);
|
||||||
|
if (res) |local_config_path| {
|
||||||
|
defer allocator.free(local_config_path);
|
||||||
|
if (loadConfig(local_config_path)) |conf| {
|
||||||
|
config = conf;
|
||||||
|
break :config_read;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var exec_dir_bytes: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
var exec_dir_bytes: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||||
const exec_dir_path = std.fs.selfExeDirPath(&exec_dir_bytes) catch break :config_read;
|
const exec_dir_path = std.fs.selfExeDirPath(&exec_dir_bytes) catch break :config_read;
|
||||||
|
|
||||||
var exec_dir = std.fs.cwd().openDir(exec_dir_path, .{}) catch break :config_read;
|
if (loadConfig(exec_dir_path)) |conf| {
|
||||||
defer exec_dir.close();
|
config = conf;
|
||||||
|
|
||||||
const conf_file = exec_dir.openFile("zls.json", .{}) catch break :config_read;
|
|
||||||
defer conf_file.close();
|
|
||||||
|
|
||||||
// Max 1MB
|
|
||||||
const file_buf = conf_file.inStream().readAllAlloc(allocator, 0x1000000) catch break :config_read;
|
|
||||||
defer allocator.free(file_buf);
|
|
||||||
|
|
||||||
// TODO: Better errors? Doesn't seem like std.json can provide us positions or context.
|
|
||||||
config = std.json.parse(Config, &std.json.TokenStream.init(file_buf), config_parse_options) catch |err| {
|
|
||||||
std.debug.warn("Error while parsing configuration file: {}\nUsing default config.\n", .{err});
|
|
||||||
break :config_read;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.zig_lib_path) |zig_lib_path| {
|
|
||||||
if (!std.fs.path.isAbsolute(zig_lib_path)) {
|
|
||||||
std.debug.warn("zig library path is not absolute, defaulting to null.\n", .{});
|
|
||||||
allocator.free(zig_lib_path);
|
|
||||||
config.zig_lib_path = null;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try document_store.init(allocator, config.zig_lib_path);
|
try document_store.init(allocator);
|
||||||
defer document_store.deinit();
|
defer document_store.deinit();
|
||||||
|
|
||||||
|
workspace_folder_configs = std.StringHashMap(?Config).init(allocator);
|
||||||
|
defer workspace_folder_configs.deinit();
|
||||||
|
|
||||||
// This JSON parser is passed to processJsonRpc and reset.
|
// This JSON parser is passed to processJsonRpc and reset.
|
||||||
var json_parser = std.json.Parser.init(allocator, false);
|
var json_parser = std.json.Parser.init(allocator, false);
|
||||||
defer json_parser.deinit();
|
defer json_parser.deinit();
|
||||||
|
@ -40,9 +40,7 @@ pub const RequestId = union(enum) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/// Params of a request
|
/// Params of a request
|
||||||
pub const RequestParams = union(enum) {
|
pub const RequestParams = void;
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const NotificationParams = union(enum) {
|
pub const NotificationParams = union(enum) {
|
||||||
LogMessageParams: LogMessageParams,
|
LogMessageParams: LogMessageParams,
|
||||||
|
Loading…
Reference in New Issue
Block a user