Merge branch 'master' into intern-pool
This commit is contained in:
commit
2dffa9ef25
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -7,3 +7,6 @@
|
||||
[submodule "src/tres"]
|
||||
path = src/tres
|
||||
url = https://github.com/ziglibs/tres.git
|
||||
[submodule "src/diffz"]
|
||||
path = src/diffz
|
||||
url = https://github.com/ziglibs/diffz
|
||||
|
22
build.zig
22
build.zig
@ -72,12 +72,14 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
b.option(u32, "enable_failing_allocator_likelihood", "The chance that an allocation will fail is `1/likelihood`") orelse 256,
|
||||
);
|
||||
|
||||
const build_root_path = b.pathFromRoot(".");
|
||||
|
||||
const version = v: {
|
||||
const version_string = b.fmt("{d}.{d}.{d}", .{ zls_version.major, zls_version.minor, zls_version.patch });
|
||||
|
||||
var code: u8 = undefined;
|
||||
const git_describe_untrimmed = b.execAllowFail(&[_][]const u8{
|
||||
"git", "-C", b.build_root, "describe", "--match", "*.*.*", "--tags",
|
||||
"git", "-C", build_root_path, "describe", "--match", "*.*.*", "--tags",
|
||||
}, &code, .Ignore) catch break :v version_string;
|
||||
|
||||
const git_describe = std.mem.trim(u8, git_describe_untrimmed, " \n\r");
|
||||
@ -120,9 +122,15 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
const tres_module = b.createModule(.{ .source_file = .{ .path = tres_path } });
|
||||
exe.addModule("tres", tres_module);
|
||||
|
||||
const DIFFZ_DEFAULT_PATH = "src/diffz/DiffMatchPatch.zig";
|
||||
const diffz_path = b.option([]const u8, "diffz", "Path to diffz package (default: " ++ DIFFZ_DEFAULT_PATH ++ ")") orelse DIFFZ_DEFAULT_PATH;
|
||||
const diffz_module = b.createModule(.{ .source_file = .{ .path = diffz_path } });
|
||||
exe.addModule("diffz", diffz_module);
|
||||
|
||||
const check_submodules_step = CheckSubmodulesStep.init(b, &.{
|
||||
known_folders_path,
|
||||
tres_path,
|
||||
diffz_path,
|
||||
});
|
||||
b.getInstallStep().dependOn(&check_submodules_step.step);
|
||||
|
||||
@ -156,10 +164,10 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
|
||||
const gen_cmd = gen_exe.run();
|
||||
gen_cmd.addArgs(&.{
|
||||
b.pathJoin(&.{ b.build_root, "src", "Config.zig" }),
|
||||
b.pathJoin(&.{ b.build_root, "schema.json" }),
|
||||
b.pathJoin(&.{ b.build_root, "README.md" }),
|
||||
b.pathJoin(&.{ b.build_root, "src", "data" }),
|
||||
b.pathJoin(&.{ build_root_path, "src", "Config.zig" }),
|
||||
b.pathJoin(&.{ build_root_path, "schema.json" }),
|
||||
b.pathJoin(&.{ build_root_path, "README.md" }),
|
||||
b.pathJoin(&.{ build_root_path, "src", "data" }),
|
||||
});
|
||||
if (b.args) |args| gen_cmd.addArgs(args);
|
||||
|
||||
@ -185,7 +193,7 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
tests.setFilter(test_filter);
|
||||
|
||||
if (coverage) {
|
||||
const src_dir = b.pathJoin(&.{ b.build_root, "src" });
|
||||
const src_dir = b.pathJoin(&.{ build_root_path, "src" });
|
||||
const include_pattern = b.fmt("--include-pattern={s}", .{src_dir});
|
||||
|
||||
tests.setExecCmd(&[_]?[]const u8{
|
||||
@ -203,11 +211,13 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
.dependencies = &.{
|
||||
.{ .name = "known-folders", .module = known_folders_module },
|
||||
.{ .name = "tres", .module = tres_module },
|
||||
.{ .name = "diffz", .module = diffz_module },
|
||||
.{ .name = "build_options", .module = build_options_module },
|
||||
},
|
||||
});
|
||||
tests.addModule("zls", zls_module);
|
||||
tests.addModule("tres", tres_module);
|
||||
tests.addModule("diffz", diffz_module);
|
||||
|
||||
test_step.dependOn(&tests.step);
|
||||
|
||||
|
@ -13,11 +13,14 @@
|
||||
known-folders.url = "github:ziglibs/known-folders";
|
||||
known-folders.flake = false;
|
||||
|
||||
diffz.url = "github:ziglibs/diffz";
|
||||
diffz.flake = false;
|
||||
|
||||
tres.url = "github:ziglibs/tres";
|
||||
tres.flake = false;
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, zig-overlay, gitignore, flake-utils, known-folders, tres }:
|
||||
outputs = { self, nixpkgs, zig-overlay, gitignore, flake-utils, known-folders, tres, diffz }:
|
||||
let
|
||||
systems = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
|
||||
inherit (gitignore.lib) gitignoreSource;
|
||||
@ -38,7 +41,7 @@
|
||||
dontInstall = true;
|
||||
buildPhase = ''
|
||||
mkdir -p $out
|
||||
zig build install -Dcpu=baseline -Doptimize=ReleaseSafe -Ddata_version=master -Dtres=${tres}/tres.zig -Dknown-folders=${known-folders}/known-folders.zig --prefix $out
|
||||
zig build install -Dcpu=baseline -Doptimize=ReleaseSafe -Ddata_version=master -Dtres=${tres}/tres.zig -Dknown-folders=${known-folders}/known-folders.zig -Ddiffz=${diffz}/DiffMatchPatch.zig --prefix $out
|
||||
'';
|
||||
XDG_CACHE_HOME = ".cache";
|
||||
};
|
||||
|
@ -858,6 +858,33 @@ pub fn collectDependencies(
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO resolve relative paths
|
||||
pub fn collectIncludeDirs(
|
||||
store: *const DocumentStore,
|
||||
allocator: std.mem.Allocator,
|
||||
handle: Handle,
|
||||
include_dirs: *std.ArrayListUnmanaged([]const u8),
|
||||
) !void {
|
||||
const target_info = try std.zig.system.NativeTargetInfo.detect(.{});
|
||||
var native_paths = try std.zig.system.NativePaths.detect(allocator, target_info);
|
||||
defer native_paths.deinit();
|
||||
|
||||
const build_file_includes_paths: []const []const u8 = if (handle.associated_build_file) |build_file_uri|
|
||||
store.build_files.get(build_file_uri).?.config.include_dirs
|
||||
else
|
||||
&.{};
|
||||
|
||||
try include_dirs.ensureTotalCapacity(allocator, native_paths.include_dirs.items.len + build_file_includes_paths.len);
|
||||
|
||||
const native_include_dirs = try native_paths.include_dirs.toOwnedSlice();
|
||||
defer allocator.free(native_include_dirs);
|
||||
include_dirs.appendSliceAssumeCapacity(native_include_dirs);
|
||||
|
||||
for (build_file_includes_paths) |include_path| {
|
||||
include_dirs.appendAssumeCapacity(try allocator.dupe(u8, include_path));
|
||||
}
|
||||
}
|
||||
|
||||
/// returns the document behind `@cImport()` where `node` is the `cImport` node
|
||||
/// if a cImport can't be translated e.g. requires computing a
|
||||
/// comptime value `resolveCImport` will return null
|
||||
@ -876,15 +903,22 @@ pub fn resolveCImport(self: *DocumentStore, handle: Handle, node: Ast.Node.Index
|
||||
const result = self.cimports.get(hash) orelse blk: {
|
||||
const source: []const u8 = handle.cimports.items(.source)[index];
|
||||
|
||||
const include_dirs: []const []const u8 = if (handle.associated_build_file) |build_file_uri|
|
||||
self.build_files.get(build_file_uri).?.config.include_dirs
|
||||
else
|
||||
&.{};
|
||||
var include_dirs: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
defer {
|
||||
for (include_dirs.items) |path| {
|
||||
self.allocator.free(path);
|
||||
}
|
||||
include_dirs.deinit(self.allocator);
|
||||
}
|
||||
self.collectIncludeDirs(self.allocator, handle, &include_dirs) catch |err| {
|
||||
log.err("failed to resolve include paths: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
|
||||
var result = (try translate_c.translate(
|
||||
self.allocator,
|
||||
self.config.*,
|
||||
include_dirs,
|
||||
include_dirs.items,
|
||||
source,
|
||||
)) orelse return null;
|
||||
|
||||
@ -940,18 +974,13 @@ pub fn uriFromImportStr(self: *const DocumentStore, allocator: std.mem.Allocator
|
||||
}
|
||||
return null;
|
||||
} else {
|
||||
const base = handle.uri;
|
||||
var base_len = base.len;
|
||||
while (base[base_len - 1] != '/' and base_len > 0) {
|
||||
base_len -= 1;
|
||||
}
|
||||
base_len -= 1;
|
||||
if (base_len <= 0) {
|
||||
return null;
|
||||
// return error.UriBadScheme;
|
||||
var seperator_index = handle.uri.len;
|
||||
while (seperator_index > 0) : (seperator_index -= 1) {
|
||||
if (std.fs.path.isSep(handle.uri[seperator_index - 1])) break;
|
||||
}
|
||||
const base = handle.uri[0 .. seperator_index - 1];
|
||||
|
||||
return URI.pathRelative(allocator, base[0..base_len], import_str) catch |err| switch (err) {
|
||||
return URI.pathRelative(allocator, base, import_str) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.UriBadScheme => return null,
|
||||
};
|
||||
|
209
src/Server.zig
209
src/Server.zig
@ -70,6 +70,7 @@ const ClientCapabilities = packed struct {
|
||||
completion_doc_supports_md: bool = false,
|
||||
label_details_support: bool = false,
|
||||
supports_configuration: bool = false,
|
||||
supports_workspace_did_change_configuration_dynamic_registration: bool = false,
|
||||
};
|
||||
|
||||
pub const Error = std.mem.Allocator.Error || error{
|
||||
@ -1191,14 +1192,45 @@ fn hoverDefinitionFieldAccess(
|
||||
|
||||
fn gotoDefinitionString(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
pos_context: analysis.PositionContext,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!?types.Location {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
const import_str = analysis.getImportStr(handle.tree, 0, pos_index) orelse return null;
|
||||
const uri = try server.document_store.uriFromImportStr(server.arena.allocator(), handle.*, import_str);
|
||||
const allocator = server.arena.allocator();
|
||||
|
||||
const loc = pos_context.loc().?;
|
||||
const import_str_loc = offsets.tokenIndexToLoc(handle.tree.source, loc.start);
|
||||
if (import_str_loc.end - import_str_loc.start < 2) return null;
|
||||
var import_str = offsets.locToSlice(handle.tree.source, .{
|
||||
.start = import_str_loc.start + 1,
|
||||
.end = import_str_loc.end - 1,
|
||||
});
|
||||
|
||||
const uri = switch (pos_context) {
|
||||
.import_string_literal,
|
||||
.embedfile_string_literal,
|
||||
=> try server.document_store.uriFromImportStr(allocator, handle.*, import_str),
|
||||
.cinclude_string_literal => try uri_utils.fromPath(
|
||||
allocator,
|
||||
blk: {
|
||||
if (std.fs.path.isAbsolute(import_str)) break :blk import_str;
|
||||
var include_dirs: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
server.document_store.collectIncludeDirs(allocator, handle.*, &include_dirs) catch |err| {
|
||||
log.err("failed to resolve include paths: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
for (include_dirs.items) |dir| {
|
||||
const path = try std.fs.path.join(allocator, &.{ dir, import_str });
|
||||
std.fs.accessAbsolute(path, .{}) catch continue;
|
||||
break :blk path;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
return types.Location{
|
||||
.uri = uri orelse return null,
|
||||
@ -1624,60 +1656,87 @@ fn completeDot(server: *Server, handle: *const DocumentStore.Handle) error{OutOf
|
||||
return completions;
|
||||
}
|
||||
|
||||
fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, store: *const DocumentStore, handle: *const DocumentStore.Handle, completing: []const u8, is_import: bool) ![]types.CompletionItem {
|
||||
var subpath_present = false;
|
||||
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
|
||||
fn completeFileSystemStringLiteral(
|
||||
arena: std.mem.Allocator,
|
||||
store: DocumentStore,
|
||||
handle: DocumentStore.Handle,
|
||||
pos_context: analysis.PositionContext,
|
||||
) ![]types.CompletionItem {
|
||||
var completions: analysis.CompletionSet = .{};
|
||||
|
||||
fsc: {
|
||||
var document_path = try uri_utils.parse(allocator, handle.uri);
|
||||
var document_dir_path = std.fs.openIterableDirAbsolute(std.fs.path.dirname(document_path) orelse break :fsc, .{}) catch break :fsc;
|
||||
defer document_dir_path.close();
|
||||
const loc = pos_context.loc().?;
|
||||
var completing = handle.tree.source[loc.start + 1 .. loc.end - 1];
|
||||
|
||||
if (std.mem.lastIndexOfScalar(u8, completing, '/')) |subpath_index| {
|
||||
var subpath = completing[0..subpath_index];
|
||||
var seperator_index = completing.len;
|
||||
while (seperator_index > 0) : (seperator_index -= 1) {
|
||||
if (std.fs.path.isSep(completing[seperator_index - 1])) break;
|
||||
}
|
||||
completing = completing[0..seperator_index];
|
||||
|
||||
if (std.mem.startsWith(u8, subpath, "./") and subpath_index > 2) {
|
||||
subpath = completing[2..subpath_index];
|
||||
} else if (std.mem.startsWith(u8, subpath, ".") and subpath_index > 1) {
|
||||
subpath = completing[1..subpath_index];
|
||||
var search_paths: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
if (std.fs.path.isAbsolute(completing) and pos_context != .import_string_literal) {
|
||||
try search_paths.append(arena, completing);
|
||||
} else if (pos_context == .cinclude_string_literal) {
|
||||
store.collectIncludeDirs(arena, handle, &search_paths) catch |err| {
|
||||
log.err("failed to resolve include paths: {}", .{err});
|
||||
return &.{};
|
||||
};
|
||||
} else {
|
||||
var document_path = try uri_utils.parse(arena, handle.uri);
|
||||
try search_paths.append(arena, std.fs.path.dirname(document_path).?);
|
||||
}
|
||||
|
||||
for (search_paths.items) |path| {
|
||||
if (!std.fs.path.isAbsolute(path)) continue;
|
||||
const dir_path = if (std.fs.path.isAbsolute(completing)) path else try std.fs.path.join(arena, &.{ path, completing });
|
||||
|
||||
var iterable_dir = std.fs.openIterableDirAbsolute(dir_path, .{}) catch continue;
|
||||
defer iterable_dir.close();
|
||||
var it = iterable_dir.iterateAssumeFirstIteration();
|
||||
|
||||
while (it.next() catch null) |entry| {
|
||||
const expected_extension = switch (pos_context) {
|
||||
.import_string_literal => ".zig",
|
||||
.cinclude_string_literal => ".h",
|
||||
.embedfile_string_literal => null,
|
||||
else => unreachable,
|
||||
};
|
||||
switch (entry.kind) {
|
||||
.File => if (expected_extension) |expected| {
|
||||
const actual_extension = std.fs.path.extension(entry.name);
|
||||
if (!std.mem.eql(u8, actual_extension, expected)) continue;
|
||||
},
|
||||
.Directory => {},
|
||||
else => continue,
|
||||
}
|
||||
|
||||
var old = document_dir_path;
|
||||
document_dir_path = document_dir_path.dir.openIterableDir(subpath, .{}) catch break :fsc; // NOTE: Is this even safe lol?
|
||||
old.close();
|
||||
|
||||
subpath_present = true;
|
||||
}
|
||||
|
||||
var dir_iterator = document_dir_path.iterate();
|
||||
while (try dir_iterator.next()) |entry| {
|
||||
if (std.mem.startsWith(u8, entry.name, ".")) continue;
|
||||
if (entry.kind == .File and is_import and !std.mem.endsWith(u8, entry.name, ".zig")) continue;
|
||||
|
||||
const l = try allocator.dupe(u8, entry.name);
|
||||
try completions.append(allocator, types.CompletionItem{
|
||||
.label = l,
|
||||
.insertText = l,
|
||||
_ = try completions.getOrPut(arena, types.CompletionItem{
|
||||
.label = try arena.dupe(u8, entry.name),
|
||||
.detail = if (pos_context == .cinclude_string_literal) path else null,
|
||||
.insertText = if (entry.kind == .Directory)
|
||||
try std.fmt.allocPrint(arena, "{s}/", .{entry.name})
|
||||
else
|
||||
null,
|
||||
.kind = if (entry.kind == .File) .File else .Folder,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!subpath_present and is_import) {
|
||||
if (completing.len == 0 and pos_context == .import_string_literal) {
|
||||
if (handle.associated_build_file) |uri| {
|
||||
const build_file = store.build_files.get(uri).?;
|
||||
try completions.ensureUnusedCapacity(allocator, build_file.config.packages.len);
|
||||
try completions.ensureUnusedCapacity(arena, build_file.config.packages.len);
|
||||
|
||||
for (build_file.config.packages) |pkg| {
|
||||
completions.appendAssumeCapacity(.{
|
||||
completions.putAssumeCapacity(.{
|
||||
.label = pkg.name,
|
||||
.kind = .Module,
|
||||
});
|
||||
}, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return completions.toOwnedSlice(allocator);
|
||||
return completions.keys();
|
||||
}
|
||||
|
||||
fn initializeHandler(server: *Server, request: types.InitializeParams) Error!types.InitializeResult {
|
||||
@ -1776,7 +1835,7 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ
|
||||
server.client_capabilities.supports_configuration = workspace.configuration orelse false;
|
||||
if (workspace.didChangeConfiguration) |did_change| {
|
||||
if (did_change.dynamicRegistration orelse false) {
|
||||
try server.registerCapability("workspace/didChangeConfiguration");
|
||||
server.client_capabilities.supports_workspace_did_change_configuration_dynamic_registration = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1856,7 +1915,7 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ
|
||||
.renameProvider = .{ .bool = true },
|
||||
.completionProvider = .{
|
||||
.resolveProvider = false,
|
||||
.triggerCharacters = &[_][]const u8{ ".", ":", "@", "]" },
|
||||
.triggerCharacters = &[_][]const u8{ ".", ":", "@", "]", "/" },
|
||||
.completionItem = .{ .labelDetailsSupport = true },
|
||||
},
|
||||
.documentHighlightProvider = .{ .bool = true },
|
||||
@ -1918,6 +1977,10 @@ fn initializedHandler(server: *Server, notification: types.InitializedParams) Er
|
||||
|
||||
server.status = .initialized;
|
||||
|
||||
if (server.client_capabilities.supports_workspace_did_change_configuration_dynamic_registration) {
|
||||
try server.registerCapability("workspace/didChangeConfiguration");
|
||||
}
|
||||
|
||||
if (server.client_capabilities.supports_configuration)
|
||||
try server.requestConfiguration();
|
||||
}
|
||||
@ -2013,27 +2076,37 @@ fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory}
|
||||
const new_value: field.type = switch (ft) {
|
||||
[]const u8 => switch (value) {
|
||||
.String => |s| blk: {
|
||||
if (s.len == 0) {
|
||||
if (field.type == ?[]const u8) {
|
||||
break :blk null;
|
||||
} else {
|
||||
break :blk s;
|
||||
}
|
||||
const trimmed = std.mem.trim(u8, s, " ");
|
||||
if (trimmed.len == 0 or std.mem.eql(u8, trimmed, "nil")) {
|
||||
log.warn("Ignoring new value for \"zls.{s}\": the given new value is invalid", .{field.name});
|
||||
break :blk @field(server.config, field.name);
|
||||
}
|
||||
var nv = try server.allocator.dupe(u8, s);
|
||||
var nv = try server.allocator.dupe(u8, trimmed);
|
||||
if (@field(server.config, field.name)) |prev_val| server.allocator.free(prev_val);
|
||||
break :blk nv;
|
||||
}, // TODO: Allocation model? (same with didChangeConfiguration); imo this isn't *that* bad but still
|
||||
else => @panic("Invalid configuration value"), // TODO: Handle this
|
||||
},
|
||||
else => blk: {
|
||||
log.warn("Ignoring new value for \"zls.{s}\": the given new value has an invalid type", .{field.name});
|
||||
break :blk @field(server.config, field.name);
|
||||
},
|
||||
},
|
||||
else => switch (ti) {
|
||||
.Int => switch (value) {
|
||||
.Integer => |s| std.math.cast(ft, s) orelse @panic("Invalid configuration value"),
|
||||
else => @panic("Invalid configuration value"), // TODO: Handle this
|
||||
.Integer => |val| std.math.cast(ft, val) orelse blk: {
|
||||
log.warn("Ignoring new value for \"zls.{s}\": the given new value is invalid", .{field.name});
|
||||
break :blk @field(server.config, field.name);
|
||||
},
|
||||
else => blk: {
|
||||
log.warn("Ignoring new value for \"zls.{s}\": the given new value has an invalid type", .{field.name});
|
||||
break :blk @field(server.config, field.name);
|
||||
},
|
||||
},
|
||||
.Bool => switch (value) {
|
||||
.Bool => |b| b,
|
||||
else => @panic("Invalid configuration value"), // TODO: Handle this
|
||||
else => blk: {
|
||||
log.warn("Ignoring new value for \"zls.{s}\": the given new value has an invalid type", .{field.name});
|
||||
break :blk @field(server.config, field.name);
|
||||
},
|
||||
},
|
||||
else => @compileError("Not implemented for " ++ @typeName(ft)),
|
||||
},
|
||||
@ -2068,7 +2141,7 @@ fn changeDocumentHandler(server: *Server, notification: types.DidChangeTextDocum
|
||||
|
||||
const handle = server.document_store.getHandle(notification.textDocument.uri) orelse return;
|
||||
|
||||
const new_text = try diff.applyTextEdits(server.allocator, handle.text, notification.contentChanges, server.offset_encoding);
|
||||
const new_text = try diff.applyContentChanges(server.allocator, handle.text, notification.contentChanges, server.offset_encoding);
|
||||
|
||||
try server.document_store.refreshDocument(handle.uri, new_text);
|
||||
|
||||
@ -2165,12 +2238,13 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
|
||||
.global_error_set => try server.completeError(handle),
|
||||
.enum_literal => try server.completeDot(handle),
|
||||
.label => try server.completeLabel(source_index, handle),
|
||||
.import_string_literal, .embedfile_string_literal => |loc| blk: {
|
||||
.import_string_literal,
|
||||
.cinclude_string_literal,
|
||||
.embedfile_string_literal,
|
||||
=> blk: {
|
||||
if (!server.config.enable_import_embedfile_argument_completions) break :blk null;
|
||||
|
||||
const completing = offsets.locToSlice(handle.tree.source, loc);
|
||||
const is_import = pos_context == .import_string_literal;
|
||||
break :blk completeFileSystemStringLiteral(server.arena.allocator(), &server.document_store, handle, completing, is_import) catch |err| {
|
||||
break :blk completeFileSystemStringLiteral(server.arena.allocator(), server.document_store, handle.*, pos_context) catch |err| {
|
||||
log.err("failed to get file system completions: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
@ -2184,7 +2258,14 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
|
||||
// the remaining identifier with the completion instead of just inserting.
|
||||
// TODO Identify function call/struct init and replace the whole thing.
|
||||
const lookahead_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
if (server.client_capabilities.supports_apply_edits and pos_context.loc() != null and lookahead_context.loc() != null and pos_context.loc().?.end != lookahead_context.loc().?.end) {
|
||||
if (server.client_capabilities.supports_apply_edits and
|
||||
pos_context != .import_string_literal and
|
||||
pos_context != .cinclude_string_literal and
|
||||
pos_context != .embedfile_string_literal and
|
||||
pos_context.loc() != null and
|
||||
lookahead_context.loc() != null and
|
||||
pos_context.loc().?.end != lookahead_context.loc().?.end)
|
||||
{
|
||||
var end = lookahead_context.loc().?.end;
|
||||
while (end < handle.text.len and (std.ascii.isAlphanumeric(handle.text[end]) or handle.text[end] == '"')) {
|
||||
end += 1;
|
||||
@ -2266,7 +2347,10 @@ fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, resol
|
||||
.builtin => |loc| try server.gotoDefinitionBuiltin(handle, loc),
|
||||
.var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias),
|
||||
.field_access => |loc| try server.gotoDefinitionFieldAccess(handle, source_index, loc, resolve_alias),
|
||||
.import_string_literal => try server.gotoDefinitionString(source_index, handle),
|
||||
.import_string_literal,
|
||||
.cinclude_string_literal,
|
||||
.embedfile_string_literal,
|
||||
=> try server.gotoDefinitionString(pos_context, handle),
|
||||
.label => try server.gotoDefinitionLabel(source_index, handle),
|
||||
else => null,
|
||||
};
|
||||
@ -2353,7 +2437,7 @@ fn formattingHandler(server: *Server, request: types.DocumentFormattingParams) E
|
||||
return text_edits;
|
||||
}
|
||||
|
||||
return if (diff.edits(allocator, handle.text, formatted)) |text_edits| text_edits.items else |_| null;
|
||||
return if (diff.edits(allocator, handle.text, formatted, server.offset_encoding)) |text_edits| text_edits.items else |_| null;
|
||||
}
|
||||
|
||||
fn didChangeConfigurationHandler(server: *Server, request: configuration.DidChangeConfigurationParams) Error!void {
|
||||
@ -2853,7 +2937,12 @@ fn processMessage(server: *Server, message: Message) Error!void {
|
||||
},
|
||||
.ResponseMessage => |response| {
|
||||
if (response.id != .string) return;
|
||||
if (std.mem.startsWith(u8, response.id.string, "register")) return;
|
||||
if (std.mem.startsWith(u8, response.id.string, "register")) {
|
||||
if (response.@"error") |err| {
|
||||
log.err("Error response for '{s}': {}, {s}", .{ response.id.string, err.code, err.message });
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (std.mem.eql(u8, response.id.string, "apply_edit")) return;
|
||||
|
||||
if (std.mem.eql(u8, response.id.string, "i_haz_configuration")) {
|
||||
|
@ -1457,47 +1457,11 @@ fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize)
|
||||
return source_index >= loc.start and source_index <= loc.end;
|
||||
}
|
||||
|
||||
pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
if (tree.fullContainerDecl(&buf, node)) |container_decl| {
|
||||
for (container_decl.ast.members) |decl_idx| {
|
||||
if (getImportStr(tree, decl_idx, source_index)) |name| {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} else if (tree.fullVarDecl(node)) |var_decl| {
|
||||
return getImportStr(tree, var_decl.ast.init_node, source_index);
|
||||
} else if (node_tags[node] == .@"usingnamespace") {
|
||||
return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index);
|
||||
}
|
||||
|
||||
if (!nodeContainsSourceIndex(tree, node, source_index)) return null;
|
||||
|
||||
if (!ast.isBuiltinCall(tree, node)) return null;
|
||||
|
||||
const builtin_token = tree.nodes.items(.main_token)[node];
|
||||
const call_name = tree.tokenSlice(builtin_token);
|
||||
|
||||
if (!std.mem.eql(u8, call_name, "@import")) return null;
|
||||
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||
|
||||
if (params.len != 1) return null;
|
||||
|
||||
if (node_tags[params[0]] != .string_literal) return null;
|
||||
|
||||
const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]);
|
||||
return import_str[1 .. import_str.len - 1];
|
||||
}
|
||||
|
||||
pub const PositionContext = union(enum) {
|
||||
builtin: offsets.Loc,
|
||||
comment,
|
||||
import_string_literal: offsets.Loc,
|
||||
cinclude_string_literal: offsets.Loc,
|
||||
embedfile_string_literal: offsets.Loc,
|
||||
string_literal: offsets.Loc,
|
||||
field_access: offsets.Loc,
|
||||
@ -1514,6 +1478,7 @@ pub const PositionContext = union(enum) {
|
||||
.builtin => |r| r,
|
||||
.comment => null,
|
||||
.import_string_literal => |r| r,
|
||||
.cinclude_string_literal => |r| r,
|
||||
.embedfile_string_literal => |r| r,
|
||||
.string_literal => |r| r,
|
||||
.field_access => |r| r,
|
||||
@ -1591,7 +1556,7 @@ pub fn getPositionContext(
|
||||
};
|
||||
|
||||
while (true) {
|
||||
const tok = tokenizer.next();
|
||||
var tok = tokenizer.next();
|
||||
// Early exits.
|
||||
if (tok.loc.start > new_index) break;
|
||||
if (tok.loc.start == new_index) {
|
||||
@ -1610,7 +1575,12 @@ pub fn getPositionContext(
|
||||
},
|
||||
};
|
||||
}
|
||||
return .other;
|
||||
const q = std.mem.lastIndexOf(u8, held_line, "\"") orelse return .other;
|
||||
if (held_line[q -| 1] == '@') {
|
||||
tok.tag = .identifier;
|
||||
} else {
|
||||
tok.tag = .string_literal;
|
||||
}
|
||||
},
|
||||
.doc_comment, .container_doc_comment => return .comment,
|
||||
.eof => break,
|
||||
@ -1630,8 +1600,10 @@ pub fn getPositionContext(
|
||||
if (std.mem.eql(u8, builtin_name, "@import")) {
|
||||
curr_ctx.ctx = .{ .import_string_literal = tok.loc };
|
||||
break :string_lit_block;
|
||||
}
|
||||
if (std.mem.eql(u8, builtin_name, "@embedFile")) {
|
||||
} else if (std.mem.eql(u8, builtin_name, "@cInclude")) {
|
||||
curr_ctx.ctx = .{ .cinclude_string_literal = tok.loc };
|
||||
break :string_lit_block;
|
||||
} else if (std.mem.eql(u8, builtin_name, "@embedFile")) {
|
||||
curr_ctx.ctx = .{ .embedfile_string_literal = tok.loc };
|
||||
break :string_lit_block;
|
||||
}
|
||||
@ -1850,6 +1822,7 @@ fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Inde
|
||||
.error_set_decl,
|
||||
=> return,
|
||||
.container_decl,
|
||||
.container_decl_trailing,
|
||||
.container_decl_arg,
|
||||
.container_decl_arg_trailing,
|
||||
.container_decl_two,
|
||||
|
@ -44,7 +44,7 @@ pub fn printDocumentScope(doc_scope: analysis.DocumentScope) void {
|
||||
if (!std.debug.runtime_safety) @compileError("this function should only be used in debug mode!");
|
||||
|
||||
var index: usize = 0;
|
||||
while(index < doc_scope.scopes.len) : (index += 1) {
|
||||
while (index < doc_scope.scopes.len) : (index += 1) {
|
||||
const scope = doc_scope.scopes.get(index);
|
||||
if (index != 0) std.debug.print("\n\n", .{});
|
||||
std.debug.print(
|
||||
|
405
src/diff.zig
405
src/diff.zig
@ -1,358 +1,47 @@
|
||||
const std = @import("std");
|
||||
const types = @import("lsp.zig");
|
||||
const offsets = @import("offsets.zig");
|
||||
const DiffMatchPatch = @import("diffz");
|
||||
|
||||
pub const Error = error{ OutOfMemory, InvalidRange };
|
||||
|
||||
// Whether the `Change` is an addition, deletion, or no change from the
|
||||
// original string to the new string
|
||||
const Operation = enum { Deletion, Addition, Nothing };
|
||||
|
||||
/// A single character difference between two strings
|
||||
const Change = struct {
|
||||
operation: Operation,
|
||||
pos: usize,
|
||||
value: ?u8,
|
||||
const dmp = DiffMatchPatch{
|
||||
.diff_timeout = 250,
|
||||
};
|
||||
|
||||
/// Given two input strings, `a` and `b`, return a list of Edits that
|
||||
/// describe the changes from `a` to `b`
|
||||
pub const Error = error{ OutOfMemory, InvalidRange, UnknownError };
|
||||
|
||||
pub fn edits(
|
||||
allocator: std.mem.Allocator,
|
||||
a: []const u8,
|
||||
b: []const u8,
|
||||
before: []const u8,
|
||||
after: []const u8,
|
||||
encoding: offsets.Encoding,
|
||||
) Error!std.ArrayListUnmanaged(types.TextEdit) {
|
||||
// Given the input strings A and B, we skip over the first N characters
|
||||
// where A[0..N] == B[0..N]. We want to trim the start (and end) of the
|
||||
// strings that have the same text. This decreases the size of the LCS
|
||||
// table and makes the diff comparison more efficient
|
||||
var a_trim: []const u8 = a;
|
||||
var b_trim: []const u8 = b;
|
||||
const a_trim_offset = trim_input(&a_trim, &b_trim);
|
||||
var diffs = try dmp.diff(allocator, before, after, true);
|
||||
var eds = std.ArrayListUnmanaged(types.TextEdit){};
|
||||
|
||||
const rows = a_trim.len + 1;
|
||||
const cols = b_trim.len + 1;
|
||||
|
||||
var lcs = try Array2D.new(allocator, rows, cols);
|
||||
defer lcs.deinit();
|
||||
|
||||
calculate_lcs(&lcs, a_trim, b_trim);
|
||||
|
||||
return try get_changes(
|
||||
&lcs,
|
||||
a,
|
||||
a_trim_offset,
|
||||
a_trim,
|
||||
b_trim,
|
||||
allocator,
|
||||
);
|
||||
}
|
||||
|
||||
fn trim_input(a_out: *[]const u8, b_out: *[]const u8) usize {
|
||||
if (a_out.len == 0 or b_out.len == 0) return 0;
|
||||
|
||||
var a: []const u8 = a_out.*;
|
||||
var b: []const u8 = b_out.*;
|
||||
|
||||
// Trim the beginning of the string
|
||||
var start: usize = 0;
|
||||
while (start < a.len and start < b.len and a[start] == b[start]) : ({
|
||||
start += 1;
|
||||
}) {}
|
||||
|
||||
// Trim the end of the string
|
||||
var end: usize = 1;
|
||||
while (end < a.len and end < b.len and a[a.len - end] == b[b.len - end]) : ({
|
||||
end += 1;
|
||||
}) {}
|
||||
end -= 1;
|
||||
|
||||
var a_start = start;
|
||||
var a_end = a.len - end;
|
||||
var b_start = start;
|
||||
var b_end = b.len - end;
|
||||
|
||||
// In certain situations, the trimmed range can be "negative" where
|
||||
// `a_start` ends up being after `a_end` in the byte stream. If you
|
||||
// consider the following inputs:
|
||||
// a: "xx gg xx"
|
||||
// b: "xx gg xx"
|
||||
//
|
||||
// This will lead to the following calculations:
|
||||
// a_start: 4
|
||||
// a_end: 4
|
||||
// b_start: 4
|
||||
// b_end: 2
|
||||
//
|
||||
// In negative range situations, we add the absolute value of the
|
||||
// the negative range's length (`b_start - b_end` in this case) to the
|
||||
// other range's length (a_end + (b_start - b_end)), and then set the
|
||||
// negative range end to the negative range start (b_end = b_start)
|
||||
if (a_start > a_end) {
|
||||
const difference = a_start - a_end;
|
||||
a_end = a_start;
|
||||
b_end += difference;
|
||||
}
|
||||
if (b_start > b_end) {
|
||||
const difference = b_start - b_end;
|
||||
b_end = b_start;
|
||||
a_end += difference;
|
||||
}
|
||||
|
||||
a_out.* = a[a_start..a_end];
|
||||
b_out.* = b[b_start..b_end];
|
||||
|
||||
return start;
|
||||
}
|
||||
|
||||
/// A 2D array that is addressable as a[row, col]
|
||||
pub const Array2D = struct {
|
||||
const Self = @This();
|
||||
|
||||
data: [*]usize,
|
||||
allocator: std.mem.Allocator,
|
||||
rows: usize,
|
||||
cols: usize,
|
||||
|
||||
pub fn new(
|
||||
allocator: std.mem.Allocator,
|
||||
rows: usize,
|
||||
cols: usize,
|
||||
) error{OutOfMemory}!Self {
|
||||
const data = try allocator.alloc(usize, rows * cols);
|
||||
|
||||
return Self{
|
||||
.data = data.ptr,
|
||||
.allocator = allocator,
|
||||
.rows = rows,
|
||||
.cols = cols,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.allocator.free(self.data[0 .. self.rows * self.cols]);
|
||||
}
|
||||
|
||||
pub fn get(self: *Self, row: usize, col: usize) *usize {
|
||||
return @ptrCast(*usize, self.data + (row * self.cols) + col);
|
||||
}
|
||||
};
|
||||
|
||||
/// Build a Longest Common Subsequence table
|
||||
fn calculate_lcs(
|
||||
lcs: *Array2D,
|
||||
astr: []const u8,
|
||||
bstr: []const u8,
|
||||
) void {
|
||||
const rows = astr.len + 1;
|
||||
const cols = bstr.len + 1;
|
||||
|
||||
std.mem.set(usize, lcs.data[0 .. rows * cols], 0);
|
||||
|
||||
// This approach is a dynamic programming technique to calculate the
|
||||
// longest common subsequence between two strings, `a` and `b`. We start
|
||||
// at 1 for `i` and `j` because the first column and first row are always
|
||||
// set to zero
|
||||
//
|
||||
// You can find more information about this at the following url:
|
||||
// https://en.wikipedia.org/wiki/Longest_common_subsequence_problem
|
||||
var i: usize = 1;
|
||||
while (i < rows) : (i += 1) {
|
||||
var j: usize = 1;
|
||||
while (j < cols) : (j += 1) {
|
||||
if (astr[i - 1] == bstr[j - 1]) {
|
||||
lcs.get(i, j).* = lcs.get(i - 1, j - 1).* + 1;
|
||||
} else {
|
||||
lcs.get(i, j).* = std.math.max(
|
||||
lcs.get(i - 1, j).*,
|
||||
lcs.get(i, j - 1).*,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_changes(
|
||||
lcs: *Array2D,
|
||||
a: []const u8,
|
||||
a_trim_offset: usize,
|
||||
a_trim: []const u8,
|
||||
b_trim: []const u8,
|
||||
allocator: std.mem.Allocator,
|
||||
) Error!std.ArrayListUnmanaged(types.TextEdit) {
|
||||
// First we get a list of changes between strings at the character level:
|
||||
// "addition", "deletion", and "no change" for each character
|
||||
var changes = try std.ArrayListUnmanaged(Change).initCapacity(allocator, a_trim.len);
|
||||
defer changes.deinit(allocator);
|
||||
try recur_changes(
|
||||
lcs,
|
||||
&changes,
|
||||
a_trim,
|
||||
b_trim,
|
||||
@intCast(i64, a_trim.len),
|
||||
@intCast(i64, b_trim.len),
|
||||
allocator,
|
||||
);
|
||||
|
||||
// We want to group runs of deletions and additions, and separate them by
|
||||
// runs of `.Nothing` changes. This will allow us to calculate the
|
||||
// `TextEdit` ranges
|
||||
var groups = std.ArrayListUnmanaged([]Change){};
|
||||
defer groups.deinit(allocator);
|
||||
|
||||
var active_change: ?[]Change = null;
|
||||
for (changes.items) |ch, i| {
|
||||
switch (ch.operation) {
|
||||
.Addition, .Deletion => {
|
||||
if (active_change == null) {
|
||||
active_change = changes.items[i..];
|
||||
}
|
||||
var offset: usize = 0;
|
||||
for (diffs.items) |diff| {
|
||||
var start = offset;
|
||||
switch (diff.operation) {
|
||||
.delete => {
|
||||
offset += diff.text.len;
|
||||
try eds.append(allocator, .{ .range = offsets.locToRange(before, .{ .start = start, .end = offset }, encoding), .newText = "" });
|
||||
},
|
||||
.Nothing => {
|
||||
if (active_change) |*ac| {
|
||||
ac.* = ac.*[0..(i - (changes.items.len - ac.*.len))];
|
||||
try groups.append(allocator, ac.*);
|
||||
active_change = null;
|
||||
}
|
||||
.equal => {
|
||||
offset += diff.text.len;
|
||||
},
|
||||
.insert => {
|
||||
try eds.append(allocator, .{ .range = offsets.locToRange(before, .{ .start = start, .end = start }, encoding), .newText = diff.text });
|
||||
},
|
||||
}
|
||||
}
|
||||
if (active_change) |*ac| {
|
||||
ac.* = ac.*[0..(changes.items.len - (changes.items.len - ac.*.len))];
|
||||
try groups.append(allocator, ac.*);
|
||||
}
|
||||
|
||||
// The LCS algorithm works "in reverse", so we're putting everything back
|
||||
// in ascending order
|
||||
var a_lines = std.mem.split(u8, a, "\n");
|
||||
std.mem.reverse([]Change, groups.items);
|
||||
for (groups.items) |group| std.mem.reverse(Change, group);
|
||||
|
||||
var edit_results = try std.ArrayListUnmanaged(types.TextEdit).initCapacity(allocator, groups.items.len);
|
||||
errdefer {
|
||||
for (edit_results.items) |edit| {
|
||||
allocator.free(edit.newText);
|
||||
}
|
||||
edit_results.deinit(allocator);
|
||||
}
|
||||
|
||||
// Convert our grouped changes into `Edit`s
|
||||
for (groups.items) |group| {
|
||||
var range_start = group[0].pos;
|
||||
var range_len: usize = 0;
|
||||
var newText = std.ArrayListUnmanaged(u8){};
|
||||
errdefer newText.deinit(allocator);
|
||||
for (group) |ch| {
|
||||
switch (ch.operation) {
|
||||
.Addition => try newText.append(allocator, ch.value.?),
|
||||
.Deletion => range_len += 1,
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
var range = try char_pos_to_range(
|
||||
&a_lines,
|
||||
a_trim_offset + range_start,
|
||||
a_trim_offset + range_start + range_len,
|
||||
);
|
||||
a_lines.reset();
|
||||
edit_results.appendAssumeCapacity(.{
|
||||
.range = range,
|
||||
.newText = try newText.toOwnedSlice(allocator),
|
||||
});
|
||||
}
|
||||
|
||||
return edit_results;
|
||||
return eds;
|
||||
}
|
||||
|
||||
fn recur_changes(
|
||||
lcs: *Array2D,
|
||||
changes: *std.ArrayListUnmanaged(Change),
|
||||
a: []const u8,
|
||||
b: []const u8,
|
||||
i: i64,
|
||||
j: i64,
|
||||
allocator: std.mem.Allocator,
|
||||
) error{OutOfMemory}!void {
|
||||
// This function recursively works backwards through the LCS table in
|
||||
// order to figure out what kind of changes took place to transform `a`
|
||||
// into `b`
|
||||
|
||||
const ii = @intCast(usize, i);
|
||||
const jj = @intCast(usize, j);
|
||||
|
||||
if (i > 0 and j > 0 and a[ii - 1] == b[jj - 1]) {
|
||||
try changes.append(allocator, .{
|
||||
.operation = .Nothing,
|
||||
.pos = ii - 1,
|
||||
.value = null,
|
||||
});
|
||||
try recur_changes(lcs, changes, a, b, i - 1, j - 1, allocator);
|
||||
} else if (j > 0 and (i == 0 or lcs.get(ii, jj - 1).* >= lcs.get(ii - 1, jj).*)) {
|
||||
try changes.append(allocator, .{
|
||||
.operation = .Addition,
|
||||
.pos = ii,
|
||||
.value = b[jj - 1],
|
||||
});
|
||||
try recur_changes(lcs, changes, a, b, i, j - 1, allocator);
|
||||
} else if (i > 0 and (j == 0 or lcs.get(ii, jj - 1).* < lcs.get(ii - 1, jj).*)) {
|
||||
try changes.append(allocator, .{
|
||||
.operation = .Deletion,
|
||||
.pos = ii - 1,
|
||||
.value = a[ii - 1],
|
||||
});
|
||||
try recur_changes(lcs, changes, a, b, i - 1, j, allocator);
|
||||
}
|
||||
}
|
||||
|
||||
/// Accept a range that is solely based on buffer/character position and
|
||||
/// convert it to line number & character position range
|
||||
fn char_pos_to_range(
|
||||
lines: *std.mem.SplitIterator(u8),
|
||||
start: usize,
|
||||
end: usize,
|
||||
) Error!types.Range {
|
||||
var char_pos: usize = 0;
|
||||
var line_pos: usize = 0;
|
||||
var result_start_pos: ?types.Position = null;
|
||||
var result_end_pos: ?types.Position = null;
|
||||
|
||||
while (lines.next()) |line| : ({
|
||||
char_pos += line.len + 1;
|
||||
line_pos += 1;
|
||||
}) {
|
||||
if (start >= char_pos and start <= char_pos + line.len) {
|
||||
result_start_pos = .{
|
||||
.line = @intCast(u32, line_pos),
|
||||
.character = @intCast(u32, start - char_pos),
|
||||
};
|
||||
}
|
||||
if (end >= char_pos and end <= char_pos + line.len) {
|
||||
result_end_pos = .{
|
||||
.line = @intCast(u32, line_pos),
|
||||
.character = @intCast(u32, end - char_pos),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (result_start_pos == null) return error.InvalidRange;
|
||||
|
||||
// If we did not find an end position, it is outside the range of the
|
||||
// string for some reason so clamp it to the string end position
|
||||
if (result_end_pos == null) {
|
||||
result_end_pos = types.Position{
|
||||
.line = @intCast(u32, line_pos),
|
||||
.character = @intCast(u32, char_pos),
|
||||
};
|
||||
}
|
||||
|
||||
return types.Range{
|
||||
.start = result_start_pos.?,
|
||||
.end = result_end_pos.?,
|
||||
};
|
||||
}
|
||||
|
||||
// Caller owns returned memory.
|
||||
pub fn applyTextEdits(
|
||||
/// Caller owns returned memory.
|
||||
/// NOTE: As far as I know, this implementation is actually incorrect
|
||||
/// as we use intermediate state, but at the same time, it works so
|
||||
/// I really don't want to touch it right now. TODO: Investigate + fix.
|
||||
pub fn applyContentChanges(
|
||||
allocator: std.mem.Allocator,
|
||||
text: []const u8,
|
||||
content_changes: []const types.TextDocumentContentChangeEvent,
|
||||
@ -385,3 +74,41 @@ pub fn applyTextEdits(
|
||||
|
||||
return try text_array.toOwnedSliceSentinel(allocator, 0);
|
||||
}
|
||||
|
||||
// https://cs.opensource.google/go/x/tools/+/master:internal/lsp/diff/diff.go;l=40
|
||||
|
||||
fn textEditLessThan(_: void, lhs: types.TextEdit, rhs: types.TextEdit) bool {
|
||||
return offsets.rangeLessThan(lhs.range, rhs.range);
|
||||
}
|
||||
|
||||
/// Caller owns returned memory.
|
||||
pub fn applyTextEdits(
|
||||
allocator: std.mem.Allocator,
|
||||
text: []const u8,
|
||||
text_edits: []const types.TextEdit,
|
||||
encoding: offsets.Encoding,
|
||||
) ![]const u8 {
|
||||
var text_edits_sortable = try allocator.alloc(types.TextEdit, text_edits.len);
|
||||
defer allocator.free(text_edits_sortable);
|
||||
|
||||
std.mem.copy(types.TextEdit, text_edits_sortable, text_edits);
|
||||
std.sort.sort(types.TextEdit, text_edits_sortable, {}, textEditLessThan);
|
||||
|
||||
var final_text = std.ArrayListUnmanaged(u8){};
|
||||
|
||||
var last: usize = 0;
|
||||
for (text_edits_sortable) |te| {
|
||||
const start = offsets.maybePositionToIndex(text, te.range.start, encoding) orelse text.len;
|
||||
if (start > last) {
|
||||
try final_text.appendSlice(allocator, text[last..start]);
|
||||
last = start;
|
||||
}
|
||||
try final_text.appendSlice(allocator, te.newText);
|
||||
last = offsets.maybePositionToIndex(text, te.range.end, encoding) orelse text.len;
|
||||
}
|
||||
if (last < text.len) {
|
||||
try final_text.appendSlice(allocator, text[last..]);
|
||||
}
|
||||
|
||||
return try final_text.toOwnedSlice(allocator);
|
||||
}
|
||||
|
1
src/diffz
Submodule
1
src/diffz
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit f25b31c4f8d06bc134018133a2d57b6dcbb6941e
|
@ -17,6 +17,25 @@ pub fn indexToPosition(text: []const u8, index: usize, encoding: Encoding) types
|
||||
};
|
||||
}
|
||||
|
||||
pub fn maybePositionToIndex(text: []const u8, position: types.Position, encoding: Encoding) ?usize {
|
||||
var line: u32 = 0;
|
||||
var line_start_index: usize = 0;
|
||||
for (text) |c, i| {
|
||||
if (line == position.line) break;
|
||||
if (c == '\n') {
|
||||
line += 1;
|
||||
line_start_index = i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (line != position.line) return null;
|
||||
|
||||
const line_text = std.mem.sliceTo(text[line_start_index..], '\n');
|
||||
const line_byte_length = getNCodeUnitByteCount(line_text, position.character, encoding);
|
||||
|
||||
return line_start_index + line_byte_length;
|
||||
}
|
||||
|
||||
pub fn positionToIndex(text: []const u8, position: types.Position, encoding: Encoding) usize {
|
||||
var line: u32 = 0;
|
||||
var line_start_index: usize = 0;
|
||||
@ -333,3 +352,22 @@ pub fn getNCodeUnitByteCount(text: []const u8, n: usize, encoding: Encoding) usi
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rangeLessThan(a: types.Range, b: types.Range) bool {
|
||||
return positionLessThan(a.start, b.start) or positionLessThan(a.end, b.end);
|
||||
}
|
||||
|
||||
pub fn positionLessThan(a: types.Position, b: types.Position) bool {
|
||||
if (a.line < b.line) {
|
||||
return true;
|
||||
}
|
||||
if (a.line > b.line) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (a.character < b.character) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
@ -844,7 +844,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe
|
||||
// Maybe we can hook into it insead? Also applies to Identifier and VarDecl
|
||||
var bound_type_params = analysis.BoundTypeParams{};
|
||||
defer bound_type_params.deinit(builder.store.allocator);
|
||||
|
||||
|
||||
const lhs_type = try analysis.resolveFieldAccessLhsType(
|
||||
builder.store,
|
||||
(try analysis.resolveTypeOfNodeInternal(
|
||||
|
@ -147,20 +147,6 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
|
||||
return null;
|
||||
};
|
||||
|
||||
const base_include_dirs = blk: {
|
||||
const target_info = std.zig.system.NativeTargetInfo.detect(.{}) catch break :blk null;
|
||||
var native_paths = std.zig.system.NativePaths.detect(allocator, target_info) catch break :blk null;
|
||||
defer native_paths.deinit();
|
||||
|
||||
break :blk try native_paths.include_dirs.toOwnedSlice();
|
||||
};
|
||||
defer if (base_include_dirs) |dirs| {
|
||||
for (dirs) |path| {
|
||||
allocator.free(path);
|
||||
}
|
||||
allocator.free(dirs);
|
||||
};
|
||||
|
||||
const base_args = &[_][]const u8{
|
||||
config.zig_exe_path orelse return null,
|
||||
"translate-c",
|
||||
@ -172,19 +158,12 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
|
||||
"-lc",
|
||||
};
|
||||
|
||||
const argc = base_args.len + 2 * (include_dirs.len + if (base_include_dirs) |dirs| dirs.len else 0) + 1;
|
||||
const argc = base_args.len + 2 * include_dirs.len + 1;
|
||||
var argv = try std.ArrayListUnmanaged([]const u8).initCapacity(allocator, argc);
|
||||
defer argv.deinit(allocator);
|
||||
|
||||
argv.appendSliceAssumeCapacity(base_args);
|
||||
|
||||
if (base_include_dirs) |dirs| {
|
||||
for (dirs) |include_dir| {
|
||||
argv.appendAssumeCapacity("-I");
|
||||
argv.appendAssumeCapacity(include_dir);
|
||||
}
|
||||
}
|
||||
|
||||
for (include_dirs) |include_dir| {
|
||||
argv.appendAssumeCapacity("-I");
|
||||
argv.appendAssumeCapacity(include_dir);
|
||||
|
@ -66,6 +66,9 @@ pub fn pathRelative(allocator: std.mem.Allocator, base: []const u8, rel: []const
|
||||
if (std.mem.eql(u8, component, ".")) {
|
||||
continue;
|
||||
} else if (std.mem.eql(u8, component, "..")) {
|
||||
while ((result.getLastOrNull() orelse return error.UriBadScheme) == '/') {
|
||||
_ = result.pop();
|
||||
}
|
||||
while (true) {
|
||||
const char = result.popOrNull() orelse return error.UriBadScheme;
|
||||
if (char == '/') break;
|
||||
|
@ -13,6 +13,7 @@ pub const types = @import("lsp.zig");
|
||||
pub const URI = @import("uri.zig");
|
||||
pub const DocumentStore = @import("DocumentStore.zig");
|
||||
pub const ComptimeInterpreter = @import("ComptimeInterpreter.zig");
|
||||
pub const diff = @import("diff.zig");
|
||||
pub const analyser = @import("analyser/analyser.zig");
|
||||
|
||||
comptime {
|
||||
|
@ -1,5 +1,6 @@
|
||||
const std = @import("std");
|
||||
const zls = @import("zls");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const tres = @import("tres");
|
||||
|
||||
@ -8,7 +9,6 @@ const Config = zls.Config;
|
||||
const Server = zls.Server;
|
||||
const types = zls.types;
|
||||
|
||||
|
||||
/// initialize request taken from Visual Studio Code with the following changes:
|
||||
/// - removed locale, rootPath, rootUri, trace, workspaceFolders
|
||||
/// - removed capabilities.workspace.configuration
|
||||
@ -149,7 +149,12 @@ pub const Context = struct {
|
||||
}
|
||||
|
||||
// helper
|
||||
pub fn requestDidOpen(self: *Context, uri: []const u8, source: []const u8) !void {
|
||||
pub fn addDocument(self: *Context, source: []const u8) ![]const u8 {
|
||||
const uri: []const u8 = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:\\test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
|
||||
const open_document = types.DidOpenTextDocumentParams{
|
||||
.textDocument = .{
|
||||
.uri = uri,
|
||||
@ -160,7 +165,9 @@ pub const Context = struct {
|
||||
};
|
||||
const params = try std.json.stringifyAlloc(allocator, open_document, .{});
|
||||
defer allocator.free(params);
|
||||
|
||||
try self.notification("textDocument/didOpen", params);
|
||||
return uri;
|
||||
}
|
||||
|
||||
pub fn Response(comptime Result: type) type {
|
||||
|
@ -400,12 +400,7 @@ fn testCompletion(source: []const u8, expected_completions: []const Completion)
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
const test_uri: []const u8 = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:\\test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
|
||||
try ctx.requestDidOpen(test_uri, text);
|
||||
const test_uri = try ctx.addDocument(text);
|
||||
|
||||
const params = types.CompletionParams{
|
||||
.textDocument = .{ .uri = test_uri },
|
||||
|
@ -53,12 +53,7 @@ fn testDefinition(source: []const u8) !void {
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
const test_uri: []const u8 = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:\\test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
|
||||
try ctx.requestDidOpen(test_uri, phr.new_source);
|
||||
const test_uri = try ctx.addDocument(phr.new_source);
|
||||
|
||||
const params = types.TextDocumentPositionParams{
|
||||
.textDocument = .{ .uri = test_uri },
|
||||
|
81
tests/lsp_features/document_symbol.zig
Normal file
81
tests/lsp_features/document_symbol.zig
Normal file
@ -0,0 +1,81 @@
|
||||
const std = @import("std");
|
||||
const zls = @import("zls");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const tres = @import("tres");
|
||||
|
||||
const Context = @import("../context.zig").Context;
|
||||
|
||||
const types = zls.types;
|
||||
const requests = zls.requests;
|
||||
|
||||
const allocator: std.mem.Allocator = std.testing.allocator;
|
||||
|
||||
test "documentSymbol - smoke" {
|
||||
try testDocumentSymbol(
|
||||
\\const S = struct {
|
||||
\\ fn f() void {}
|
||||
\\};
|
||||
,
|
||||
\\Variable S
|
||||
\\ Function f
|
||||
);
|
||||
}
|
||||
|
||||
// FIXME: https://github.com/zigtools/zls/issues/986
|
||||
test "documentSymbol - nested struct with self" {
|
||||
try testDocumentSymbol(
|
||||
\\const Foo = struct {
|
||||
\\ const Self = @This();
|
||||
\\ pub fn foo() !Self {}
|
||||
\\ const Bar = struct {};
|
||||
\\};
|
||||
,
|
||||
\\Variable Foo
|
||||
\\ Variable Self
|
||||
\\ Function foo
|
||||
\\ Variable Bar
|
||||
);
|
||||
}
|
||||
|
||||
fn testDocumentSymbol(source: []const u8, want: []const u8) !void {
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
const test_uri = try ctx.addDocument(source);
|
||||
|
||||
const params = types.DocumentSymbolParams{
|
||||
.textDocument = .{ .uri = test_uri },
|
||||
};
|
||||
|
||||
const response = try ctx.requestGetResponse([]types.DocumentSymbol, "textDocument/documentSymbol", params);
|
||||
|
||||
var got = std.ArrayListUnmanaged(u8){};
|
||||
defer got.deinit(allocator);
|
||||
|
||||
var stack: [16][]const types.DocumentSymbol = undefined;
|
||||
var stack_len: usize = 0;
|
||||
|
||||
stack[stack_len] = response.result;
|
||||
stack_len += 1;
|
||||
|
||||
var writer = got.writer(allocator);
|
||||
while (stack_len > 0) {
|
||||
const top = &stack[stack_len - 1];
|
||||
if (top.len > 0) {
|
||||
try std.fmt.format(writer, "{[space]s:[width]}", .{ .space = "", .width = (stack_len - 1) * 2 });
|
||||
try std.fmt.format(writer, "{s} {s}\n", .{ @tagName(top.*[0].kind), top.*[0].name });
|
||||
if (top.*[0].children) |children| {
|
||||
std.debug.assert(stack_len < stack.len);
|
||||
stack[stack_len] = children;
|
||||
stack_len += 1;
|
||||
}
|
||||
top.* = top.*[1..];
|
||||
} else {
|
||||
stack_len -= 1;
|
||||
}
|
||||
}
|
||||
_ = got.pop(); // Final \n
|
||||
|
||||
try std.testing.expectEqualStrings(want, got.items);
|
||||
}
|
@ -183,7 +183,7 @@ test "foldingRange - call" {
|
||||
\\extern fn foo(a: bool, b: ?usize) void;
|
||||
\\const result = foo(
|
||||
\\ false,
|
||||
\\ null,
|
||||
\\ null,
|
||||
\\);
|
||||
, &.{
|
||||
.{ .startLine = 1, .startCharacter = 19, .endLine = 4, .endCharacter = 0 },
|
||||
@ -205,12 +205,7 @@ fn testFoldingRange(source: []const u8, expect: []const types.FoldingRange) !voi
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
const test_uri: []const u8 = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:\\test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
|
||||
try ctx.requestDidOpen(test_uri, source);
|
||||
const test_uri = try ctx.addDocument(source);
|
||||
|
||||
const params = types.FoldingRangeParams{ .textDocument = .{ .uri = test_uri } };
|
||||
|
||||
|
@ -73,12 +73,7 @@ fn testInlayHints(source: []const u8) !void {
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
const test_uri: []const u8 = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:\\test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
|
||||
try ctx.requestDidOpen(test_uri, phr.new_source);
|
||||
const test_uri = try ctx.addDocument(phr.new_source);
|
||||
|
||||
const range = types.Range{
|
||||
.start = types.Position{ .line = 0, .character = 0 },
|
||||
@ -120,10 +115,10 @@ fn testInlayHints(source: []const u8) !void {
|
||||
for (hints) |hint| {
|
||||
if (position.line != hint.position.line or position.character != hint.position.character) continue;
|
||||
|
||||
if(!std.mem.endsWith(u8, hint.label, ":")) {
|
||||
try error_builder.msgAtLoc("label `{s}` must end with a colon!", new_loc, .err, .{ hint.label });
|
||||
if (!std.mem.endsWith(u8, hint.label, ":")) {
|
||||
try error_builder.msgAtLoc("label `{s}` must end with a colon!", new_loc, .err, .{hint.label});
|
||||
}
|
||||
const actual_label = hint.label[0..hint.label.len - 1];
|
||||
const actual_label = hint.label[0 .. hint.label.len - 1];
|
||||
|
||||
if (!std.mem.eql(u8, expected_label, actual_label)) {
|
||||
try error_builder.msgAtLoc("expected label `{s}` here but got `{s}`!", new_loc, .err, .{ expected_label, actual_label });
|
||||
|
@ -115,10 +115,6 @@ test "references - label" {
|
||||
}
|
||||
|
||||
fn testReferences(source: []const u8) !void {
|
||||
const file_uri: []const u8 = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:\\test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
const new_name = "placeholder";
|
||||
|
||||
var phr = try helper.collectReplacePlaceholders(allocator, source, new_name);
|
||||
@ -127,7 +123,7 @@ fn testReferences(source: []const u8) !void {
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
try ctx.requestDidOpen(file_uri, phr.new_source);
|
||||
const file_uri = try ctx.addDocument(phr.new_source);
|
||||
|
||||
try std.testing.expect(phr.locations.len != 0);
|
||||
|
||||
|
@ -31,12 +31,7 @@ fn testSelectionRange(source: []const u8, want: []const []const u8) !void {
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
const test_uri: []const u8 = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:\\test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
|
||||
try ctx.requestDidOpen(test_uri, phr.new_source);
|
||||
const test_uri = try ctx.addDocument(phr.new_source);
|
||||
|
||||
const position = offsets.locToRange(phr.new_source, phr.locations.items(.new)[0], .@"utf-16").start;
|
||||
|
||||
|
@ -44,16 +44,11 @@ test "semantic tokens - string literals" {
|
||||
);
|
||||
}
|
||||
|
||||
const file_uri = switch (builtin.os.tag) {
|
||||
.windows => "file:///C:/test.zig",
|
||||
else => "file:///test.zig",
|
||||
};
|
||||
|
||||
fn testSemanticTokens(source: []const u8, expected: []const u32) !void {
|
||||
var ctx = try Context.init();
|
||||
defer ctx.deinit();
|
||||
|
||||
try ctx.requestDidOpen(file_uri, source);
|
||||
const file_uri = try ctx.addDocument(source);
|
||||
|
||||
const Response = struct {
|
||||
data: []const u32,
|
||||
@ -62,9 +57,12 @@ fn testSemanticTokens(source: []const u8, expected: []const u32) !void {
|
||||
const expected_bytes = try std.json.stringifyAlloc(allocator, Response{ .data = expected }, .{});
|
||||
defer allocator.free(expected_bytes);
|
||||
|
||||
const params = try std.json.stringifyAlloc(allocator, .{ .textDocument = .{ .uri = file_uri } }, .{});
|
||||
defer allocator.free(params);
|
||||
|
||||
try ctx.request(
|
||||
"textDocument/semanticTokens/full",
|
||||
"{\"textDocument\":{\"uri\":\"" ++ file_uri ++ "\"}}",
|
||||
params,
|
||||
expected_bytes,
|
||||
);
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ comptime {
|
||||
_ = @import("utility/offsets.zig");
|
||||
_ = @import("utility/position_context.zig");
|
||||
_ = @import("utility/uri.zig");
|
||||
_ = @import("utility/diff.zig");
|
||||
|
||||
// TODO Lifecycle Messages
|
||||
|
||||
@ -13,6 +14,7 @@ comptime {
|
||||
// LSP features
|
||||
_ = @import("lsp_features/completion.zig");
|
||||
_ = @import("lsp_features/definition.zig");
|
||||
_ = @import("lsp_features/document_symbol.zig");
|
||||
_ = @import("lsp_features/folding_range.zig");
|
||||
_ = @import("lsp_features/inlay_hints.zig");
|
||||
_ = @import("lsp_features/references.zig");
|
||||
|
30
tests/utility/diff.zig
Normal file
30
tests/utility/diff.zig
Normal file
@ -0,0 +1,30 @@
|
||||
const std = @import("std");
|
||||
const zls = @import("zls");
|
||||
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
fn gen(alloc: std.mem.Allocator, rand: std.rand.Random) ![]const u8 {
|
||||
var buffer = try alloc.alloc(u8, rand.intRangeAtMost(usize, 16, 1024));
|
||||
for (buffer) |*b| b.* = rand.intRangeAtMost(u8, 32, 126);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
test "diff - random" {
|
||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
var rand = std.rand.DefaultPrng.init(0);
|
||||
|
||||
var index: usize = 0;
|
||||
|
||||
while (index < 100) : (index += 1) {
|
||||
defer _ = arena.reset(.retain_capacity);
|
||||
|
||||
const pre = try gen(arena.allocator(), rand.random());
|
||||
const post = try gen(arena.allocator(), rand.random());
|
||||
|
||||
var edits = try zls.diff.edits(arena.allocator(), pre, post, .@"utf-8");
|
||||
const applied = try zls.diff.applyTextEdits(arena.allocator(), pre, edits.items, .@"utf-8");
|
||||
try std.testing.expectEqualStrings(post, applied);
|
||||
}
|
||||
}
|
@ -52,4 +52,8 @@ test "uri - pathRelative" {
|
||||
const join2 = try URI.pathRelative(allocator, "file:///project/zig/wow", "../]src]/]main.zig");
|
||||
defer allocator.free(join2);
|
||||
try std.testing.expectEqualStrings("file:///project/zig/%5Dsrc%5D/%5Dmain.zig", join2);
|
||||
|
||||
const join3 = try URI.pathRelative(allocator, "file:///project/zig/wow//", "../src/main.zig");
|
||||
defer allocator.free(join3);
|
||||
try std.testing.expectEqualStrings("file:///project/zig/src/main.zig", join3);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user