implement cInclude completions & goto definition (#970)
* implement cInclude completions & goto definition * fix cImport features on windows * fix relative path with `..`
This commit is contained in:
parent
bf19ed3ea9
commit
37ca1333ed
@ -854,6 +854,33 @@ pub fn collectDependencies(
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO resolve relative paths
|
||||
pub fn collectIncludeDirs(
|
||||
store: *const DocumentStore,
|
||||
allocator: std.mem.Allocator,
|
||||
handle: Handle,
|
||||
include_dirs: *std.ArrayListUnmanaged([]const u8),
|
||||
) !void {
|
||||
const target_info = try std.zig.system.NativeTargetInfo.detect(.{});
|
||||
var native_paths = try std.zig.system.NativePaths.detect(allocator, target_info);
|
||||
defer native_paths.deinit();
|
||||
|
||||
const build_file_includes_paths: []const []const u8 = if (handle.associated_build_file) |build_file_uri|
|
||||
store.build_files.get(build_file_uri).?.config.include_dirs
|
||||
else
|
||||
&.{};
|
||||
|
||||
try include_dirs.ensureTotalCapacity(allocator, native_paths.include_dirs.items.len + build_file_includes_paths.len);
|
||||
|
||||
const native_include_dirs = try native_paths.include_dirs.toOwnedSlice();
|
||||
defer allocator.free(native_include_dirs);
|
||||
include_dirs.appendSliceAssumeCapacity(native_include_dirs);
|
||||
|
||||
for (build_file_includes_paths) |include_path| {
|
||||
include_dirs.appendAssumeCapacity(try allocator.dupe(u8, include_path));
|
||||
}
|
||||
}
|
||||
|
||||
/// returns the document behind `@cImport()` where `node` is the `cImport` node
|
||||
/// if a cImport can't be translated e.g. requires computing a
|
||||
/// comptime value `resolveCImport` will return null
|
||||
@ -872,15 +899,22 @@ pub fn resolveCImport(self: *DocumentStore, handle: Handle, node: Ast.Node.Index
|
||||
const result = self.cimports.get(hash) orelse blk: {
|
||||
const source: []const u8 = handle.cimports.items(.source)[index];
|
||||
|
||||
const include_dirs: []const []const u8 = if (handle.associated_build_file) |build_file_uri|
|
||||
self.build_files.get(build_file_uri).?.config.include_dirs
|
||||
else
|
||||
&.{};
|
||||
var include_dirs: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
defer {
|
||||
for (include_dirs.items) |path| {
|
||||
self.allocator.free(path);
|
||||
}
|
||||
include_dirs.deinit(self.allocator);
|
||||
}
|
||||
self.collectIncludeDirs(self.allocator, handle, &include_dirs) catch |err| {
|
||||
log.err("failed to resolve include paths: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
|
||||
var result = (try translate_c.translate(
|
||||
self.allocator,
|
||||
self.config.*,
|
||||
include_dirs,
|
||||
include_dirs.items,
|
||||
source,
|
||||
)) orelse return null;
|
||||
|
||||
@ -936,18 +970,13 @@ pub fn uriFromImportStr(self: *const DocumentStore, allocator: std.mem.Allocator
|
||||
}
|
||||
return null;
|
||||
} else {
|
||||
const base = handle.uri;
|
||||
var base_len = base.len;
|
||||
while (base[base_len - 1] != '/' and base_len > 0) {
|
||||
base_len -= 1;
|
||||
}
|
||||
base_len -= 1;
|
||||
if (base_len <= 0) {
|
||||
return null;
|
||||
// return error.UriBadScheme;
|
||||
var seperator_index = handle.uri.len;
|
||||
while (seperator_index > 0) : (seperator_index -= 1) {
|
||||
if (std.fs.path.isSep(handle.uri[seperator_index - 1])) break;
|
||||
}
|
||||
const base = handle.uri[0 .. seperator_index - 1];
|
||||
|
||||
return URI.pathRelative(allocator, base[0..base_len], import_str) catch |err| switch (err) {
|
||||
return URI.pathRelative(allocator, base, import_str) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.UriBadScheme => return null,
|
||||
};
|
||||
|
157
src/Server.zig
157
src/Server.zig
@ -1210,14 +1210,45 @@ fn hoverDefinitionFieldAccess(
|
||||
|
||||
fn gotoDefinitionString(
|
||||
server: *Server,
|
||||
pos_index: usize,
|
||||
pos_context: analysis.PositionContext,
|
||||
handle: *const DocumentStore.Handle,
|
||||
) error{OutOfMemory}!?types.Location {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
const import_str = analysis.getImportStr(handle.tree, 0, pos_index) orelse return null;
|
||||
const uri = try server.document_store.uriFromImportStr(server.arena.allocator(), handle.*, import_str);
|
||||
const allocator = server.arena.allocator();
|
||||
|
||||
const loc = pos_context.loc().?;
|
||||
const import_str_loc = offsets.tokenIndexToLoc(handle.tree.source, loc.start);
|
||||
if (import_str_loc.end - import_str_loc.start < 2) return null;
|
||||
var import_str = offsets.locToSlice(handle.tree.source, .{
|
||||
.start = import_str_loc.start + 1,
|
||||
.end = import_str_loc.end - 1,
|
||||
});
|
||||
|
||||
const uri = switch (pos_context) {
|
||||
.import_string_literal,
|
||||
.embedfile_string_literal,
|
||||
=> try server.document_store.uriFromImportStr(allocator, handle.*, import_str),
|
||||
.cinclude_string_literal => try uri_utils.fromPath(
|
||||
allocator,
|
||||
blk: {
|
||||
if (std.fs.path.isAbsolute(import_str)) break :blk import_str;
|
||||
var include_dirs: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
server.document_store.collectIncludeDirs(allocator, handle.*, &include_dirs) catch |err| {
|
||||
log.err("failed to resolve include paths: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
for (include_dirs.items) |dir| {
|
||||
const path = try std.fs.path.join(allocator, &.{ dir, import_str });
|
||||
std.fs.accessAbsolute(path, .{}) catch continue;
|
||||
break :blk path;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
return types.Location{
|
||||
.uri = uri orelse return null,
|
||||
@ -1643,60 +1674,87 @@ fn completeDot(server: *Server, handle: *const DocumentStore.Handle) error{OutOf
|
||||
return completions;
|
||||
}
|
||||
|
||||
fn completeFileSystemStringLiteral(allocator: std.mem.Allocator, store: *const DocumentStore, handle: *const DocumentStore.Handle, completing: []const u8, is_import: bool) ![]types.CompletionItem {
|
||||
var subpath_present = false;
|
||||
var completions = std.ArrayListUnmanaged(types.CompletionItem){};
|
||||
fn completeFileSystemStringLiteral(
|
||||
arena: std.mem.Allocator,
|
||||
store: DocumentStore,
|
||||
handle: DocumentStore.Handle,
|
||||
pos_context: analysis.PositionContext,
|
||||
) ![]types.CompletionItem {
|
||||
var completions: analysis.CompletionSet = .{};
|
||||
|
||||
fsc: {
|
||||
var document_path = try uri_utils.parse(allocator, handle.uri);
|
||||
var document_dir_path = std.fs.openIterableDirAbsolute(std.fs.path.dirname(document_path) orelse break :fsc, .{}) catch break :fsc;
|
||||
defer document_dir_path.close();
|
||||
const loc = pos_context.loc().?;
|
||||
var completing = handle.tree.source[loc.start + 1 .. loc.end - 1];
|
||||
|
||||
if (std.mem.lastIndexOfScalar(u8, completing, '/')) |subpath_index| {
|
||||
var subpath = completing[0..subpath_index];
|
||||
var seperator_index = completing.len;
|
||||
while (seperator_index > 0) : (seperator_index -= 1) {
|
||||
if (std.fs.path.isSep(completing[seperator_index - 1])) break;
|
||||
}
|
||||
completing = completing[0..seperator_index];
|
||||
|
||||
if (std.mem.startsWith(u8, subpath, "./") and subpath_index > 2) {
|
||||
subpath = completing[2..subpath_index];
|
||||
} else if (std.mem.startsWith(u8, subpath, ".") and subpath_index > 1) {
|
||||
subpath = completing[1..subpath_index];
|
||||
var search_paths: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
if (std.fs.path.isAbsolute(completing) and pos_context != .import_string_literal) {
|
||||
try search_paths.append(arena, completing);
|
||||
} else if (pos_context == .cinclude_string_literal) {
|
||||
store.collectIncludeDirs(arena, handle, &search_paths) catch |err| {
|
||||
log.err("failed to resolve include paths: {}", .{err});
|
||||
return &.{};
|
||||
};
|
||||
} else {
|
||||
var document_path = try uri_utils.parse(arena, handle.uri);
|
||||
try search_paths.append(arena, std.fs.path.dirname(document_path).?);
|
||||
}
|
||||
|
||||
for (search_paths.items) |path| {
|
||||
if (!std.fs.path.isAbsolute(path)) continue;
|
||||
const dir_path = if (std.fs.path.isAbsolute(completing)) path else try std.fs.path.join(arena, &.{ path, completing });
|
||||
|
||||
var iterable_dir = std.fs.openIterableDirAbsolute(dir_path, .{}) catch continue;
|
||||
defer iterable_dir.close();
|
||||
var it = iterable_dir.iterateAssumeFirstIteration();
|
||||
|
||||
while (it.next() catch null) |entry| {
|
||||
const expected_extension = switch (pos_context) {
|
||||
.import_string_literal => ".zig",
|
||||
.cinclude_string_literal => ".h",
|
||||
.embedfile_string_literal => null,
|
||||
else => unreachable,
|
||||
};
|
||||
switch (entry.kind) {
|
||||
.File => if (expected_extension) |expected| {
|
||||
const actual_extension = std.fs.path.extension(entry.name);
|
||||
if (!std.mem.eql(u8, actual_extension, expected)) continue;
|
||||
},
|
||||
.Directory => {},
|
||||
else => continue,
|
||||
}
|
||||
|
||||
var old = document_dir_path;
|
||||
document_dir_path = document_dir_path.dir.openIterableDir(subpath, .{}) catch break :fsc; // NOTE: Is this even safe lol?
|
||||
old.close();
|
||||
|
||||
subpath_present = true;
|
||||
}
|
||||
|
||||
var dir_iterator = document_dir_path.iterate();
|
||||
while (try dir_iterator.next()) |entry| {
|
||||
if (std.mem.startsWith(u8, entry.name, ".")) continue;
|
||||
if (entry.kind == .File and is_import and !std.mem.endsWith(u8, entry.name, ".zig")) continue;
|
||||
|
||||
const l = try allocator.dupe(u8, entry.name);
|
||||
try completions.append(allocator, types.CompletionItem{
|
||||
.label = l,
|
||||
.insertText = l,
|
||||
_ = try completions.getOrPut(arena, types.CompletionItem{
|
||||
.label = try arena.dupe(u8, entry.name),
|
||||
.detail = if (pos_context == .cinclude_string_literal) path else null,
|
||||
.insertText = if (entry.kind == .Directory)
|
||||
try std.fmt.allocPrint(arena, "{s}/", .{entry.name})
|
||||
else
|
||||
null,
|
||||
.kind = if (entry.kind == .File) .File else .Folder,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!subpath_present and is_import) {
|
||||
if (completing.len == 0 and pos_context == .import_string_literal) {
|
||||
if (handle.associated_build_file) |uri| {
|
||||
const build_file = store.build_files.get(uri).?;
|
||||
try completions.ensureUnusedCapacity(allocator, build_file.config.packages.len);
|
||||
try completions.ensureUnusedCapacity(arena, build_file.config.packages.len);
|
||||
|
||||
for (build_file.config.packages) |pkg| {
|
||||
completions.appendAssumeCapacity(.{
|
||||
completions.putAssumeCapacity(.{
|
||||
.label = pkg.name,
|
||||
.kind = .Module,
|
||||
});
|
||||
}, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return completions.toOwnedSlice(allocator);
|
||||
return completions.keys();
|
||||
}
|
||||
|
||||
fn initializeHandler(server: *Server, request: types.InitializeParams) Error!types.InitializeResult {
|
||||
@ -1875,7 +1933,7 @@ fn initializeHandler(server: *Server, request: types.InitializeParams) Error!typ
|
||||
.renameProvider = .{ .bool = true },
|
||||
.completionProvider = .{
|
||||
.resolveProvider = false,
|
||||
.triggerCharacters = &[_][]const u8{ ".", ":", "@", "]" },
|
||||
.triggerCharacters = &[_][]const u8{ ".", ":", "@", "]", "/" },
|
||||
.completionItem = .{ .labelDetailsSupport = true },
|
||||
},
|
||||
.documentHighlightProvider = .{ .bool = true },
|
||||
@ -2184,12 +2242,13 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
|
||||
.global_error_set => try server.completeError(handle),
|
||||
.enum_literal => try server.completeDot(handle),
|
||||
.label => try server.completeLabel(source_index, handle),
|
||||
.import_string_literal, .embedfile_string_literal => |loc| blk: {
|
||||
.import_string_literal,
|
||||
.cinclude_string_literal,
|
||||
.embedfile_string_literal,
|
||||
=> blk: {
|
||||
if (!server.config.enable_import_embedfile_argument_completions) break :blk null;
|
||||
|
||||
const completing = offsets.locToSlice(handle.tree.source, loc);
|
||||
const is_import = pos_context == .import_string_literal;
|
||||
break :blk completeFileSystemStringLiteral(server.arena.allocator(), &server.document_store, handle, completing, is_import) catch |err| {
|
||||
break :blk completeFileSystemStringLiteral(server.arena.allocator(), server.document_store, handle.*, pos_context) catch |err| {
|
||||
log.err("failed to get file system completions: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
@ -2203,7 +2262,14 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
|
||||
// the remaining identifier with the completion instead of just inserting.
|
||||
// TODO Identify function call/struct init and replace the whole thing.
|
||||
const lookahead_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
|
||||
if (server.client_capabilities.supports_apply_edits and pos_context.loc() != null and lookahead_context.loc() != null and pos_context.loc().?.end != lookahead_context.loc().?.end) {
|
||||
if (server.client_capabilities.supports_apply_edits and
|
||||
pos_context != .import_string_literal and
|
||||
pos_context != .cinclude_string_literal and
|
||||
pos_context != .embedfile_string_literal and
|
||||
pos_context.loc() != null and
|
||||
lookahead_context.loc() != null and
|
||||
pos_context.loc().?.end != lookahead_context.loc().?.end)
|
||||
{
|
||||
var end = lookahead_context.loc().?.end;
|
||||
while (end < handle.text.len and (std.ascii.isAlphanumeric(handle.text[end]) or handle.text[end] == '"')) {
|
||||
end += 1;
|
||||
@ -2285,7 +2351,10 @@ fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, resol
|
||||
.builtin => |loc| try server.gotoDefinitionBuiltin(handle, loc),
|
||||
.var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias),
|
||||
.field_access => |loc| try server.gotoDefinitionFieldAccess(handle, source_index, loc, resolve_alias),
|
||||
.import_string_literal => try server.gotoDefinitionString(source_index, handle),
|
||||
.import_string_literal,
|
||||
.cinclude_string_literal,
|
||||
.embedfile_string_literal,
|
||||
=> try server.gotoDefinitionString(pos_context, handle),
|
||||
.label => try server.gotoDefinitionLabel(source_index, handle),
|
||||
else => null,
|
||||
};
|
||||
|
@ -1458,47 +1458,11 @@ fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize)
|
||||
return source_index >= loc.start and source_index <= loc.end;
|
||||
}
|
||||
|
||||
pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
if (tree.fullContainerDecl(&buf, node)) |container_decl| {
|
||||
for (container_decl.ast.members) |decl_idx| {
|
||||
if (getImportStr(tree, decl_idx, source_index)) |name| {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} else if (tree.fullVarDecl(node)) |var_decl| {
|
||||
return getImportStr(tree, var_decl.ast.init_node, source_index);
|
||||
} else if (node_tags[node] == .@"usingnamespace") {
|
||||
return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index);
|
||||
}
|
||||
|
||||
if (!nodeContainsSourceIndex(tree, node, source_index)) return null;
|
||||
|
||||
if (!ast.isBuiltinCall(tree, node)) return null;
|
||||
|
||||
const builtin_token = tree.nodes.items(.main_token)[node];
|
||||
const call_name = tree.tokenSlice(builtin_token);
|
||||
|
||||
if (!std.mem.eql(u8, call_name, "@import")) return null;
|
||||
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const params = ast.builtinCallParams(tree, node, &buffer).?;
|
||||
|
||||
if (params.len != 1) return null;
|
||||
|
||||
if (node_tags[params[0]] != .string_literal) return null;
|
||||
|
||||
const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]);
|
||||
return import_str[1 .. import_str.len - 1];
|
||||
}
|
||||
|
||||
pub const PositionContext = union(enum) {
|
||||
builtin: offsets.Loc,
|
||||
comment,
|
||||
import_string_literal: offsets.Loc,
|
||||
cinclude_string_literal: offsets.Loc,
|
||||
embedfile_string_literal: offsets.Loc,
|
||||
string_literal: offsets.Loc,
|
||||
field_access: offsets.Loc,
|
||||
@ -1515,6 +1479,7 @@ pub const PositionContext = union(enum) {
|
||||
.builtin => |r| r,
|
||||
.comment => null,
|
||||
.import_string_literal => |r| r,
|
||||
.cinclude_string_literal => |r| r,
|
||||
.embedfile_string_literal => |r| r,
|
||||
.string_literal => |r| r,
|
||||
.field_access => |r| r,
|
||||
@ -1631,8 +1596,10 @@ pub fn getPositionContext(
|
||||
if (std.mem.eql(u8, builtin_name, "@import")) {
|
||||
curr_ctx.ctx = .{ .import_string_literal = tok.loc };
|
||||
break :string_lit_block;
|
||||
}
|
||||
if (std.mem.eql(u8, builtin_name, "@embedFile")) {
|
||||
} else if (std.mem.eql(u8, builtin_name, "@cInclude")) {
|
||||
curr_ctx.ctx = .{ .cinclude_string_literal = tok.loc };
|
||||
break :string_lit_block;
|
||||
} else if (std.mem.eql(u8, builtin_name, "@embedFile")) {
|
||||
curr_ctx.ctx = .{ .embedfile_string_literal = tok.loc };
|
||||
break :string_lit_block;
|
||||
}
|
||||
|
@ -147,20 +147,6 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
|
||||
return null;
|
||||
};
|
||||
|
||||
const base_include_dirs = blk: {
|
||||
const target_info = std.zig.system.NativeTargetInfo.detect(.{}) catch break :blk null;
|
||||
var native_paths = std.zig.system.NativePaths.detect(allocator, target_info) catch break :blk null;
|
||||
defer native_paths.deinit();
|
||||
|
||||
break :blk try native_paths.include_dirs.toOwnedSlice();
|
||||
};
|
||||
defer if (base_include_dirs) |dirs| {
|
||||
for (dirs) |path| {
|
||||
allocator.free(path);
|
||||
}
|
||||
allocator.free(dirs);
|
||||
};
|
||||
|
||||
const base_args = &[_][]const u8{
|
||||
config.zig_exe_path orelse return null,
|
||||
"translate-c",
|
||||
@ -172,19 +158,12 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
|
||||
"-lc",
|
||||
};
|
||||
|
||||
const argc = base_args.len + 2 * (include_dirs.len + if (base_include_dirs) |dirs| dirs.len else 0) + 1;
|
||||
const argc = base_args.len + 2 * include_dirs.len + 1;
|
||||
var argv = try std.ArrayListUnmanaged([]const u8).initCapacity(allocator, argc);
|
||||
defer argv.deinit(allocator);
|
||||
|
||||
argv.appendSliceAssumeCapacity(base_args);
|
||||
|
||||
if (base_include_dirs) |dirs| {
|
||||
for (dirs) |include_dir| {
|
||||
argv.appendAssumeCapacity("-I");
|
||||
argv.appendAssumeCapacity(include_dir);
|
||||
}
|
||||
}
|
||||
|
||||
for (include_dirs) |include_dir| {
|
||||
argv.appendAssumeCapacity("-I");
|
||||
argv.appendAssumeCapacity(include_dir);
|
||||
|
@ -66,6 +66,9 @@ pub fn pathRelative(allocator: std.mem.Allocator, base: []const u8, rel: []const
|
||||
if (std.mem.eql(u8, component, ".")) {
|
||||
continue;
|
||||
} else if (std.mem.eql(u8, component, "..")) {
|
||||
while ((result.getLastOrNull() orelse return error.UriBadScheme) == '/') {
|
||||
_ = result.pop();
|
||||
}
|
||||
while (true) {
|
||||
const char = result.popOrNull() orelse return error.UriBadScheme;
|
||||
if (char == '/') break;
|
||||
|
@ -52,4 +52,8 @@ test "uri - pathRelative" {
|
||||
const join2 = try URI.pathRelative(allocator, "file:///project/zig/wow", "../]src]/]main.zig");
|
||||
defer allocator.free(join2);
|
||||
try std.testing.expectEqualStrings("file:///project/zig/%5Dsrc%5D/%5Dmain.zig", join2);
|
||||
|
||||
const join3 = try URI.pathRelative(allocator, "file:///project/zig/wow//", "../src/main.zig");
|
||||
defer allocator.free(join3);
|
||||
try std.testing.expectEqualStrings("file:///project/zig/src/main.zig", join3);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user