Merge pull request #665 from Techatrix/cimport-diagnostics

Report cImport failure using `textDocument/publishDiagnostics`
This commit is contained in:
Techatrix 2022-09-20 01:43:04 +00:00 committed by GitHub
commit 0fa788b727
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 120 additions and 98 deletions

View File

@ -274,7 +274,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
if (handle.associated_build_file) |build_file| {
log.debug("Opened document `{s}` with build file `{s}`", .{ handle.uri(), build_file.uri });
} else {
log.debug("Opened document `{s}` without a build file", .{ handle.uri() });
log.debug("Opened document `{s}` without a build file", .{handle.uri()});
}
}
@ -439,8 +439,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
handle.cimports = try self.collectCIncludes(handle);
errdefer {
for (handle.cimports) |item| {
self.allocator.free(item.uri);
for (handle.cimports) |*item| {
item.result.deinit(self.allocator);
}
self.allocator.free(handle.cimports);
}
@ -515,8 +515,8 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
self.allocator.free(import_uri);
}
for (handle.cimports) |item| {
self.allocator.free(item.uri);
for (handle.cimports) |*item| {
item.result.deinit(self.allocator);
}
handle.document_scope.deinit(self.allocator);
@ -562,7 +562,7 @@ fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8
}
pub const CImportSource = struct {
/// the `@cInclude` node
/// the `@cImport` node
node: Ast.Node.Index,
/// hash of c source file
hash: [Hasher.mac_length]u8,
@ -607,12 +607,13 @@ fn collectCIncludeSources(self: *DocumentStore, handle: *Handle) ![]CImportSourc
}
pub const CImportHandle = struct {
/// the `@cInclude` node
/// the `@cImport` node
node: Ast.Node.Index,
/// hash of the c source file
hash: [Hasher.mac_length]u8,
/// uri to a zig source file generated with translate-c
uri: []const u8,
/// the result from calling zig translate-c
/// see `translate_c.translate`
result: translate_c.Result,
};
/// Collects all `@cImport` nodes and converts them into zig files using translate-c
@ -621,12 +622,12 @@ fn collectCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
var cimport_nodes = try analysis.collectCImportNodes(self.allocator, handle.tree);
defer self.allocator.free(cimport_nodes);
var uris = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, cimport_nodes.len);
var cimports = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, cimport_nodes.len);
errdefer {
for (uris.items) |item| {
self.allocator.free(item.uri);
for (cimports.items) |*item| {
item.result.deinit(self.allocator);
}
uris.deinit(self.allocator);
cimports.deinit(self.allocator);
}
for (cimport_nodes) |node| {
@ -636,28 +637,25 @@ fn collectCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
};
defer self.allocator.free(c_source);
const uri = self.translate(handle, c_source) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(uri);
const result = (try self.translate(handle, c_source)) orelse continue;
errdefer result.deinit(self.allocator);
var hasher = hasher_init;
hasher.update(c_source);
var hash: [Hasher.mac_length]u8 = undefined;
hasher.final(&hash);
uris.appendAssumeCapacity(.{
cimports.appendAssumeCapacity(.{
.node = node,
.hash = hash,
.uri = uri,
.result = result,
});
}
return uris.toOwnedSlice(self.allocator);
return cimports.toOwnedSlice(self.allocator);
}
fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) !?[]const u8 {
fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) error{OutOfMemory}!?translate_c.Result {
const dirs: []BuildConfig.IncludeDir = if (handle.associated_build_file) |build_file| build_file.config.include_dirs else &.{};
const include_dirs = blk: {
var result = try self.allocator.alloc([]const u8, dirs.len);
@ -671,15 +669,21 @@ fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) !?[]cons
};
defer self.allocator.free(include_dirs);
const file_path = (try translate_c.translate(
const maybe_result = try translate_c.translate(
self.allocator,
self.config,
include_dirs,
source,
)) orelse return null;
defer self.allocator.free(file_path);
);
return try URI.fromPath(self.allocator, file_path);
if (maybe_result) |result| {
switch (result) {
.success => |uri| log.debug("Translated cImport into {s}", .{uri}),
else => {},
}
}
return maybe_result;
}
fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
@ -703,8 +707,8 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
}
self.allocator.free(old_imports);
for (old_cimports) |old_cimport| {
self.allocator.free(old_cimport.uri);
for (old_cimports) |*old_cimport| {
old_cimport.result.deinit(self.allocator);
}
self.allocator.free(old_cimports);
}
@ -712,26 +716,30 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
var i: usize = 0;
while (i < handle.imports_used.items.len) {
const old = handle.imports_used.items[i];
still_exists: {
const found_new = found: {
for (handle.import_uris) |new| {
if (std.mem.eql(u8, new, old)) {
handle.imports_used.items[i] = new;
break :still_exists;
}
if (!std.mem.eql(u8, new, old)) continue;
break :found new;
}
for (handle.cimports) |cimport| {
const new = cimport.uri;
if (std.mem.eql(u8, new, old)) {
handle.imports_used.items[i] = new;
break :still_exists;
}
if (cimport.result != .success) continue;
const new = cimport.result.success;
if (!std.mem.eql(u8, old, new)) continue;
break :found new;
}
break :found null;
};
if (found_new) |new| {
handle.imports_used.items[i] = new;
i += 1;
} else {
log.debug("Import removed: {s}", .{old});
self.decrementCount(old);
_ = handle.imports_used.swapRemove(i);
continue;
}
i += 1;
}
}
@ -747,48 +755,23 @@ fn refreshDocumentCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHan
var old_cimports = handle.cimports;
var new_cimports = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, new_sources.len);
errdefer {
for (new_cimports.items) |new_cimport| {
self.allocator.free(new_cimport.uri);
for (new_cimports.items) |*new_cimport| {
new_cimport.result.deinit(self.allocator);
}
new_cimports.deinit(self.allocator);
}
for (new_sources) |new_source| {
const maybe_old_cimport: ?CImportHandle = blk: {
const old_cimport: CImportHandle = found: {
for (old_cimports) |old_cimport| {
if (new_source.node == old_cimport.node) {
break :found old_cimport;
}
}
break :blk null;
};
outer: for (new_sources) |new_source| {
// look for a old cimport with identical source hash
for (old_cimports) |old_cimport| {
if (!std.mem.eql(u8, &new_source.hash, &old_cimport.hash)) continue;
// avoid re-translating if the source didn't change
if (std.mem.eql(u8, &new_source.hash, &old_cimport.hash)) {
break :blk CImportHandle{
.node = old_cimport.node,
.hash = old_cimport.hash,
.uri = try self.allocator.dupe(u8, old_cimport.uri),
};
}
const new_uri = self.translate(handle, new_source.source) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(new_uri);
break :blk CImportHandle{
new_cimports.appendAssumeCapacity(.{
.node = old_cimport.node,
.hash = old_cimport.hash,
.uri = new_uri,
};
};
if (maybe_old_cimport) |cimport| {
new_cimports.appendAssumeCapacity(cimport);
continue;
.result = try old_cimport.result.dupe(self.allocator),
});
continue :outer;
}
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, new_source.node) catch |err| switch (err) {
@ -802,19 +785,13 @@ fn refreshDocumentCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHan
hasher.update(c_source);
hasher.final(&hash);
const new_uri = self.translate(
handle,
c_source,
) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(new_uri);
const new_result = (try self.translate(handle, new_source.source)) orelse continue;
errdefer new_result.deinit(self.allocator);
new_cimports.appendAssumeCapacity(.{
.node = new_source.node,
.hash = hash,
.uri = new_uri,
.result = new_result,
});
}
@ -996,7 +973,12 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Index) !?*Handle {
const uri = blk: {
for (handle.cimports) |item| {
if (item.node == node) break :blk item.uri;
if (item.node != node) continue;
switch (item.result) {
.success => |uri| break :blk uri,
.failure => return null,
}
}
return null;
};
@ -1079,8 +1061,8 @@ pub fn deinit(self: *DocumentStore) void {
self.allocator.free(uri);
}
self.allocator.free(entry.value_ptr.*.import_uris);
for (entry.value_ptr.*.cimports) |cimport| {
self.allocator.free(cimport.uri);
for (entry.value_ptr.*.cimports) |*cimport| {
cimport.result.deinit(self.allocator);
}
self.allocator.free(entry.value_ptr.*.cimports);
entry.value_ptr.*.imports_used.deinit(self.allocator);

View File

@ -322,6 +322,24 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
}
}
for (handle.cimports) |cimport| {
if (cimport.result != .failure) continue;
const stderr = std.mem.trim(u8, cimport.result.failure, " ");
var pos_and_diag_iterator = std.mem.split(u8, stderr, ":");
_ = pos_and_diag_iterator.next(); // skip file path
_ = pos_and_diag_iterator.next(); // skip line
_ = pos_and_diag_iterator.next(); // skip character
try diagnostics.append(allocator, .{
.range = offsets.nodeToRange(handle.tree, cimport.node, server.offset_encoding),
.severity = .Error,
.code = "cImport",
.source = "zls",
.message = try allocator.dupe(u8, pos_and_diag_iterator.rest()),
});
}
try send(writer, server.arena.allocator(), types.Notification{
.method = "textDocument/publishDiagnostics",
.params = .{

View File

@ -4,6 +4,7 @@ const Config = @import("Config.zig");
const ast = @import("ast.zig");
const Ast = std.zig.Ast;
const URI = @import("uri.zig");
const log = std.log.scoped(.translate_c);
/// converts a `@cInclude` node into an equivalent c header file
/// which can then be handed over to `zig translate-c`
@ -92,24 +93,47 @@ fn convertCIncludeInternal(
}
}
pub const Result = union(enum) {
// uri to the generated zig file
success: []const u8,
// zig translate-c failed with the given stderr content
failure: []const u8,
pub fn deinit(self: *Result, allocator: std.mem.Allocator) void {
switch (self.*) {
.success => |path| allocator.free(path),
.failure => |stderr| allocator.free(stderr),
}
}
pub fn dupe(self: Result, allocator: std.mem.Allocator) !Result {
return switch (self) {
.success => |path| .{ .success = try allocator.dupe(u8, path) },
.failure => |stderr| .{ .failure = try allocator.dupe(u8, stderr) },
};
}
};
/// takes a c header file and returns the result from calling `zig translate-c`
/// returns the file path to the generated zig file
/// returns a URI to the generated zig file on success or the content of stderr on failure
/// null indicates a failure which is automatically logged
/// Caller owns returned memory.
pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []const []const u8, source: []const u8) error{OutOfMemory}!?[]const u8 {
pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []const []const u8, source: []const u8) error{OutOfMemory}!?Result {
const file_path = try std.fs.path.join(allocator, &[_][]const u8{ config.global_cache_path.?, "cimport.h" });
defer allocator.free(file_path);
var file = std.fs.createFileAbsolute(file_path, .{}) catch |err| {
std.log.warn("failed to create file '{s}': {}", .{ file_path, err });
log.warn("failed to create file '{s}': {}", .{ file_path, err });
return null;
};
defer file.close();
defer std.fs.deleteFileAbsolute(file_path) catch |err| {
std.log.warn("failed to delete file '{s}': {}", .{ file_path, err });
log.warn("failed to delete file '{s}': {}", .{ file_path, err });
};
_ = file.write(source) catch |err| {
std.log.warn("failed to write to '{s}': {}", .{ file_path, err });
log.warn("failed to write to '{s}': {}", .{ file_path, err });
return null;
};
const base_include_dirs = blk: {
@ -161,7 +185,7 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
.allocator = allocator,
.argv = argv.items,
}) catch |err| {
std.log.err("Failed to execute zig translate-c process, error: {}", .{err});
log.err("Failed to execute zig translate-c process, error: {}", .{err});
return null;
};
@ -170,14 +194,12 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
return switch (result.term) {
.Exited => |code| if (code == 0) {
return try allocator.dupe(u8, std.mem.sliceTo(result.stdout, '\n'));
return Result{ .success = try URI.fromPath(allocator, std.mem.sliceTo(result.stdout, '\n')) };
} else {
// TODO convert failure to `textDocument/publishDiagnostics`
std.log.err("zig translate-c process failed, code: {}, stderr: '{s}'", .{ code, result.stderr });
return null;
return Result{ .failure = try allocator.dupe(u8, std.mem.sliceTo(result.stderr, '\n')) };
},
else => {
std.log.err("zig translate-c process terminated '{}'", .{result.term});
log.err("zig translate-c process terminated '{}'", .{result.term});
return null;
},
};