Merge pull request #665 from Techatrix/cimport-diagnostics

Report cImport failure using `textDocument/publishDiagnostics`
This commit is contained in:
Techatrix 2022-09-20 01:43:04 +00:00 committed by GitHub
commit 0fa788b727
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 120 additions and 98 deletions

View File

@ -274,7 +274,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
if (handle.associated_build_file) |build_file| { if (handle.associated_build_file) |build_file| {
log.debug("Opened document `{s}` with build file `{s}`", .{ handle.uri(), build_file.uri }); log.debug("Opened document `{s}` with build file `{s}`", .{ handle.uri(), build_file.uri });
} else { } else {
log.debug("Opened document `{s}` without a build file", .{ handle.uri() }); log.debug("Opened document `{s}` without a build file", .{handle.uri()});
} }
} }
@ -439,8 +439,8 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
handle.cimports = try self.collectCIncludes(handle); handle.cimports = try self.collectCIncludes(handle);
errdefer { errdefer {
for (handle.cimports) |item| { for (handle.cimports) |*item| {
self.allocator.free(item.uri); item.result.deinit(self.allocator);
} }
self.allocator.free(handle.cimports); self.allocator.free(handle.cimports);
} }
@ -515,8 +515,8 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
self.allocator.free(import_uri); self.allocator.free(import_uri);
} }
for (handle.cimports) |item| { for (handle.cimports) |*item| {
self.allocator.free(item.uri); item.result.deinit(self.allocator);
} }
handle.document_scope.deinit(self.allocator); handle.document_scope.deinit(self.allocator);
@ -562,7 +562,7 @@ fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8
} }
pub const CImportSource = struct { pub const CImportSource = struct {
/// the `@cInclude` node /// the `@cImport` node
node: Ast.Node.Index, node: Ast.Node.Index,
/// hash of c source file /// hash of c source file
hash: [Hasher.mac_length]u8, hash: [Hasher.mac_length]u8,
@ -607,12 +607,13 @@ fn collectCIncludeSources(self: *DocumentStore, handle: *Handle) ![]CImportSourc
} }
pub const CImportHandle = struct { pub const CImportHandle = struct {
/// the `@cInclude` node /// the `@cImport` node
node: Ast.Node.Index, node: Ast.Node.Index,
/// hash of the c source file /// hash of the c source file
hash: [Hasher.mac_length]u8, hash: [Hasher.mac_length]u8,
/// uri to a zig source file generated with translate-c /// the result from calling zig translate-c
uri: []const u8, /// see `translate_c.translate`
result: translate_c.Result,
}; };
/// Collects all `@cImport` nodes and converts them into zig files using translate-c /// Collects all `@cImport` nodes and converts them into zig files using translate-c
@ -621,12 +622,12 @@ fn collectCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
var cimport_nodes = try analysis.collectCImportNodes(self.allocator, handle.tree); var cimport_nodes = try analysis.collectCImportNodes(self.allocator, handle.tree);
defer self.allocator.free(cimport_nodes); defer self.allocator.free(cimport_nodes);
var uris = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, cimport_nodes.len); var cimports = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, cimport_nodes.len);
errdefer { errdefer {
for (uris.items) |item| { for (cimports.items) |*item| {
self.allocator.free(item.uri); item.result.deinit(self.allocator);
} }
uris.deinit(self.allocator); cimports.deinit(self.allocator);
} }
for (cimport_nodes) |node| { for (cimport_nodes) |node| {
@ -636,28 +637,25 @@ fn collectCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
}; };
defer self.allocator.free(c_source); defer self.allocator.free(c_source);
const uri = self.translate(handle, c_source) catch |err| { const result = (try self.translate(handle, c_source)) orelse continue;
std.log.warn("failed to translate cInclude: {}", .{err}); errdefer result.deinit(self.allocator);
continue;
} orelse continue;
errdefer self.allocator.free(uri);
var hasher = hasher_init; var hasher = hasher_init;
hasher.update(c_source); hasher.update(c_source);
var hash: [Hasher.mac_length]u8 = undefined; var hash: [Hasher.mac_length]u8 = undefined;
hasher.final(&hash); hasher.final(&hash);
uris.appendAssumeCapacity(.{ cimports.appendAssumeCapacity(.{
.node = node, .node = node,
.hash = hash, .hash = hash,
.uri = uri, .result = result,
}); });
} }
return uris.toOwnedSlice(self.allocator); return cimports.toOwnedSlice(self.allocator);
} }
fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) !?[]const u8 { fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) error{OutOfMemory}!?translate_c.Result {
const dirs: []BuildConfig.IncludeDir = if (handle.associated_build_file) |build_file| build_file.config.include_dirs else &.{}; const dirs: []BuildConfig.IncludeDir = if (handle.associated_build_file) |build_file| build_file.config.include_dirs else &.{};
const include_dirs = blk: { const include_dirs = blk: {
var result = try self.allocator.alloc([]const u8, dirs.len); var result = try self.allocator.alloc([]const u8, dirs.len);
@ -671,15 +669,21 @@ fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) !?[]cons
}; };
defer self.allocator.free(include_dirs); defer self.allocator.free(include_dirs);
const file_path = (try translate_c.translate( const maybe_result = try translate_c.translate(
self.allocator, self.allocator,
self.config, self.config,
include_dirs, include_dirs,
source, source,
)) orelse return null; );
defer self.allocator.free(file_path);
return try URI.fromPath(self.allocator, file_path); if (maybe_result) |result| {
switch (result) {
.success => |uri| log.debug("Translated cImport into {s}", .{uri}),
else => {},
}
}
return maybe_result;
} }
fn refreshDocument(self: *DocumentStore, handle: *Handle) !void { fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
@ -703,8 +707,8 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
} }
self.allocator.free(old_imports); self.allocator.free(old_imports);
for (old_cimports) |old_cimport| { for (old_cimports) |*old_cimport| {
self.allocator.free(old_cimport.uri); old_cimport.result.deinit(self.allocator);
} }
self.allocator.free(old_cimports); self.allocator.free(old_cimports);
} }
@ -712,26 +716,30 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
var i: usize = 0; var i: usize = 0;
while (i < handle.imports_used.items.len) { while (i < handle.imports_used.items.len) {
const old = handle.imports_used.items[i]; const old = handle.imports_used.items[i];
still_exists: {
const found_new = found: {
for (handle.import_uris) |new| { for (handle.import_uris) |new| {
if (std.mem.eql(u8, new, old)) { if (!std.mem.eql(u8, new, old)) continue;
handle.imports_used.items[i] = new; break :found new;
break :still_exists;
}
} }
for (handle.cimports) |cimport| { for (handle.cimports) |cimport| {
const new = cimport.uri; if (cimport.result != .success) continue;
if (std.mem.eql(u8, new, old)) { const new = cimport.result.success;
if (!std.mem.eql(u8, old, new)) continue;
break :found new;
}
break :found null;
};
if (found_new) |new| {
handle.imports_used.items[i] = new; handle.imports_used.items[i] = new;
break :still_exists; i += 1;
} } else {
}
log.debug("Import removed: {s}", .{old}); log.debug("Import removed: {s}", .{old});
self.decrementCount(old); self.decrementCount(old);
_ = handle.imports_used.swapRemove(i); _ = handle.imports_used.swapRemove(i);
continue;
} }
i += 1;
} }
} }
@ -747,48 +755,23 @@ fn refreshDocumentCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHan
var old_cimports = handle.cimports; var old_cimports = handle.cimports;
var new_cimports = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, new_sources.len); var new_cimports = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, new_sources.len);
errdefer { errdefer {
for (new_cimports.items) |new_cimport| { for (new_cimports.items) |*new_cimport| {
self.allocator.free(new_cimport.uri); new_cimport.result.deinit(self.allocator);
} }
new_cimports.deinit(self.allocator); new_cimports.deinit(self.allocator);
} }
for (new_sources) |new_source| { outer: for (new_sources) |new_source| {
const maybe_old_cimport: ?CImportHandle = blk: { // look for a old cimport with identical source hash
const old_cimport: CImportHandle = found: {
for (old_cimports) |old_cimport| { for (old_cimports) |old_cimport| {
if (new_source.node == old_cimport.node) { if (!std.mem.eql(u8, &new_source.hash, &old_cimport.hash)) continue;
break :found old_cimport;
}
}
break :blk null;
};
// avoid re-translating if the source didn't change new_cimports.appendAssumeCapacity(.{
if (std.mem.eql(u8, &new_source.hash, &old_cimport.hash)) {
break :blk CImportHandle{
.node = old_cimport.node, .node = old_cimport.node,
.hash = old_cimport.hash, .hash = old_cimport.hash,
.uri = try self.allocator.dupe(u8, old_cimport.uri), .result = try old_cimport.result.dupe(self.allocator),
}; });
} continue :outer;
const new_uri = self.translate(handle, new_source.source) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(new_uri);
break :blk CImportHandle{
.node = old_cimport.node,
.hash = old_cimport.hash,
.uri = new_uri,
};
};
if (maybe_old_cimport) |cimport| {
new_cimports.appendAssumeCapacity(cimport);
continue;
} }
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, new_source.node) catch |err| switch (err) { const c_source = translate_c.convertCInclude(self.allocator, handle.tree, new_source.node) catch |err| switch (err) {
@ -802,19 +785,13 @@ fn refreshDocumentCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHan
hasher.update(c_source); hasher.update(c_source);
hasher.final(&hash); hasher.final(&hash);
const new_uri = self.translate( const new_result = (try self.translate(handle, new_source.source)) orelse continue;
handle, errdefer new_result.deinit(self.allocator);
c_source,
) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(new_uri);
new_cimports.appendAssumeCapacity(.{ new_cimports.appendAssumeCapacity(.{
.node = new_source.node, .node = new_source.node,
.hash = hash, .hash = hash,
.uri = new_uri, .result = new_result,
}); });
} }
@ -996,7 +973,12 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Index) !?*Handle { pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Index) !?*Handle {
const uri = blk: { const uri = blk: {
for (handle.cimports) |item| { for (handle.cimports) |item| {
if (item.node == node) break :blk item.uri; if (item.node != node) continue;
switch (item.result) {
.success => |uri| break :blk uri,
.failure => return null,
}
} }
return null; return null;
}; };
@ -1079,8 +1061,8 @@ pub fn deinit(self: *DocumentStore) void {
self.allocator.free(uri); self.allocator.free(uri);
} }
self.allocator.free(entry.value_ptr.*.import_uris); self.allocator.free(entry.value_ptr.*.import_uris);
for (entry.value_ptr.*.cimports) |cimport| { for (entry.value_ptr.*.cimports) |*cimport| {
self.allocator.free(cimport.uri); cimport.result.deinit(self.allocator);
} }
self.allocator.free(entry.value_ptr.*.cimports); self.allocator.free(entry.value_ptr.*.cimports);
entry.value_ptr.*.imports_used.deinit(self.allocator); entry.value_ptr.*.imports_used.deinit(self.allocator);

View File

@ -322,6 +322,24 @@ fn publishDiagnostics(server: *Server, writer: anytype, handle: DocumentStore.Ha
} }
} }
for (handle.cimports) |cimport| {
if (cimport.result != .failure) continue;
const stderr = std.mem.trim(u8, cimport.result.failure, " ");
var pos_and_diag_iterator = std.mem.split(u8, stderr, ":");
_ = pos_and_diag_iterator.next(); // skip file path
_ = pos_and_diag_iterator.next(); // skip line
_ = pos_and_diag_iterator.next(); // skip character
try diagnostics.append(allocator, .{
.range = offsets.nodeToRange(handle.tree, cimport.node, server.offset_encoding),
.severity = .Error,
.code = "cImport",
.source = "zls",
.message = try allocator.dupe(u8, pos_and_diag_iterator.rest()),
});
}
try send(writer, server.arena.allocator(), types.Notification{ try send(writer, server.arena.allocator(), types.Notification{
.method = "textDocument/publishDiagnostics", .method = "textDocument/publishDiagnostics",
.params = .{ .params = .{

View File

@ -4,6 +4,7 @@ const Config = @import("Config.zig");
const ast = @import("ast.zig"); const ast = @import("ast.zig");
const Ast = std.zig.Ast; const Ast = std.zig.Ast;
const URI = @import("uri.zig"); const URI = @import("uri.zig");
const log = std.log.scoped(.translate_c);
/// converts a `@cInclude` node into an equivalent c header file /// converts a `@cInclude` node into an equivalent c header file
/// which can then be handed over to `zig translate-c` /// which can then be handed over to `zig translate-c`
@ -92,24 +93,47 @@ fn convertCIncludeInternal(
} }
} }
pub const Result = union(enum) {
// uri to the generated zig file
success: []const u8,
// zig translate-c failed with the given stderr content
failure: []const u8,
pub fn deinit(self: *Result, allocator: std.mem.Allocator) void {
switch (self.*) {
.success => |path| allocator.free(path),
.failure => |stderr| allocator.free(stderr),
}
}
pub fn dupe(self: Result, allocator: std.mem.Allocator) !Result {
return switch (self) {
.success => |path| .{ .success = try allocator.dupe(u8, path) },
.failure => |stderr| .{ .failure = try allocator.dupe(u8, stderr) },
};
}
};
/// takes a c header file and returns the result from calling `zig translate-c` /// takes a c header file and returns the result from calling `zig translate-c`
/// returns the file path to the generated zig file /// returns a URI to the generated zig file on success or the content of stderr on failure
/// null indicates a failure which is automatically logged
/// Caller owns returned memory. /// Caller owns returned memory.
pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []const []const u8, source: []const u8) error{OutOfMemory}!?[]const u8 { pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []const []const u8, source: []const u8) error{OutOfMemory}!?Result {
const file_path = try std.fs.path.join(allocator, &[_][]const u8{ config.global_cache_path.?, "cimport.h" }); const file_path = try std.fs.path.join(allocator, &[_][]const u8{ config.global_cache_path.?, "cimport.h" });
defer allocator.free(file_path); defer allocator.free(file_path);
var file = std.fs.createFileAbsolute(file_path, .{}) catch |err| { var file = std.fs.createFileAbsolute(file_path, .{}) catch |err| {
std.log.warn("failed to create file '{s}': {}", .{ file_path, err }); log.warn("failed to create file '{s}': {}", .{ file_path, err });
return null; return null;
}; };
defer file.close(); defer file.close();
defer std.fs.deleteFileAbsolute(file_path) catch |err| { defer std.fs.deleteFileAbsolute(file_path) catch |err| {
std.log.warn("failed to delete file '{s}': {}", .{ file_path, err }); log.warn("failed to delete file '{s}': {}", .{ file_path, err });
}; };
_ = file.write(source) catch |err| { _ = file.write(source) catch |err| {
std.log.warn("failed to write to '{s}': {}", .{ file_path, err }); log.warn("failed to write to '{s}': {}", .{ file_path, err });
return null;
}; };
const base_include_dirs = blk: { const base_include_dirs = blk: {
@ -161,7 +185,7 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
.allocator = allocator, .allocator = allocator,
.argv = argv.items, .argv = argv.items,
}) catch |err| { }) catch |err| {
std.log.err("Failed to execute zig translate-c process, error: {}", .{err}); log.err("Failed to execute zig translate-c process, error: {}", .{err});
return null; return null;
}; };
@ -170,14 +194,12 @@ pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []c
return switch (result.term) { return switch (result.term) {
.Exited => |code| if (code == 0) { .Exited => |code| if (code == 0) {
return try allocator.dupe(u8, std.mem.sliceTo(result.stdout, '\n')); return Result{ .success = try URI.fromPath(allocator, std.mem.sliceTo(result.stdout, '\n')) };
} else { } else {
// TODO convert failure to `textDocument/publishDiagnostics` return Result{ .failure = try allocator.dupe(u8, std.mem.sliceTo(result.stderr, '\n')) };
std.log.err("zig translate-c process failed, code: {}, stderr: '{s}'", .{ code, result.stderr });
return null;
}, },
else => { else => {
std.log.err("zig translate-c process terminated '{}'", .{result.term}); log.err("zig translate-c process terminated '{}'", .{result.term});
return null; return null;
}, },
}; };