add basic cImport support

This commit is contained in:
Techatrix 2022-08-19 00:00:46 +02:00
parent 0436b05b80
commit d57f8e3a64
3 changed files with 569 additions and 77 deletions

View File

@ -9,9 +9,15 @@ const BuildAssociatedConfig = @import("BuildAssociatedConfig.zig");
const BuildConfig = @import("special/build_runner.zig").BuildConfig; const BuildConfig = @import("special/build_runner.zig").BuildConfig;
const tracy = @import("tracy.zig"); const tracy = @import("tracy.zig");
const Config = @import("Config.zig"); const Config = @import("Config.zig");
const translate_c = @import("translate_c.zig");
const DocumentStore = @This(); const DocumentStore = @This();
pub const Hasher = std.crypto.auth.siphash.SipHash128(1, 3);
/// Initial state, that can be copied.
pub const hasher_init: Hasher = Hasher.init(&[_]u8{0} ** Hasher.key_length);
const BuildFile = struct { const BuildFile = struct {
refs: usize, refs: usize,
uri: []const u8, uri: []const u8,
@ -30,7 +36,9 @@ pub const Handle = struct {
count: usize, count: usize,
/// Contains one entry for every import in the document /// Contains one entry for every import in the document
import_uris: []const []const u8, import_uris: []const []const u8,
/// Items in this array list come from `import_uris` /// Contains one entry for every cimport in the document
cimports: []CImportHandle,
/// Items in this array list come from `import_uris` and `cimports`
imports_used: std.ArrayListUnmanaged([]const u8), imports_used: std.ArrayListUnmanaged([]const u8),
tree: Ast, tree: Ast,
document_scope: analysis.DocumentScope, document_scope: analysis.DocumentScope,
@ -206,6 +214,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
handle.* = Handle{ handle.* = Handle{
.count = 1, .count = 1,
.import_uris = &.{}, .import_uris = &.{},
.cimports = &.{},
.imports_used = .{}, .imports_used = .{},
.document = .{ .document = .{
.uri = uri, .uri = uri,
@ -359,6 +368,14 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
self.allocator.free(handle.import_uris); self.allocator.free(handle.import_uris);
} }
handle.cimports = try self.collectCIncludes(handle);
errdefer {
for (handle.cimports) |item| {
self.allocator.free(item.uri);
}
self.allocator.free(handle.cimports);
}
try self.handles.putNoClobber(self.allocator, uri, handle); try self.handles.putNoClobber(self.allocator, uri, handle);
return handle; return handle;
} }
@ -430,9 +447,14 @@ fn decrementCount(self: *DocumentStore, uri: []const u8) void {
self.allocator.free(import_uri); self.allocator.free(import_uri);
} }
for (handle.cimports) |item| {
self.allocator.free(item.uri);
}
handle.document_scope.deinit(self.allocator); handle.document_scope.deinit(self.allocator);
handle.imports_used.deinit(self.allocator); handle.imports_used.deinit(self.allocator);
self.allocator.free(handle.import_uris); self.allocator.free(handle.import_uris);
self.allocator.free(handle.cimports);
self.allocator.destroy(handle); self.allocator.destroy(handle);
const uri_key = entry.key_ptr.*; const uri_key = entry.key_ptr.*;
std.debug.assert(self.handles.remove(uri)); std.debug.assert(self.handles.remove(uri));
@ -449,27 +471,146 @@ pub fn getHandle(self: *DocumentStore, uri: []const u8) ?*Handle {
} }
fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8 { fn collectImportUris(self: *DocumentStore, handle: *Handle) ![]const []const u8 {
var new_imports = std.ArrayList([]const u8).init(self.allocator); const collected_imports = try analysis.collectImports(self.allocator, handle.tree);
var imports = std.ArrayList([]const u8).fromOwnedSlice(self.allocator, collected_imports);
errdefer { errdefer {
for (new_imports.items) |imp| { for (imports.items) |imp| {
self.allocator.free(imp); self.allocator.free(imp);
} }
new_imports.deinit(); imports.deinit();
} }
try analysis.collectImports(&new_imports, handle.tree);
// Convert to URIs // Convert to URIs
var i: usize = 0; var i: usize = 0;
while (i < new_imports.items.len) { while (i < imports.items.len) {
if (try self.uriFromImportStr(self.allocator, handle.*, new_imports.items[i])) |uri| { if (try self.uriFromImportStr(self.allocator, handle.*, imports.items[i])) |uri| {
// The raw import strings are owned by the document and do not need to be freed here. // The raw import strings are owned by the document and do not need to be freed here.
new_imports.items[i] = uri; imports.items[i] = uri;
i += 1; i += 1;
} else { } else {
_ = new_imports.swapRemove(i); _ = imports.swapRemove(i);
} }
} }
return new_imports.toOwnedSlice(); return imports.toOwnedSlice();
}
pub const CImportSource = struct {
/// the `@cInclude` node
node: Ast.Node.Index,
/// hash of c source file
hash: [Hasher.mac_length]u8,
/// c source file
source: []const u8,
};
/// Collects all `@cImport` nodes and converts them into c source code
/// the translation process is defined in `translate_c.convertCInclude`
/// Caller owns returned memory.
fn collectCIncludeSources(self: *DocumentStore, handle: *Handle) ![]CImportSource {
var cimport_nodes = try analysis.collectCImportNodes(self.allocator, handle.tree);
defer self.allocator.free(cimport_nodes);
var sources = try std.ArrayListUnmanaged(CImportSource).initCapacity(self.allocator, cimport_nodes.len);
errdefer {
for (sources.items) |item| {
self.allocator.free(item.source);
}
sources.deinit(self.allocator);
}
for (cimport_nodes) |node| {
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, node) catch |err| switch (err) {
error.Unsupported => continue,
error.OutOfMemory => return error.OutOfMemory,
};
var hasher = hasher_init;
hasher.update(c_source);
var hash: [Hasher.mac_length]u8 = undefined;
hasher.final(&hash);
sources.appendAssumeCapacity(.{
.node = node,
.hash = hash,
.source = c_source,
});
}
return sources.toOwnedSlice(self.allocator);
}
pub const CImportHandle = struct {
/// the `@cInclude` node
node: Ast.Node.Index,
/// hash of the c source file
hash: [Hasher.mac_length]u8,
/// uri to a zig source file generated with translate-c
uri: []const u8,
};
/// Collects all `@cImport` nodes and converts them into zig files using translate-c
/// Caller owns returned memory.
fn collectCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
var cimport_nodes = try analysis.collectCImportNodes(self.allocator, handle.tree);
defer self.allocator.free(cimport_nodes);
var uris = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, cimport_nodes.len);
errdefer {
for (uris.items) |item| {
self.allocator.free(item.uri);
}
uris.deinit(self.allocator);
}
for (cimport_nodes) |node| {
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, node) catch |err| switch (err) {
error.Unsupported => continue,
error.OutOfMemory => return error.OutOfMemory,
};
defer self.allocator.free(c_source);
const uri = self.translate(handle, c_source) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(uri);
var hasher = hasher_init;
hasher.update(c_source);
var hash: [Hasher.mac_length]u8 = undefined;
hasher.final(&hash);
uris.appendAssumeCapacity(.{
.node = node,
.hash = hash,
.uri = uri,
});
}
return uris.toOwnedSlice(self.allocator);
}
fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) !?[]const u8 {
const dirs: []BuildConfig.IncludeDir = if (handle.associated_build_file) |build_file| build_file.config.include_dirs else &.{};
const include_dirs = blk: {
var result = try self.allocator.alloc([]const u8, dirs.len);
errdefer self.allocator.free(result);
for (dirs) |dir, i| {
result[i] = dir.getPath();
}
break :blk result;
};
defer self.allocator.free(include_dirs);
return translate_c.translate(
self.allocator,
self.config,
include_dirs,
source,
);
} }
fn refreshDocument(self: *DocumentStore, handle: *Handle) !void { fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
@ -480,28 +621,37 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
handle.document_scope.deinit(self.allocator); handle.document_scope.deinit(self.allocator);
handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree); handle.document_scope = try analysis.makeDocumentScope(self.allocator, handle.tree);
const new_imports = try self.collectImportUris(handle); var old_imports = handle.import_uris;
errdefer { var old_cimports = handle.cimports;
for (new_imports) |imp| {
self.allocator.free(imp); handle.import_uris = try self.collectImportUris(handle);
}
self.allocator.free(new_imports); handle.cimports = try self.refreshDocumentCIncludes(handle);
}
const old_imports = handle.import_uris;
handle.import_uris = new_imports;
defer { defer {
for (old_imports) |uri| { for (old_imports) |uri| {
self.allocator.free(uri); self.allocator.free(uri);
} }
self.allocator.free(old_imports); self.allocator.free(old_imports);
for (old_cimports) |old_cimport| {
self.allocator.free(old_cimport.uri);
}
self.allocator.free(old_cimports);
} }
var i: usize = 0; var i: usize = 0;
while (i < handle.imports_used.items.len) { while (i < handle.imports_used.items.len) {
const old = handle.imports_used.items[i]; const old = handle.imports_used.items[i];
still_exists: { still_exists: {
for (new_imports) |new| { for (handle.import_uris) |new| {
if (std.mem.eql(u8, new, old)) {
handle.imports_used.items[i] = new;
break :still_exists;
}
}
for (handle.cimports) |cimport| {
const new = cimport.uri;
if (std.mem.eql(u8, new, old)) { if (std.mem.eql(u8, new, old)) {
handle.imports_used.items[i] = new; handle.imports_used.items[i] = new;
break :still_exists; break :still_exists;
@ -516,6 +666,92 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
} }
} }
fn refreshDocumentCIncludes(self: *DocumentStore, handle: *Handle) ![]CImportHandle {
const new_sources: []CImportSource = try self.collectCIncludeSources(handle);
defer {
for (new_sources) |new_source| {
self.allocator.free(new_source.source);
}
self.allocator.free(new_sources);
}
var old_cimports = handle.cimports;
var new_cimports = try std.ArrayListUnmanaged(CImportHandle).initCapacity(self.allocator, new_sources.len);
errdefer {
for (new_cimports.items) |new_cimport| {
self.allocator.free(new_cimport.uri);
}
new_cimports.deinit(self.allocator);
}
for (new_sources) |new_source| {
const maybe_old_cimport: ?CImportHandle = blk: {
const old_cimport: CImportHandle = found: {
for (old_cimports) |old_cimport| {
if (new_source.node == old_cimport.node) {
break :found old_cimport;
}
}
break :blk null;
};
// avoid re-translating if the source didn't change
if (std.mem.eql(u8, &new_source.hash, &old_cimport.hash)) {
break :blk CImportHandle{
.node = old_cimport.node,
.hash = old_cimport.hash,
.uri = try self.allocator.dupe(u8, old_cimport.uri),
};
}
const new_uri = self.translate(handle, new_source.source) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(new_uri);
break :blk CImportHandle{
.node = old_cimport.node,
.hash = old_cimport.hash,
.uri = new_uri,
};
};
if (maybe_old_cimport) |cimport| {
new_cimports.appendAssumeCapacity(cimport);
continue;
}
const c_source = translate_c.convertCInclude(self.allocator, handle.tree, new_source.node) catch |err| switch (err) {
error.Unsupported => continue,
error.OutOfMemory => return error.OutOfMemory,
};
defer self.allocator.free(c_source);
var hasher = hasher_init;
var hash: [Hasher.mac_length]u8 = undefined;
hasher.update(c_source);
hasher.final(&hash);
const new_uri = self.translate(
handle,
c_source,
) catch |err| {
std.log.warn("failed to translate cInclude: {}", .{err});
continue;
} orelse continue;
errdefer self.allocator.free(new_uri);
new_cimports.appendAssumeCapacity(.{
.node = new_source.node,
.hash = hash,
.uri = new_uri,
});
}
return new_cimports.toOwnedSlice(self.allocator);
}
pub fn applySave(self: *DocumentStore, handle: *Handle) !void { pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
if (handle.is_build_file) |build_file| { if (handle.is_build_file) |build_file| {
loadBuildConfiguration(.{ loadBuildConfiguration(.{
@ -680,39 +916,66 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
} }
// New document, read the file then call into openDocument. // New document, read the file then call into openDocument.
const file_path = try URI.parse(allocator, final_uri); var document_handle = try self.newDocumentFromUri(final_uri);
defer allocator.free(file_path);
var file = std.fs.cwd().openFile(file_path, .{}) catch { // Add to import table of current handle.
log.debug("Cannot open import file {s}", .{file_path}); try handle.imports_used.append(allocator, handle_uri);
return document_handle;
}
pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Index) !?*Handle {
const uri = blk: {
for (handle.cimports) |item| {
if (item.node == node) break :blk item.uri;
}
return null; return null;
}; };
defer file.close(); // Check if the import is already opened by others.
{ if (self.getHandle(uri)) |new_handle| {
const file_contents = file.readToEndAllocOptions( // If it is, append it to our imports, increment the count, set our new handle
allocator, // and return the parsed tree root node.
std.math.maxInt(usize), try handle.imports_used.append(self.allocator, uri);
null, new_handle.count += 1;
@alignOf(u8), return new_handle;
0,
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => {
log.debug("Could not read from file {s}", .{file_path});
return null;
},
};
errdefer allocator.free(file_contents);
// Add to import table of current handle.
try handle.imports_used.append(self.allocator, handle_uri);
// Swap handles.
// This takes ownership of the passed uri and text.
const duped_final_uri = try allocator.dupe(u8, final_uri);
errdefer allocator.free(duped_final_uri);
return try self.newDocument(duped_final_uri, file_contents);
} }
// New document, read the file then call into openDocument.
var document_handle = try self.newDocumentFromUri(uri);
// Add to cimport table of current handle.
try handle.imports_used.append(self.allocator, uri);
return document_handle;
}
fn newDocumentFromUri(self: *DocumentStore, uri: []const u8) !?*Handle {
const file_path = try URI.parse(self.allocator, uri);
defer self.allocator.free(file_path);
var file = std.fs.openFileAbsolute(file_path, .{}) catch |err| {
log.debug("Cannot open file '{s}': {}", .{ file_path, err });
return null;
};
defer file.close();
const file_contents = file.readToEndAllocOptions(
self.allocator,
std.math.maxInt(usize),
null,
@alignOf(u8),
0,
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => {
log.debug("Could not read from file {s}", .{file_path});
return null;
},
};
errdefer self.allocator.free(file_contents);
return try self.newDocument(try self.allocator.dupe(u8, uri), file_contents);
} }
fn stdUriFromLibPath(allocator: std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 { fn stdUriFromLibPath(allocator: std.mem.Allocator, zig_lib_path: ?[]const u8) !?[]const u8 {
@ -742,6 +1005,10 @@ pub fn deinit(self: *DocumentStore) void {
self.allocator.free(uri); self.allocator.free(uri);
} }
self.allocator.free(entry.value_ptr.*.import_uris); self.allocator.free(entry.value_ptr.*.import_uris);
for (entry.value_ptr.*.cimports) |cimport| {
self.allocator.free(cimport.uri);
}
self.allocator.free(entry.value_ptr.*.cimports);
entry.value_ptr.*.imports_used.deinit(self.allocator); entry.value_ptr.*.imports_used.deinit(self.allocator);
self.allocator.free(entry.key_ptr.*); self.allocator.free(entry.key_ptr.*);
self.allocator.destroy(entry.value_ptr.*); self.allocator.destroy(entry.value_ptr.*);
@ -778,6 +1045,7 @@ fn tagStoreCompletionItems(self: DocumentStore, arena: *std.heap.ArenaAllocator,
result_set.putAssumeCapacity(completion, {}); result_set.putAssumeCapacity(completion, {});
} }
} }
return result_set.entries.items(.key); return result_set.entries.items(.key);
} }

View File

@ -334,7 +334,8 @@ fn resolveVarDeclAliasInternal(store: *DocumentStore, arena: *std.heap.ArenaAllo
const lhs = datas[node_handle.node].lhs; const lhs = datas[node_handle.node].lhs;
const container_node = if (ast.isBuiltinCall(tree, lhs)) block: { const container_node = if (ast.isBuiltinCall(tree, lhs)) block: {
if (!std.mem.eql(u8, tree.tokenSlice(main_tokens[lhs]), "@import")) const name = tree.tokenSlice(main_tokens[lhs]);
if (!std.mem.eql(u8, name, "@import") and !std.mem.eql(u8, name, "@cImport"))
return null; return null;
const inner_node = (try resolveTypeOfNode(store, arena, .{ .node = lhs, .handle = handle })) orelse return null; const inner_node = (try resolveTypeOfNode(store, arena, .{ .node = lhs, .handle = handle })) orelse return null;
@ -901,20 +902,28 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
return resolved_type; return resolved_type;
} }
if (!std.mem.eql(u8, call_name, "@import")) return null; if (std.mem.eql(u8, call_name, "@import")) {
if (params.len == 0) return null; if (params.len == 0) return null;
const import_param = params[0];
if (node_tags[import_param] != .string_literal) return null;
const import_param = params[0]; const import_str = tree.tokenSlice(main_tokens[import_param]);
if (node_tags[import_param] != .string_literal) return null; const new_handle = (store.resolveImport(handle, import_str[1 .. import_str.len - 1]) catch |err| {
log.debug("Error {} while processing import {s}", .{ err, import_str });
return null;
}) orelse return null;
const import_str = tree.tokenSlice(main_tokens[import_param]); // reference to node '0' which is root
const new_handle = (store.resolveImport(handle, import_str[1 .. import_str.len - 1]) catch |err| { return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle });
log.debug("Error {} while processing import {s}", .{ err, import_str }); } else if (std.mem.eql(u8, call_name, "@cImport")) {
return null; const new_handle = (store.resolveCImport(handle, node) catch |err| {
}) orelse return null; log.debug("Error {} while processing cImport", .{err}); // TODO improve
return null;
}) orelse return null;
// reference to node '0' which is root // reference to node '0' which is root
return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle }); return TypeWithHandle.typeVal(.{ .node = 0, .handle = new_handle });
}
}, },
.fn_proto, .fn_proto,
.fn_proto_multi, .fn_proto_multi,
@ -1074,8 +1083,17 @@ pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator,
return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params); return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params);
} }
/// Collects all imports we can find into a slice of import paths (without quotes). /// Collects all `@import`'s we can find into a slice of import paths (without quotes).
pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: Ast) !void { /// Caller owns returned memory.
pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}![][]const u8 {
var imports = std.ArrayListUnmanaged([]const u8){};
errdefer {
for (imports.items) |imp| {
allocator.free(imp);
}
imports.deinit(allocator);
}
const tags = tree.tokens.items(.tag); const tags = tree.tokens.items(.tag);
var i: usize = 0; var i: usize = 0;
@ -1095,9 +1113,33 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: Ast) !void {
continue; continue;
const str = tree.tokenSlice(@intCast(u32, i + 2)); const str = tree.tokenSlice(@intCast(u32, i + 2));
try import_arr.append(str[1 .. str.len - 1]); try imports.append(allocator, str[1 .. str.len - 1]);
} }
} }
return imports.toOwnedSlice(allocator);
}
/// Collects all `@cImport` nodes
/// Caller owns returned memory.
pub fn collectCImportNodes(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}![]Ast.Node.Index {
var import_nodes = std.ArrayListUnmanaged(Ast.Node.Index){};
errdefer import_nodes.deinit(allocator);
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
var i: usize = 0;
while (i < node_tags.len) : (i += 1) {
const node = @intCast(Ast.Node.Index, i);
if (!ast.isBuiltinCall(tree, node)) continue;
if (!std.mem.eql(u8, Ast.tokenSlice(tree, main_tokens[node]), "@cImport")) continue;
try import_nodes.append(allocator, node);
}
return import_nodes.toOwnedSlice(allocator);
} }
pub const NodeWithHandle = struct { pub const NodeWithHandle = struct {
@ -1338,26 +1380,22 @@ pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]con
return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index); return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index);
} }
if (!nodeContainsSourceIndex(tree, node, source_index)) { if (!nodeContainsSourceIndex(tree, node, source_index)) return null;
return null;
}
if (ast.isBuiltinCall(tree, node)) { if (!ast.isBuiltinCall(tree, node)) return null;
const builtin_token = tree.nodes.items(.main_token)[node];
const call_name = tree.tokenSlice(builtin_token);
if (!std.mem.eql(u8, call_name, "@import")) return null; const builtin_token = tree.nodes.items(.main_token)[node];
const call_name = tree.tokenSlice(builtin_token);
var buffer: [2]Ast.Node.Index = undefined; if (!std.mem.eql(u8, call_name, "@import")) return null;
const params = ast.builtinCallParams(tree, node, &buffer).?;
if (params.len != 1) return null; var buffer: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(tree, node, &buffer).?;
const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]); if (params.len != 1) return null;
return import_str[1 .. import_str.len - 1];
}
return null; const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]);
return import_str[1 .. import_str.len - 1];
} }
pub const SourceRange = std.zig.Token.Loc; pub const SourceRange = std.zig.Token.Loc;

186
src/translate_c.zig Normal file
View File

@ -0,0 +1,186 @@
const std = @import("std");
const builtin = @import("builtin");
const Config = @import("Config.zig");
const ast = @import("ast.zig");
const Ast = std.zig.Ast;
const URI = @import("uri.zig");
/// converts a `@cInclude` node into an equivalent c header file
/// which can then be handed over to `zig translate-c`
/// Caller owns returned memory.
///
/// **Example**
/// ```zig
/// const glfw = @cImport(
/// @cDefine("GLFW_INCLUDE_VULKAN", {})
/// @cInclude("GLFW/glfw3.h")
/// );
/// ```
/// gets converted into:
/// ```c
/// #define GLFW_INCLUDE_VULKAN
/// #include "GLFW/glfw3.h"
/// ```
pub fn convertCInclude(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index) error{ OutOfMemory, Unsupported }![]const u8 {
const main_tokens = tree.nodes.items(.main_token);
std.debug.assert(ast.isBuiltinCall(tree, node));
std.debug.assert(std.mem.eql(u8, Ast.tokenSlice(tree, main_tokens[node]), "@cImport"));
var output = std.ArrayList(u8).init(allocator);
errdefer output.deinit();
var stack_allocator = std.heap.stackFallback(512, allocator);
var buffer: [2]Ast.Node.Index = undefined;
for (ast.builtinCallParams(tree, node, &buffer).?) |child| {
try convertCIncludeInternal(stack_allocator.get(), tree, child, &output);
}
return output.toOwnedSlice();
}
fn convertCIncludeInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast.Node.Index, output: *std.ArrayList(u8)) error{ OutOfMemory, Unsupported }!void {
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
var buffer: [2]Ast.Node.Index = undefined;
if (ast.isBlock(tree, node)) {
const FrameSize = @sizeOf(@Frame(convertCIncludeInternal));
var child_frame = try allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize);
defer allocator.free(child_frame);
for (ast.blockStatements(tree, node, &buffer).?) |statement| {
try await @asyncCall(child_frame, {}, convertCIncludeInternal, .{ allocator, tree, statement, output });
}
} else if (ast.builtinCallParams(tree, node, &buffer)) |params| {
if (params.len < 1) return;
const call_name = Ast.tokenSlice(tree, main_tokens[node]);
if (node_tags[params[0]] != .string_literal) return error.Unsupported;
const first = extractString(Ast.tokenSlice(tree, main_tokens[params[0]]));
if (std.mem.eql(u8, call_name, "@cInclude")) {
try output.writer().print("#include <{s}>\n", .{first});
} else if (std.mem.eql(u8, call_name, "@cDefine")) {
if (params.len < 2) return;
var buffer2: [2]Ast.Node.Index = undefined;
const is_void = if (ast.blockStatements(tree, params[1], &buffer2)) |block| block.len == 0 else false;
if (is_void) {
try output.writer().print("#define {s}\n", .{first});
} else {
if (node_tags[params[1]] != .string_literal) return error.Unsupported;
const second = extractString(Ast.tokenSlice(tree, main_tokens[params[1]]));
try output.writer().print("#define {s} {s}\n", .{ first, second });
}
} else if (std.mem.eql(u8, call_name, "@cUndef")) {
try output.writer().print("#undefine {s}\n", .{first});
} else {
return error.Unsupported;
}
}
}
/// takes a c header file and returns the result from calling `zig translate-c`
/// returns the file path to the generated zig file
/// Caller owns returned memory.
pub fn translate(allocator: std.mem.Allocator, config: Config, include_dirs: []const []const u8, source: []const u8) error{OutOfMemory}!?[]const u8 {
const file_path = try std.fs.path.join(allocator, &[_][]const u8{ config.global_cache_path.?, "cimport.h" });
defer allocator.free(file_path);
var file = std.fs.createFileAbsolute(file_path, .{}) catch |err| {
std.log.warn("failed to create file '{s}': {}", .{ file_path, err });
return null;
};
defer file.close();
defer std.fs.deleteFileAbsolute(file_path) catch |err| {
std.log.warn("failed to delete file '{s}': {}", .{ file_path, err });
};
_ = file.write(source) catch |err| {
std.log.warn("failed to write to '{s}': {}", .{ file_path, err });
};
const base_include_dirs = blk: {
const target_info = std.zig.system.NativeTargetInfo.detect(allocator, .{}) catch break :blk null;
var native_paths = std.zig.system.NativePaths.detect(allocator, target_info) catch break :blk null;
defer native_paths.deinit();
break :blk native_paths.include_dirs.toOwnedSlice();
};
defer if (base_include_dirs) |dirs| {
for (dirs) |path| {
allocator.free(path);
}
allocator.free(dirs);
};
const base_args = &[_][]const u8{
config.zig_exe_path.?,
"translate-c",
"--enable-cache",
"--zig-lib-dir",
config.zig_lib_path.?,
"--cache-dir",
config.global_cache_path.?,
};
const argc = base_args.len + 2 * (include_dirs.len + if (base_include_dirs) |dirs| dirs.len else 0) + 1;
var argv = try std.ArrayListUnmanaged([]const u8).initCapacity(allocator, argc);
defer argv.deinit(allocator);
argv.appendSliceAssumeCapacity(base_args);
if (base_include_dirs) |dirs| {
for (dirs) |include_dir| {
argv.appendAssumeCapacity("-I");
argv.appendAssumeCapacity(include_dir);
}
}
for (include_dirs) |include_dir| {
argv.appendAssumeCapacity("-I");
argv.appendAssumeCapacity(include_dir);
}
argv.appendAssumeCapacity(file_path);
const result = std.ChildProcess.exec(.{
.allocator = allocator,
.argv = argv.items,
}) catch |err| {
std.log.err("Failed to execute zig translate-c process, error: {}", .{err});
return null;
};
defer allocator.free(result.stdout);
defer allocator.free(result.stderr);
return switch (result.term) {
.Exited => |code| if (code == 0) {
return try std.mem.join(allocator, "", &.{
"file://",
std.mem.sliceTo(result.stdout, '\n'),
});
} else {
// TODO convert failure to `textDocument/publishDiagnostics`
std.log.err("zig translate-c process failed, code: {}, stderr: '{s}'", .{ code, result.stderr });
return null;
},
else => {
std.log.err("zig translate-c process terminated '{}'", .{result.term});
return null;
},
};
}
fn extractString(str: []const u8) []const u8 {
if (std.mem.startsWith(u8, str, "\"") and std.mem.endsWith(u8, str, "\"")) {
return str[1 .. str.len - 1];
} else {
return str;
}
}