emit JSON formatted packages & include dirs in build_runner.zig
This commit is contained in:
parent
41a467b0f3
commit
1859afd4c5
@ -6,20 +6,16 @@ const offsets = @import("offsets.zig");
|
|||||||
const log = std.log.scoped(.doc_store);
|
const log = std.log.scoped(.doc_store);
|
||||||
const Ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const BuildAssociatedConfig = @import("BuildAssociatedConfig.zig");
|
const BuildAssociatedConfig = @import("BuildAssociatedConfig.zig");
|
||||||
|
const BuildConfig = @import("special/build_runner.zig").BuildConfig;
|
||||||
const tracy = @import("tracy.zig");
|
const tracy = @import("tracy.zig");
|
||||||
const Config = @import("Config.zig");
|
const Config = @import("Config.zig");
|
||||||
|
|
||||||
const DocumentStore = @This();
|
const DocumentStore = @This();
|
||||||
|
|
||||||
const BuildFile = struct {
|
const BuildFile = struct {
|
||||||
const Pkg = struct {
|
|
||||||
name: []const u8,
|
|
||||||
uri: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
refs: usize,
|
refs: usize,
|
||||||
uri: []const u8,
|
uri: []const u8,
|
||||||
packages: std.ArrayListUnmanaged(Pkg),
|
config: BuildConfig,
|
||||||
|
|
||||||
builtin_uri: ?[]const u8 = null,
|
builtin_uri: ?[]const u8 = null,
|
||||||
|
|
||||||
@ -110,7 +106,7 @@ fn loadBuildAssociatedConfiguration(allocator: std.mem.Allocator, build_file: *B
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const LoadPackagesContext = struct {
|
const LoadBuildConfigContext = struct {
|
||||||
build_file: *BuildFile,
|
build_file: *BuildFile,
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
build_runner_path: []const u8,
|
build_runner_path: []const u8,
|
||||||
@ -121,7 +117,7 @@ const LoadPackagesContext = struct {
|
|||||||
global_cache_root: []const u8,
|
global_cache_root: []const u8,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn loadPackages(context: LoadPackagesContext) !void {
|
fn loadBuildConfiguration(context: LoadBuildConfigContext) !void {
|
||||||
const tracy_zone = tracy.trace(@src());
|
const tracy_zone = tracy.trace(@src());
|
||||||
defer tracy_zone.end();
|
defer tracy_zone.end();
|
||||||
|
|
||||||
@ -167,46 +163,24 @@ fn loadPackages(context: LoadPackagesContext) !void {
|
|||||||
defer allocator.free(joined);
|
defer allocator.free(joined);
|
||||||
|
|
||||||
log.err(
|
log.err(
|
||||||
"Failed to execute build runner to collect packages, command:\n{s}\nError: {s}",
|
"Failed to execute build runner to collect build configuration, command:\n{s}\nError: {s}",
|
||||||
.{ joined, zig_run_result.stderr },
|
.{ joined, zig_run_result.stderr },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (zig_run_result.term) {
|
switch (zig_run_result.term) {
|
||||||
.Exited => |exit_code| {
|
.Exited => |exit_code| {
|
||||||
if (exit_code == 0) {
|
if (exit_code != 0) return error.RunFailed;
|
||||||
log.debug("Finished zig run for build file {s}", .{build_file.uri});
|
|
||||||
|
|
||||||
for (build_file.packages.items) |old_pkg| {
|
const parse_options = std.json.ParseOptions{ .allocator = allocator };
|
||||||
allocator.free(old_pkg.name);
|
|
||||||
allocator.free(old_pkg.uri);
|
|
||||||
}
|
|
||||||
|
|
||||||
build_file.packages.shrinkAndFree(allocator, 0);
|
std.json.parseFree(BuildConfig, build_file.config, parse_options);
|
||||||
var line_it = std.mem.split(u8, zig_run_result.stdout, "\n");
|
|
||||||
while (line_it.next()) |line| {
|
|
||||||
if (std.mem.indexOfScalar(u8, line, '\x00')) |zero_byte_idx| {
|
|
||||||
const name = line[0..zero_byte_idx];
|
|
||||||
const rel_path = line[zero_byte_idx + 1 ..];
|
|
||||||
|
|
||||||
const pkg_abs_path = try std.fs.path.resolve(allocator, &[_][]const u8{ directory_path, rel_path });
|
build_file.config = std.json.parse(
|
||||||
defer allocator.free(pkg_abs_path);
|
BuildConfig,
|
||||||
|
&std.json.TokenStream.init(zig_run_result.stdout),
|
||||||
const pkg_uri = try URI.fromPath(allocator, pkg_abs_path);
|
parse_options,
|
||||||
errdefer allocator.free(pkg_uri);
|
) catch return error.RunFailed;
|
||||||
|
|
||||||
const duped_name = try allocator.dupe(u8, name);
|
|
||||||
errdefer allocator.free(duped_name);
|
|
||||||
|
|
||||||
(try build_file.packages.addOne(allocator)).* = .{
|
|
||||||
.name = duped_name,
|
|
||||||
.uri = pkg_uri,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return error.RunFailed;
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
else => return error.RunFailed,
|
else => return error.RunFailed,
|
||||||
}
|
}
|
||||||
@ -256,7 +230,10 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
|
|||||||
build_file.* = .{
|
build_file.* = .{
|
||||||
.refs = 1,
|
.refs = 1,
|
||||||
.uri = try self.allocator.dupe(u8, uri),
|
.uri = try self.allocator.dupe(u8, uri),
|
||||||
.packages = .{},
|
.config = .{
|
||||||
|
.packages = &.{},
|
||||||
|
.include_dirs = &.{},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const build_file_path = try URI.parse(self.allocator, build_file.uri);
|
const build_file_path = try URI.parse(self.allocator, build_file.uri);
|
||||||
@ -274,7 +251,7 @@ fn newDocument(self: *DocumentStore, uri: []const u8, text: [:0]u8) anyerror!*Ha
|
|||||||
|
|
||||||
// TODO: Do this in a separate thread?
|
// TODO: Do this in a separate thread?
|
||||||
// It can take quite long.
|
// It can take quite long.
|
||||||
loadPackages(.{
|
loadBuildConfiguration(.{
|
||||||
.build_file = build_file,
|
.build_file = build_file,
|
||||||
.allocator = self.allocator,
|
.allocator = self.allocator,
|
||||||
.build_runner_path = self.config.build_runner_path.?,
|
.build_runner_path = self.config.build_runner_path.?,
|
||||||
@ -409,11 +386,8 @@ fn decrementBuildFileRefs(self: *DocumentStore, build_file: *BuildFile) void {
|
|||||||
build_file.refs -= 1;
|
build_file.refs -= 1;
|
||||||
if (build_file.refs == 0) {
|
if (build_file.refs == 0) {
|
||||||
log.debug("Freeing build file {s}", .{build_file.uri});
|
log.debug("Freeing build file {s}", .{build_file.uri});
|
||||||
for (build_file.packages.items) |pkg| {
|
|
||||||
self.allocator.free(pkg.name);
|
std.json.parseFree(BuildConfig, build_file.config, .{ .allocator = self.allocator });
|
||||||
self.allocator.free(pkg.uri);
|
|
||||||
}
|
|
||||||
build_file.packages.deinit(self.allocator);
|
|
||||||
|
|
||||||
// Decrement count of the document since one count comes
|
// Decrement count of the document since one count comes
|
||||||
// from the build file existing.
|
// from the build file existing.
|
||||||
@ -544,7 +518,7 @@ fn refreshDocument(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
|
|
||||||
pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
|
pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
|
||||||
if (handle.is_build_file) |build_file| {
|
if (handle.is_build_file) |build_file| {
|
||||||
loadPackages(.{
|
loadBuildConfiguration(.{
|
||||||
.build_file = build_file,
|
.build_file = build_file,
|
||||||
.allocator = self.allocator,
|
.allocator = self.allocator,
|
||||||
.build_runner_path = self.config.build_runner_path.?,
|
.build_runner_path = self.config.build_runner_path.?,
|
||||||
@ -553,7 +527,7 @@ pub fn applySave(self: *DocumentStore, handle: *Handle) !void {
|
|||||||
.cache_root = self.zig_cache_root,
|
.cache_root = self.zig_cache_root,
|
||||||
.global_cache_root = self.zig_global_cache_root,
|
.global_cache_root = self.zig_global_cache_root,
|
||||||
}) catch |err| {
|
}) catch |err| {
|
||||||
log.err("Failed to load packages of build file {s} (error: {})", .{ build_file.uri, err });
|
log.err("Failed to load build configuration for {s} (error: {})", .{ build_file.uri, err });
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -643,7 +617,7 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand
|
|||||||
return null;
|
return null;
|
||||||
} else if (!std.mem.endsWith(u8, import_str, ".zig")) {
|
} else if (!std.mem.endsWith(u8, import_str, ".zig")) {
|
||||||
if (handle.associated_build_file) |build_file| {
|
if (handle.associated_build_file) |build_file| {
|
||||||
for (build_file.packages.items) |pkg| {
|
for (build_file.config.packages) |pkg| {
|
||||||
if (std.mem.eql(u8, import_str, pkg.name)) {
|
if (std.mem.eql(u8, import_str, pkg.name)) {
|
||||||
return try allocator.dupe(u8, pkg.uri);
|
return try allocator.dupe(u8, pkg.uri);
|
||||||
}
|
}
|
||||||
@ -686,7 +660,7 @@ pub fn resolveImport(self: *DocumentStore, handle: *Handle, import_str: []const
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (handle.associated_build_file) |bf| {
|
if (handle.associated_build_file) |bf| {
|
||||||
for (bf.packages.items) |pkg| {
|
for (bf.config.packages) |pkg| {
|
||||||
if (std.mem.eql(u8, pkg.uri, final_uri)) {
|
if (std.mem.eql(u8, pkg.uri, final_uri)) {
|
||||||
break :find_uri pkg.uri;
|
break :find_uri pkg.uri;
|
||||||
}
|
}
|
||||||
@ -775,11 +749,7 @@ pub fn deinit(self: *DocumentStore) void {
|
|||||||
|
|
||||||
self.handles.deinit(self.allocator);
|
self.handles.deinit(self.allocator);
|
||||||
for (self.build_files.items) |build_file| {
|
for (self.build_files.items) |build_file| {
|
||||||
for (build_file.packages.items) |pkg| {
|
std.json.parseFree(BuildConfig, build_file.config, .{ .allocator = self.allocator });
|
||||||
self.allocator.free(pkg.name);
|
|
||||||
self.allocator.free(pkg.uri);
|
|
||||||
}
|
|
||||||
build_file.packages.deinit(self.allocator);
|
|
||||||
self.allocator.free(build_file.uri);
|
self.allocator.free(build_file.uri);
|
||||||
build_file.destroy(self.allocator);
|
build_file.destroy(self.allocator);
|
||||||
}
|
}
|
||||||
|
@ -2048,9 +2048,9 @@ fn completionHandler(server: *Server, writer: anytype, id: types.RequestId, req:
|
|||||||
|
|
||||||
if (!subpath_present and pos_context == .import_string_literal) {
|
if (!subpath_present and pos_context == .import_string_literal) {
|
||||||
if (handle.associated_build_file) |bf| {
|
if (handle.associated_build_file) |bf| {
|
||||||
try fsl_completions.ensureUnusedCapacity(server.arena.allocator(), bf.packages.items.len);
|
try fsl_completions.ensureUnusedCapacity(server.arena.allocator(), bf.config.packages.len);
|
||||||
|
|
||||||
for (bf.packages.items) |pkg| {
|
for (bf.config.packages) |pkg| {
|
||||||
try fsl_completions.append(server.arena.allocator(), .{
|
try fsl_completions.append(server.arena.allocator(), .{
|
||||||
.label = pkg.name,
|
.label = pkg.name,
|
||||||
.kind = .Module,
|
.kind = .Module,
|
||||||
|
@ -5,14 +5,39 @@ const io = std.io;
|
|||||||
const log = std.log;
|
const log = std.log;
|
||||||
const process = std.process;
|
const process = std.process;
|
||||||
const Builder = std.build.Builder;
|
const Builder = std.build.Builder;
|
||||||
const Pkg = std.build.Pkg;
|
|
||||||
const InstallArtifactStep = std.build.InstallArtifactStep;
|
const InstallArtifactStep = std.build.InstallArtifactStep;
|
||||||
const LibExeObjStep = std.build.LibExeObjStep;
|
const LibExeObjStep = std.build.LibExeObjStep;
|
||||||
const ArrayList = std.ArrayList;
|
const ArrayList = std.ArrayList;
|
||||||
|
|
||||||
|
pub const BuildConfig = struct {
|
||||||
|
packages: []Pkg,
|
||||||
|
include_dirs: []IncludeDir,
|
||||||
|
|
||||||
|
pub const Pkg = struct {
|
||||||
|
name: []const u8,
|
||||||
|
uri: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const IncludeDir = union(enum) {
|
||||||
|
raw_path: []const u8,
|
||||||
|
raw_path_system: []const u8,
|
||||||
|
|
||||||
|
pub fn getPath(self: IncludeDir) []const u8 {
|
||||||
|
return switch (self) {
|
||||||
|
.raw_path => |path| return path,
|
||||||
|
.raw_path_system => |path| return path,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eql(a: IncludeDir, b: IncludeDir) bool {
|
||||||
|
return @enumToInt(a) == @enumToInt(b) and
|
||||||
|
std.mem.eql(u8, a.getPath(), b.getPath());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
///! This is a modified build runner to extract information out of build.zig
|
///! This is a modified build runner to extract information out of build.zig
|
||||||
///! Modified from the std.special.build_runner
|
///! Modified version of lib/build_runner.zig
|
||||||
pub fn main() !void {
|
pub fn main() !void {
|
||||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
@ -55,48 +80,93 @@ pub fn main() !void {
|
|||||||
builder.resolveInstallPrefix(null, Builder.DirList{});
|
builder.resolveInstallPrefix(null, Builder.DirList{});
|
||||||
try runBuild(builder);
|
try runBuild(builder);
|
||||||
|
|
||||||
const stdout_stream = io.getStdOut().writer();
|
var packages = ArrayList(BuildConfig.Pkg).init(allocator);
|
||||||
|
defer packages.deinit();
|
||||||
|
|
||||||
|
var include_dirs = ArrayList(BuildConfig.IncludeDir).init(allocator);
|
||||||
|
defer include_dirs.deinit();
|
||||||
|
|
||||||
// TODO: We currently add packages from every LibExeObj step that the install step depends on.
|
// TODO: We currently add packages from every LibExeObj step that the install step depends on.
|
||||||
// Should we error out or keep one step or something similar?
|
// Should we error out or keep one step or something similar?
|
||||||
// We also flatten them, we should probably keep the nested structure.
|
// We also flatten them, we should probably keep the nested structure.
|
||||||
for (builder.top_level_steps.items) |tls| {
|
for (builder.top_level_steps.items) |tls| {
|
||||||
for (tls.step.dependencies.items) |step| {
|
for (tls.step.dependencies.items) |step| {
|
||||||
try processStep(stdout_stream, step);
|
try processStep(&packages, &include_dirs, step);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try std.json.stringify(
|
||||||
|
BuildConfig{
|
||||||
|
.packages = packages.items,
|
||||||
|
.include_dirs = include_dirs.items,
|
||||||
|
},
|
||||||
|
.{ .whitespace = .{} },
|
||||||
|
io.getStdOut().writer(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn processStep(stdout_stream: anytype, step: *std.build.Step) anyerror!void {
|
fn processStep(
|
||||||
|
packages: *ArrayList(BuildConfig.Pkg),
|
||||||
|
include_dirs: *ArrayList(BuildConfig.IncludeDir),
|
||||||
|
step: *std.build.Step,
|
||||||
|
) anyerror!void {
|
||||||
if (step.cast(InstallArtifactStep)) |install_exe| {
|
if (step.cast(InstallArtifactStep)) |install_exe| {
|
||||||
|
try processIncludeDirs(include_dirs, install_exe.artifact.include_dirs.items);
|
||||||
for (install_exe.artifact.packages.items) |pkg| {
|
for (install_exe.artifact.packages.items) |pkg| {
|
||||||
try processPackage(stdout_stream, pkg);
|
try processPackage(packages, pkg);
|
||||||
}
|
}
|
||||||
} else if (step.cast(LibExeObjStep)) |exe| {
|
} else if (step.cast(LibExeObjStep)) |exe| {
|
||||||
|
try processIncludeDirs(include_dirs, exe.include_dirs.items);
|
||||||
for (exe.packages.items) |pkg| {
|
for (exe.packages.items) |pkg| {
|
||||||
try processPackage(stdout_stream, pkg);
|
try processPackage(packages, pkg);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (step.dependencies.items) |unknown_step| {
|
for (step.dependencies.items) |unknown_step| {
|
||||||
try processStep(stdout_stream, unknown_step);
|
try processStep(packages, include_dirs, unknown_step);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn processPackage(out_stream: anytype, pkg: Pkg) anyerror!void {
|
fn processPackage(
|
||||||
const source = if (@hasField(Pkg, "source")) pkg.source else pkg.path;
|
packages: *ArrayList(BuildConfig.Pkg),
|
||||||
|
pkg: std.build.Pkg,
|
||||||
|
) anyerror!void {
|
||||||
|
for (packages.items) |package| {
|
||||||
|
if (std.mem.eql(u8, package.name, pkg.name)) return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const source = if (@hasField(std.build.Pkg, "source")) pkg.source else pkg.path;
|
||||||
switch (source) {
|
switch (source) {
|
||||||
.path => |path| try out_stream.print("{s}\x00{s}\n", .{ pkg.name, path }),
|
.path => |path| try packages.append(.{ .name = pkg.name, .uri = path }),
|
||||||
.generated => |generated| if (generated.path != null) try out_stream.print("{s}\x00{s}\n", .{ pkg.name, generated.path.? }),
|
.generated => |generated| if (generated.path != null) try packages.append(.{ .name = pkg.name, .uri = generated.path.? }),
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pkg.dependencies) |dependencies| {
|
if (pkg.dependencies) |dependencies| {
|
||||||
for (dependencies) |dep| {
|
for (dependencies) |dep| {
|
||||||
try processPackage(out_stream, dep);
|
try processPackage(packages, dep);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn processIncludeDirs(
|
||||||
|
include_dirs: *ArrayList(BuildConfig.IncludeDir),
|
||||||
|
dirs: []std.build.LibExeObjStep.IncludeDir,
|
||||||
|
) !void {
|
||||||
|
outer: for (dirs) |dir| {
|
||||||
|
const candidate: BuildConfig.IncludeDir = switch (dir) {
|
||||||
|
.raw_path => |path| .{ .raw_path = path },
|
||||||
|
.raw_path_system => |path| .{ .raw_path_system = path },
|
||||||
|
else => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
for (include_dirs.items) |include_dir| {
|
||||||
|
if (candidate.eql(include_dir)) continue :outer;
|
||||||
|
}
|
||||||
|
|
||||||
|
try include_dirs.append(candidate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn runBuild(builder: *Builder) anyerror!void {
|
fn runBuild(builder: *Builder) anyerror!void {
|
||||||
switch (@typeInfo(@typeInfo(@TypeOf(root.build)).Fn.return_type.?)) {
|
switch (@typeInfo(@typeInfo(@TypeOf(root.build)).Fn.return_type.?)) {
|
||||||
.Void => root.build(builder),
|
.Void => root.build(builder),
|
||||||
|
Loading…
Reference in New Issue
Block a user