Merge pull request #597 from Techatrix/build-runner-fix

Fix Path <-> Uri mismatch
This commit is contained in:
Lee Cannon 2022-08-22 17:40:59 +01:00 committed by GitHub
commit b21039d512
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 95 additions and 39 deletions

View File

@ -21,8 +21,7 @@ pub const hasher_init: Hasher = Hasher.init(&[_]u8{0} ** Hasher.key_length);
const BuildFile = struct {
refs: usize,
uri: []const u8,
config: BuildConfig,
config: BuildFileConfig,
builtin_uri: ?[]const u8 = null,
pub fn destroy(self: *BuildFile, allocator: std.mem.Allocator) void {
@ -31,6 +30,31 @@ const BuildFile = struct {
}
};
pub const BuildFileConfig = struct {
packages: []Pkg,
include_dirs: []IncludeDir,
pub fn deinit(self: BuildFileConfig, allocator: std.mem.Allocator) void {
for (self.packages) |pkg| {
allocator.free(pkg.name);
allocator.free(pkg.uri);
}
allocator.free(self.packages);
for (self.include_dirs) |dir| {
allocator.free(dir.path);
}
allocator.free(self.include_dirs);
}
pub const Pkg = struct {
name: []const u8,
uri: []const u8,
};
pub const IncludeDir = BuildConfig.IncludeDir;
};
pub const Handle = struct {
document: types.TextDocument,
count: usize,
@ -182,13 +206,56 @@ fn loadBuildConfiguration(context: LoadBuildConfigContext) !void {
const parse_options = std.json.ParseOptions{ .allocator = allocator };
std.json.parseFree(BuildConfig, build_file.config, parse_options);
build_file.config.deinit(allocator);
build_file.config = std.json.parse(
const config: BuildConfig = std.json.parse(
BuildConfig,
&std.json.TokenStream.init(zig_run_result.stdout),
parse_options,
) catch return error.RunFailed;
defer std.json.parseFree(BuildConfig, config, parse_options);
var packages = try std.ArrayListUnmanaged(BuildFileConfig.Pkg).initCapacity(allocator, config.packages.len);
errdefer {
for (packages.items) |pkg| {
allocator.free(pkg.name);
allocator.free(pkg.uri);
}
packages.deinit(allocator);
}
var include_dirs = try std.ArrayListUnmanaged(BuildFileConfig.IncludeDir).initCapacity(allocator, config.include_dirs.len);
errdefer {
for (include_dirs.items) |dir| {
allocator.free(dir.path);
}
include_dirs.deinit(allocator);
}
for (config.packages) |pkg| {
const pkg_abs_path = try std.fs.path.resolve(allocator, &[_][]const u8{ directory_path, pkg.path });
defer allocator.free(pkg_abs_path);
const uri = try URI.fromPath(allocator, pkg_abs_path);
errdefer allocator.free(uri);
const name = try allocator.dupe(u8, pkg.name);
errdefer allocator.free(name);
packages.appendAssumeCapacity(.{ .name = name, .uri = uri });
}
for (config.include_dirs) |dir| {
const path = try allocator.dupe(u8, dir.path);
errdefer allocator.free(path);
include_dirs.appendAssumeCapacity(.{ .path = path, .system = dir.system });
}
build_file.config = .{
.packages = packages.toOwnedSlice(allocator),
.include_dirs = include_dirs.toOwnedSlice(allocator),
};
},
else => return error.RunFailed,
}
@ -404,7 +471,7 @@ fn decrementBuildFileRefs(self: *DocumentStore, build_file: *BuildFile) void {
if (build_file.refs == 0) {
log.debug("Freeing build file {s}", .{build_file.uri});
std.json.parseFree(BuildConfig, build_file.config, .{ .allocator = self.allocator });
build_file.config.deinit(self.allocator);
// Decrement count of the document since one count comes
// from the build file existing.
@ -598,7 +665,7 @@ fn translate(self: *DocumentStore, handle: *Handle, source: []const u8) !?[]cons
errdefer self.allocator.free(result);
for (dirs) |dir, i| {
result[i] = dir.getPath();
result[i] = dir.path;
}
break :blk result;
@ -1019,7 +1086,7 @@ pub fn deinit(self: *DocumentStore) void {
self.handles.deinit(self.allocator);
for (self.build_files.items) |build_file| {
std.json.parseFree(BuildConfig, build_file.config, .{ .allocator = self.allocator });
build_file.config.deinit(self.allocator);
self.allocator.free(build_file.uri);
build_file.destroy(self.allocator);
}

View File

@ -1,13 +1,10 @@
const root = @import("@build@");
const std = @import("std");
const fmt = std.fmt;
const io = std.io;
const log = std.log;
const process = std.process;
const Builder = std.build.Builder;
const InstallArtifactStep = std.build.InstallArtifactStep;
const LibExeObjStep = std.build.LibExeObjStep;
const ArrayList = std.ArrayList;
pub const BuildConfig = struct {
packages: []Pkg,
@ -15,24 +12,12 @@ pub const BuildConfig = struct {
pub const Pkg = struct {
name: []const u8,
uri: []const u8,
path: []const u8,
};
pub const IncludeDir = union(enum) {
raw_path: []const u8,
raw_path_system: []const u8,
pub fn getPath(self: IncludeDir) []const u8 {
return switch (self) {
.raw_path => |path| return path,
.raw_path_system => |path| return path,
};
}
pub fn eql(a: IncludeDir, b: IncludeDir) bool {
return @enumToInt(a) == @enumToInt(b) and
std.mem.eql(u8, a.getPath(), b.getPath());
}
pub const IncludeDir = struct {
path: []const u8,
system: bool,
};
};
@ -80,10 +65,10 @@ pub fn main() !void {
builder.resolveInstallPrefix(null, Builder.DirList{});
try runBuild(builder);
var packages = ArrayList(BuildConfig.Pkg).init(allocator);
var packages = std.ArrayList(BuildConfig.Pkg).init(allocator);
defer packages.deinit();
var include_dirs = ArrayList(BuildConfig.IncludeDir).init(allocator);
var include_dirs = std.ArrayList(BuildConfig.IncludeDir).init(allocator);
defer include_dirs.deinit();
// TODO: We currently add packages from every LibExeObj step that the install step depends on.
@ -101,13 +86,13 @@ pub fn main() !void {
.include_dirs = include_dirs.items,
},
.{ .whitespace = .{} },
io.getStdOut().writer(),
std.io.getStdOut().writer(),
);
}
fn processStep(
packages: *ArrayList(BuildConfig.Pkg),
include_dirs: *ArrayList(BuildConfig.IncludeDir),
packages: *std.ArrayList(BuildConfig.Pkg),
include_dirs: *std.ArrayList(BuildConfig.IncludeDir),
step: *std.build.Step,
) anyerror!void {
if (step.cast(InstallArtifactStep)) |install_exe| {
@ -128,7 +113,7 @@ fn processStep(
}
fn processPackage(
packages: *ArrayList(BuildConfig.Pkg),
packages: *std.ArrayList(BuildConfig.Pkg),
pkg: std.build.Pkg,
) anyerror!void {
for (packages.items) |package| {
@ -136,9 +121,13 @@ fn processPackage(
}
const source = if (@hasField(std.build.Pkg, "source")) pkg.source else pkg.path;
switch (source) {
.path => |path| try packages.append(.{ .name = pkg.name, .uri = path }),
.generated => |generated| if (generated.path != null) try packages.append(.{ .name = pkg.name, .uri = generated.path.? }),
const maybe_path = switch (source) {
.path => |path| path,
.generated => |generated| generated.path,
};
if (maybe_path) |path| {
try packages.append(.{ .name = pkg.name, .path = path });
}
if (pkg.dependencies) |dependencies| {
@ -149,18 +138,18 @@ fn processPackage(
}
fn processIncludeDirs(
include_dirs: *ArrayList(BuildConfig.IncludeDir),
include_dirs: *std.ArrayList(BuildConfig.IncludeDir),
dirs: []std.build.LibExeObjStep.IncludeDir,
) !void {
outer: for (dirs) |dir| {
const candidate: BuildConfig.IncludeDir = switch (dir) {
.raw_path => |path| .{ .raw_path = path },
.raw_path_system => |path| .{ .raw_path_system = path },
.raw_path => |path| .{ .path = path, .system = false },
.raw_path_system => |path| .{ .path = path, .system = true },
else => continue,
};
for (include_dirs.items) |include_dir| {
if (candidate.eql(include_dir)) continue :outer;
if (std.mem.eql(u8, candidate.path, include_dir.path)) continue :outer;
}
try include_dirs.append(candidate);