Merge pull request #1249 from FalsePattern/master
Update to 0.11.0-dev.3737+9eb008717
This commit is contained in:
commit
d5cbb5b6c8
@ -7,7 +7,7 @@ const zls_version = std.SemanticVersion{ .major = 0, .minor = 11, .patch = 0 };
|
||||
pub fn build(b: *std.build.Builder) !void {
|
||||
comptime {
|
||||
const current_zig = builtin.zig_version;
|
||||
const min_zig = std.SemanticVersion.parse("0.11.0-dev.3696+8d0a8c285") catch unreachable; // std.builtin.Version -> std.SemanticVersion
|
||||
const min_zig = std.SemanticVersion.parse("0.11.0-dev.3737+9eb008717") catch unreachable; // std.builtin.Version -> std.SemanticVersion
|
||||
if (current_zig.order(min_zig) == .lt) {
|
||||
@compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig }));
|
||||
}
|
||||
|
@ -8,16 +8,17 @@
|
||||
.hash = "122028c00915d9b37296059be8a3883c718dbb5bd174350caedf152fed1f46f99607",
|
||||
},
|
||||
.tres = .{
|
||||
.url = "https://github.com/ziglibs/tres/archive/220d01f3931595e3a2e2a6a0693363c0bfaf47e9.tar.gz",
|
||||
.hash = "12204d32c44b494c0cbe553811dedfb7a73da37b13b492872dd4edc0340983072697",
|
||||
.url = "https://github.com/ziglibs/tres/archive/67d5b6305135d15ea84024e447c9070ce14fa5d0.tar.gz",
|
||||
.hash = "1220b23398142f65003da68986b8e7b015720d4b62f7bcb5f73a111c9220422658f6",
|
||||
},
|
||||
.diffz = .{
|
||||
.url = "https://github.com/ziglibs/diffz/archive/2fd03fc72760a700e41f30f2b180f26e11c3365b.tar.gz",
|
||||
.hash = "1220de3226674f638ef4afcc919d121e06207a868cd163b24426b55d77c1048fc608",
|
||||
.url = "https://github.com/ziglibs/diffz/archive/df02d432be9b40d55b5d7de5c1c53ec80aad6d5d.tar.gz",
|
||||
.hash = "122041f6531ee2cd10e7d5f81817c50b45037affc95d748cbcd71a766866fb6030d4",
|
||||
},
|
||||
.binned_allocator = .{
|
||||
.url = "https://gist.github.com/silversquirl/c1e4840048fdf48e669b6eac76d80634/archive/8bbe137e65f26854ff936046d884a45d4fa156de.tar.gz",
|
||||
.hash = "1220044bd25cc02da476d0ddf988667a29751dae6bf988128b32ac258b21c23d0f47",
|
||||
// upstream: https://gist.github.com/silversquirl/c1e4840048fdf48e669b6eac76d80634
|
||||
.url = "https://gist.github.com/FalsePattern/48fded613c115e16e91c46db8642c7e4/archive/75e3d5e6a0e0cf23dbf7abfe16831e23c38721bc.tar.gz",
|
||||
.hash = "1220ba896ddd4258eed9274b36284d4cc4600ee69c4c0978cbe237ec09a524a2e252",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -137,8 +137,8 @@ pub fn huntItDown(
|
||||
|
||||
var current_namespace = namespace;
|
||||
while (current_namespace != .none) {
|
||||
const decls = interpreter.namespaces.items(.decls)[@enumToInt(current_namespace)];
|
||||
defer current_namespace = interpreter.namespaces.items(.parent)[@enumToInt(current_namespace)];
|
||||
const decls = interpreter.namespaces.items(.decls)[@intFromEnum(current_namespace)];
|
||||
defer current_namespace = interpreter.namespaces.items(.parent)[@intFromEnum(current_namespace)];
|
||||
|
||||
if (decls.get(decl_name)) |decl| {
|
||||
return decl;
|
||||
@ -195,7 +195,7 @@ pub fn interpret(
|
||||
.node_idx = node_idx,
|
||||
.ty = .none,
|
||||
});
|
||||
const container_namespace = @intToEnum(Namespace.Index, interpreter.namespaces.len - 1);
|
||||
const container_namespace = @enumFromInt(Namespace.Index, interpreter.namespaces.len - 1);
|
||||
|
||||
const struct_index = try interpreter.ip.createStruct(interpreter.allocator, .{
|
||||
.fields = .{},
|
||||
@ -244,7 +244,7 @@ pub fn interpret(
|
||||
}
|
||||
|
||||
const struct_type = try interpreter.ip.get(interpreter.allocator, Key{ .struct_type = struct_index });
|
||||
interpreter.namespaces.items(.ty)[@enumToInt(container_namespace)] = struct_type;
|
||||
interpreter.namespaces.items(.ty)[@intFromEnum(container_namespace)] = struct_type;
|
||||
|
||||
return InterpretResult{ .value = Value{
|
||||
.interpreter = interpreter,
|
||||
@ -261,7 +261,7 @@ pub fn interpret(
|
||||
.aligned_var_decl,
|
||||
.simple_var_decl,
|
||||
=> {
|
||||
var decls: *std.StringArrayHashMapUnmanaged(InternPool.DeclIndex) = &interpreter.namespaces.items(.decls)[@enumToInt(namespace)];
|
||||
var decls: *std.StringArrayHashMapUnmanaged(InternPool.DeclIndex) = &interpreter.namespaces.items(.decls)[@intFromEnum(namespace)];
|
||||
|
||||
const name = analysis.getDeclName(tree, node_idx).?;
|
||||
const decl_index = try interpreter.ip.createDecl(interpreter.allocator, .{
|
||||
@ -324,7 +324,7 @@ pub fn interpret(
|
||||
.node_idx = node_idx,
|
||||
.ty = .none,
|
||||
});
|
||||
const block_namespace = @intToEnum(Namespace.Index, interpreter.namespaces.len - 1);
|
||||
const block_namespace = @enumFromInt(Namespace.Index, interpreter.namespaces.len - 1);
|
||||
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const statements = ast.blockStatements(tree, node_idx, &buffer).?;
|
||||
@ -333,7 +333,7 @@ pub fn interpret(
|
||||
const ret = try interpreter.interpret(idx, block_namespace, options);
|
||||
switch (ret) {
|
||||
.@"break" => |lllll| {
|
||||
const maybe_block_label_string = if (interpreter.namespaces.get(@enumToInt(namespace)).getLabel(tree)) |i| tree.tokenSlice(i) else null;
|
||||
const maybe_block_label_string = if (interpreter.namespaces.get(@intFromEnum(namespace)).getLabel(tree)) |i| tree.tokenSlice(i) else null;
|
||||
if (lllll) |l| {
|
||||
if (maybe_block_label_string) |ls| {
|
||||
if (std.mem.eql(u8, l, ls)) {
|
||||
@ -345,7 +345,7 @@ pub fn interpret(
|
||||
}
|
||||
},
|
||||
.break_with_value => |bwv| {
|
||||
const maybe_block_label_string = if (interpreter.namespaces.get(@enumToInt(namespace)).getLabel(tree)) |i| tree.tokenSlice(i) else null;
|
||||
const maybe_block_label_string = if (interpreter.namespaces.get(@intFromEnum(namespace)).getLabel(tree)) |i| tree.tokenSlice(i) else null;
|
||||
|
||||
if (bwv.label) |l| {
|
||||
if (maybe_block_label_string) |ls| {
|
||||
@ -753,7 +753,7 @@ pub fn interpret(
|
||||
var big_int = try std.math.big.int.Managed.init(interpreter.allocator);
|
||||
defer big_int.deinit();
|
||||
const prefix_length: usize = if (base != .decimal) 2 else 0;
|
||||
try big_int.setString(@enumToInt(base), s[prefix_length..]);
|
||||
try big_int.setString(@intFromEnum(base), s[prefix_length..]);
|
||||
std.debug.assert(number_type == .comptime_int_type);
|
||||
break :blk Key{ .int_big_value = .{ .ty = number_type, .int = big_int.toConst() } };
|
||||
},
|
||||
@ -947,7 +947,7 @@ pub fn interpret(
|
||||
|
||||
const name = interpreter.ip.indexToKey(field_name.index).bytes; // TODO add checks
|
||||
|
||||
const decls = interpreter.namespaces.items(.decls)[@enumToInt(value_namespace)];
|
||||
const decls = interpreter.namespaces.items(.decls)[@intFromEnum(value_namespace)];
|
||||
const has_decl = decls.contains(name);
|
||||
|
||||
return InterpretResult{ .value = Value{
|
||||
@ -1058,7 +1058,7 @@ pub fn interpret(
|
||||
// fnd.return_type = value.value_data.@"type";
|
||||
|
||||
if (namespace != .none) {
|
||||
const decls = &interpreter.namespaces.items(.decls)[@enumToInt(namespace)];
|
||||
const decls = &interpreter.namespaces.items(.decls)[@intFromEnum(namespace)];
|
||||
|
||||
const decl_index = try interpreter.ip.createDecl(interpreter.allocator, .{
|
||||
.name = name,
|
||||
@ -1236,7 +1236,7 @@ pub fn call(
|
||||
.node_idx = func_node_idx,
|
||||
.ty = .none,
|
||||
});
|
||||
const fn_namespace = @intToEnum(Namespace.Index, interpreter.namespaces.len - 1);
|
||||
const fn_namespace = @enumFromInt(Namespace.Index, interpreter.namespaces.len - 1);
|
||||
|
||||
var arg_it = proto.iterate(&tree);
|
||||
var arg_index: usize = 0;
|
||||
@ -1257,7 +1257,7 @@ pub fn call(
|
||||
if (param.name_token) |name_token| {
|
||||
const name = offsets.tokenToSlice(tree, name_token);
|
||||
|
||||
const decls = &interpreter.namespaces.items(.decls)[@enumToInt(fn_namespace)];
|
||||
const decls = &interpreter.namespaces.items(.decls)[@intFromEnum(fn_namespace)];
|
||||
const decl_index = try interpreter.ip.createDecl(interpreter.allocator, .{
|
||||
.name = name,
|
||||
.node_idx = name_token,
|
||||
|
@ -17,6 +17,8 @@ const AstGen = @import("stage2/AstGen.zig");
|
||||
const Zir = @import("stage2/Zir.zig");
|
||||
const InternPool = @import("analyser/InternPool.zig");
|
||||
|
||||
const legacy_json = @import("legacy_json.zig");
|
||||
|
||||
const DocumentStore = @This();
|
||||
|
||||
pub const Uri = []const u8;
|
||||
@ -43,10 +45,10 @@ const BuildFile = struct {
|
||||
|
||||
pub fn deinit(self: *BuildFile, allocator: std.mem.Allocator) void {
|
||||
allocator.free(self.uri);
|
||||
std.json.parseFree(BuildConfig, allocator, self.config);
|
||||
legacy_json.parseFree(BuildConfig, allocator, self.config);
|
||||
if (self.builtin_uri) |builtin_uri| allocator.free(builtin_uri);
|
||||
if (self.build_associated_config) |cfg| {
|
||||
std.json.parseFree(BuildAssociatedConfig, allocator, cfg);
|
||||
legacy_json.parseFree(BuildAssociatedConfig, allocator, cfg);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -301,7 +303,7 @@ pub fn applySave(self: *DocumentStore, handle: *const Handle) !void {
|
||||
return;
|
||||
};
|
||||
|
||||
std.json.parseFree(BuildConfig, self.allocator, build_file.config);
|
||||
legacy_json.parseFree(BuildConfig, self.allocator, build_file.config);
|
||||
build_file.config = build_config;
|
||||
}
|
||||
}
|
||||
@ -326,7 +328,7 @@ pub fn invalidateBuildFiles(self: *DocumentStore) void {
|
||||
return;
|
||||
};
|
||||
|
||||
std.json.parseFree(BuildConfig, self.allocator, build_file.config);
|
||||
legacy_json.parseFree(BuildConfig, self.allocator, build_file.config);
|
||||
build_file.config = build_config;
|
||||
}
|
||||
}
|
||||
@ -457,7 +459,7 @@ pub fn isInStd(uri: Uri) bool {
|
||||
}
|
||||
|
||||
/// looks for a `zls.build.json` file in the build file directory
|
||||
/// has to be freed with `std.json.parseFree`
|
||||
/// has to be freed with `json_compat.parseFree`
|
||||
fn loadBuildAssociatedConfiguration(allocator: std.mem.Allocator, build_file: BuildFile) !BuildAssociatedConfig {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
@ -473,7 +475,7 @@ fn loadBuildAssociatedConfiguration(allocator: std.mem.Allocator, build_file: Bu
|
||||
const file_buf = try config_file.readToEndAlloc(allocator, std.math.maxInt(usize));
|
||||
defer allocator.free(file_buf);
|
||||
|
||||
return try std.json.parseFromSlice(BuildAssociatedConfig, allocator, file_buf, .{});
|
||||
return try legacy_json.parseFromSlice(BuildAssociatedConfig, allocator, file_buf, .{});
|
||||
}
|
||||
|
||||
/// Caller owns returned memory!
|
||||
@ -510,7 +512,7 @@ pub fn executeBuildRunner(
|
||||
}
|
||||
|
||||
/// Runs the build.zig and extracts include directories and packages
|
||||
/// Has to be freed with `std.json.parseFree`
|
||||
/// Has to be freed with `json_compat.parseFree`
|
||||
pub fn loadBuildConfiguration(
|
||||
allocator: std.mem.Allocator,
|
||||
build_file: BuildFile,
|
||||
@ -564,13 +566,13 @@ pub fn loadBuildConfiguration(
|
||||
// to the BuildConfig type
|
||||
.ignore_unknown_fields = true,
|
||||
};
|
||||
const build_config = std.json.parseFromSlice(
|
||||
const build_config = legacy_json.parseFromSlice(
|
||||
BuildConfig,
|
||||
allocator,
|
||||
zig_run_result.stdout,
|
||||
parse_options,
|
||||
) catch return error.RunFailed;
|
||||
errdefer std.json.parseFree(BuildConfig, allocator, build_config);
|
||||
errdefer legacy_json.parseFree(BuildConfig, allocator, build_config);
|
||||
|
||||
for (build_config.packages) |*pkg| {
|
||||
const pkg_abs_path = try std.fs.path.resolve(allocator, &[_][]const u8{ build_file_path, "..", pkg.path });
|
||||
|
@ -31,7 +31,7 @@ pub fn parse(allocator: std.mem.Allocator, include_carriage_return: bool, reader
|
||||
}
|
||||
|
||||
const header_name = header[0 .. std.mem.indexOf(u8, header, ": ") orelse return error.MissingColon];
|
||||
const header_value = header[header_name.len + 2 .. header.len - @boolToInt(include_carriage_return)];
|
||||
const header_value = header[header_name.len + 2 .. header.len - @intFromBool(include_carriage_return)];
|
||||
if (std.mem.eql(u8, header_name, "Content-Length")) {
|
||||
if (header_value.len == 0) return error.MissingHeaderValue;
|
||||
r.content_length = std.fmt.parseInt(usize, header_value, 10) catch return error.InvalidContentLength;
|
||||
|
@ -1286,12 +1286,12 @@ const Message = union(enum) {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fromJsonValueTree(tree: std.json.ValueTree) error{InvalidRequest}!Message {
|
||||
pub fn fromJsonValueTree(root: std.json.Value) error{InvalidRequest}!Message {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
if (tree.root != .object) return error.InvalidRequest;
|
||||
const object = tree.root.object;
|
||||
if (root != .object) return error.InvalidRequest;
|
||||
const object = root.object;
|
||||
|
||||
if (object.get("id")) |id_obj| {
|
||||
comptime std.debug.assert(!tres.isAllocatorRequired(types.RequestId));
|
||||
@ -1348,23 +1348,20 @@ pub fn processJsonRpc(
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
|
||||
var parser = std.json.Parser.init(server.arena.allocator(), .alloc_always);
|
||||
defer parser.deinit();
|
||||
|
||||
var tree = parser.parse(json) catch |err| {
|
||||
var tree = std.json.parseFromSlice(std.json.Value, server.arena.allocator(), json, .{}) catch |err| {
|
||||
log.err("failed to parse message: {}", .{err});
|
||||
return; // maybe panic?
|
||||
};
|
||||
defer tree.deinit();
|
||||
|
||||
const message = Message.fromJsonValueTree(tree) catch |err| {
|
||||
const message = Message.fromJsonValueTree(tree.value) catch |err| {
|
||||
log.err("failed to parse message: {}", .{err});
|
||||
return; // maybe panic?
|
||||
};
|
||||
|
||||
server.processMessage(message) catch |err| switch (message) {
|
||||
.RequestMessage => |request| server.sendResponseError(request.id, .{
|
||||
.code = @errorToInt(err),
|
||||
.code = @intFromError(err),
|
||||
.message = @errorName(err),
|
||||
}),
|
||||
else => {},
|
||||
|
@ -120,7 +120,7 @@ fn bswap(x: anytype) @TypeOf(x) {
|
||||
|
||||
const T = @TypeOf(x);
|
||||
switch (@typeInfo(T)) {
|
||||
.Enum => return @intToEnum(T, @byteSwap(@enumToInt(x))),
|
||||
.Enum => return @enumFromInt(T, @byteSwap(@intFromEnum(x))),
|
||||
.Int => return @byteSwap(x),
|
||||
.Struct => |info| switch (info.layout) {
|
||||
.Extern => {
|
||||
@ -153,7 +153,7 @@ fn bswap_and_workaround_u32(bytes_ptr: *const [4]u8) u32 {
|
||||
/// workaround for https://github.com/ziglang/zig/issues/14904
|
||||
fn bswap_and_workaround_tag(bytes_ptr: *const [4]u8) InMessage.Tag {
|
||||
const int = std.mem.readIntLittle(u32, bytes_ptr);
|
||||
return @intToEnum(InMessage.Tag, int);
|
||||
return @enumFromInt(InMessage.Tag, int);
|
||||
}
|
||||
|
||||
const OutMessage = std.zig.Client.Message;
|
||||
|
@ -1199,9 +1199,9 @@ pub const Index = enum(u32) {
|
||||
|
||||
comptime {
|
||||
const Zir = @import("../stage2/Zir.zig");
|
||||
assert(@enumToInt(Zir.Inst.Ref.generic_poison_type) == @enumToInt(Index.generic_poison_type));
|
||||
assert(@enumToInt(Zir.Inst.Ref.undef) == @enumToInt(Index.undefined_value));
|
||||
assert(@enumToInt(Zir.Inst.Ref.one_usize) == @enumToInt(Index.one_usize));
|
||||
assert(@intFromEnum(Zir.Inst.Ref.generic_poison_type) == @intFromEnum(Index.generic_poison_type));
|
||||
assert(@intFromEnum(Zir.Inst.Ref.undef) == @intFromEnum(Index.undefined_value));
|
||||
assert(@intFromEnum(Zir.Inst.Ref.one_usize) == @intFromEnum(Index.one_usize));
|
||||
}
|
||||
|
||||
pub const NamespaceIndex = enum(u32) {
|
||||
@ -1513,11 +1513,11 @@ pub fn deinit(ip: *InternPool, gpa: Allocator) void {
|
||||
|
||||
pub fn indexToKey(ip: InternPool, index: Index) Key {
|
||||
assert(index != .none);
|
||||
const item = ip.items.get(@enumToInt(index));
|
||||
const item = ip.items.get(@intFromEnum(index));
|
||||
const data = item.data;
|
||||
return switch (item.tag) {
|
||||
.simple_type => .{ .simple_type = @intToEnum(SimpleType, data) },
|
||||
.simple_value => .{ .simple_value = @intToEnum(SimpleValue, data) },
|
||||
.simple_type => .{ .simple_type = @enumFromInt(SimpleType, data) },
|
||||
.simple_value => .{ .simple_value = @enumFromInt(SimpleValue, data) },
|
||||
|
||||
.type_int_signed => .{ .int_type = .{
|
||||
.signedness = .signed,
|
||||
@ -1529,17 +1529,17 @@ pub fn indexToKey(ip: InternPool, index: Index) Key {
|
||||
} },
|
||||
.type_pointer => .{ .pointer_type = ip.extraData(Pointer, data) },
|
||||
.type_array => .{ .array_type = ip.extraData(Array, data) },
|
||||
.type_optional => .{ .optional_type = .{ .payload_type = @intToEnum(Index, data) } },
|
||||
.type_anyframe => .{ .anyframe_type = .{ .child = @intToEnum(Index, data) } },
|
||||
.type_optional => .{ .optional_type = .{ .payload_type = @enumFromInt(Index, data) } },
|
||||
.type_anyframe => .{ .anyframe_type = .{ .child = @enumFromInt(Index, data) } },
|
||||
.type_error_union => .{ .error_union_type = ip.extraData(ErrorUnion, data) },
|
||||
.type_error_set => .{ .error_set_type = ip.extraData(ErrorSet, data) },
|
||||
.type_function => .{ .function_type = ip.extraData(Function, data) },
|
||||
.type_tuple => .{ .tuple_type = ip.extraData(Tuple, data) },
|
||||
.type_vector => .{ .vector_type = ip.extraData(Vector, data) },
|
||||
|
||||
.type_struct => .{ .struct_type = @intToEnum(StructIndex, data) },
|
||||
.type_enum => .{ .enum_type = @intToEnum(EnumIndex, data) },
|
||||
.type_union => .{ .union_type = @intToEnum(UnionIndex, data) },
|
||||
.type_struct => .{ .struct_type = @enumFromInt(StructIndex, data) },
|
||||
.type_enum => .{ .enum_type = @enumFromInt(EnumIndex, data) },
|
||||
.type_union => .{ .union_type = @enumFromInt(UnionIndex, data) },
|
||||
|
||||
.int_u64 => .{ .int_u64_value = ip.extraData(U64Value, data) },
|
||||
.int_i64 => .{ .int_i64_value = ip.extraData(I64Value, data) },
|
||||
@ -1567,29 +1567,29 @@ pub fn indexToKey(ip: InternPool, index: Index) Key {
|
||||
.slice => .{ .slice = ip.extraData(Slice, data) },
|
||||
.aggregate => .{ .aggregate = ip.extraData(Aggregate, data) },
|
||||
.union_value => .{ .union_value = ip.extraData(UnionValue, data) },
|
||||
.null_value => .{ .null_value = .{ .ty = @intToEnum(Index, data) } },
|
||||
.undefined_value => .{ .undefined_value = .{ .ty = @intToEnum(Index, data) } },
|
||||
.unknown_value => .{ .unknown_value = .{ .ty = @intToEnum(Index, data) } },
|
||||
.null_value => .{ .null_value = .{ .ty = @enumFromInt(Index, data) } },
|
||||
.undefined_value => .{ .undefined_value = .{ .ty = @enumFromInt(Index, data) } },
|
||||
.unknown_value => .{ .unknown_value = .{ .ty = @enumFromInt(Index, data) } },
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
const adapter: KeyAdapter = .{ .ip = ip };
|
||||
const gop = try ip.map.getOrPutAdapted(gpa, key, adapter);
|
||||
if (gop.found_existing) return @intToEnum(Index, gop.index);
|
||||
if (gop.found_existing) return @enumFromInt(Index, gop.index);
|
||||
|
||||
const tag: Tag = key.tag();
|
||||
const data: u32 = switch (key) {
|
||||
.simple_type => |simple| @enumToInt(simple),
|
||||
.simple_value => |simple| @enumToInt(simple),
|
||||
.simple_type => |simple| @intFromEnum(simple),
|
||||
.simple_value => |simple| @intFromEnum(simple),
|
||||
|
||||
.int_type => |int_ty| int_ty.bits,
|
||||
.optional_type => |optional_ty| @enumToInt(optional_ty.payload_type),
|
||||
.anyframe_type => |anyframe_ty| @enumToInt(anyframe_ty.child),
|
||||
.optional_type => |optional_ty| @intFromEnum(optional_ty.payload_type),
|
||||
.anyframe_type => |anyframe_ty| @intFromEnum(anyframe_ty.child),
|
||||
|
||||
.struct_type => |struct_index| @enumToInt(struct_index),
|
||||
.enum_type => |enum_index| @enumToInt(enum_index),
|
||||
.union_type => |union_index| @enumToInt(union_index),
|
||||
.struct_type => |struct_index| @intFromEnum(struct_index),
|
||||
.enum_type => |enum_index| @intFromEnum(enum_index),
|
||||
.union_type => |union_index| @intFromEnum(union_index),
|
||||
|
||||
.int_u64_value => |int_val| try ip.addExtra(gpa, int_val),
|
||||
.int_i64_value => |int_val| try ip.addExtra(gpa, int_val),
|
||||
@ -1599,9 +1599,9 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
}),
|
||||
.float_16_value => |float_val| @bitCast(u16, float_val),
|
||||
.float_32_value => |float_val| @bitCast(u32, float_val),
|
||||
.null_value => |null_val| @enumToInt(null_val.ty),
|
||||
.undefined_value => |undefined_val| @enumToInt(undefined_val.ty),
|
||||
.unknown_value => |unknown_val| @enumToInt(unknown_val.ty),
|
||||
.null_value => |null_val| @intFromEnum(null_val.ty),
|
||||
.undefined_value => |undefined_val| @intFromEnum(undefined_val.ty),
|
||||
.unknown_value => |unknown_val| @intFromEnum(unknown_val.ty),
|
||||
inline else => |data| try ip.addExtra(gpa, data), // TODO sad stage1 noises :(
|
||||
};
|
||||
|
||||
@ -1609,47 +1609,47 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
.tag = tag,
|
||||
.data = data,
|
||||
});
|
||||
return @intToEnum(Index, ip.items.len - 1);
|
||||
return @enumFromInt(Index, ip.items.len - 1);
|
||||
}
|
||||
|
||||
pub fn contains(ip: InternPool, key: Key) ?Index {
|
||||
const adapter: KeyAdapter = .{ .ip = &ip };
|
||||
const index = ip.map.getIndexAdapted(key, adapter) orelse return null;
|
||||
return @intToEnum(Index, index);
|
||||
return @enumFromInt(Index, index);
|
||||
}
|
||||
|
||||
pub fn getDecl(ip: InternPool, index: InternPool.DeclIndex) *InternPool.Decl {
|
||||
var decls = ip.decls;
|
||||
return decls.at(@enumToInt(index));
|
||||
return decls.at(@intFromEnum(index));
|
||||
}
|
||||
pub fn getStruct(ip: InternPool, index: InternPool.StructIndex) *InternPool.Struct {
|
||||
var structs = ip.structs;
|
||||
return structs.at(@enumToInt(index));
|
||||
return structs.at(@intFromEnum(index));
|
||||
}
|
||||
pub fn getEnum(ip: InternPool, index: InternPool.EnumIndex) *InternPool.Enum {
|
||||
var enums = ip.enums;
|
||||
return enums.at(@enumToInt(index));
|
||||
return enums.at(@intFromEnum(index));
|
||||
}
|
||||
pub fn getUnion(ip: InternPool, index: InternPool.UnionIndex) *InternPool.Union {
|
||||
var unions = ip.unions;
|
||||
return unions.at(@enumToInt(index));
|
||||
return unions.at(@intFromEnum(index));
|
||||
}
|
||||
|
||||
pub fn createDecl(ip: *InternPool, gpa: Allocator, decl: InternPool.Decl) Allocator.Error!InternPool.DeclIndex {
|
||||
try ip.decls.append(gpa, decl);
|
||||
return @intToEnum(InternPool.DeclIndex, ip.decls.count() - 1);
|
||||
return @enumFromInt(InternPool.DeclIndex, ip.decls.count() - 1);
|
||||
}
|
||||
pub fn createStruct(ip: *InternPool, gpa: Allocator, struct_info: InternPool.Struct) Allocator.Error!InternPool.StructIndex {
|
||||
try ip.structs.append(gpa, struct_info);
|
||||
return @intToEnum(InternPool.StructIndex, ip.structs.count() - 1);
|
||||
return @enumFromInt(InternPool.StructIndex, ip.structs.count() - 1);
|
||||
}
|
||||
pub fn createEnum(ip: *InternPool, gpa: Allocator, enum_info: InternPool.Enum) Allocator.Error!InternPool.EnumIndex {
|
||||
try ip.enums.append(gpa, enum_info);
|
||||
return @intToEnum(InternPool.EnumIndex, ip.enums.count() - 1);
|
||||
return @enumFromInt(InternPool.EnumIndex, ip.enums.count() - 1);
|
||||
}
|
||||
pub fn createUnion(ip: *InternPool, gpa: Allocator, union_info: InternPool.Union) Allocator.Error!InternPool.UnionIndex {
|
||||
try ip.unions.append(gpa, union_info);
|
||||
return @intToEnum(InternPool.UnionIndex, ip.unions.count() - 1);
|
||||
return @enumFromInt(InternPool.UnionIndex, ip.unions.count() - 1);
|
||||
}
|
||||
|
||||
fn addExtra(ip: *InternPool, gpa: Allocator, extra: anytype) Allocator.Error!u32 {
|
||||
@ -1675,7 +1675,7 @@ const KeyAdapter = struct {
|
||||
|
||||
pub fn eql(ctx: @This(), a: Key, b_void: void, b_map_index: usize) bool {
|
||||
_ = b_void;
|
||||
return a.eql(ctx.ip.indexToKey(@intToEnum(Index, b_map_index)));
|
||||
return a.eql(ctx.ip.indexToKey(@enumFromInt(Index, b_map_index)));
|
||||
}
|
||||
|
||||
pub fn hash(ctx: @This(), a: Key) u32 {
|
||||
@ -1751,8 +1751,8 @@ fn deepHash(hasher: anytype, key: anytype) void {
|
||||
}
|
||||
},
|
||||
|
||||
.Bool => deepHash(hasher, @boolToInt(key)),
|
||||
.Enum => deepHash(hasher, @enumToInt(key)),
|
||||
.Bool => deepHash(hasher, @intFromBool(key)),
|
||||
.Enum => deepHash(hasher, @intFromEnum(key)),
|
||||
.Float => |info| deepHash(hasher, switch (info.bits) {
|
||||
16 => @bitCast(u16, key),
|
||||
32 => @bitCast(u32, key),
|
||||
@ -3402,9 +3402,9 @@ test "bytes value" {
|
||||
try expect(bytes_value1 == bytes_value2);
|
||||
try expect(bytes_value2 != bytes_value3);
|
||||
|
||||
try expect(@ptrToInt(&str1) != @ptrToInt(ip.indexToKey(bytes_value1).bytes.ptr));
|
||||
try expect(@ptrToInt(&str2) != @ptrToInt(ip.indexToKey(bytes_value2).bytes.ptr));
|
||||
try expect(@ptrToInt(&str3) != @ptrToInt(ip.indexToKey(bytes_value3).bytes.ptr));
|
||||
try expect(@intFromPtr(&str1) != @intFromPtr(ip.indexToKey(bytes_value1).bytes.ptr));
|
||||
try expect(@intFromPtr(&str2) != @intFromPtr(ip.indexToKey(bytes_value2).bytes.ptr));
|
||||
try expect(@intFromPtr(&str3) != @intFromPtr(ip.indexToKey(bytes_value3).bytes.ptr));
|
||||
|
||||
try std.testing.expectEqual(ip.indexToKey(bytes_value1).bytes.ptr, ip.indexToKey(bytes_value2).bytes.ptr);
|
||||
|
||||
|
@ -23,7 +23,7 @@ pub fn encode(extra: *std.ArrayList(u8), comptime T: type, data: anytype) Alloca
|
||||
=> @compileError("Unable to encode type " ++ @typeName(T)),
|
||||
|
||||
.Void => {},
|
||||
.Bool => try encode(extra, u1, @boolToInt(data)),
|
||||
.Bool => try encode(extra, u1, @intFromBool(data)),
|
||||
.Int => try extra.appendSlice(std.mem.asBytes(&data)),
|
||||
.Float => |info| switch (info.bits) {
|
||||
16 => try encode(extra, u16, @bitCast(u16, data)),
|
||||
@ -81,7 +81,7 @@ pub fn encode(extra: *std.ArrayList(u8), comptime T: type, data: anytype) Alloca
|
||||
try encode(extra, item);
|
||||
}
|
||||
},
|
||||
.Enum => |info| try encode(extra, info.tag_type, @enumToInt(data)),
|
||||
.Enum => |info| try encode(extra, info.tag_type, @intFromEnum(data)),
|
||||
.Union => @compileError("TODO"),
|
||||
.Vector => |info| {
|
||||
const array: [info.len]info.child = data;
|
||||
@ -174,7 +174,7 @@ pub fn decode(extra: *[]const u8, comptime T: type) T {
|
||||
break :blk decode(extra, info.child);
|
||||
}
|
||||
},
|
||||
.Enum => |info| @intToEnum(T, decode(extra, info.tag_type)),
|
||||
.Enum => |info| @enumFromInt(T, decode(extra, info.tag_type)),
|
||||
.Union => @compileError("TODO"),
|
||||
.Vector => |info| decode(extra, [info.len]info.child),
|
||||
};
|
||||
@ -192,10 +192,10 @@ pub fn canEncodeAsBytes(comptime T: type) bool {
|
||||
|
||||
/// forward aligns `extra` until it has the given alignment
|
||||
pub fn alignForward(extra: []const u8, alignment: usize) []const u8 {
|
||||
const unaligned = @ptrToInt(extra.ptr);
|
||||
const unaligned = @intFromPtr(extra.ptr);
|
||||
const offset = std.mem.alignForward(usize, unaligned, alignment) - unaligned;
|
||||
const result = extra[offset..];
|
||||
std.debug.assert(std.mem.isAligned(@ptrToInt(result.ptr), alignment));
|
||||
std.debug.assert(std.mem.isAligned(@intFromPtr(result.ptr), alignment));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -167,7 +167,7 @@ pub fn getFunctionSnippet(allocator: std.mem.Allocator, tree: Ast, func: Ast.ful
|
||||
var i: usize = 0;
|
||||
while (ast.nextFnParam(&it)) |param| : (i += 1) {
|
||||
if (skip_self_param and i == 0) continue;
|
||||
if (i != @boolToInt(skip_self_param))
|
||||
if (i != @intFromBool(skip_self_param))
|
||||
try buf_stream.writeAll(", ${")
|
||||
else
|
||||
try buf_stream.writeAll("${");
|
||||
@ -834,7 +834,7 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e
|
||||
return null;
|
||||
};
|
||||
|
||||
const root_namespace = @intToEnum(ComptimeInterpreter.Namespace.Index, 0);
|
||||
const root_namespace = @enumFromInt(ComptimeInterpreter.Namespace.Index, 0);
|
||||
|
||||
// TODO: Start from current/nearest-current scope
|
||||
const result = interpreter.interpret(node, root_namespace, .{}) catch |err| {
|
||||
@ -1033,7 +1033,7 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e
|
||||
const new_handle = analyser.store.getOrLoadHandle(builtin_uri) orelse return null;
|
||||
const root_scope_decls = new_handle.document_scope.scopes.items(.decls)[0];
|
||||
const decl_index = root_scope_decls.get("Type") orelse return null;
|
||||
const decl = new_handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
const decl = new_handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
if (decl != .ast_node) return null;
|
||||
|
||||
const var_decl = new_handle.tree.fullVarDecl(decl.ast_node) orelse return null;
|
||||
@ -1173,7 +1173,7 @@ pub const TypeWithHandle = struct {
|
||||
// duplicates
|
||||
|
||||
fn hashType(hasher: *std.hash.Wyhash, ty: Type) void {
|
||||
hasher.update(&.{ @boolToInt(ty.is_type_val), @enumToInt(ty.data) });
|
||||
hasher.update(&.{ @intFromBool(ty.is_type_val), @intFromEnum(ty.data) });
|
||||
|
||||
switch (ty.data) {
|
||||
.pointer,
|
||||
@ -1209,7 +1209,7 @@ pub const TypeWithHandle = struct {
|
||||
|
||||
if (!std.mem.eql(u8, a.handle.uri, b.handle.uri)) return false;
|
||||
if (a.type.is_type_val != b.type.is_type_val) return false;
|
||||
if (@enumToInt(a.type.data) != @enumToInt(b.type.data)) return false;
|
||||
if (@intFromEnum(a.type.data) != @intFromEnum(b.type.data)) return false;
|
||||
|
||||
switch (a.type.data) {
|
||||
inline .pointer,
|
||||
@ -2132,7 +2132,7 @@ pub const DeclWithHandle = struct {
|
||||
|
||||
const name = tree.tokenSlice(main_tokens[pay.items[0]]);
|
||||
const decl_index = scope_decls[scope_index].get(name) orelse return null;
|
||||
const decl = switch_expr_type.handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
const decl = switch_expr_type.handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
|
||||
switch (decl) {
|
||||
.ast_node => |node| {
|
||||
@ -2192,7 +2192,7 @@ fn iterateSymbolsContainerInternal(
|
||||
const container_scope_index = findContainerScopeIndex(container_handle) orelse return;
|
||||
|
||||
for (scope_decls[container_scope_index].values()) |decl_index| {
|
||||
const decl = &handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
const decl = &handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
switch (decl.*) {
|
||||
.ast_node => |node| {
|
||||
if (node_tags[node].isContainerField()) {
|
||||
@ -2252,9 +2252,9 @@ pub const EnclosingScopeIterator = struct {
|
||||
pub fn next(self: *EnclosingScopeIterator) ?Scope.Index {
|
||||
if (self.current_scope == .none) return null;
|
||||
|
||||
const child_scopes = self.scope_children[@enumToInt(self.current_scope)];
|
||||
const child_scopes = self.scope_children[@intFromEnum(self.current_scope)];
|
||||
defer self.current_scope = for (child_scopes.items) |child_scope| {
|
||||
const child_loc = self.scope_locs[@enumToInt(child_scope)];
|
||||
const child_loc = self.scope_locs[@intFromEnum(child_scope)];
|
||||
if (child_loc.start <= self.source_index and self.source_index <= child_loc.end) {
|
||||
break child_scope;
|
||||
}
|
||||
@ -2268,7 +2268,7 @@ fn iterateEnclosingScopes(document_scope: DocumentScope, source_index: usize) En
|
||||
return .{
|
||||
.scope_locs = document_scope.scopes.items(.loc),
|
||||
.scope_children = document_scope.scopes.items(.child_scopes),
|
||||
.current_scope = @intToEnum(Scope.Index, 0),
|
||||
.current_scope = @enumFromInt(Scope.Index, 0),
|
||||
.source_index = source_index,
|
||||
};
|
||||
}
|
||||
@ -2290,8 +2290,8 @@ pub fn iterateLabels(handle: *const DocumentStore.Handle, source_index: usize, c
|
||||
|
||||
var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
|
||||
while (scope_iterator.next()) |scope_index| {
|
||||
for (scope_decls[@enumToInt(scope_index)].values()) |decl_index| {
|
||||
const decl = &handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
for (scope_decls[@intFromEnum(scope_index)].values()) |decl_index| {
|
||||
const decl = &handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
if (decl.* != .label_decl) continue;
|
||||
try callback(context, DeclWithHandle{ .decl = decl, .handle = handle });
|
||||
}
|
||||
@ -2310,14 +2310,14 @@ fn iterateSymbolsGlobalInternal(
|
||||
|
||||
var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
|
||||
while (scope_iterator.next()) |scope_index| {
|
||||
for (scope_decls[@enumToInt(scope_index)].values()) |decl_index| {
|
||||
const decl = &handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
for (scope_decls[@intFromEnum(scope_index)].values()) |decl_index| {
|
||||
const decl = &handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
if (decl.* == .ast_node and handle.tree.nodes.items(.tag)[decl.ast_node].isContainerField()) continue;
|
||||
if (decl.* == .label_decl) continue;
|
||||
try callback(context, DeclWithHandle{ .decl = decl, .handle = handle });
|
||||
}
|
||||
|
||||
for (scope_uses[@enumToInt(scope_index)]) |use| {
|
||||
for (scope_uses[@intFromEnum(scope_index)]) |use| {
|
||||
const gop = try analyser.using_trail.getOrPut(analyser.gpa, use);
|
||||
if (gop.found_existing) continue;
|
||||
|
||||
@ -2365,9 +2365,9 @@ pub fn innermostBlockScope(handle: DocumentStore.Handle, source_index: usize) As
|
||||
|
||||
var scope_index = innermostBlockScopeIndex(handle, source_index);
|
||||
while (true) {
|
||||
defer scope_index = scope_parents[@enumToInt(scope_index)];
|
||||
switch (scope_datas[@enumToInt(scope_index)]) {
|
||||
.container, .function, .block => return scope_datas[@enumToInt(scope_index)].toNodeIndex().?,
|
||||
defer scope_index = scope_parents[@intFromEnum(scope_index)];
|
||||
switch (scope_datas[@intFromEnum(scope_index)]) {
|
||||
.container, .function, .block => return scope_datas[@intFromEnum(scope_index)].toNodeIndex().?,
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
@ -2381,7 +2381,7 @@ pub fn innermostContainer(handle: *const DocumentStore.Handle, source_index: usi
|
||||
|
||||
var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
|
||||
while (scope_iterator.next()) |scope_index| {
|
||||
switch (scope_datas[@enumToInt(scope_index)]) {
|
||||
switch (scope_datas[@intFromEnum(scope_index)]) {
|
||||
.container => |node| current = node,
|
||||
else => {},
|
||||
}
|
||||
@ -2427,8 +2427,8 @@ pub fn lookupLabel(
|
||||
|
||||
var scope_iterator = iterateEnclosingScopes(handle.document_scope, source_index);
|
||||
while (scope_iterator.next()) |scope_index| {
|
||||
const decl_index = scope_decls[@enumToInt(scope_index)].get(symbol) orelse continue;
|
||||
const decl = &handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
const decl_index = scope_decls[@intFromEnum(scope_index)].get(symbol) orelse continue;
|
||||
const decl = &handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
|
||||
if (decl.* != .label_decl) continue;
|
||||
|
||||
@ -2445,10 +2445,10 @@ pub fn lookupSymbolGlobal(analyser: *Analyser, handle: *const DocumentStore.Hand
|
||||
var current_scope = innermostBlockScopeIndex(handle.*, source_index);
|
||||
|
||||
while (current_scope != .none) {
|
||||
const scope_index = @enumToInt(current_scope);
|
||||
const scope_index = @intFromEnum(current_scope);
|
||||
defer current_scope = scope_parents[scope_index];
|
||||
if (scope_decls[scope_index].get(symbol)) |decl_index| {
|
||||
const candidate = &handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
const candidate = &handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
switch (candidate.*) {
|
||||
.ast_node => |node| {
|
||||
if (handle.tree.nodes.items(.tag)[node].isContainerField()) continue;
|
||||
@ -2485,7 +2485,7 @@ pub fn lookupSymbolContainer(
|
||||
|
||||
if (findContainerScopeIndex(container_handle)) |container_scope_index| {
|
||||
if (scope_decls[container_scope_index].get(symbol)) |decl_index| {
|
||||
const decl = &handle.document_scope.decls.items[@enumToInt(decl_index)];
|
||||
const decl = &handle.document_scope.decls.items[@intFromEnum(decl_index)];
|
||||
switch (decl.*) {
|
||||
.ast_node => |node| {
|
||||
if (node_tags[node].isContainerField()) {
|
||||
@ -2626,16 +2626,16 @@ const ScopeContext = struct {
|
||||
.loc = loc,
|
||||
.data = data,
|
||||
});
|
||||
const new_scope = @intToEnum(Scope.Index, context.doc_scope.scopes.len - 1);
|
||||
const new_scope = @enumFromInt(Scope.Index, context.doc_scope.scopes.len - 1);
|
||||
if (context.current_scope.* != .none) {
|
||||
try context.doc_scope.scopes.items(.child_scopes)[@enumToInt(context.current_scope.*)].append(context.allocator, new_scope);
|
||||
try context.doc_scope.scopes.items(.child_scopes)[@intFromEnum(context.current_scope.*)].append(context.allocator, new_scope);
|
||||
}
|
||||
context.current_scope.* = new_scope;
|
||||
return new_scope;
|
||||
}
|
||||
|
||||
fn popScope(context: ScopeContext) void {
|
||||
const parent_scope = context.doc_scope.scopes.items(.parent)[@enumToInt(context.current_scope.*)];
|
||||
const parent_scope = context.doc_scope.scopes.items(.parent)[@intFromEnum(context.current_scope.*)];
|
||||
context.current_scope.* = parent_scope;
|
||||
}
|
||||
|
||||
@ -2645,9 +2645,9 @@ const ScopeContext = struct {
|
||||
try context.doc_scope.decls.append(context.allocator, decl);
|
||||
errdefer _ = context.doc_scope.decls.pop();
|
||||
|
||||
const decl_index = @intToEnum(Declaration.Index, context.doc_scope.decls.items.len - 1);
|
||||
const decl_index = @enumFromInt(Declaration.Index, context.doc_scope.decls.items.len - 1);
|
||||
|
||||
try context.doc_scope.scopes.items(.decls)[@enumToInt(scope)].put(context.allocator, name, decl_index);
|
||||
try context.doc_scope.scopes.items(.decls)[@intFromEnum(scope)].put(context.allocator, name, decl_index);
|
||||
}
|
||||
};
|
||||
|
||||
@ -2713,8 +2713,8 @@ fn makeInnerScope(context: ScopeContext, tree: Ast, node_idx: Ast.Node.Index) er
|
||||
}
|
||||
}
|
||||
|
||||
scopes.items(.tests)[@enumToInt(scope_index)] = try tests.toOwnedSlice(allocator);
|
||||
scopes.items(.uses)[@enumToInt(scope_index)] = try uses.toOwnedSlice(allocator);
|
||||
scopes.items(.tests)[@intFromEnum(scope_index)] = try tests.toOwnedSlice(allocator);
|
||||
scopes.items(.uses)[@intFromEnum(scope_index)] = try uses.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
/// If `node_idx` is a block it's scope index will be returned
|
||||
@ -2734,7 +2734,7 @@ fn makeBlockScopeInternal(context: ScopeContext, tree: Ast, node_idx: Ast.Node.I
|
||||
.block_two_semicolon,
|
||||
=> {
|
||||
std.debug.assert(context.doc_scope.scopes.items(.data)[block_scope_index] == .block);
|
||||
return @intToEnum(Scope.Index, block_scope_index);
|
||||
return @enumFromInt(Scope.Index, block_scope_index);
|
||||
},
|
||||
else => {
|
||||
const new_scope = try context.pushScope(
|
||||
@ -2903,7 +2903,7 @@ fn makeScopeInternal(context: ScopeContext, tree: Ast, node_idx: Ast.Node.Index)
|
||||
const then_scope = (try makeBlockScopeInternal(context, tree, if_node.ast.then_expr)).?;
|
||||
|
||||
if (if_node.payload_token) |payload| {
|
||||
const name_token = payload + @boolToInt(token_tags[payload] == .asterisk);
|
||||
const name_token = payload + @intFromBool(token_tags[payload] == .asterisk);
|
||||
std.debug.assert(token_tags[name_token] == .identifier);
|
||||
|
||||
const name = tree.tokenSlice(name_token);
|
||||
@ -2960,7 +2960,7 @@ fn makeScopeInternal(context: ScopeContext, tree: Ast, node_idx: Ast.Node.Index)
|
||||
}
|
||||
|
||||
if (while_node.payload_token) |payload| {
|
||||
const name_token = payload + @boolToInt(token_tags[payload] == .asterisk);
|
||||
const name_token = payload + @intFromBool(token_tags[payload] == .asterisk);
|
||||
std.debug.assert(token_tags[name_token] == .identifier);
|
||||
|
||||
const name = tree.tokenSlice(name_token);
|
||||
@ -2999,7 +2999,7 @@ fn makeScopeInternal(context: ScopeContext, tree: Ast, node_idx: Ast.Node.Index)
|
||||
for (for_node.ast.inputs) |input| {
|
||||
if (capture_token + 1 >= tree.tokens.len) break;
|
||||
const capture_is_ref = token_tags[capture_token] == .asterisk;
|
||||
const name_token = capture_token + @boolToInt(capture_is_ref);
|
||||
const name_token = capture_token + @intFromBool(capture_is_ref);
|
||||
capture_token = name_token + 2;
|
||||
|
||||
try context.putDecl(
|
||||
@ -3040,7 +3040,7 @@ fn makeScopeInternal(context: ScopeContext, tree: Ast, node_idx: Ast.Node.Index)
|
||||
if (switch_case.payload_token) |payload| {
|
||||
const expr_index = (try makeBlockScopeInternal(context, tree, switch_case.ast.target_expr)).?;
|
||||
// if payload is *name than get next token
|
||||
const name_token = payload + @boolToInt(token_tags[payload] == .asterisk);
|
||||
const name_token = payload + @intFromBool(token_tags[payload] == .asterisk);
|
||||
const name = tree.tokenSlice(name_token);
|
||||
|
||||
try context.putDecl(expr_index, name, .{
|
||||
|
@ -215,7 +215,7 @@ fn fullForComponents(tree: Ast, info: full.For.Components) full.For {
|
||||
result.label_token = tok_i -| 1;
|
||||
}
|
||||
const last_cond_token = lastToken(tree, info.inputs[info.inputs.len - 1]);
|
||||
result.payload_token = last_cond_token + 3 + @boolToInt(token_tags[last_cond_token + 1] == .comma);
|
||||
result.payload_token = last_cond_token + 3 + @intFromBool(token_tags[last_cond_token + 1] == .comma);
|
||||
if (info.else_expr != 0) {
|
||||
result.else_token = lastToken(tree, info.then_expr) + 1;
|
||||
}
|
||||
@ -964,7 +964,7 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
|
||||
},
|
||||
.@"for" => {
|
||||
const extra = @bitCast(Node.For, datas[n].rhs);
|
||||
n = tree.extra_data[datas[n].lhs + extra.inputs + @boolToInt(extra.has_else)];
|
||||
n = tree.extra_data[datas[n].lhs + extra.inputs + @intFromBool(extra.has_else)];
|
||||
},
|
||||
.@"suspend" => {
|
||||
if (datas[n].lhs != 0) {
|
||||
|
@ -271,13 +271,10 @@ fn generateVSCodeConfigFile(allocator: std.mem.Allocator, config: Config, path:
|
||||
const default: ?std.json.Value = blk: {
|
||||
if (option.@"enum" != null) break :blk .{ .string = option.default };
|
||||
|
||||
var parser = std.json.Parser.init(allocator, .alloc_always);
|
||||
defer parser.deinit();
|
||||
|
||||
var value = try parser.parse(option.default);
|
||||
var value = try std.json.parseFromSlice(std.json.Value, allocator, option.default, .{});
|
||||
defer value.deinit();
|
||||
|
||||
break :blk if (value.root != .null) value.root else null;
|
||||
break :blk if (value.value != .null) value.value else null;
|
||||
};
|
||||
|
||||
configuration.putAssumeCapacityNoClobber(name, .{
|
||||
@ -1050,8 +1047,9 @@ pub fn main() !void {
|
||||
}
|
||||
}
|
||||
|
||||
const config = try std.json.parseFromSlice(Config, gpa, @embedFile("config.json"), .{});
|
||||
defer std.json.parseFree(Config, gpa, config);
|
||||
const config_json = try std.json.parseFromSlice(Config, gpa, @embedFile("config.json"), .{});
|
||||
defer config_json.deinit();
|
||||
const config = config_json.value;
|
||||
|
||||
try generateConfigFile(gpa, config, config_path);
|
||||
try generateSchemaFile(gpa, config, schema_path);
|
||||
|
@ -10,6 +10,8 @@ const offsets = @import("offsets.zig");
|
||||
|
||||
const logger = std.log.scoped(.zls_config);
|
||||
|
||||
const legacy_json = @import("legacy_json.zig");
|
||||
|
||||
pub fn loadFromFile(allocator: std.mem.Allocator, file_path: []const u8) ?Config {
|
||||
const tracy_zone = tracy.trace(@src());
|
||||
defer tracy_zone.end();
|
||||
@ -34,7 +36,7 @@ pub fn loadFromFile(allocator: std.mem.Allocator, file_path: []const u8) ?Config
|
||||
scanner.enableDiagnostics(&parse_diagnostics);
|
||||
|
||||
// TODO: report errors using "textDocument/publishDiagnostics"
|
||||
var config = std.json.parseFromTokenSource(Config, allocator, &scanner, parse_options) catch |err| {
|
||||
var config = legacy_json.parseFromTokenSource(Config, allocator, &scanner, parse_options) catch |err| {
|
||||
logger.warn(
|
||||
"{s}:{d}:{d}: Error while parsing configuration file {}",
|
||||
.{ file_path, parse_diagnostics.getLine(), parse_diagnostics.getColumn(), err },
|
||||
@ -82,7 +84,7 @@ pub fn configChanged(config: *Config, runtime_zig_version: *?ZigVersionWrapper,
|
||||
logger.info("Using zig executable '{s}'", .{exe_path});
|
||||
|
||||
var env = getZigEnv(allocator, exe_path) orelse break :blk;
|
||||
defer std.json.parseFree(Env, allocator, env);
|
||||
defer legacy_json.parseFree(Env, allocator, env);
|
||||
|
||||
if (config.zig_lib_path) |lib_path| allocator.free(lib_path);
|
||||
// Make sure the path is absolute
|
||||
@ -177,7 +179,7 @@ pub const Env = struct {
|
||||
target: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
/// result has to be freed with `std.json.parseFree`
|
||||
/// result has to be freed with `json_compat.parseFree`
|
||||
pub fn getZigEnv(allocator: std.mem.Allocator, zig_exe_path: []const u8) ?Env {
|
||||
const zig_env_result = std.ChildProcess.exec(.{
|
||||
.allocator = allocator,
|
||||
@ -202,7 +204,7 @@ pub fn getZigEnv(allocator: std.mem.Allocator, zig_exe_path: []const u8) ?Env {
|
||||
else => logger.err("zig env invocation failed", .{}),
|
||||
}
|
||||
|
||||
return std.json.parseFromSlice(
|
||||
return legacy_json.parseFromSlice(
|
||||
Env,
|
||||
allocator,
|
||||
zig_env_result.stdout,
|
||||
|
@ -150,7 +150,7 @@ pub const builtins = [_]Builtin{
|
||||
\\
|
||||
\\Asserts that `@sizeOf(@TypeOf(value)) == @sizeOf(DestType)`.
|
||||
\\
|
||||
\\Asserts that `@typeInfo(DestType) != .Pointer`. Use `@ptrCast` or `@intToPtr` if you need this.
|
||||
\\Asserts that `@typeInfo(DestType) != .Pointer`. Use `@ptrCast` or `@ptrFromInt` if you need this.
|
||||
\\
|
||||
\\Can be used for these things for example:
|
||||
\\
|
||||
@ -178,9 +178,9 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@boolToInt",
|
||||
.signature = "@boolToInt(value: bool) u1",
|
||||
.snippet = "@boolToInt(${1:value: bool})",
|
||||
.name = "@intFromBool",
|
||||
.signature = "@intFromBool(value: bool) u1",
|
||||
.snippet = "@intFromBool(${1:value: bool})",
|
||||
.documentation =
|
||||
\\Converts `true` to `@as(u1, 1)` and `false` to `@as(u1, 0)`.
|
||||
,
|
||||
@ -669,9 +669,9 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@enumToInt",
|
||||
.signature = "@enumToInt(enum_or_tagged_union: anytype) anytype",
|
||||
.snippet = "@enumToInt(${1:enum_or_tagged_union: anytype})",
|
||||
.name = "@intFromEnum",
|
||||
.signature = "@intFromEnum(enum_or_tagged_union: anytype) anytype",
|
||||
.snippet = "@intFromEnum(${1:enum_or_tagged_union: anytype})",
|
||||
.documentation =
|
||||
\\Converts an enumeration value into its integer tag type. When a tagged union is passed, the tag value is used as the enumeration value.
|
||||
\\
|
||||
@ -704,9 +704,9 @@ pub const builtins = [_]Builtin{
|
||||
.arguments = &.{},
|
||||
},
|
||||
.{
|
||||
.name = "@errorToInt",
|
||||
.signature = "@errorToInt(err: anytype) std.meta.Int(.unsigned, @sizeOf(anyerror) * 8)",
|
||||
.snippet = "@errorToInt(${1:err: anytype})",
|
||||
.name = "@intFromError",
|
||||
.signature = "@intFromError(err: anytype) std.meta.Int(.unsigned, @sizeOf(anyerror) * 8)",
|
||||
.snippet = "@intFromError(${1:err: anytype})",
|
||||
.documentation =
|
||||
\\Supports the following types:
|
||||
\\
|
||||
@ -854,9 +854,9 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@floatToInt",
|
||||
.signature = "@floatToInt(comptime DestType: type, float: anytype) DestType",
|
||||
.snippet = "@floatToInt(${1:comptime DestType: type}, ${2:float: anytype})",
|
||||
.name = "@intFromFloat",
|
||||
.signature = "@intFromFloat(comptime DestType: type, float: anytype) DestType",
|
||||
.snippet = "@intFromFloat(${1:comptime DestType: type}, ${2:float: anytype})",
|
||||
.documentation =
|
||||
\\Converts the integer part of a floating point number to the destination type.
|
||||
\\
|
||||
@ -988,9 +988,9 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@intToEnum",
|
||||
.signature = "@intToEnum(comptime DestType: type, integer: anytype) DestType",
|
||||
.snippet = "@intToEnum(${1:comptime DestType: type}, ${2:integer: anytype})",
|
||||
.name = "@enumFromInt",
|
||||
.signature = "@enumFromInt(comptime DestType: type, integer: anytype) DestType",
|
||||
.snippet = "@enumFromInt(${1:comptime DestType: type}, ${2:integer: anytype})",
|
||||
.documentation =
|
||||
\\Converts an integer into an [enum](https://ziglang.org/documentation/master/#enum) value.
|
||||
\\
|
||||
@ -1002,9 +1002,9 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@intToError",
|
||||
.signature = "@intToError(value: std.meta.Int(.unsigned, @sizeOf(anyerror) * 8)) anyerror",
|
||||
.snippet = "@intToError(${1:value: std.meta.Int(.unsigned, @sizeOf(anyerror) * 8})",
|
||||
.name = "@errorFromInt",
|
||||
.signature = "@errorFromInt(value: std.meta.Int(.unsigned, @sizeOf(anyerror) * 8)) anyerror",
|
||||
.snippet = "@errorFromInt(${1:value: std.meta.Int(.unsigned, @sizeOf(anyerror) * 8})",
|
||||
.documentation =
|
||||
\\Converts from the integer representation of an error into [The Global Error Set](https://ziglang.org/documentation/master/#The-Global-Error-Set) type.
|
||||
\\
|
||||
@ -1017,11 +1017,11 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@intToFloat",
|
||||
.signature = "@intToFloat(comptime DestType: type, int: anytype) DestType",
|
||||
.snippet = "@intToFloat(${1:comptime DestType: type}, ${2:int: anytype})",
|
||||
.name = "@floatFromInt",
|
||||
.signature = "@floatFromInt(comptime DestType: type, int: anytype) DestType",
|
||||
.snippet = "@floatFromInt(${1:comptime DestType: type}, ${2:int: anytype})",
|
||||
.documentation =
|
||||
\\Converts an integer to the closest floating point representation. To convert the other way, use [@floatToInt](https://ziglang.org/documentation/master/#floatToInt). This cast is always safe.
|
||||
\\Converts an integer to the closest floating point representation. To convert the other way, use [@intFromFloat](https://ziglang.org/documentation/master/#intFromFloat). This cast is always safe.
|
||||
,
|
||||
.arguments = &.{
|
||||
"comptime DestType: type",
|
||||
@ -1029,11 +1029,11 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@intToPtr",
|
||||
.signature = "@intToPtr(comptime DestType: type, address: usize) DestType",
|
||||
.snippet = "@intToPtr(${1:comptime DestType: type}, ${2:address: usize})",
|
||||
.name = "@ptrFromInt",
|
||||
.signature = "@ptrFromInt(comptime DestType: type, address: usize) DestType",
|
||||
.snippet = "@ptrFromInt(${1:comptime DestType: type}, ${2:address: usize})",
|
||||
.documentation =
|
||||
\\Converts an integer to a [pointer](https://ziglang.org/documentation/master/#Pointers). To convert the other way, use [@ptrToInt](https://ziglang.org/documentation/master/#ptrToInt). Casting an address of 0 to a destination type which in not [optional](https://ziglang.org/documentation/master/#Optional-Pointers) and does not have the `allowzero` attribute will result in a [Pointer Cast Invalid Null](https://ziglang.org/documentation/master/#Pointer-Cast-Invalid-Null) panic when runtime safety checks are enabled.
|
||||
\\Converts an integer to a [pointer](https://ziglang.org/documentation/master/#Pointers). To convert the other way, use [@intFromPtr](https://ziglang.org/documentation/master/#intFromPtr). Casting an address of 0 to a destination type which in not [optional](https://ziglang.org/documentation/master/#Optional-Pointers) and does not have the `allowzero` attribute will result in a [Pointer Cast Invalid Null](https://ziglang.org/documentation/master/#Pointer-Cast-Invalid-Null) panic when runtime safety checks are enabled.
|
||||
\\
|
||||
\\If the destination pointer type does not allow address zero and `address` is zero, this invokes safety-checked [Undefined Behavior](https://ziglang.org/documentation/master/#Undefined-Behavior).
|
||||
,
|
||||
@ -1274,13 +1274,13 @@ pub const builtins = [_]Builtin{
|
||||
},
|
||||
},
|
||||
.{
|
||||
.name = "@ptrToInt",
|
||||
.signature = "@ptrToInt(value: anytype) usize",
|
||||
.snippet = "@ptrToInt(${1:value: anytype})",
|
||||
.name = "@intFromPtr",
|
||||
.signature = "@intFromPtr(value: anytype) usize",
|
||||
.snippet = "@intFromPtr(${1:value: anytype})",
|
||||
.documentation =
|
||||
\\Converts `value` to a `usize` which is the address of the pointer. `value` can be `*T` or `?*T`.
|
||||
\\
|
||||
\\To convert the other way, use [@intToPtr](https://ziglang.org/documentation/master/#intToPtr)
|
||||
\\To convert the other way, use [@ptrFromInt](https://ziglang.org/documentation/master/#ptrFromInt)
|
||||
,
|
||||
.arguments = &.{
|
||||
"value: anytype",
|
||||
@ -1555,7 +1555,7 @@ pub const builtins = [_]Builtin{
|
||||
\\test "vector @splat" {
|
||||
\\ const scalar: u32 = 5;
|
||||
\\ const result = @splat(4, scalar);
|
||||
\\ comptime try expect(@TypeOf(result) == @Vector(4, u32));
|
||||
\\ try comptime expect(@TypeOf(result) == @Vector(4, u32));
|
||||
\\ try expect(std.mem.eql(u32, &@as([4]u32, result), &[_]u32{ 5, 5, 5, 5 }));
|
||||
\\}
|
||||
\\```
|
||||
@ -1588,9 +1588,9 @@ pub const builtins = [_]Builtin{
|
||||
\\ const value = @Vector(4, i32){ 1, -1, 1, -1 };
|
||||
\\ const result = value > @splat(4, @as(i32, 0));
|
||||
\\ // result is { true, false, true, false };
|
||||
\\ comptime try expect(@TypeOf(result) == @Vector(4, bool));
|
||||
\\ try comptime expect(@TypeOf(result) == @Vector(4, bool));
|
||||
\\ const is_all_true = @reduce(.And, result);
|
||||
\\ comptime try expect(@TypeOf(is_all_true) == bool);
|
||||
\\ try comptime expect(@TypeOf(is_all_true) == bool);
|
||||
\\ try expect(is_all_true == false);
|
||||
\\}
|
||||
\\```
|
||||
@ -1642,7 +1642,7 @@ pub const builtins = [_]Builtin{
|
||||
.signature = "@sin(value: anytype) @TypeOf(value)",
|
||||
.snippet = "@sin(${1:value: anytype})",
|
||||
.documentation =
|
||||
\\Sine trigonometric function on a floating point number. Uses a dedicated hardware instruction when available.
|
||||
\\Sine trigonometric function on a floating point number in radians. Uses a dedicated hardware instruction when available.
|
||||
\\
|
||||
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
|
||||
,
|
||||
@ -1655,7 +1655,7 @@ pub const builtins = [_]Builtin{
|
||||
.signature = "@cos(value: anytype) @TypeOf(value)",
|
||||
.snippet = "@cos(${1:value: anytype})",
|
||||
.documentation =
|
||||
\\Cosine trigonometric function on a floating point number. Uses a dedicated hardware instruction when available.
|
||||
\\Cosine trigonometric function on a floating point number in radians. Uses a dedicated hardware instruction when available.
|
||||
\\
|
||||
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
|
||||
,
|
||||
@ -1668,7 +1668,7 @@ pub const builtins = [_]Builtin{
|
||||
.signature = "@tan(value: anytype) @TypeOf(value)",
|
||||
.snippet = "@tan(${1:value: anytype})",
|
||||
.documentation =
|
||||
\\Tangent trigonometric function on a floating point number. Uses a dedicated hardware instruction when available.
|
||||
\\Tangent trigonometric function on a floating point number in radians. Uses a dedicated hardware instruction when available.
|
||||
\\
|
||||
\\Supports [Floats](https://ziglang.org/documentation/master/#Floats) and [Vectors](https://ziglang.org/documentation/master/#Vectors) of floats.
|
||||
,
|
||||
@ -1977,7 +1977,7 @@ pub const builtins = [_]Builtin{
|
||||
\\test "no runtime side effects" {
|
||||
\\ var data: i32 = 0;
|
||||
\\ const T = @TypeOf(foo(i32, &data));
|
||||
\\ comptime try expect(T == i32);
|
||||
\\ try comptime expect(T == i32);
|
||||
\\ try expect(data == 0);
|
||||
\\}
|
||||
\\fn foo(comptime T: type, ptr: *T) T {
|
||||
|
@ -184,8 +184,7 @@ fn handleUnusedCapture(
|
||||
// this means bare loop/switch captures (w/out curlies) aren't supported.
|
||||
var block_start = capture_loc.end + 1;
|
||||
var is_comment = false;
|
||||
while (block_start < builder.handle.text.len) : (block_start += 1)
|
||||
{
|
||||
while (block_start < builder.handle.text.len) : (block_start += 1) {
|
||||
switch (builder.handle.text[block_start]) {
|
||||
'/' => if (block_start + 1 < builder.handle.text.len and builder.handle.text[block_start + 1] == '/') {
|
||||
is_comment = true;
|
||||
@ -197,7 +196,7 @@ fn handleUnusedCapture(
|
||||
is_comment = false;
|
||||
},
|
||||
//If the character is not a whitespace, and we're not in a comment then break out of the loop
|
||||
else => |c| if(!std.ascii.isWhitespace(c) and !is_comment) break,
|
||||
else => |c| if (!std.ascii.isWhitespace(c) and !is_comment) break,
|
||||
}
|
||||
}
|
||||
if (builder.handle.text[block_start] != '{') {
|
||||
@ -420,7 +419,7 @@ const DiagnosticKind = union(enum) {
|
||||
inline for (std.meta.fields(T)) |field| {
|
||||
if (std.mem.startsWith(u8, message, field.name)) {
|
||||
// is there a better way to achieve this?
|
||||
return @intToEnum(T, field.value);
|
||||
return @enumFromInt(T, field.value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -319,7 +319,7 @@ fn getDiagnosticsFromZir(
|
||||
|
||||
const allocator = server.arena.allocator();
|
||||
|
||||
const payload_index = handle.zir.extra[@enumToInt(Zir.ExtraIndex.compile_errors)];
|
||||
const payload_index = handle.zir.extra[@intFromEnum(Zir.ExtraIndex.compile_errors)];
|
||||
if (payload_index == 0) return;
|
||||
|
||||
const header = handle.zir.extraData(Zir.Inst.CompileErrors, payload_index);
|
||||
|
@ -182,7 +182,7 @@ pub fn generateFoldingRanges(allocator: std.mem.Allocator, tree: Ast, encoding:
|
||||
const last_param = fn_proto.ast.params[fn_proto.ast.params.len - 1];
|
||||
const last_param_tok = ast.lastToken(tree, last_param);
|
||||
const param_has_comma = last_param_tok + 1 < tree.tokens.len and token_tags[last_param_tok + 1] == .comma;
|
||||
const list_end_tok = last_param_tok + @boolToInt(param_has_comma);
|
||||
const list_end_tok = last_param_tok + @intFromBool(param_has_comma);
|
||||
|
||||
if (list_start_tok > list_end_tok) continue; // Incomplete, ie `fn a()`
|
||||
try builder.add(null, list_start_tok, list_end_tok, .exclusive, .inclusive);
|
||||
|
@ -155,7 +155,7 @@ const Builder = struct {
|
||||
@truncate(u32, delta.line),
|
||||
@truncate(u32, delta.character),
|
||||
@truncate(u32, length),
|
||||
@enumToInt(token_type),
|
||||
@intFromEnum(token_type),
|
||||
@bitCast(u16, token_modifiers),
|
||||
});
|
||||
self.previous_source_index = loc.start;
|
||||
@ -472,7 +472,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
// check it it's 'else'
|
||||
if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword);
|
||||
if (switch_case.payload_token) |payload_token| {
|
||||
const actual_payload = payload_token + @boolToInt(token_tags[payload_token] == .asterisk);
|
||||
const actual_payload = payload_token + @intFromBool(token_tags[payload_token] == .asterisk);
|
||||
try writeTokenMod(builder, actual_payload, .variable, .{ .declaration = true });
|
||||
}
|
||||
try callWriteNodeTokens(allocator, .{ builder, switch_case.ast.target_expr });
|
||||
@ -488,7 +488,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
try callWriteNodeTokens(allocator, .{ builder, while_node.ast.cond_expr });
|
||||
if (while_node.payload_token) |payload| {
|
||||
const capture_is_ref = token_tags[payload] == .asterisk;
|
||||
const name_token = payload + @boolToInt(capture_is_ref);
|
||||
const name_token = payload + @intFromBool(capture_is_ref);
|
||||
try writeTokenMod(builder, name_token, .variable, .{ .declaration = true });
|
||||
}
|
||||
try callWriteNodeTokens(allocator, .{ builder, while_node.ast.cont_expr });
|
||||
@ -519,7 +519,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
var capture_token = for_node.payload_token;
|
||||
for (for_node.ast.inputs) |_| {
|
||||
const capture_is_ref = token_tags[capture_token] == .asterisk;
|
||||
const name_token = capture_token + @boolToInt(capture_is_ref);
|
||||
const name_token = capture_token + @intFromBool(capture_is_ref);
|
||||
capture_token = name_token + 2;
|
||||
|
||||
try writeTokenMod(builder, name_token, .variable, .{ .declaration = true });
|
||||
@ -541,7 +541,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
|
||||
|
||||
if (if_node.payload_token) |payload_token| {
|
||||
const capture_is_ref = token_tags[payload_token] == .asterisk;
|
||||
const actual_payload = payload_token + @boolToInt(capture_is_ref);
|
||||
const actual_payload = payload_token + @intFromBool(capture_is_ref);
|
||||
try writeTokenMod(builder, actual_payload, .variable, .{ .declaration = true });
|
||||
}
|
||||
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.then_expr });
|
||||
|
@ -18,7 +18,7 @@ fn fnProtoToSignatureInfo(analyser: *Analyser, alloc: std.mem.Allocator, commas:
|
||||
|
||||
const arg_idx = if (skip_self_param) blk: {
|
||||
const has_self_param = try analyser.hasSelfParam(handle, proto);
|
||||
break :blk commas + @boolToInt(has_self_param);
|
||||
break :blk commas + @intFromBool(has_self_param);
|
||||
} else commas;
|
||||
|
||||
var params = std.ArrayListUnmanaged(types.ParameterInformation){};
|
||||
|
180
src/legacy_json.zig
Normal file
180
src/legacy_json.zig
Normal file
@ -0,0 +1,180 @@
|
||||
// TODO This is a *very* temporary fix until a proper rewrite is made.
|
||||
// See discussions in https://github.com/zigtools/zls/pull/1249
|
||||
// and https://github.com/zigtools/zls/issues/1248
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ParseError = std.json.ParseError;
|
||||
const Scanner = std.json.Scanner;
|
||||
const ParseOptions = std.json.ParseOptions;
|
||||
pub fn parseFromSlice(
|
||||
comptime T: type,
|
||||
allocator: Allocator,
|
||||
s: []const u8,
|
||||
options: ParseOptions,
|
||||
) (ParseError(Scanner) || std.mem.Allocator.Error)!T {
|
||||
const json = try std.json.parseFromSlice(T, allocator, s, options);
|
||||
defer json.deinit();
|
||||
return deepCopy(T, allocator, json.value);
|
||||
}
|
||||
pub fn parseFromTokenSource(
|
||||
comptime T: type,
|
||||
allocator: Allocator,
|
||||
scanner_or_reader: anytype,
|
||||
options: ParseOptions,
|
||||
) (ParseError(@TypeOf(scanner_or_reader.*)) || std.mem.Allocator.Error)!T {
|
||||
const json = try std.json.parseFromTokenSource(T, allocator, scanner_or_reader, options);
|
||||
defer json.deinit();
|
||||
return try deepCopy(T, allocator, json.value);
|
||||
}
|
||||
|
||||
/// Recursively copies a struct, reallocating pointers and slices
|
||||
fn deepCopy(comptime T: type, allocator: Allocator, value: T) !T {
|
||||
switch (@typeInfo(T)) {
|
||||
.Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => return value,
|
||||
.Optional => {
|
||||
if (value) |v| {
|
||||
return try deepCopy(@TypeOf(v), allocator, v);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
.Union => |unionInfo| {
|
||||
if (unionInfo.tag_type) |UnionTagType| {
|
||||
inline for (unionInfo.fields) |u_field| {
|
||||
if (value == @field(UnionTagType, u_field.name)) {
|
||||
return @unionInit(T, u_field.name, deepCopy(u_field.type, allocator, @field(value, u_field.name)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
},
|
||||
.Struct => |structInfo| {
|
||||
var result: T = undefined;
|
||||
inline for (structInfo.fields) |field| {
|
||||
if (field.is_comptime) @compileError("comptime fields are not supported: " ++ @typeName(T) ++ "." ++ field.name);
|
||||
const field_value = @field(value, field.name);
|
||||
@field(result, field.name) = try deepCopy(@TypeOf(field_value), allocator, field_value);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
.Array, .Vector => {
|
||||
var r: T = undefined;
|
||||
for (value, 0..) |v, i| {
|
||||
r[i] = try deepCopy(@TypeOf(v), allocator, v);
|
||||
}
|
||||
return r;
|
||||
},
|
||||
.Pointer => |ptrInfo| {
|
||||
switch (ptrInfo.size) {
|
||||
.One => {
|
||||
const r: *ptrInfo.child = try allocator.create(ptrInfo.child);
|
||||
errdefer allocator.destroy(r);
|
||||
r.* = try deepCopy(ptrInfo.child, allocator, value.*);
|
||||
return r;
|
||||
},
|
||||
.Slice => {
|
||||
var result = std.ArrayList(ptrInfo.child).init(allocator);
|
||||
errdefer result.deinit();
|
||||
for (value) |v| {
|
||||
try result.append(try deepCopy(ptrInfo.child, allocator, v));
|
||||
}
|
||||
if (ptrInfo.sentinel) |some| {
|
||||
const sentinel_value = @ptrCast(*align(1) const ptrInfo.child, some).*;
|
||||
return try result.toOwnedSliceSentinel(sentinel_value);
|
||||
}
|
||||
return try result.toOwnedSlice();
|
||||
},
|
||||
|
||||
else => @compileError("Unable to deepCopy type '" ++ @typeName(T) ++ "'"),
|
||||
}
|
||||
},
|
||||
|
||||
else => @compileError("Unable to deepCopy type '" ++ @typeName(T) ++ "'"),
|
||||
}
|
||||
}
|
||||
/// Releases resources created by parseFromSlice() or parseFromTokenSource().
|
||||
pub fn parseFree(comptime T: type, allocator: Allocator, value: T) void {
|
||||
switch (@typeInfo(T)) {
|
||||
.Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {},
|
||||
.Optional => {
|
||||
if (value) |v| {
|
||||
return parseFree(@TypeOf(v), allocator, v);
|
||||
}
|
||||
},
|
||||
.Union => |unionInfo| {
|
||||
if (unionInfo.tag_type) |UnionTagType| {
|
||||
inline for (unionInfo.fields) |u_field| {
|
||||
if (value == @field(UnionTagType, u_field.name)) {
|
||||
parseFree(u_field.type, allocator, @field(value, u_field.name));
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
},
|
||||
.Struct => |structInfo| {
|
||||
inline for (structInfo.fields) |field| {
|
||||
var should_free = true;
|
||||
if (field.default_value) |default| {
|
||||
switch (@typeInfo(field.type)) {
|
||||
// We must not attempt to free pointers to struct default values
|
||||
.Pointer => |fieldPtrInfo| {
|
||||
const field_value = @field(value, field.name);
|
||||
const field_ptr = switch (fieldPtrInfo.size) {
|
||||
.One => field_value,
|
||||
.Slice => field_value.ptr,
|
||||
else => unreachable, // Other pointer types are not parseable
|
||||
};
|
||||
const field_addr = @intFromPtr(field_ptr);
|
||||
|
||||
const casted_default = @ptrCast(*const field.type, @alignCast(@alignOf(field.type), default)).*;
|
||||
const default_ptr = switch (fieldPtrInfo.size) {
|
||||
.One => casted_default,
|
||||
.Slice => casted_default.ptr,
|
||||
else => unreachable, // Other pointer types are not parseable
|
||||
};
|
||||
const default_addr = @intFromPtr(default_ptr);
|
||||
|
||||
if (field_addr == default_addr) {
|
||||
should_free = false;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
if (should_free) {
|
||||
parseFree(field.type, allocator, @field(value, field.name));
|
||||
}
|
||||
}
|
||||
},
|
||||
.Array => |arrayInfo| {
|
||||
for (value) |v| {
|
||||
parseFree(arrayInfo.child, allocator, v);
|
||||
}
|
||||
},
|
||||
.Vector => |vecInfo| {
|
||||
var i: usize = 0;
|
||||
while (i < vecInfo.len) : (i += 1) {
|
||||
parseFree(vecInfo.child, allocator, value[i]);
|
||||
}
|
||||
},
|
||||
.Pointer => |ptrInfo| {
|
||||
switch (ptrInfo.size) {
|
||||
.One => {
|
||||
parseFree(ptrInfo.child, allocator, value.*);
|
||||
allocator.destroy(value);
|
||||
},
|
||||
.Slice => {
|
||||
for (value) |v| {
|
||||
parseFree(ptrInfo.child, allocator, v);
|
||||
}
|
||||
allocator.free(value);
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
15
src/main.zig
15
src/main.zig
@ -10,13 +10,14 @@ const Server = @import("Server.zig");
|
||||
const Header = @import("Header.zig");
|
||||
const debug = @import("debug.zig");
|
||||
const binned_allocator = @import("binned_allocator");
|
||||
const legacy_json = @import("legacy_json.zig");
|
||||
|
||||
const logger = std.log.scoped(.zls_main);
|
||||
const message_logger = std.log.scoped(.message);
|
||||
|
||||
var actual_log_level: std.log.Level = switch (zig_builtin.mode) {
|
||||
.Debug => .debug,
|
||||
else => @intToEnum(std.log.Level, @enumToInt(build_options.log_level)), // temporary fix to build failing on release-safe due to a Zig bug
|
||||
else => @enumFromInt(std.log.Level, @intFromEnum(build_options.log_level)), // temporary fix to build failing on release-safe due to a Zig bug
|
||||
};
|
||||
|
||||
pub const std_options = struct {
|
||||
@ -30,7 +31,7 @@ pub const std_options = struct {
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
if (@enumToInt(level) > @enumToInt(actual_log_level)) return;
|
||||
if (@intFromEnum(level) > @intFromEnum(actual_log_level)) return;
|
||||
|
||||
const level_txt = comptime level.asText();
|
||||
const scope_txt = comptime @tagName(scope);
|
||||
@ -147,8 +148,8 @@ fn updateConfig(
|
||||
defer allocator.free(json_message);
|
||||
try file.reader().readNoEof(json_message);
|
||||
|
||||
const new_config = try std.json.parseFromSlice(Config, allocator, json_message, .{});
|
||||
std.json.parseFree(Config, allocator, config.*);
|
||||
const new_config = try legacy_json.parseFromSlice(Config, allocator, json_message, .{});
|
||||
legacy_json.parseFree(Config, allocator, config.*);
|
||||
config.* = new_config;
|
||||
}
|
||||
}
|
||||
@ -227,7 +228,7 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult {
|
||||
const KV = struct { []const u8, ArgId };
|
||||
var pairs: [fields.len]KV = undefined;
|
||||
for (&pairs, fields) |*pair, field| {
|
||||
pair.* = .{ field.name, @intToEnum(ArgId, field.value) };
|
||||
pair.* = .{ field.name, @enumFromInt(ArgId, field.value) };
|
||||
}
|
||||
break :blk pairs[0..];
|
||||
});
|
||||
@ -334,7 +335,7 @@ fn parseArgs(allocator: std.mem.Allocator) !ParseArgsResult {
|
||||
if (specified.get(.@"show-config-path")) {
|
||||
const new_config = try getConfig(allocator, result.config_path);
|
||||
defer if (new_config.config_path) |path| allocator.free(path);
|
||||
defer std.json.parseFree(Config, allocator, new_config.config);
|
||||
defer legacy_json.parseFree(Config, allocator, new_config.config);
|
||||
|
||||
const full_path = if (new_config.config_path) |path| blk: {
|
||||
break :blk try std.fs.path.resolve(allocator, &.{ path, "zls.json" });
|
||||
@ -392,7 +393,7 @@ pub fn main() !void {
|
||||
logger.info("Starting ZLS {s} @ '{s}'", .{ build_options.version, result.zls_exe_path });
|
||||
|
||||
var config = try getConfig(allocator, result.config_path);
|
||||
defer std.json.parseFree(Config, allocator, config.config);
|
||||
defer legacy_json.parseFree(Config, allocator, config.config);
|
||||
defer if (config.config_path) |path| allocator.free(path);
|
||||
|
||||
if (result.replay_enabled and config.config.replay_session_path == null and config.config.record_session_path == null) {
|
||||
|
@ -89,7 +89,7 @@ fn setExtra(astgen: *AstGen, index: usize, extra: anytype) void {
|
||||
inline for (fields) |field| {
|
||||
astgen.extra.items[i] = switch (field.type) {
|
||||
u32 => @field(extra, field.name),
|
||||
Zir.Inst.Ref => @enumToInt(@field(extra, field.name)),
|
||||
Zir.Inst.Ref => @intFromEnum(@field(extra, field.name)),
|
||||
i32 => @bitCast(u32, @field(extra, field.name)),
|
||||
Zir.Inst.Call.Flags => @bitCast(u32, @field(extra, field.name)),
|
||||
Zir.Inst.BuiltinCall.Flags => @bitCast(u32, @field(extra, field.name)),
|
||||
@ -175,7 +175,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
|
||||
try lowerAstErrors(&astgen);
|
||||
}
|
||||
|
||||
const err_index = @enumToInt(Zir.ExtraIndex.compile_errors);
|
||||
const err_index = @intFromEnum(Zir.ExtraIndex.compile_errors);
|
||||
if (astgen.compile_errors.items.len == 0) {
|
||||
astgen.extra.items[err_index] = 0;
|
||||
} else {
|
||||
@ -191,7 +191,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
|
||||
}
|
||||
}
|
||||
|
||||
const imports_index = @enumToInt(Zir.ExtraIndex.imports);
|
||||
const imports_index = @intFromEnum(Zir.ExtraIndex.imports);
|
||||
if (astgen.imports.count() == 0) {
|
||||
astgen.extra.items[imports_index] = 0;
|
||||
} else {
|
||||
@ -1469,7 +1469,7 @@ fn arrayInitExprRlNone(
|
||||
|
||||
for (elements) |elem_init| {
|
||||
const elem_ref = try expr(gz, scope, .{ .rl = .none }, elem_init);
|
||||
astgen.extra.items[extra_index] = @enumToInt(elem_ref);
|
||||
astgen.extra.items[extra_index] = @intFromEnum(elem_ref);
|
||||
extra_index += 1;
|
||||
}
|
||||
return try gz.addPlNodePayloadIndex(tag, node, payload_index);
|
||||
@ -1486,13 +1486,13 @@ fn arrayInitExprInner(
|
||||
) InnerError!Zir.Inst.Ref {
|
||||
const astgen = gz.astgen;
|
||||
|
||||
const len = elements.len + @boolToInt(array_ty_inst != .none);
|
||||
const len = elements.len + @intFromBool(array_ty_inst != .none);
|
||||
const payload_index = try addExtra(astgen, Zir.Inst.MultiOp{
|
||||
.operands_len = @intCast(u32, len),
|
||||
});
|
||||
var extra_index = try reserveExtra(astgen, len);
|
||||
if (array_ty_inst != .none) {
|
||||
astgen.extra.items[extra_index] = @enumToInt(array_ty_inst);
|
||||
astgen.extra.items[extra_index] = @intFromEnum(array_ty_inst);
|
||||
extra_index += 1;
|
||||
}
|
||||
|
||||
@ -1504,14 +1504,14 @@ fn arrayInitExprInner(
|
||||
.tag = .elem_type_index,
|
||||
.data = .{ .bin = .{
|
||||
.lhs = array_ty_inst,
|
||||
.rhs = @intToEnum(Zir.Inst.Ref, i),
|
||||
.rhs = @enumFromInt(Zir.Inst.Ref, i),
|
||||
} },
|
||||
});
|
||||
break :ri ResultInfo{ .rl = .{ .coerced_ty = ty_expr } };
|
||||
} else ResultInfo{ .rl = .{ .none = {} } };
|
||||
|
||||
const elem_ref = try expr(gz, scope, ri, elem_init);
|
||||
astgen.extra.items[extra_index] = @enumToInt(elem_ref);
|
||||
astgen.extra.items[extra_index] = @intFromEnum(elem_ref);
|
||||
extra_index += 1;
|
||||
}
|
||||
|
||||
@ -3461,17 +3461,17 @@ fn ptrType(
|
||||
.src_node = gz.nodeIndexToRelative(node),
|
||||
});
|
||||
if (sentinel_ref != .none) {
|
||||
gz.astgen.extra.appendAssumeCapacity(@enumToInt(sentinel_ref));
|
||||
gz.astgen.extra.appendAssumeCapacity(@intFromEnum(sentinel_ref));
|
||||
}
|
||||
if (align_ref != .none) {
|
||||
gz.astgen.extra.appendAssumeCapacity(@enumToInt(align_ref));
|
||||
gz.astgen.extra.appendAssumeCapacity(@intFromEnum(align_ref));
|
||||
}
|
||||
if (addrspace_ref != .none) {
|
||||
gz.astgen.extra.appendAssumeCapacity(@enumToInt(addrspace_ref));
|
||||
gz.astgen.extra.appendAssumeCapacity(@intFromEnum(addrspace_ref));
|
||||
}
|
||||
if (bit_start_ref != .none) {
|
||||
gz.astgen.extra.appendAssumeCapacity(@enumToInt(bit_start_ref));
|
||||
gz.astgen.extra.appendAssumeCapacity(@enumToInt(bit_end_ref));
|
||||
gz.astgen.extra.appendAssumeCapacity(@intFromEnum(bit_start_ref));
|
||||
gz.astgen.extra.appendAssumeCapacity(@intFromEnum(bit_end_ref));
|
||||
}
|
||||
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
@ -3590,10 +3590,10 @@ const WipMembers = struct {
|
||||
assert(index < self.decls_start);
|
||||
const bit_bag: u32 = if (self.decl_index % decls_per_u32 == 0) 0 else self.payload.items[index];
|
||||
self.payload.items[index] = (bit_bag >> bits_per_decl) |
|
||||
(@as(u32, @boolToInt(is_pub)) << 28) |
|
||||
(@as(u32, @boolToInt(is_export)) << 29) |
|
||||
(@as(u32, @boolToInt(has_align)) << 30) |
|
||||
(@as(u32, @boolToInt(has_section_or_addrspace)) << 31);
|
||||
(@as(u32, @intFromBool(is_pub)) << 28) |
|
||||
(@as(u32, @intFromBool(is_export)) << 29) |
|
||||
(@as(u32, @intFromBool(has_align)) << 30) |
|
||||
(@as(u32, @intFromBool(has_section_or_addrspace)) << 31);
|
||||
self.decl_index += 1;
|
||||
}
|
||||
|
||||
@ -3605,7 +3605,7 @@ const WipMembers = struct {
|
||||
bit_bag >>= bits_per_field;
|
||||
comptime var i = 0;
|
||||
inline while (i < bits_per_field) : (i += 1) {
|
||||
bit_bag |= @as(u32, @boolToInt(bits[i])) << (32 - bits_per_field + i);
|
||||
bit_bag |= @as(u32, @intFromBool(bits[i])) << (32 - bits_per_field + i);
|
||||
}
|
||||
self.payload.items[index] = bit_bag;
|
||||
self.field_index += 1;
|
||||
@ -4179,11 +4179,11 @@ fn globalVarDecl(
|
||||
wip_members.appendToDecl(block_inst);
|
||||
wip_members.appendToDecl(doc_comment_index); // doc_comment wip
|
||||
if (align_inst != .none) {
|
||||
wip_members.appendToDecl(@enumToInt(align_inst));
|
||||
wip_members.appendToDecl(@intFromEnum(align_inst));
|
||||
}
|
||||
if (has_section_or_addrspace) {
|
||||
wip_members.appendToDecl(@enumToInt(section_inst));
|
||||
wip_members.appendToDecl(@enumToInt(addrspace_inst));
|
||||
wip_members.appendToDecl(@intFromEnum(section_inst));
|
||||
wip_members.appendToDecl(@intFromEnum(addrspace_inst));
|
||||
}
|
||||
}
|
||||
|
||||
@ -4673,7 +4673,7 @@ fn structDeclInner(
|
||||
wip_members.appendToField(@intCast(u32, astgen.scratch.items.len - old_scratch_len));
|
||||
block_scope.instructions.items.len = block_scope.instructions_top;
|
||||
} else {
|
||||
wip_members.appendToField(@enumToInt(field_type));
|
||||
wip_members.appendToField(@intFromEnum(field_type));
|
||||
}
|
||||
|
||||
if (have_align) {
|
||||
@ -4824,13 +4824,13 @@ fn unionDeclInner(
|
||||
|
||||
if (have_type) {
|
||||
const field_type = try typeExpr(&block_scope, &namespace.base, member.ast.type_expr);
|
||||
wip_members.appendToField(@enumToInt(field_type));
|
||||
wip_members.appendToField(@intFromEnum(field_type));
|
||||
} else if (arg_inst == .none and auto_enum_tok == null) {
|
||||
return astgen.failNode(member_node, "union field missing type", .{});
|
||||
}
|
||||
if (have_align) {
|
||||
const align_inst = try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .u32_type } }, member.ast.align_expr);
|
||||
wip_members.appendToField(@enumToInt(align_inst));
|
||||
wip_members.appendToField(@intFromEnum(align_inst));
|
||||
}
|
||||
if (have_value) {
|
||||
if (arg_inst == .none) {
|
||||
@ -4862,7 +4862,7 @@ fn unionDeclInner(
|
||||
);
|
||||
}
|
||||
const tag_value = try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr);
|
||||
wip_members.appendToField(@enumToInt(tag_value));
|
||||
wip_members.appendToField(@intFromEnum(tag_value));
|
||||
}
|
||||
}
|
||||
|
||||
@ -5113,7 +5113,7 @@ fn containerDecl(
|
||||
}
|
||||
namespace.base.tag = .enum_namespace;
|
||||
const tag_value_inst = try expr(&block_scope, &namespace.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr);
|
||||
wip_members.appendToField(@enumToInt(tag_value_inst));
|
||||
wip_members.appendToField(@intFromEnum(tag_value_inst));
|
||||
}
|
||||
}
|
||||
|
||||
@ -5792,7 +5792,7 @@ fn ifExpr(
|
||||
else
|
||||
.err_union_payload_unsafe;
|
||||
const payload_inst = try then_scope.addUnNode(tag, cond.inst, then_node);
|
||||
const token_name_index = payload_token + @boolToInt(payload_is_ref);
|
||||
const token_name_index = payload_token + @intFromBool(payload_is_ref);
|
||||
const ident_name = try astgen.identAsString(token_name_index);
|
||||
const token_name_str = tree.tokenSlice(token_name_index);
|
||||
if (mem.eql(u8, "_", token_name_str))
|
||||
@ -5946,8 +5946,8 @@ fn setCondBrPayload(
|
||||
const astgen = then_scope.astgen;
|
||||
const then_body = then_scope.instructionsSliceUpto(else_scope);
|
||||
const else_body = else_scope.instructionsSlice();
|
||||
const then_body_len = astgen.countBodyLenAfterFixups(then_body) + @boolToInt(then_break != 0);
|
||||
const else_body_len = astgen.countBodyLenAfterFixups(else_body) + @boolToInt(else_break != 0);
|
||||
const then_body_len = astgen.countBodyLenAfterFixups(then_body) + @intFromBool(then_break != 0);
|
||||
const else_body_len = astgen.countBodyLenAfterFixups(else_body) + @intFromBool(else_break != 0);
|
||||
try astgen.extra.ensureUnusedCapacity(
|
||||
astgen.gpa,
|
||||
@typeInfo(Zir.Inst.CondBr).Struct.fields.len + then_body_len + else_body_len,
|
||||
@ -5982,8 +5982,8 @@ fn setCondBrPayloadElideBlockStorePtr(
|
||||
const else_body = else_scope.instructionsSlice();
|
||||
const has_then_break = then_break != 0;
|
||||
const has_else_break = else_break != 0;
|
||||
const then_body_len = astgen.countBodyLenAfterFixups(then_body) + @boolToInt(has_then_break);
|
||||
const else_body_len = astgen.countBodyLenAfterFixups(else_body) + @boolToInt(has_else_break);
|
||||
const then_body_len = astgen.countBodyLenAfterFixups(then_body) + @intFromBool(has_then_break);
|
||||
const else_body_len = astgen.countBodyLenAfterFixups(else_body) + @intFromBool(has_else_break);
|
||||
try astgen.extra.ensureUnusedCapacity(
|
||||
astgen.gpa,
|
||||
@typeInfo(Zir.Inst.CondBr).Struct.fields.len + then_body_len + else_body_len,
|
||||
@ -6143,7 +6143,7 @@ fn whileExpr(
|
||||
const ident_bytes = tree.tokenSlice(ident_token);
|
||||
if (mem.eql(u8, "_", ident_bytes))
|
||||
break :s &then_scope.base;
|
||||
const payload_name_loc = payload_token + @boolToInt(payload_is_ref);
|
||||
const payload_name_loc = payload_token + @intFromBool(payload_is_ref);
|
||||
const ident_name = try astgen.identAsString(payload_name_loc);
|
||||
try astgen.detectLocalShadowing(&then_scope.base, ident_name, payload_name_loc, ident_bytes, .capture);
|
||||
payload_val_scope = .{
|
||||
@ -6385,7 +6385,7 @@ fn forExpr(
|
||||
for (for_full.ast.inputs, 0..) |input, i_usize| {
|
||||
const i = @intCast(u32, i_usize);
|
||||
const capture_is_ref = token_tags[capture_token] == .asterisk;
|
||||
const ident_tok = capture_token + @boolToInt(capture_is_ref);
|
||||
const ident_tok = capture_token + @intFromBool(capture_is_ref);
|
||||
const is_discard = mem.eql(u8, tree.tokenSlice(ident_tok), "_");
|
||||
|
||||
if (is_discard and capture_is_ref) {
|
||||
@ -6513,7 +6513,7 @@ fn forExpr(
|
||||
for (for_full.ast.inputs, 0..) |input, i_usize| {
|
||||
const i = @intCast(u32, i_usize);
|
||||
const capture_is_ref = token_tags[capture_token] == .asterisk;
|
||||
const ident_tok = capture_token + @boolToInt(capture_is_ref);
|
||||
const ident_tok = capture_token + @intFromBool(capture_is_ref);
|
||||
const capture_name = tree.tokenSlice(ident_tok);
|
||||
// Skip over the comma, and on to the next capture (or the ending pipe character).
|
||||
capture_token = ident_tok + 2;
|
||||
@ -6537,7 +6537,7 @@ fn forExpr(
|
||||
// indexables, we use it as an element index. This is so similar
|
||||
// that they can share the same code paths, branching only on the
|
||||
// ZIR tag.
|
||||
const switch_cond = (@as(u2, @boolToInt(capture_is_ref)) << 1) | @boolToInt(is_counter);
|
||||
const switch_cond = (@as(u2, @intFromBool(capture_is_ref)) << 1) | @intFromBool(is_counter);
|
||||
const tag: Zir.Inst.Tag = switch (switch_cond) {
|
||||
0b00 => .elem_val,
|
||||
0b01 => .add,
|
||||
@ -6789,7 +6789,7 @@ fn switchExpr(
|
||||
const payloads = &astgen.scratch;
|
||||
const scratch_top = astgen.scratch.items.len;
|
||||
const case_table_start = scratch_top;
|
||||
const scalar_case_table = case_table_start + @boolToInt(special_prong != .none);
|
||||
const scalar_case_table = case_table_start + @intFromBool(special_prong != .none);
|
||||
const multi_case_table = scalar_case_table + scalar_cases_len;
|
||||
const case_table_end = multi_case_table + multi_cases_len;
|
||||
try astgen.scratch.resize(gpa, case_table_end);
|
||||
@ -6856,8 +6856,8 @@ fn switchExpr(
|
||||
},
|
||||
});
|
||||
} else {
|
||||
const is_multi_case_bits: u2 = @boolToInt(is_multi_case);
|
||||
const is_ptr_bits: u2 = @boolToInt(is_ptr);
|
||||
const is_multi_case_bits: u2 = @intFromBool(is_multi_case);
|
||||
const is_ptr_bits: u2 = @intFromBool(is_ptr);
|
||||
const capture_tag: Zir.Inst.Tag = switch ((is_multi_case_bits << 1) | is_ptr_bits) {
|
||||
0b00 => .switch_capture,
|
||||
0b01 => .switch_capture_ref,
|
||||
@ -6936,7 +6936,7 @@ fn switchExpr(
|
||||
items_len += 1;
|
||||
|
||||
const item_inst = try comptimeExpr(parent_gz, scope, item_ri, item_node);
|
||||
try payloads.append(gpa, @enumToInt(item_inst));
|
||||
try payloads.append(gpa, @intFromEnum(item_inst));
|
||||
}
|
||||
|
||||
// ranges
|
||||
@ -6948,7 +6948,7 @@ fn switchExpr(
|
||||
const first = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].lhs);
|
||||
const last = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].rhs);
|
||||
try payloads.appendSlice(gpa, &[_]u32{
|
||||
@enumToInt(first), @enumToInt(last),
|
||||
@intFromEnum(first), @intFromEnum(last),
|
||||
});
|
||||
}
|
||||
|
||||
@ -6965,7 +6965,7 @@ fn switchExpr(
|
||||
try payloads.resize(gpa, header_index + 2); // item, body_len
|
||||
const item_node = case.ast.values[0];
|
||||
const item_inst = try comptimeExpr(parent_gz, scope, item_ri, item_node);
|
||||
payloads.items[header_index] = @enumToInt(item_inst);
|
||||
payloads.items[header_index] = @intFromEnum(item_inst);
|
||||
break :blk header_index + 1;
|
||||
};
|
||||
|
||||
@ -6995,7 +6995,7 @@ fn switchExpr(
|
||||
const case_slice = case_scope.instructionsSlice();
|
||||
const body_len = astgen.countBodyLenAfterFixups(case_slice);
|
||||
try payloads.ensureUnusedCapacity(gpa, body_len);
|
||||
const inline_bit = @as(u32, @boolToInt(case.inline_token != null)) << 31;
|
||||
const inline_bit = @as(u32, @intFromBool(case.inline_token != null)) << 31;
|
||||
payloads.items[body_len_index] = body_len | inline_bit;
|
||||
appendBodyWithFixupsArrayList(astgen, payloads, case_slice);
|
||||
}
|
||||
@ -7004,7 +7004,7 @@ fn switchExpr(
|
||||
try parent_gz.instructions.append(gpa, switch_block);
|
||||
|
||||
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.SwitchBlock).Struct.fields.len +
|
||||
@boolToInt(multi_cases_len != 0) +
|
||||
@intFromBool(multi_cases_len != 0) +
|
||||
payloads.items.len - case_table_end);
|
||||
|
||||
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.SwitchBlock{
|
||||
@ -7562,8 +7562,8 @@ fn numberLiteral(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index, source_node:
|
||||
const gpa = astgen.gpa;
|
||||
var big_int = try std.math.big.int.Managed.init(gpa);
|
||||
defer big_int.deinit();
|
||||
const prefix_offset = @as(u8, 2) * @boolToInt(base != .decimal);
|
||||
big_int.setString(@enumToInt(base), bytes[prefix_offset..]) catch |err| switch (err) {
|
||||
const prefix_offset = @as(u8, 2) * @intFromBool(base != .decimal);
|
||||
big_int.setString(@intFromEnum(base), bytes[prefix_offset..]) catch |err| switch (err) {
|
||||
error.InvalidCharacter => unreachable, // caught in `parseNumberLiteral`
|
||||
error.InvalidBase => unreachable, // we only pass 16, 8, 2, see above
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
@ -7668,7 +7668,7 @@ fn asmExpr(
|
||||
},
|
||||
else => .{
|
||||
.tag = .asm_expr,
|
||||
.tmpl = @enumToInt(try comptimeExpr(gz, scope, .{ .rl = .none }, full.ast.template)),
|
||||
.tmpl = @intFromEnum(try comptimeExpr(gz, scope, .{ .rl = .none }, full.ast.template)),
|
||||
},
|
||||
};
|
||||
|
||||
@ -7906,7 +7906,7 @@ fn typeOf(
|
||||
|
||||
for (args, 0..) |arg, i| {
|
||||
const param_ref = try reachableExpr(&typeof_scope, &typeof_scope.base, .{ .rl = .none }, arg, node);
|
||||
astgen.extra.items[args_index + i] = @enumToInt(param_ref);
|
||||
astgen.extra.items[args_index + i] = @intFromEnum(param_ref);
|
||||
}
|
||||
_ = try typeof_scope.addBreak(.break_inline, refToIndex(typeof_inst).?, .void_value);
|
||||
|
||||
@ -7955,7 +7955,7 @@ fn minMax(
|
||||
var extra_index = try reserveExtra(gz.astgen, args.len);
|
||||
for (args) |arg| {
|
||||
const arg_ref = try expr(gz, scope, .{ .rl = .none }, arg);
|
||||
astgen.extra.items[extra_index] = @enumToInt(arg_ref);
|
||||
astgen.extra.items[extra_index] = @intFromEnum(arg_ref);
|
||||
extra_index += 1;
|
||||
}
|
||||
const tag: Zir.Inst.Extended = switch (op) {
|
||||
@ -8030,7 +8030,7 @@ fn builtinCall(
|
||||
var extra_index = try reserveExtra(gz.astgen, params.len);
|
||||
for (params) |param| {
|
||||
const param_ref = try expr(gz, scope, .{ .rl = .none }, param);
|
||||
astgen.extra.items[extra_index] = @enumToInt(param_ref);
|
||||
astgen.extra.items[extra_index] = @intFromEnum(param_ref);
|
||||
extra_index += 1;
|
||||
}
|
||||
const result = try gz.addExtendedMultiOpPayloadIndex(.compile_log, payload_index, params.len);
|
||||
@ -8264,7 +8264,7 @@ fn builtinCall(
|
||||
.tag = .extended,
|
||||
.data = .{ .extended = .{
|
||||
.opcode = .reify,
|
||||
.small = @enumToInt(gz.anon_name_strategy),
|
||||
.small = @intFromEnum(gz.anon_name_strategy),
|
||||
.operand = payload_index,
|
||||
} },
|
||||
});
|
||||
@ -8974,7 +8974,7 @@ fn callExpr(
|
||||
.callee = callee,
|
||||
.flags = .{
|
||||
.pop_error_return_trace = !propagate_error_trace,
|
||||
.packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @enumToInt(modifier)),
|
||||
.packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @intFromEnum(modifier)),
|
||||
.args_len = @intCast(Zir.Inst.Call.Flags.PackedArgsLen, call.ast.params.len),
|
||||
},
|
||||
});
|
||||
@ -10183,63 +10183,63 @@ fn rvalue(
|
||||
},
|
||||
.ty => |ty_inst| {
|
||||
// Quickly eliminate some common, unnecessary type coercion.
|
||||
const as_ty = @as(u64, @enumToInt(Zir.Inst.Ref.type_type)) << 32;
|
||||
const as_comptime_int = @as(u64, @enumToInt(Zir.Inst.Ref.comptime_int_type)) << 32;
|
||||
const as_bool = @as(u64, @enumToInt(Zir.Inst.Ref.bool_type)) << 32;
|
||||
const as_usize = @as(u64, @enumToInt(Zir.Inst.Ref.usize_type)) << 32;
|
||||
const as_void = @as(u64, @enumToInt(Zir.Inst.Ref.void_type)) << 32;
|
||||
switch ((@as(u64, @enumToInt(ty_inst)) << 32) | @as(u64, @enumToInt(result))) {
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.u1_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.u8_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.i8_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.u16_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.u29_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.i16_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.u32_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.i32_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.u64_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.i64_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.usize_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.isize_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_char_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_short_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_ushort_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_int_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_uint_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_long_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_ulong_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_longlong_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_ulonglong_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.c_longdouble_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.f16_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.f32_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.f64_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.f80_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.f128_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.anyopaque_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.bool_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.void_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.type_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.anyerror_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.comptime_int_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.comptime_float_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.noreturn_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.null_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.undefined_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.fn_noreturn_no_args_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.fn_void_no_args_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.fn_naked_noreturn_no_args_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.fn_ccc_void_no_args_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.single_const_pointer_to_comptime_int_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.const_slice_u8_type),
|
||||
as_ty | @enumToInt(Zir.Inst.Ref.enum_literal_type),
|
||||
as_comptime_int | @enumToInt(Zir.Inst.Ref.zero),
|
||||
as_comptime_int | @enumToInt(Zir.Inst.Ref.one),
|
||||
as_bool | @enumToInt(Zir.Inst.Ref.bool_true),
|
||||
as_bool | @enumToInt(Zir.Inst.Ref.bool_false),
|
||||
as_usize | @enumToInt(Zir.Inst.Ref.zero_usize),
|
||||
as_usize | @enumToInt(Zir.Inst.Ref.one_usize),
|
||||
as_void | @enumToInt(Zir.Inst.Ref.void_value),
|
||||
const as_ty = @as(u64, @intFromEnum(Zir.Inst.Ref.type_type)) << 32;
|
||||
const as_comptime_int = @as(u64, @intFromEnum(Zir.Inst.Ref.comptime_int_type)) << 32;
|
||||
const as_bool = @as(u64, @intFromEnum(Zir.Inst.Ref.bool_type)) << 32;
|
||||
const as_usize = @as(u64, @intFromEnum(Zir.Inst.Ref.usize_type)) << 32;
|
||||
const as_void = @as(u64, @intFromEnum(Zir.Inst.Ref.void_type)) << 32;
|
||||
switch ((@as(u64, @intFromEnum(ty_inst)) << 32) | @as(u64, @intFromEnum(result))) {
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.u1_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.u8_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.i8_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.u16_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.u29_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.i16_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.u32_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.i32_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.u64_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.i64_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.usize_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.isize_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_char_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_short_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_ushort_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_int_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_uint_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_long_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_ulong_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_longlong_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_ulonglong_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.c_longdouble_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.f16_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.f32_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.f64_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.f80_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.f128_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.anyopaque_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.bool_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.void_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.type_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.anyerror_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.comptime_int_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.comptime_float_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.noreturn_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.null_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.undefined_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.fn_noreturn_no_args_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.fn_void_no_args_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.fn_naked_noreturn_no_args_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.fn_ccc_void_no_args_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.single_const_pointer_to_comptime_int_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.const_slice_u8_type),
|
||||
as_ty | @intFromEnum(Zir.Inst.Ref.enum_literal_type),
|
||||
as_comptime_int | @intFromEnum(Zir.Inst.Ref.zero),
|
||||
as_comptime_int | @intFromEnum(Zir.Inst.Ref.one),
|
||||
as_bool | @intFromEnum(Zir.Inst.Ref.bool_true),
|
||||
as_bool | @intFromEnum(Zir.Inst.Ref.bool_false),
|
||||
as_usize | @intFromEnum(Zir.Inst.Ref.zero_usize),
|
||||
as_usize | @intFromEnum(Zir.Inst.Ref.one_usize),
|
||||
as_void | @intFromEnum(Zir.Inst.Ref.void_value),
|
||||
=> return result, // type of result is already correct
|
||||
|
||||
// Need an explicit type coercion instruction.
|
||||
@ -11329,8 +11329,8 @@ const GenZir = struct {
|
||||
fancyFnExprExtraLen(astgen, cc_body, args.cc_ref) +
|
||||
fancyFnExprExtraLen(astgen, ret_body, ret_ref) +
|
||||
body_len + src_locs.len +
|
||||
@boolToInt(args.lib_name != 0) +
|
||||
@boolToInt(args.noalias_bits != 0),
|
||||
@intFromBool(args.lib_name != 0) +
|
||||
@intFromBool(args.noalias_bits != 0),
|
||||
);
|
||||
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.FuncFancy{
|
||||
.param_block = args.param_block,
|
||||
@ -11368,7 +11368,7 @@ const GenZir = struct {
|
||||
const inst_data = zir_datas[align_body[align_body.len - 1]].@"break";
|
||||
astgen.extra.items[inst_data.payload_index] = new_index;
|
||||
} else if (args.align_ref != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.align_ref));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.align_ref));
|
||||
}
|
||||
if (addrspace_body.len != 0) {
|
||||
astgen.extra.appendAssumeCapacity(countBodyLenAfterFixups(astgen, addrspace_body));
|
||||
@ -11376,7 +11376,7 @@ const GenZir = struct {
|
||||
const inst_data = zir_datas[addrspace_body[addrspace_body.len - 1]].@"break";
|
||||
astgen.extra.items[inst_data.payload_index] = new_index;
|
||||
} else if (args.addrspace_ref != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.addrspace_ref));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.addrspace_ref));
|
||||
}
|
||||
if (section_body.len != 0) {
|
||||
astgen.extra.appendAssumeCapacity(countBodyLenAfterFixups(astgen, section_body));
|
||||
@ -11384,7 +11384,7 @@ const GenZir = struct {
|
||||
const inst_data = zir_datas[section_body[section_body.len - 1]].@"break";
|
||||
astgen.extra.items[inst_data.payload_index] = new_index;
|
||||
} else if (args.section_ref != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.section_ref));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.section_ref));
|
||||
}
|
||||
if (cc_body.len != 0) {
|
||||
astgen.extra.appendAssumeCapacity(countBodyLenAfterFixups(astgen, cc_body));
|
||||
@ -11392,7 +11392,7 @@ const GenZir = struct {
|
||||
const inst_data = zir_datas[cc_body[cc_body.len - 1]].@"break";
|
||||
astgen.extra.items[inst_data.payload_index] = new_index;
|
||||
} else if (args.cc_ref != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.cc_ref));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.cc_ref));
|
||||
}
|
||||
if (ret_body.len != 0) {
|
||||
astgen.extra.appendAssumeCapacity(countBodyLenAfterFixups(astgen, ret_body));
|
||||
@ -11400,7 +11400,7 @@ const GenZir = struct {
|
||||
const inst_data = zir_datas[ret_body[ret_body.len - 1]].@"break";
|
||||
astgen.extra.items[inst_data.payload_index] = new_index;
|
||||
} else if (ret_ref != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(ret_ref));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(ret_ref));
|
||||
}
|
||||
|
||||
if (args.noalias_bits != 0) {
|
||||
@ -11442,7 +11442,7 @@ const GenZir = struct {
|
||||
const ret_body_len = if (ret_body.len != 0)
|
||||
countBodyLenAfterFixups(astgen, ret_body)
|
||||
else
|
||||
@boolToInt(ret_ref != .none);
|
||||
@intFromBool(ret_ref != .none);
|
||||
|
||||
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.Func{
|
||||
.param_block = args.param_block,
|
||||
@ -11456,7 +11456,7 @@ const GenZir = struct {
|
||||
const inst_data = zir_datas[ret_body[ret_body.len - 1]].@"break";
|
||||
astgen.extra.items[inst_data.payload_index] = new_index;
|
||||
} else if (ret_ref != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(ret_ref));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(ret_ref));
|
||||
}
|
||||
astgen.appendBodyWithFixups(body);
|
||||
astgen.extra.appendSliceAssumeCapacity(src_locs);
|
||||
@ -11487,7 +11487,7 @@ const GenZir = struct {
|
||||
fn fancyFnExprExtraLen(astgen: *AstGen, body: []Zir.Inst.Index, ref: Zir.Inst.Ref) u32 {
|
||||
// In the case of non-empty body, there is one for the body length,
|
||||
// and then one for each instruction.
|
||||
return countBodyLenAfterFixups(astgen, body) + @boolToInt(ref != .none);
|
||||
return countBodyLenAfterFixups(astgen, body) + @intFromBool(ref != .none);
|
||||
}
|
||||
|
||||
fn addVar(gz: *GenZir, args: struct {
|
||||
@ -11507,9 +11507,9 @@ const GenZir = struct {
|
||||
try astgen.extra.ensureUnusedCapacity(
|
||||
gpa,
|
||||
@typeInfo(Zir.Inst.ExtendedVar).Struct.fields.len +
|
||||
@boolToInt(args.lib_name != 0) +
|
||||
@boolToInt(args.align_inst != .none) +
|
||||
@boolToInt(args.init != .none),
|
||||
@intFromBool(args.lib_name != 0) +
|
||||
@intFromBool(args.align_inst != .none) +
|
||||
@intFromBool(args.init != .none),
|
||||
);
|
||||
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.ExtendedVar{
|
||||
.var_type = args.var_type,
|
||||
@ -11518,10 +11518,10 @@ const GenZir = struct {
|
||||
astgen.extra.appendAssumeCapacity(args.lib_name);
|
||||
}
|
||||
if (args.align_inst != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.align_inst));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.align_inst));
|
||||
}
|
||||
if (args.init != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.init));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.init));
|
||||
}
|
||||
|
||||
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
|
||||
@ -12108,23 +12108,23 @@ const GenZir = struct {
|
||||
try astgen.extra.ensureUnusedCapacity(
|
||||
gpa,
|
||||
@typeInfo(Zir.Inst.AllocExtended).Struct.fields.len +
|
||||
@as(usize, @boolToInt(args.type_inst != .none)) +
|
||||
@as(usize, @boolToInt(args.align_inst != .none)),
|
||||
@as(usize, @intFromBool(args.type_inst != .none)) +
|
||||
@as(usize, @intFromBool(args.align_inst != .none)),
|
||||
);
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.AllocExtended{
|
||||
.src_node = gz.nodeIndexToRelative(args.node),
|
||||
});
|
||||
if (args.type_inst != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.type_inst));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.type_inst));
|
||||
}
|
||||
if (args.align_inst != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.align_inst));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.align_inst));
|
||||
}
|
||||
|
||||
const has_type: u4 = @boolToInt(args.type_inst != .none);
|
||||
const has_align: u4 = @boolToInt(args.align_inst != .none);
|
||||
const is_const: u4 = @boolToInt(args.is_const);
|
||||
const is_comptime: u4 = @boolToInt(args.is_comptime);
|
||||
const has_type: u4 = @intFromBool(args.type_inst != .none);
|
||||
const has_align: u4 = @intFromBool(args.align_inst != .none);
|
||||
const is_const: u4 = @intFromBool(args.is_const);
|
||||
const is_comptime: u4 = @intFromBool(args.is_comptime);
|
||||
const small: u16 = has_type | (has_align << 1) | (is_const << 2) | (is_comptime << 3);
|
||||
|
||||
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
|
||||
@ -12184,7 +12184,7 @@ const GenZir = struct {
|
||||
const small: u16 = @intCast(u16, args.outputs.len) |
|
||||
@intCast(u16, args.inputs.len << 5) |
|
||||
@intCast(u16, args.clobbers.len << 10) |
|
||||
(@as(u16, @boolToInt(args.is_volatile)) << 15);
|
||||
(@as(u16, @intFromBool(args.is_volatile)) << 15);
|
||||
|
||||
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
|
||||
astgen.instructions.appendAssumeCapacity(.{
|
||||
@ -12262,7 +12262,7 @@ const GenZir = struct {
|
||||
if (args.backing_int_ref != .none) {
|
||||
astgen.extra.appendAssumeCapacity(args.backing_int_body_len);
|
||||
if (args.backing_int_body_len == 0) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.backing_int_ref));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.backing_int_ref));
|
||||
}
|
||||
}
|
||||
astgen.instructions.set(inst, .{
|
||||
@ -12305,7 +12305,7 @@ const GenZir = struct {
|
||||
astgen.extra.appendAssumeCapacity(@bitCast(u32, node_offset));
|
||||
}
|
||||
if (args.tag_type != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.tag_type));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.tag_type));
|
||||
}
|
||||
if (args.body_len != 0) {
|
||||
astgen.extra.appendAssumeCapacity(args.body_len);
|
||||
@ -12354,7 +12354,7 @@ const GenZir = struct {
|
||||
astgen.extra.appendAssumeCapacity(@bitCast(u32, node_offset));
|
||||
}
|
||||
if (args.tag_type != .none) {
|
||||
astgen.extra.appendAssumeCapacity(@enumToInt(args.tag_type));
|
||||
astgen.extra.appendAssumeCapacity(@intFromEnum(args.tag_type));
|
||||
}
|
||||
if (args.body_len != 0) {
|
||||
astgen.extra.appendAssumeCapacity(args.body_len);
|
||||
@ -12848,10 +12848,10 @@ fn lowerAstErrors(astgen: *AstGen) !void {
|
||||
var notes: std.ArrayListUnmanaged(u32) = .{};
|
||||
defer notes.deinit(gpa);
|
||||
|
||||
if (token_tags[parse_err.token + @boolToInt(parse_err.token_is_prev)] == .invalid) {
|
||||
const tok = parse_err.token + @boolToInt(parse_err.token_is_prev);
|
||||
const bad_off = @intCast(u32, tree.tokenSlice(parse_err.token + @boolToInt(parse_err.token_is_prev)).len);
|
||||
const byte_abs = token_starts[parse_err.token + @boolToInt(parse_err.token_is_prev)] + bad_off;
|
||||
if (token_tags[parse_err.token + @intFromBool(parse_err.token_is_prev)] == .invalid) {
|
||||
const tok = parse_err.token + @intFromBool(parse_err.token_is_prev);
|
||||
const bad_off = @intCast(u32, tree.tokenSlice(parse_err.token + @intFromBool(parse_err.token_is_prev)).len);
|
||||
const byte_abs = token_starts[parse_err.token + @intFromBool(parse_err.token_is_prev)] + bad_off;
|
||||
try notes.append(gpa, try astgen.errNoteTokOff(tok, bad_off, "invalid byte: '{'}'", .{
|
||||
std.zig.fmtEscapes(tree.source[byte_abs..][0..1]),
|
||||
}));
|
||||
|
@ -70,7 +70,7 @@ pub fn extraData(code: Zir, comptime T: type, index: usize) struct { data: T, en
|
||||
inline for (fields) |field| {
|
||||
@field(result, field.name) = switch (field.type) {
|
||||
u32 => code.extra[i],
|
||||
Inst.Ref => @intToEnum(Inst.Ref, code.extra[i]),
|
||||
Inst.Ref => @enumFromInt(Inst.Ref, code.extra[i]),
|
||||
i32 => @bitCast(i32, code.extra[i]),
|
||||
Inst.Call.Flags => @bitCast(Inst.Call.Flags, code.extra[i]),
|
||||
Inst.BuiltinCall.Flags => @bitCast(Inst.BuiltinCall.Flags, code.extra[i]),
|
||||
@ -101,7 +101,7 @@ pub fn refSlice(code: Zir, start: usize, len: usize) []Inst.Ref {
|
||||
}
|
||||
|
||||
pub fn hasCompileErrors(code: Zir) bool {
|
||||
return code.extra[@enumToInt(ExtraIndex.compile_errors)] != 0;
|
||||
return code.extra[@intFromEnum(ExtraIndex.compile_errors)] != 0;
|
||||
}
|
||||
|
||||
pub fn deinit(code: *Zir, gpa: Allocator) void {
|
||||
@ -2724,8 +2724,8 @@ pub const Inst = struct {
|
||||
pub const ScalarCasesLen = u29;
|
||||
|
||||
pub fn specialProng(bits: Bits) SpecialProng {
|
||||
const has_else: u2 = @boolToInt(bits.has_else);
|
||||
const has_under: u2 = @boolToInt(bits.has_under);
|
||||
const has_else: u2 = @intFromBool(bits.has_else);
|
||||
const has_under: u2 = @intFromBool(bits.has_under);
|
||||
return switch ((has_else << 1) | has_under) {
|
||||
0b00 => .none,
|
||||
0b01 => .under,
|
||||
@ -2765,7 +2765,7 @@ pub const Inst = struct {
|
||||
|
||||
var scalar_i: usize = 0;
|
||||
while (true) : (scalar_i += 1) {
|
||||
const item = @intToEnum(Ref, zir.extra[extra_index]);
|
||||
const item = @enumFromInt(Ref, zir.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
const body_len = @truncate(u31, zir.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
@ -3373,8 +3373,8 @@ pub fn declIterator(zir: Zir, decl_inst: u32) DeclIterator {
|
||||
.struct_decl => {
|
||||
const small = @bitCast(Inst.StructDecl.Small, extended.small);
|
||||
var extra_index: usize = extended.operand;
|
||||
extra_index += @boolToInt(small.has_src_node);
|
||||
extra_index += @boolToInt(small.has_fields_len);
|
||||
extra_index += @intFromBool(small.has_src_node);
|
||||
extra_index += @intFromBool(small.has_fields_len);
|
||||
const decls_len = if (small.has_decls_len) decls_len: {
|
||||
const decls_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
@ -3396,10 +3396,10 @@ pub fn declIterator(zir: Zir, decl_inst: u32) DeclIterator {
|
||||
.enum_decl => {
|
||||
const small = @bitCast(Inst.EnumDecl.Small, extended.small);
|
||||
var extra_index: usize = extended.operand;
|
||||
extra_index += @boolToInt(small.has_src_node);
|
||||
extra_index += @boolToInt(small.has_tag_type);
|
||||
extra_index += @boolToInt(small.has_body_len);
|
||||
extra_index += @boolToInt(small.has_fields_len);
|
||||
extra_index += @intFromBool(small.has_src_node);
|
||||
extra_index += @intFromBool(small.has_tag_type);
|
||||
extra_index += @intFromBool(small.has_body_len);
|
||||
extra_index += @intFromBool(small.has_fields_len);
|
||||
const decls_len = if (small.has_decls_len) decls_len: {
|
||||
const decls_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
@ -3411,10 +3411,10 @@ pub fn declIterator(zir: Zir, decl_inst: u32) DeclIterator {
|
||||
.union_decl => {
|
||||
const small = @bitCast(Inst.UnionDecl.Small, extended.small);
|
||||
var extra_index: usize = extended.operand;
|
||||
extra_index += @boolToInt(small.has_src_node);
|
||||
extra_index += @boolToInt(small.has_tag_type);
|
||||
extra_index += @boolToInt(small.has_body_len);
|
||||
extra_index += @boolToInt(small.has_fields_len);
|
||||
extra_index += @intFromBool(small.has_src_node);
|
||||
extra_index += @intFromBool(small.has_tag_type);
|
||||
extra_index += @intFromBool(small.has_body_len);
|
||||
extra_index += @intFromBool(small.has_fields_len);
|
||||
const decls_len = if (small.has_decls_len) decls_len: {
|
||||
const decls_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
@ -3426,7 +3426,7 @@ pub fn declIterator(zir: Zir, decl_inst: u32) DeclIterator {
|
||||
.opaque_decl => {
|
||||
const small = @bitCast(Inst.OpaqueDecl.Small, extended.small);
|
||||
var extra_index: usize = extended.operand;
|
||||
extra_index += @boolToInt(small.has_src_node);
|
||||
extra_index += @intFromBool(small.has_src_node);
|
||||
const decls_len = if (small.has_decls_len) decls_len: {
|
||||
const decls_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
@ -3499,7 +3499,7 @@ fn findDeclsInner(
|
||||
const inst_data = datas[inst].pl_node;
|
||||
const extra = zir.extraData(Inst.FuncFancy, inst_data.payload_index);
|
||||
var extra_index: usize = extra.end;
|
||||
extra_index += @boolToInt(extra.data.bits.has_lib_name);
|
||||
extra_index += @intFromBool(extra.data.bits.has_lib_name);
|
||||
|
||||
if (extra.data.bits.has_align_body) {
|
||||
const body_len = zir.extra[extra_index];
|
||||
@ -3551,7 +3551,7 @@ fn findDeclsInner(
|
||||
extra_index += 1;
|
||||
}
|
||||
|
||||
extra_index += @boolToInt(extra.data.bits.has_any_noalias);
|
||||
extra_index += @intFromBool(extra.data.bits.has_any_noalias);
|
||||
|
||||
const body = zir.extra[extra_index..][0..extra.data.body_len];
|
||||
return zir.findDeclsBody(list, body);
|
||||
@ -3730,7 +3730,7 @@ pub fn getFnInfo(zir: Zir, fn_inst: Inst.Index) FnInfo {
|
||||
ret_ty_ref = .void_type;
|
||||
},
|
||||
1 => {
|
||||
ret_ty_ref = @intToEnum(Inst.Ref, zir.extra[extra_index]);
|
||||
ret_ty_ref = @enumFromInt(Inst.Ref, zir.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
},
|
||||
else => {
|
||||
@ -3757,7 +3757,7 @@ pub fn getFnInfo(zir: Zir, fn_inst: Inst.Index) FnInfo {
|
||||
var ret_ty_ref: Inst.Ref = .void_type;
|
||||
var ret_ty_body: []const Inst.Index = &.{};
|
||||
|
||||
extra_index += @boolToInt(extra.data.bits.has_lib_name);
|
||||
extra_index += @intFromBool(extra.data.bits.has_lib_name);
|
||||
if (extra.data.bits.has_align_body) {
|
||||
extra_index += zir.extra[extra_index] + 1;
|
||||
} else if (extra.data.bits.has_align_ref) {
|
||||
@ -3784,11 +3784,11 @@ pub fn getFnInfo(zir: Zir, fn_inst: Inst.Index) FnInfo {
|
||||
ret_ty_body = zir.extra[extra_index..][0..body_len];
|
||||
extra_index += ret_ty_body.len;
|
||||
} else if (extra.data.bits.has_ret_ty_ref) {
|
||||
ret_ty_ref = @intToEnum(Inst.Ref, zir.extra[extra_index]);
|
||||
ret_ty_ref = @enumFromInt(Inst.Ref, zir.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
}
|
||||
|
||||
extra_index += @boolToInt(extra.data.bits.has_any_noalias);
|
||||
extra_index += @intFromBool(extra.data.bits.has_any_noalias);
|
||||
|
||||
const body = zir.extra[extra_index..][0..extra.data.body_len];
|
||||
extra_index += body.len;
|
||||
@ -3825,14 +3825,14 @@ pub fn getFnInfo(zir: Zir, fn_inst: Inst.Index) FnInfo {
|
||||
};
|
||||
}
|
||||
|
||||
pub const ref_start_index: u32 = @enumToInt(Inst.Ref.ref_start_index);
|
||||
pub const ref_start_index: u32 = @intFromEnum(Inst.Ref.ref_start_index);
|
||||
|
||||
pub fn indexToRef(inst: Inst.Index) Inst.Ref {
|
||||
return @intToEnum(Inst.Ref, ref_start_index + inst);
|
||||
return @enumFromInt(Inst.Ref, ref_start_index + inst);
|
||||
}
|
||||
|
||||
pub fn refToIndex(inst: Inst.Ref) ?Inst.Index {
|
||||
const ref_int = @enumToInt(inst);
|
||||
const ref_int = @intFromEnum(inst);
|
||||
if (ref_int >= ref_start_index) {
|
||||
return ref_int - ref_start_index;
|
||||
} else {
|
||||
|
@ -30,6 +30,7 @@ pub const goto = @import("features/goto.zig");
|
||||
pub const hover_handler = @import("features/hover.zig");
|
||||
pub const selection_range = @import("features/selection_range.zig");
|
||||
pub const diagnostics = @import("features/diagnostics.zig");
|
||||
pub const legacy_json = @import("legacy_json.zig");
|
||||
|
||||
comptime {
|
||||
const std = @import("std");
|
||||
|
@ -60,7 +60,7 @@ pub const Context = struct {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Context) void {
|
||||
std.json.parseFree(Config, allocator, self.config.*);
|
||||
zls.legacy_json.parseFree(Config, allocator, self.config.*);
|
||||
allocator.destroy(self.config);
|
||||
|
||||
self.request("shutdown", "{}", null) catch {};
|
||||
@ -129,14 +129,10 @@ pub const Context = struct {
|
||||
|
||||
const expected = expect orelse return;
|
||||
|
||||
// parse the response
|
||||
var parser = std.json.Parser.init(allocator, .alloc_always);
|
||||
defer parser.deinit();
|
||||
|
||||
var tree = try parser.parse(response_bytes);
|
||||
var tree = try std.json.parseFromSlice(std.json.Value, allocator, response_bytes, .{});
|
||||
defer tree.deinit();
|
||||
|
||||
const response = tree.root.object;
|
||||
const response = tree.value.object;
|
||||
|
||||
// assertions
|
||||
try std.testing.expectEqualStrings("2.0", response.get("jsonrpc").?.string);
|
||||
@ -195,11 +191,10 @@ pub const Context = struct {
|
||||
const response_bytes = try self.requestAlloc(method, buffer.items);
|
||||
defer self.server.allocator.free(response_bytes);
|
||||
|
||||
var parser = std.json.Parser.init(self.arena.allocator(), .alloc_always);
|
||||
var tree = try parser.parse(try self.arena.allocator().dupe(u8, response_bytes));
|
||||
var tree = try std.json.parseFromSlice(std.json.Value, self.arena.allocator(), try self.arena.allocator().dupe(u8, response_bytes), .{});
|
||||
|
||||
// TODO validate jsonrpc and id
|
||||
|
||||
return tres.parse(Response(Result), tree.root, self.arena.allocator());
|
||||
return tres.parse(Response(Result), tree.value, self.arena.allocator());
|
||||
}
|
||||
};
|
||||
|
@ -108,7 +108,7 @@ fn testTranslate(c_source: []const u8) !translate_c.Result {
|
||||
if (!std.process.can_spawn) return error.SkipZigTest;
|
||||
|
||||
var config: zls.Config = .{};
|
||||
defer std.json.parseFree(zls.Config, allocator, config);
|
||||
defer zls.legacy_json.parseFree(zls.Config, allocator, config);
|
||||
|
||||
var runtime_zig_version: ?zls.ZigVersionWrapper = null;
|
||||
defer if (runtime_zig_version) |*v| v.free();
|
||||
|
@ -377,7 +377,7 @@ const Context = struct {
|
||||
};
|
||||
}
|
||||
|
||||
const namespace = @intToEnum(ComptimeInterpreter.Namespace.Index, 0); // root namespace
|
||||
const namespace = @enumFromInt(ComptimeInterpreter.Namespace.Index, 0); // root namespace
|
||||
const result = (try self.interpreter.call(namespace, func_node, args, .{})).result;
|
||||
|
||||
const val = self.interpreter.ip.indexToKey(result.value.index);
|
||||
@ -390,7 +390,7 @@ const Context = struct {
|
||||
}
|
||||
|
||||
pub fn interpret(self: *Context, node: Ast.Node.Index) !KV {
|
||||
const namespace = @intToEnum(ComptimeInterpreter.Namespace.Index, 0); // root namespace
|
||||
const namespace = @enumFromInt(ComptimeInterpreter.Namespace.Index, 0); // root namespace
|
||||
const result = try (try self.interpreter.interpret(node, namespace, .{})).getValue();
|
||||
|
||||
const val = self.interpreter.ip.indexToKey(result.index);
|
||||
|
@ -1048,7 +1048,7 @@ fn testSemanticTokens(source: [:0]const u8, expected_tokens: []const TokenData)
|
||||
const delta_line = token_data[0];
|
||||
const delta_start = token_data[1];
|
||||
const length = token_data[2];
|
||||
const token_type = @intToEnum(zls.semantic_tokens.TokenType, token_data[3]);
|
||||
const token_type = @enumFromInt(zls.semantic_tokens.TokenType, token_data[3]);
|
||||
const token_modifiers = @bitCast(zls.semantic_tokens.TokenModifiers, @intCast(u16, token_data[4]));
|
||||
|
||||
position.line += delta_line;
|
||||
|
Loading…
Reference in New Issue
Block a user