Compare commits

...

10 Commits

Author SHA1 Message Date
Sora
6d7305f5f0
Remove zon comment (#1265) 2023-06-25 01:57:00 +01:00
Techatrix
4dc7652aa2
fix timeout in ZCS receiveMessage (#1264) 2023-06-24 22:17:56 +01:00
Techatrix
0b1fc7eb6a
include anytype parameters in inlay hint function parameter count (#1263) 2023-06-24 22:17:07 +01:00
Lee Cannon
7487308948
Merge pull request #1262 from FnControlOption/block-type
Resolve type of simple labeled block
2023-06-24 22:16:54 +01:00
fn ⌃ ⌥
0baae921ed Add completion test 2023-06-24 10:14:58 -07:00
fn ⌃ ⌥
a8c81522cd Resolve type of simple labeled block 2023-06-24 09:00:12 -07:00
Techatrix
a16fb19797
improve self param detection in inlay hints (#1258) 2023-06-24 02:25:12 +01:00
Techatrix
7e19a88ad2
update stage2 sources (#1257) 2023-06-23 22:14:08 +01:00
Techarix
0e57f694be update analysis of builtin functions 2023-06-23 21:10:26 +01:00
Lee Cannon
ecb18949df
disable translate c test on windows (#1253) 2023-06-23 11:47:54 -04:00
11 changed files with 678 additions and 626 deletions

View File

@ -16,7 +16,6 @@
.hash = "122041f6531ee2cd10e7d5f81817c50b45037affc95d748cbcd71a766866fb6030d4",
},
.binned_allocator = .{
// upstream: https://gist.github.com/silversquirl/c1e4840048fdf48e669b6eac76d80634
.url = "https://gist.github.com/FalsePattern/48fded613c115e16e91c46db8642c7e4/archive/75e3d5e6a0e0cf23dbf7abfe16831e23c38721bc.tar.gz",
.hash = "1220ba896ddd4258eed9274b36284d4cc4600ee69c4c0978cbe237ec09a524a2e252",
},

View File

@ -32,32 +32,27 @@ pub fn receiveMessage(client: *Client) !InMessage.Header {
const Header = InMessage.Header;
const fifo = client.pooler.fifo(.in);
while (try client.pooler.poll()) {
var first_run = true;
var header: ?Header = null;
while (first_run or try client.pooler.poll()) {
first_run = false;
if (header == null) {
if (fifo.readableLength() < @sizeOf(Header)) continue;
const buf = fifo.readableSlice(0);
assert(fifo.readableLength() == buf.len);
if (buf.len >= @sizeOf(Header)) {
// workaround for https://github.com/ziglang/zig/issues/14904
const bytes_len = bswap_and_workaround_u32(buf[4..][0..4]);
const tag = bswap_and_workaround_tag(buf[0..][0..4]);
if (buf.len - @sizeOf(Header) >= bytes_len) {
fifo.discard(@sizeOf(Header));
return .{
header = Header{
.tag = tag,
.bytes_len = bytes_len,
};
} else {
const needed = bytes_len - (buf.len - @sizeOf(Header));
const write_buffer = try fifo.writableWithSize(needed);
const amt = try client.in.readAll(write_buffer);
fifo.update(amt);
continue;
}
fifo.discard(@sizeOf(Header));
}
const write_buffer = try fifo.writableWithSize(256);
const amt = try client.in.read(write_buffer);
fifo.update(amt);
if (header) |h| {
if (fifo.readableLength() < h.bytes_len) continue;
return h;
}
}
return error.Timeout;
}

View File

@ -338,7 +338,7 @@ pub const Key = union(enum) {
.anyerror => .ErrorSet,
.noreturn => .NoReturn,
.anyframe_type => .AnyFrame,
.empty_struct_literal => .Struct,
.empty_struct_type => .Struct,
.null_type => .Null,
.undefined_type => .Undefined,
.enum_literal_type => .EnumLiteral,
@ -634,7 +634,7 @@ pub const Key = union(enum) {
.enum_literal_type,
=> Index.none,
.empty_struct_literal => Index.empty_aggregate,
.empty_struct_type => Index.empty_aggregate,
.void => Index.void_value,
.noreturn => Index.unreachable_value,
.null_type => Index.null_value,
@ -659,8 +659,8 @@ pub const Key = union(enum) {
.int_type => |int_info| {
if (int_info.bits == 0) {
switch (int_info.signedness) {
.unsigned => return Index.zero,
.signed => return Index.zero, // do we need a signed zero?
.unsigned => return Index.zero_comptime_int,
.signed => return Index.zero_comptime_int, // do we need a signed zero?
}
}
return Index.none;
@ -835,7 +835,7 @@ pub const Key = union(enum) {
.null_type => try writer.writeAll("@TypeOf(null)"),
.undefined_type => try writer.writeAll("@TypeOf(undefined)"),
.empty_struct_literal => try writer.writeAll("@TypeOf(.{})"),
.empty_struct_type => try writer.writeAll("@TypeOf(.{})"),
.enum_literal_type => try writer.writeAll("@TypeOf(.enum_literal)"),
.atomic_order => try writer.writeAll("std.builtin.AtomicOrder"),
@ -1104,7 +1104,7 @@ pub const Index = enum(u32) {
comptime_float_type,
noreturn_type,
anyframe_type,
empty_struct_literal,
empty_struct_type,
null_type,
undefined_type,
enum_literal_type,
@ -1114,19 +1114,22 @@ pub const Index = enum(u32) {
address_space_type,
float_mode_type,
reduce_op_type,
modifier_type,
call_modifier_type,
prefetch_options_type,
export_options_type,
extern_options_type,
type_info_type,
manyptr_u8_type,
manyptr_const_u8_type,
manyptr_const_u8_sentinel_0_type,
fn_noreturn_no_args_type,
fn_void_no_args_type,
fn_naked_noreturn_no_args_type,
fn_ccc_void_no_args_type,
single_const_pointer_to_comptime_int_type,
const_slice_u8_type,
slice_const_u8_type,
slice_const_u8_sentinel_0_type,
optional_noreturn_type,
anyerror_void_error_union_type,
generic_poison_type,
unknown_type,
@ -1134,9 +1137,17 @@ pub const Index = enum(u32) {
/// `undefined` (untyped)
undefined_value,
/// `0` (comptime_int)
zero,
zero_comptime_int,
/// `0` (u8)
zero_u8,
/// `0` (usize)
zero_usize,
/// `1` (comptime_int)
one,
one_comptime_int,
/// `1` (u8)
one_u8,
/// `1` (usize)
one_usize,
/// `{}`
void_value,
/// `unreachable` (noreturn type)
@ -1149,10 +1160,6 @@ pub const Index = enum(u32) {
bool_false,
/// `.{}` (untyped)
empty_aggregate,
/// `0` (usize)
zero_usize,
/// `1` (usize)
one_usize,
the_only_possible_value,
generic_poison,
// unknown value of unknown type
@ -1345,7 +1352,7 @@ pub const SimpleType = enum(u32) {
comptime_float,
noreturn,
anyframe_type,
empty_struct_literal,
empty_struct_type,
null_type,
undefined_type,
enum_literal_type,
@ -1424,7 +1431,7 @@ pub fn init(gpa: Allocator) Allocator.Error!InternPool {
.{ .index = .comptime_float_type, .key = .{ .simple_type = .comptime_float } },
.{ .index = .noreturn_type, .key = .{ .simple_type = .noreturn } },
.{ .index = .anyframe_type, .key = .{ .simple_type = .anyframe_type } },
.{ .index = .empty_struct_literal, .key = .{ .simple_type = .empty_struct_literal } },
.{ .index = .empty_struct_type, .key = .{ .simple_type = .empty_struct_type } },
.{ .index = .null_type, .key = .{ .simple_type = .null_type } },
.{ .index = .undefined_type, .key = .{ .simple_type = .undefined_type } },
.{ .index = .enum_literal_type, .key = .{ .simple_type = .enum_literal_type } },
@ -1435,41 +1442,45 @@ pub fn init(gpa: Allocator) Allocator.Error!InternPool {
.{ .index = .address_space_type, .key = .{ .simple_type = .address_space } },
.{ .index = .float_mode_type, .key = .{ .simple_type = .float_mode } },
.{ .index = .reduce_op_type, .key = .{ .simple_type = .reduce_op } },
.{ .index = .modifier_type, .key = .{ .simple_type = .modifier } },
.{ .index = .call_modifier_type, .key = .{ .simple_type = .modifier } },
.{ .index = .prefetch_options_type, .key = .{ .simple_type = .prefetch_options } },
.{ .index = .export_options_type, .key = .{ .simple_type = .export_options } },
.{ .index = .extern_options_type, .key = .{ .simple_type = .extern_options } },
.{ .index = .type_info_type, .key = .{ .simple_type = .type_info } },
.{ .index = .manyptr_u8_type, .key = .{ .pointer_type = .{ .elem_type = .u8_type, .size = .Many } } },
.{ .index = .manyptr_const_u8_type, .key = .{ .pointer_type = .{ .elem_type = .u8_type, .size = .Many, .is_const = true } } },
.{ .index = .manyptr_const_u8_sentinel_0_type, .key = .{ .pointer_type = .{ .elem_type = .u8_type, .sentinel = .zero_u8, .size = .Many, .is_const = true } } },
.{ .index = .fn_noreturn_no_args_type, .key = .{ .function_type = .{ .args = &.{}, .return_type = .noreturn_type } } },
.{ .index = .fn_void_no_args_type, .key = .{ .function_type = .{ .args = &.{}, .return_type = .void_type } } },
.{ .index = .fn_naked_noreturn_no_args_type, .key = .{ .function_type = .{ .args = &.{}, .return_type = .void_type, .calling_convention = .Naked } } },
.{ .index = .fn_ccc_void_no_args_type, .key = .{ .function_type = .{ .args = &.{}, .return_type = .void_type, .calling_convention = .C } } },
.{ .index = .single_const_pointer_to_comptime_int_type, .key = .{ .pointer_type = .{ .elem_type = .comptime_int_type, .size = .One, .is_const = true } } },
.{ .index = .const_slice_u8_type, .key = .{ .pointer_type = .{ .elem_type = .u8_type, .size = .Slice, .is_const = true } } },
.{ .index = .slice_const_u8_type, .key = .{ .pointer_type = .{ .elem_type = .u8_type, .size = .Slice, .is_const = true } } },
.{ .index = .slice_const_u8_sentinel_0_type, .key = .{ .pointer_type = .{ .elem_type = .u8_type, .sentinel = .zero_u8, .size = .Slice, .is_const = true } } },
.{ .index = .optional_noreturn_type, .key = .{ .optional_type = .{ .payload_type = .noreturn_type } } },
.{ .index = .anyerror_void_error_union_type, .key = .{ .error_union_type = .{ .error_set_type = .anyerror_type, .payload_type = .void_type } } },
.{ .index = .generic_poison_type, .key = .{ .simple_type = .generic_poison } },
.{ .index = .unknown_type, .key = .{ .simple_type = .unknown } },
.{ .index = .undefined_value, .key = .{ .simple_value = .undefined_value } },
.{ .index = .zero, .key = .{ .int_u64_value = .{ .ty = .comptime_int_type, .int = 0 } } },
.{ .index = .one, .key = .{ .int_u64_value = .{ .ty = .comptime_int_type, .int = 1 } } },
.{ .index = .zero_comptime_int, .key = .{ .int_u64_value = .{ .ty = .comptime_int_type, .int = 0 } } },
.{ .index = .zero_u8, .key = .{ .int_u64_value = .{ .ty = .u8_type, .int = 0 } } },
.{ .index = .zero_usize, .key = .{ .int_u64_value = .{ .ty = .usize_type, .int = 0 } } },
.{ .index = .one_comptime_int, .key = .{ .int_u64_value = .{ .ty = .comptime_int_type, .int = 1 } } },
.{ .index = .one_u8, .key = .{ .int_u64_value = .{ .ty = .u8_type, .int = 1 } } },
.{ .index = .one_usize, .key = .{ .int_u64_value = .{ .ty = .usize_type, .int = 1 } } },
.{ .index = .void_value, .key = .{ .simple_value = .void_value } },
.{ .index = .unreachable_value, .key = .{ .simple_value = .unreachable_value } },
.{ .index = .null_value, .key = .{ .simple_value = .null_value } },
.{ .index = .bool_true, .key = .{ .simple_value = .bool_true } },
.{ .index = .bool_false, .key = .{ .simple_value = .bool_false } },
.{ .index = .empty_aggregate, .key = .{ .aggregate = .{ .ty = .empty_struct_literal, .values = &.{} } } },
.{ .index = .zero_usize, .key = .{ .int_u64_value = .{ .ty = .usize_type, .int = 0 } } },
.{ .index = .one_usize, .key = .{ .int_u64_value = .{ .ty = .usize_type, .int = 1 } } },
.{ .index = .empty_aggregate, .key = .{ .aggregate = .{ .ty = .empty_struct_type, .values = &.{} } } },
.{ .index = .the_only_possible_value, .key = .{ .simple_value = .the_only_possible_value } },
.{ .index = .generic_poison, .key = .{ .simple_value = .generic_poison } },
.{ .index = .unknown_unknown, .key = .{ .unknown_value = .{ .ty = .unknown_type } } },
};
const extra_count = 4 * @sizeOf(Pointer) + @sizeOf(ErrorUnion) + 4 * @sizeOf(Function) + 4 * @sizeOf(InternPool.U64Value);
const extra_count = 6 * @sizeOf(Pointer) + @sizeOf(ErrorUnion) + 4 * @sizeOf(Function) + 6 * @sizeOf(InternPool.U64Value);
try ip.map.ensureTotalCapacity(gpa, items.len);
try ip.items.ensureTotalCapacity(gpa, items.len);
@ -3107,7 +3118,7 @@ test "pointer type" {
const @"[*:0]u32" = try ip.get(gpa, .{ .pointer_type = .{
.elem_type = .u32_type,
.size = .Many,
.sentinel = .zero,
.sentinel = .zero_comptime_int,
} });
const @"[]u32" = try ip.get(gpa, .{ .pointer_type = .{
.elem_type = .u32_type,
@ -3116,7 +3127,7 @@ test "pointer type" {
const @"[:0]u32" = try ip.get(gpa, .{ .pointer_type = .{
.elem_type = .u32_type,
.size = .Slice,
.sentinel = .zero,
.sentinel = .zero_comptime_int,
} });
const @"[*c]u32" = try ip.get(gpa, .{ .pointer_type = .{
.elem_type = .u32_type,
@ -3233,7 +3244,7 @@ test "array type" {
const u32_0_0_array_type = try ip.get(gpa, .{ .array_type = .{
.len = 3,
.child = .u32_type,
.sentinel = .zero,
.sentinel = .zero_comptime_int,
} });
try expect(i32_3_array_type != u32_0_0_array_type);

View File

@ -989,16 +989,22 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e
const cast_map = std.ComptimeStringMap(void, .{
.{"@as"},
.{"@atomicLoad"},
.{"@atomicRmw"},
.{"@atomicStore"},
.{"@bitCast"},
.{"@fieldParentPtr"},
.{"@mulAdd"},
.{"@errSetCast"},
.{"@fieldParentPtr"}, // the return type is actually a pointer
.{"@floatCast"},
.{"@floatToInt"},
.{"@intFromFloat"},
.{"@intCast"},
.{"@intToEnum"},
.{"@intToFloat"},
.{"@intToPtr"},
.{"@truncate"},
.{"@enumFromInt"},
.{"@floatFromInt"},
.{"@ptrFromInt"},
.{"@ptrCast"},
.{"@truncate"},
.{"@unionInit"},
});
if (cast_map.has(call_name)) {
if (params.len < 1) return null;
@ -1134,6 +1140,33 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e
.handle = handle,
};
},
.block,
.block_semicolon,
.block_two,
.block_two_semicolon,
=> {
const first_token = tree.firstToken(node);
if (token_tags[first_token] != .identifier) return null;
const block_label = tree.tokenSlice(first_token);
var buffer: [2]Ast.Node.Index = undefined;
const statements = ast.blockStatements(tree, node, &buffer).?;
for (statements) |child_idx| {
// TODO: Recursively find matching `break :label` (e.g. inside `if`)
if (node_tags[child_idx] == .@"break") {
if (datas[child_idx].lhs == 0) continue;
if (datas[child_idx].rhs == 0) continue;
const break_label = tree.tokenSlice(datas[child_idx].lhs);
if (!std.mem.eql(u8, block_label, break_label)) continue;
const operand = .{ .node = datas[child_idx].rhs, .handle = handle };
return try analyser.resolveTypeOfNodeInternal(operand);
}
}
},
else => {},
}
return null;

View File

@ -84,24 +84,31 @@ fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: Analyser.D
var buffer: [1]Ast.Node.Index = undefined;
const fn_proto = decl_tree.fullFnProto(&buffer, fn_node) orelse return;
var i: usize = 0;
var it = fn_proto.iterate(&decl_tree);
var params = try std.ArrayListUnmanaged(Ast.full.FnProto.Param).initCapacity(builder.arena, fn_proto.ast.params.len);
defer params.deinit(builder.arena);
if (try builder.analyser.hasSelfParam(decl_handle.handle, fn_proto)) {
_ = ast.nextFnParam(&it);
var it = fn_proto.iterate(&decl_tree);
while (ast.nextFnParam(&it)) |param| {
try params.append(builder.arena, param);
}
while (ast.nextFnParam(&it)) |param| : (i += 1) {
if (i >= call.ast.params.len) break;
const has_self_param = tree.tokens.items(.tag)[call.ast.lparen - 2] == .period and
call.ast.params.len + 1 == params.items.len and
try builder.analyser.hasSelfParam(decl_handle.handle, fn_proto);
const parameters = params.items[@intFromBool(has_self_param)..];
const arguments = call.ast.params;
const min_len = @min(parameters.len, arguments.len);
for (parameters[0..min_len], call.ast.params[0..min_len]) |param, arg| {
const name_token = param.name_token orelse continue;
const name = decl_tree.tokenSlice(name_token);
if (builder.config.inlay_hints_hide_redundant_param_names or builder.config.inlay_hints_hide_redundant_param_names_last_token) {
const last_param_token = tree.lastToken(call.ast.params[i]);
const param_name = tree.tokenSlice(last_param_token);
const last_arg_token = tree.lastToken(arg);
const arg_name = tree.tokenSlice(last_arg_token);
if (std.mem.eql(u8, param_name, name)) {
if (tree.firstToken(call.ast.params[i]) == last_param_token) {
if (std.mem.eql(u8, arg_name, name)) {
if (tree.firstToken(arg) == last_arg_token) {
if (builder.config.inlay_hints_hide_redundant_param_names)
continue;
} else {
@ -122,7 +129,7 @@ fn writeCallHint(builder: *Builder, call: Ast.full.Call, decl_handle: Analyser.D
offsets.nodeToSlice(decl_tree, param.type_expr);
try builder.appendParameterHint(
tree.firstToken(call.ast.params[i]),
tree.firstToken(arg),
name,
tooltip,
no_alias,

View File

@ -856,10 +856,35 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return rvalue(gz, ri, result, node);
},
.slice => {
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice);
const lhs_node = node_datas[node].lhs;
const lhs_tag = node_tags[lhs_node];
const lhs_is_slice_sentinel = lhs_tag == .slice_sentinel;
const lhs_is_open_slice = lhs_tag == .slice_open or
(lhs_is_slice_sentinel and tree.extraData(node_datas[lhs_node].rhs, Ast.Node.SliceSentinel).end == 0);
if (lhs_is_open_slice and nodeIsTriviallyZero(tree, extra.start)) {
const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[lhs_node].lhs);
const start = if (lhs_is_slice_sentinel) start: {
const lhs_extra = tree.extraData(node_datas[lhs_node].rhs, Ast.Node.SliceSentinel);
break :start try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, lhs_extra.start);
} else try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[lhs_node].rhs);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
const len = if (extra.end != 0) try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end) else .none;
try emitDbgStmt(gz, cursor);
const result = try gz.addPlNode(.slice_length, node, Zir.Inst.SliceLength{
.lhs = lhs,
.start = start,
.len = len,
.start_src_node_offset = gz.nodeIndexToRelative(lhs_node),
.sentinel = .none,
});
return rvalue(gz, ri, result, node);
}
const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice);
const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start);
const end = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end);
try emitDbgStmt(gz, cursor);
@ -871,10 +896,36 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return rvalue(gz, ri, result, node);
},
.slice_sentinel => {
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel);
const lhs_node = node_datas[node].lhs;
const lhs_tag = node_tags[lhs_node];
const lhs_is_slice_sentinel = lhs_tag == .slice_sentinel;
const lhs_is_open_slice = lhs_tag == .slice_open or
(lhs_is_slice_sentinel and tree.extraData(node_datas[lhs_node].rhs, Ast.Node.SliceSentinel).end == 0);
if (lhs_is_open_slice and nodeIsTriviallyZero(tree, extra.start)) {
const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[lhs_node].lhs);
const start = if (lhs_is_slice_sentinel) start: {
const lhs_extra = tree.extraData(node_datas[lhs_node].rhs, Ast.Node.SliceSentinel);
break :start try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, lhs_extra.start);
} else try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[lhs_node].rhs);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
const len = if (extra.end != 0) try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end) else .none;
const sentinel = try expr(gz, scope, .{ .rl = .none }, extra.sentinel);
try emitDbgStmt(gz, cursor);
const result = try gz.addPlNode(.slice_length, node, Zir.Inst.SliceLength{
.lhs = lhs,
.start = start,
.len = len,
.start_src_node_offset = gz.nodeIndexToRelative(lhs_node),
.sentinel = sentinel,
});
return rvalue(gz, ri, result, node);
}
const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel);
const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start);
const end = if (extra.end != 0) try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end) else .none;
const sentinel = try expr(gz, scope, .{ .rl = .none }, extra.sentinel);
@ -1579,7 +1630,7 @@ fn structInitExpr(
if (struct_init.ast.type_expr == 0) {
if (struct_init.ast.fields.len == 0) {
return rvalue(gz, ri, .empty_struct, node);
return rvalue(gz, ri, .empty_struct_type, node);
}
} else array: {
const node_tags = tree.nodes.items(.tag);
@ -2438,7 +2489,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
switch (zir_tags[inst]) {
// For some instructions, modify the zir data
// so we can avoid a separate ensure_result_used instruction.
.call => {
.call, .field_call => {
const extra_index = gz.astgen.instructions.items(.data)[inst].pl_node.payload_index;
const slot = &gz.astgen.extra.items[extra_index];
var flags = @bitCast(Zir.Inst.Call.Flags, slot.*);
@ -2513,7 +2564,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.field_ptr,
.field_ptr_init,
.field_val,
.field_call_bind,
.field_ptr_named,
.field_val_named,
.func,
@ -2564,15 +2614,10 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.slice_start,
.slice_end,
.slice_sentinel,
.slice_length,
.import,
.switch_block,
.switch_cond,
.switch_cond_ref,
.switch_capture,
.switch_capture_ref,
.switch_capture_multi,
.switch_capture_multi_ref,
.switch_capture_tag,
.switch_block_ref,
.struct_init_empty,
.struct_init,
.struct_init_ref,
@ -2588,15 +2633,15 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.error_set_decl,
.error_set_decl_anon,
.error_set_decl_func,
.int_to_enum,
.enum_to_int,
.enum_from_int,
.int_from_enum,
.type_info,
.size_of,
.bit_size_of,
.typeof_log2_int_type,
.ptr_to_int,
.int_from_ptr,
.align_of,
.bool_to_int,
.int_from_bool,
.embed_file,
.error_name,
.sqrt,
@ -2617,9 +2662,9 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.type_name,
.frame_type,
.frame_size,
.float_to_int,
.int_to_float,
.int_to_ptr,
.int_from_float,
.float_from_int,
.ptr_from_int,
.float_cast,
.int_cast,
.ptr_cast,
@ -2916,7 +2961,7 @@ fn deferStmt(
try gz.astgen.instructions.append(gz.astgen.gpa, .{
.tag = .extended,
.data = .{ .extended = .{
.opcode = .errdefer_err_code,
.opcode = .value_placeholder,
.small = undefined,
.operand = undefined,
} },
@ -2938,11 +2983,27 @@ fn deferStmt(
if (have_err_code) try gz.addDbgBlockEnd();
_ = try defer_gen.addBreak(.break_inline, 0, .void_value);
// We must handle ref_table for remapped_err_code manually.
const body = defer_gen.instructionsSlice();
const body_len = gz.astgen.countBodyLenAfterFixups(body);
const body_len = blk: {
var refs: u32 = 0;
if (have_err_code) {
var cur_inst = remapped_err_code;
while (gz.astgen.ref_table.get(cur_inst)) |ref_inst| {
refs += 1;
cur_inst = ref_inst;
}
}
break :blk gz.astgen.countBodyLenAfterFixups(body) + refs;
};
const index = @intCast(u32, gz.astgen.extra.items.len);
try gz.astgen.extra.ensureUnusedCapacity(gz.astgen.gpa, body_len);
if (have_err_code) {
if (gz.astgen.ref_table.fetchRemove(remapped_err_code)) |kv| {
gz.astgen.appendPossiblyRefdBodyInst(&gz.astgen.extra, kv.value);
}
}
gz.astgen.appendBodyWithFixups(body);
const defer_scope = try block_arena.create(Scope.Defer);
@ -3874,7 +3935,7 @@ fn fnDecl(
var section_gz = decl_gz.makeSubBlock(params_scope);
defer section_gz.unstack();
const section_ref: Zir.Inst.Ref = if (fn_proto.ast.section_expr == 0) .none else inst: {
const inst = try expr(&decl_gz, params_scope, .{ .rl = .{ .coerced_ty = .const_slice_u8_type } }, fn_proto.ast.section_expr);
const inst = try expr(&decl_gz, params_scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, fn_proto.ast.section_expr);
if (section_gz.instructionsSlice().len == 0) {
// In this case we will send a len=0 body which can be encoded more efficiently.
break :inst inst;
@ -3908,9 +3969,9 @@ fn fnDecl(
break :blk inst;
} else if (is_extern) {
// note: https://github.com/ziglang/zig/issues/5269
break :blk .calling_convention_c;
break :blk .unknown_unknown; // TODO calling_convention_c
} else if (has_inline_keyword) {
break :blk .calling_convention_inline;
break :blk .unknown_unknown; // calling_convention_inline
} else {
break :blk .none;
}
@ -4077,7 +4138,7 @@ fn globalVarDecl(
break :inst try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .address_space_type } }, var_decl.ast.addrspace_node);
};
const section_inst: Zir.Inst.Ref = if (var_decl.ast.section_node == 0) .none else inst: {
break :inst try comptimeExpr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .const_slice_u8_type } }, var_decl.ast.section_node);
break :inst try comptimeExpr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .slice_const_u8_type } }, var_decl.ast.section_node);
};
const has_section_or_addrspace = section_inst != .none or addrspace_inst != .none;
wip_members.nextDecl(is_pub, is_export, align_inst != .none, has_section_or_addrspace);
@ -4437,7 +4498,7 @@ fn testDecl(
.cc_gz = null,
.align_ref = .none,
.align_gz = null,
.ret_ref = .void_type,
.ret_ref = .anyerror_void_error_union_type,
.ret_gz = null,
.section_ref = .none,
.section_gz = null,
@ -4450,7 +4511,7 @@ fn testDecl(
.body_gz = &fn_block,
.lib_name = 0,
.is_var_args = false,
.is_inferred_error = true,
.is_inferred_error = false,
.is_test = true,
.is_extern = false,
.is_noinline = false,
@ -6655,6 +6716,7 @@ fn switchExpr(
// for the following variables, make note of the special prong AST node index,
// and bail out with a compile error if there are multiple special prongs present.
var any_payload_is_ref = false;
var any_has_tag_capture = false;
var scalar_cases_len: u32 = 0;
var multi_cases_len: u32 = 0;
var inline_cases_len: u32 = 0;
@ -6665,8 +6727,12 @@ fn switchExpr(
for (case_nodes) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
if (case.payload_token) |payload_token| {
if (token_tags[payload_token] == .asterisk) {
const ident = if (token_tags[payload_token] == .asterisk) blk: {
any_payload_is_ref = true;
break :blk payload_token + 1;
} else payload_token;
if (token_tags[ident + 1] == .comma) {
any_has_tag_capture = true;
}
}
// Check for else/`_` prong.
@ -6775,13 +6841,7 @@ fn switchExpr(
const operand_lc = LineColumn{ astgen.source_line - parent_gz.decl_line, astgen.source_column };
const raw_operand = try expr(parent_gz, scope, operand_ri, operand_node);
const cond_tag: Zir.Inst.Tag = if (any_payload_is_ref) .switch_cond_ref else .switch_cond;
const cond = try parent_gz.addUnNode(cond_tag, raw_operand, operand_node);
// Sema expects a dbg_stmt immediately after switch_cond(_ref)
try emitDbgStmt(parent_gz, operand_lc);
// We need the type of the operand to use as the result location for all the prong items.
const cond_ty_inst = try parent_gz.addUnNode(.typeof, cond, operand_node);
const item_ri: ResultInfo = .{ .rl = .{ .ty = cond_ty_inst } };
const item_ri: ResultInfo = .{ .rl = .none };
// This contains the data that goes into the `extra` array for the SwitchBlock/SwitchBlockMulti,
// except the first cases_nodes.len slots are a table that indexes payloads later in the array, with
@ -6800,13 +6860,30 @@ fn switchExpr(
block_scope.instructions_top = GenZir.unstacked_top;
block_scope.setBreakResultInfo(ri);
// Sema expects a dbg_stmt immediately before switch_block(_ref)
try emitDbgStmt(parent_gz, operand_lc);
// This gets added to the parent block later, after the item expressions.
const switch_block = try parent_gz.makeBlockInst(.switch_block, switch_node);
const switch_tag: Zir.Inst.Tag = if (any_payload_is_ref) .switch_block_ref else .switch_block;
const switch_block = try parent_gz.makeBlockInst(switch_tag, switch_node);
// We re-use this same scope for all cases, including the special prong, if any.
var case_scope = parent_gz.makeSubBlock(&block_scope.base);
case_scope.instructions_top = GenZir.unstacked_top;
// If any prong has an inline tag capture, allocate a shared dummy instruction for it
const tag_inst = if (any_has_tag_capture) tag_inst: {
const inst = @intCast(Zir.Inst.Index, astgen.instructions.len);
try astgen.instructions.append(astgen.gpa, .{
.tag = .extended,
.data = .{ .extended = .{
.opcode = .value_placeholder,
.small = undefined,
.operand = undefined,
} }, // TODO rename opcode
});
break :tag_inst inst;
} else undefined;
// In this pass we generate all the item and prong expressions.
var multi_case_index: u32 = 0;
var scalar_case_index: u32 = 0;
@ -6820,17 +6897,22 @@ fn switchExpr(
var dbg_var_inst: Zir.Inst.Ref = undefined;
var dbg_var_tag_name: ?u32 = null;
var dbg_var_tag_inst: Zir.Inst.Ref = undefined;
var capture_inst: Zir.Inst.Index = 0;
var tag_inst: Zir.Inst.Index = 0;
var has_tag_capture = false;
var capture_val_scope: Scope.LocalVal = undefined;
var tag_scope: Scope.LocalVal = undefined;
var capture: Zir.Inst.SwitchBlock.ProngInfo.Capture = .none;
const sub_scope = blk: {
const payload_token = case.payload_token orelse break :blk &case_scope.base;
const ident = if (token_tags[payload_token] == .asterisk)
payload_token + 1
else
payload_token;
const is_ptr = ident != payload_token;
capture = if (is_ptr) .by_ref else .by_val;
const ident_slice = tree.tokenSlice(ident);
var payload_sub_scope: *Scope = undefined;
if (mem.eql(u8, ident_slice, "_")) {
@ -6839,53 +6921,18 @@ fn switchExpr(
}
payload_sub_scope = &case_scope.base;
} else {
if (case_node == special_node) {
const capture_tag: Zir.Inst.Tag = if (is_ptr)
.switch_capture_ref
else
.switch_capture;
capture_inst = @intCast(Zir.Inst.Index, astgen.instructions.len);
try astgen.instructions.append(gpa, .{
.tag = capture_tag,
.data = .{
.switch_capture = .{
.switch_inst = switch_block,
// Max int communicates that this is the else/underscore prong.
.prong_index = std.math.maxInt(u32),
},
},
});
} else {
const is_multi_case_bits: u2 = @intFromBool(is_multi_case);
const is_ptr_bits: u2 = @intFromBool(is_ptr);
const capture_tag: Zir.Inst.Tag = switch ((is_multi_case_bits << 1) | is_ptr_bits) {
0b00 => .switch_capture,
0b01 => .switch_capture_ref,
0b10 => .switch_capture_multi,
0b11 => .switch_capture_multi_ref,
};
const capture_index = if (is_multi_case) multi_case_index else scalar_case_index;
capture_inst = @intCast(Zir.Inst.Index, astgen.instructions.len);
try astgen.instructions.append(gpa, .{
.tag = capture_tag,
.data = .{ .switch_capture = .{
.switch_inst = switch_block,
.prong_index = capture_index,
} },
});
}
const capture_name = try astgen.identAsString(ident);
try astgen.detectLocalShadowing(&case_scope.base, capture_name, ident, ident_slice, .capture);
capture_val_scope = .{
.parent = &case_scope.base,
.gen_zir = &case_scope,
.name = capture_name,
.inst = indexToRef(capture_inst),
.inst = indexToRef(switch_block),
.token_src = payload_token,
.id_cat = .capture,
};
dbg_var_name = capture_name;
dbg_var_inst = indexToRef(capture_inst);
dbg_var_inst = indexToRef(switch_block);
payload_sub_scope = &capture_val_scope.base;
}
@ -6901,14 +6948,9 @@ fn switchExpr(
}
const tag_name = try astgen.identAsString(tag_token);
try astgen.detectLocalShadowing(payload_sub_scope, tag_name, tag_token, tag_slice, .@"switch tag capture");
tag_inst = @intCast(Zir.Inst.Index, astgen.instructions.len);
try astgen.instructions.append(gpa, .{
.tag = .switch_capture_tag,
.data = .{ .un_tok = .{
.operand = cond,
.src_tok = case_scope.tokenIndexToRelative(tag_token),
} },
});
assert(any_has_tag_capture);
has_tag_capture = true;
tag_scope = .{
.parent = payload_sub_scope,
@ -6974,8 +7016,6 @@ fn switchExpr(
case_scope.instructions_top = parent_gz.instructions.items.len;
defer case_scope.unstack();
if (capture_inst != 0) try case_scope.instructions.append(gpa, capture_inst);
if (tag_inst != 0) try case_scope.instructions.append(gpa, tag_inst);
try case_scope.addDbgBlockBegin();
if (dbg_var_name) |some| {
try case_scope.addDbgVar(.dbg_var_val, some, dbg_var_inst);
@ -6993,10 +7033,42 @@ fn switchExpr(
}
const case_slice = case_scope.instructionsSlice();
const body_len = astgen.countBodyLenAfterFixups(case_slice);
// Since we use the switch_block instruction itself to refer to the
// capture, which will not be added to the child block, we need to
// handle ref_table manually, and the same for the inline tag
// capture instruction.
const refs_len = refs: {
var n: usize = 0;
var check_inst = switch_block;
while (astgen.ref_table.get(check_inst)) |ref_inst| {
n += 1;
check_inst = ref_inst;
}
if (has_tag_capture) {
check_inst = tag_inst;
while (astgen.ref_table.get(check_inst)) |ref_inst| {
n += 1;
check_inst = ref_inst;
}
}
break :refs n;
};
const body_len = refs_len + astgen.countBodyLenAfterFixups(case_slice);
try payloads.ensureUnusedCapacity(gpa, body_len);
const inline_bit = @as(u32, @intFromBool(case.inline_token != null)) << 31;
payloads.items[body_len_index] = body_len | inline_bit;
payloads.items[body_len_index] = @bitCast(u32, Zir.Inst.SwitchBlock.ProngInfo{
.body_len = @intCast(u28, body_len),
.capture = capture,
.is_inline = case.inline_token != null,
.has_tag_capture = has_tag_capture,
});
if (astgen.ref_table.fetchRemove(switch_block)) |kv| {
appendPossiblyRefdBodyInst(astgen, payloads, kv.value);
}
if (has_tag_capture) {
if (astgen.ref_table.fetchRemove(tag_inst)) |kv| {
appendPossiblyRefdBodyInst(astgen, payloads, kv.value);
}
}
appendBodyWithFixupsArrayList(astgen, payloads, case_slice);
}
}
@ -7005,14 +7077,16 @@ fn switchExpr(
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.SwitchBlock).Struct.fields.len +
@intFromBool(multi_cases_len != 0) +
@intFromBool(any_has_tag_capture) +
payloads.items.len - case_table_end);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.SwitchBlock{
.operand = cond,
.operand = raw_operand,
.bits = Zir.Inst.SwitchBlock.Bits{
.has_multi_cases = multi_cases_len != 0,
.has_else = special_prong == .@"else",
.has_under = special_prong == .under,
.any_has_tag_capture = any_has_tag_capture,
.scalar_cases_len = @intCast(Zir.Inst.SwitchBlock.Bits.ScalarCasesLen, scalar_cases_len),
},
});
@ -7021,6 +7095,10 @@ fn switchExpr(
astgen.extra.appendAssumeCapacity(multi_cases_len);
}
if (any_has_tag_capture) {
astgen.extra.appendAssumeCapacity(tag_inst);
}
const zir_datas = astgen.instructions.items(.data);
const zir_tags = astgen.instructions.items(.tag);
@ -7043,7 +7121,7 @@ fn switchExpr(
end_index += 3 + items_len + 2 * ranges_len;
}
const body_len = @truncate(u31, payloads.items[body_len_index]);
const body_len = @bitCast(Zir.Inst.SwitchBlock.ProngInfo, payloads.items[body_len_index]).body_len;
end_index += body_len;
switch (strat.tag) {
@ -7818,7 +7896,7 @@ fn unionInit(
params: []const Ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const union_type = try typeExpr(gz, scope, params[0]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]);
const field_type = try gz.addPlNode(.field_type_ref, params[1], Zir.Inst.FieldTypeRef{
.container_type = union_type,
.field_name = field_name,
@ -8040,12 +8118,12 @@ fn builtinCall(
if (ri.rl == .ref) {
return gz.addPlNode(.field_ptr_named, node, Zir.Inst.FieldNamed{
.lhs = try expr(gz, scope, .{ .rl = .ref }, params[0]),
.field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]),
.field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]),
});
}
const result = try gz.addPlNode(.field_val_named, node, Zir.Inst.FieldNamed{
.lhs = try expr(gz, scope, .{ .rl = .none }, params[0]),
.field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]),
.field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]),
});
return rvalue(gz, ri, result, node);
},
@ -8210,12 +8288,12 @@ fn builtinCall(
.bit_size_of => return simpleUnOpType(gz, scope, ri, node, params[0], .bit_size_of),
.align_of => return simpleUnOpType(gz, scope, ri, node, params[0], .align_of),
.ptr_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .ptr_to_int),
.compile_error => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .compile_error),
.int_from_ptr => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .int_from_ptr),
.compile_error => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0], .compile_error),
.set_eval_branch_quota => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0], .set_eval_branch_quota),
.enum_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .enum_to_int),
.bool_to_int => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .bool_to_int),
.embed_file => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .embed_file),
.int_from_enum => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .int_from_enum),
.int_from_bool => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .int_from_bool),
.embed_file => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0], .embed_file),
.error_name => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .anyerror_type } }, params[0], .error_name),
.set_runtime_safety => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .set_runtime_safety),
.sqrt => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .sqrt),
@ -8237,10 +8315,10 @@ fn builtinCall(
.Frame => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .frame_type),
.frame_size => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .frame_size),
.float_to_int => return typeCast(gz, scope, ri, node, params[0], params[1], .float_to_int),
.int_to_float => return typeCast(gz, scope, ri, node, params[0], params[1], .int_to_float),
.int_to_ptr => return typeCast(gz, scope, ri, node, params[0], params[1], .int_to_ptr),
.int_to_enum => return typeCast(gz, scope, ri, node, params[0], params[1], .int_to_enum),
.int_from_float => return typeCast(gz, scope, ri, node, params[0], params[1], .int_from_float),
.float_from_int => return typeCast(gz, scope, ri, node, params[0], params[1], .float_from_int),
.ptr_from_int => return typeCast(gz, scope, ri, node, params[0], params[1], .ptr_from_int),
.enum_from_int => return typeCast(gz, scope, ri, node, params[0], params[1], .enum_from_int),
.float_cast => return typeCast(gz, scope, ri, node, params[0], params[1], .float_cast),
.int_cast => return typeCast(gz, scope, ri, node, params[0], params[1], .int_cast),
.ptr_cast => return typeCast(gz, scope, ri, node, params[0], params[1], .ptr_cast),
@ -8274,24 +8352,24 @@ fn builtinCall(
},
.panic => {
try emitDbgNode(gz, node);
return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .panic);
return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0], .panic);
},
.trap => {
try emitDbgNode(gz, node);
_ = try gz.addNode(.trap, node);
return rvalue(gz, ri, .void_value, node);
return rvalue(gz, ri, .unreachable_value, node);
},
.error_to_int => {
.int_from_error => {
const operand = try expr(gz, scope, .{ .rl = .none }, params[0]);
const result = try gz.addExtendedPayload(.error_to_int, Zir.Inst.UnNode{
const result = try gz.addExtendedPayload(.int_from_error, Zir.Inst.UnNode{
.node = gz.nodeIndexToRelative(node),
.operand = operand,
});
return rvalue(gz, ri, result, node);
},
.int_to_error => {
.error_from_int => {
const operand = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, params[0]);
const result = try gz.addExtendedPayload(.int_to_error, Zir.Inst.UnNode{
const result = try gz.addExtendedPayload(.error_from_int, Zir.Inst.UnNode{
.node = gz.nodeIndexToRelative(node),
.operand = operand,
});
@ -8390,7 +8468,7 @@ fn builtinCall(
},
.c_define => {
if (!gz.c_import) return gz.astgen.failNode(node, "C define valid only inside C import block", .{});
const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0]);
const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0]);
const value = try comptimeExpr(gz, scope, .{ .rl = .none }, params[1]);
const result = try gz.addExtendedPayload(.c_define, Zir.Inst.BinNode{
.node = gz.nodeIndexToRelative(node),
@ -8470,8 +8548,8 @@ fn builtinCall(
return rvalue(gz, ri, result, node);
},
.call => {
const modifier = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .modifier_type } }, params[0]);
const callee = try calleeExpr(gz, scope, params[1]);
const modifier = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .call_modifier_type } }, params[0]);
const callee = try expr(gz, scope, .{ .rl = .none }, params[1]);
const args = try expr(gz, scope, .{ .rl = .none }, params[2]);
const result = try gz.addPlNode(.builtin_call, node, Zir.Inst.BuiltinCall{
.modifier = modifier,
@ -8486,7 +8564,7 @@ fn builtinCall(
},
.field_parent_ptr => {
const parent_type = try typeExpr(gz, scope, params[0]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]);
const result = try gz.addPlNode(.field_parent_ptr, node, Zir.Inst.FieldParentPtr{
.parent_type = parent_type,
.field_name = field_name,
@ -8641,7 +8719,7 @@ fn hasDeclOrField(
tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref {
const container_type = try typeExpr(gz, scope, lhs_node);
const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, rhs_node);
const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, rhs_node);
const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
.lhs = container_type,
.rhs = name,
@ -8698,7 +8776,7 @@ fn simpleUnOp(
else
try expr(gz, scope, operand_ri, operand_node);
switch (tag) {
.tag_name, .error_name, .ptr_to_int => try emitDbgStmt(gz, cursor),
.tag_name, .error_name, .int_from_ptr => try emitDbgStmt(gz, cursor),
else => {},
}
const result = try gz.addUnNode(tag, operand, node);
@ -8791,7 +8869,7 @@ fn simpleCBuiltin(
) InnerError!Zir.Inst.Ref {
const name: []const u8 = if (tag == .c_undef) "C undef" else "C include";
if (!gz.c_import) return gz.astgen.failNode(node, "{s} valid only inside C import block", .{name});
const operand = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, operand_node);
const operand = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, operand_node);
_ = try gz.addExtendedPayload(tag, Zir.Inst.UnNode{
.node = gz.nodeIndexToRelative(node),
.operand = operand,
@ -8809,7 +8887,7 @@ fn offsetOf(
tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref {
const type_inst = try typeExpr(gz, scope, lhs_node);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, rhs_node);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, rhs_node);
const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
.lhs = type_inst,
.rhs = field_name,
@ -8931,7 +9009,10 @@ fn callExpr(
} });
}
assert(callee != .none);
switch (callee) {
.direct => |obj| assert(obj != .none),
.field => |field| assert(field.obj_ptr != .none),
}
assert(node != 0);
const call_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
@ -8970,8 +9051,10 @@ fn callExpr(
else => false,
};
switch (callee) {
.direct => |callee_obj| {
const payload_index = try addExtra(astgen, Zir.Inst.Call{
.callee = callee,
.callee = callee_obj,
.flags = .{
.pop_error_return_trace = !propagate_error_trace,
.packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @intFromEnum(modifier)),
@ -8988,71 +9071,78 @@ fn callExpr(
.payload_index = payload_index,
} },
});
},
.field => |callee_field| {
const payload_index = try addExtra(astgen, Zir.Inst.FieldCall{
.obj_ptr = callee_field.obj_ptr,
.field_name_start = callee_field.field_name_start,
.flags = .{
.pop_error_return_trace = !propagate_error_trace,
.packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @intFromEnum(modifier)),
.args_len = @intCast(Zir.Inst.Call.Flags.PackedArgsLen, call.ast.params.len),
},
});
if (call.ast.params.len != 0) {
try astgen.extra.appendSlice(astgen.gpa, astgen.scratch.items[scratch_top..]);
}
gz.astgen.instructions.set(call_index, .{
.tag = .field_call,
.data = .{ .pl_node = .{
.src_node = gz.nodeIndexToRelative(node),
.payload_index = payload_index,
} },
});
},
}
return rvalue(gz, ri, call_inst, node); // TODO function call with result location
}
/// calleeExpr generates the function part of a call expression (f in f(x)), or the
/// callee argument to the @call() builtin. If the lhs is a field access or the
/// @field() builtin, we need to generate a special field_call_bind instruction
/// instead of the normal field_val or field_ptr. If this is a inst.func() call,
/// this instruction will capture the value of the first argument before evaluating
/// the other arguments. We need to use .ref here to guarantee we will be able to
/// promote an lvalue to an address if the first parameter requires it. This
/// unfortunately also means we need to take a reference to any types on the lhs.
const Callee = union(enum) {
field: struct {
/// A *pointer* to the object the field is fetched on, so that we can
/// promote the lvalue to an address if the first parameter requires it.
obj_ptr: Zir.Inst.Ref,
/// Offset into `string_bytes`.
field_name_start: u32,
},
direct: Zir.Inst.Ref,
};
/// calleeExpr generates the function part of a call expression (f in f(x)), but
/// *not* the callee argument to the @call() builtin. Its purpose is to
/// distinguish between standard calls and method call syntax `a.b()`. Thus, if
/// the lhs is a field access, we return using the `field` union field;
/// otherwise, we use the `direct` union field.
fn calleeExpr(
gz: *GenZir,
scope: *Scope,
node: Ast.Node.Index,
) InnerError!Zir.Inst.Ref {
) InnerError!Callee {
const astgen = gz.astgen;
const tree = astgen.tree;
const tag = tree.nodes.items(.tag)[node];
switch (tag) {
.field_access => return addFieldAccess(.field_call_bind, gz, scope, .{ .rl = .ref }, node),
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
const node_datas = tree.nodes.items(.data);
.field_access => {
const main_tokens = tree.nodes.items(.main_token);
const builtin_token = main_tokens[node];
const builtin_name = tree.tokenSlice(builtin_token);
const node_datas = tree.nodes.items(.data);
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
const str_index = try astgen.identAsString(field_ident);
// Capture the object by reference so we can promote it to an
// address in Sema if needed.
const lhs = try expr(gz, scope, .{ .rl = .ref }, object_node);
var inline_params: [2]Ast.Node.Index = undefined;
var params: []Ast.Node.Index = switch (tag) {
.builtin_call,
.builtin_call_comma,
=> tree.extra_data[node_datas[node].lhs..node_datas[node].rhs],
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
try emitDbgStmt(gz, cursor);
.builtin_call_two,
.builtin_call_two_comma,
=> blk: {
inline_params = .{ node_datas[node].lhs, node_datas[node].rhs };
const len: usize = if (inline_params[0] == 0) @as(usize, 0) else if (inline_params[1] == 0) @as(usize, 1) else @as(usize, 2);
break :blk inline_params[0..len];
return .{ .field = .{
.obj_ptr = lhs,
.field_name_start = str_index,
} };
},
else => unreachable,
};
// If anything is wrong, fall back to builtinCall.
// It will emit any necessary compile errors and notes.
if (std.mem.eql(u8, builtin_name, "@field") and params.len == 2) {
const lhs = try expr(gz, scope, .{ .rl = .ref }, params[0]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]);
return gz.addExtendedPayload(.field_call_bind_named, Zir.Inst.FieldNamedNode{
.node = gz.nodeIndexToRelative(node),
.lhs = lhs,
.field_name = field_name,
});
}
return builtinCall(gz, scope, .{ .rl = .none }, node, params);
},
else => return expr(gz, scope, .{ .rl = .none }, node),
else => return .{ .direct = try expr(gz, scope, .{ .rl = .none }, node) },
}
}
@ -10199,6 +10289,8 @@ fn rvalue(
as_ty | @intFromEnum(Zir.Inst.Ref.i32_type),
as_ty | @intFromEnum(Zir.Inst.Ref.u64_type),
as_ty | @intFromEnum(Zir.Inst.Ref.i64_type),
as_ty | @intFromEnum(Zir.Inst.Ref.u128_type),
as_ty | @intFromEnum(Zir.Inst.Ref.i128_type),
as_ty | @intFromEnum(Zir.Inst.Ref.usize_type),
as_ty | @intFromEnum(Zir.Inst.Ref.isize_type),
as_ty | @intFromEnum(Zir.Inst.Ref.c_char_type),
@ -10224,15 +10316,30 @@ fn rvalue(
as_ty | @intFromEnum(Zir.Inst.Ref.comptime_int_type),
as_ty | @intFromEnum(Zir.Inst.Ref.comptime_float_type),
as_ty | @intFromEnum(Zir.Inst.Ref.noreturn_type),
as_ty | @intFromEnum(Zir.Inst.Ref.anyframe_type),
as_ty | @intFromEnum(Zir.Inst.Ref.null_type),
as_ty | @intFromEnum(Zir.Inst.Ref.undefined_type),
as_ty | @intFromEnum(Zir.Inst.Ref.fn_noreturn_no_args_type),
as_ty | @intFromEnum(Zir.Inst.Ref.fn_void_no_args_type),
as_ty | @intFromEnum(Zir.Inst.Ref.fn_naked_noreturn_no_args_type),
as_ty | @intFromEnum(Zir.Inst.Ref.fn_ccc_void_no_args_type),
as_ty | @intFromEnum(Zir.Inst.Ref.single_const_pointer_to_comptime_int_type),
as_ty | @intFromEnum(Zir.Inst.Ref.const_slice_u8_type),
as_ty | @intFromEnum(Zir.Inst.Ref.enum_literal_type),
as_ty | @intFromEnum(Zir.Inst.Ref.atomic_order_type),
as_ty | @intFromEnum(Zir.Inst.Ref.atomic_rmw_op_type),
as_ty | @intFromEnum(Zir.Inst.Ref.calling_convention_type),
as_ty | @intFromEnum(Zir.Inst.Ref.address_space_type),
as_ty | @intFromEnum(Zir.Inst.Ref.float_mode_type),
as_ty | @intFromEnum(Zir.Inst.Ref.reduce_op_type),
as_ty | @intFromEnum(Zir.Inst.Ref.call_modifier_type),
as_ty | @intFromEnum(Zir.Inst.Ref.prefetch_options_type),
as_ty | @intFromEnum(Zir.Inst.Ref.export_options_type),
as_ty | @intFromEnum(Zir.Inst.Ref.extern_options_type),
as_ty | @intFromEnum(Zir.Inst.Ref.type_info_type),
as_ty | @intFromEnum(Zir.Inst.Ref.manyptr_u8_type),
as_ty | @intFromEnum(Zir.Inst.Ref.manyptr_const_u8_type),
as_ty | @intFromEnum(Zir.Inst.Ref.manyptr_const_u8_sentinel_0_type),
as_ty | @intFromEnum(Zir.Inst.Ref.single_const_pointer_to_comptime_int_type),
as_ty | @intFromEnum(Zir.Inst.Ref.slice_const_u8_type),
as_ty | @intFromEnum(Zir.Inst.Ref.slice_const_u8_sentinel_0_type),
as_ty | @intFromEnum(Zir.Inst.Ref.anyerror_void_error_union_type),
as_ty | @intFromEnum(Zir.Inst.Ref.generic_poison_type),
as_ty | @intFromEnum(Zir.Inst.Ref.empty_struct_type),
as_comptime_int | @intFromEnum(Zir.Inst.Ref.zero),
as_comptime_int | @intFromEnum(Zir.Inst.Ref.one),
as_bool | @intFromEnum(Zir.Inst.Ref.bool_true),
@ -10605,8 +10712,8 @@ fn identAsString(astgen: *AstGen, ident_token: Ast.TokenIndex) !u32 {
const string_bytes = &astgen.string_bytes;
const str_index = @intCast(u32, string_bytes.items.len);
try astgen.appendIdentStr(ident_token, string_bytes);
const key = string_bytes.items[str_index..];
const gop = try astgen.string_table.getOrPutContextAdapted(gpa, @as([]const u8, key), StringIndexAdapter{
const key: []const u8 = string_bytes.items[str_index..];
const gop = try astgen.string_table.getOrPutContextAdapted(gpa, key, StringIndexAdapter{
.bytes = string_bytes,
}, StringIndexContext{
.bytes = string_bytes,
@ -11689,9 +11796,9 @@ const GenZir = struct {
) !Zir.Inst.Index {
const gpa = gz.astgen.gpa;
const param_body = param_gz.instructionsSlice();
const body_len = gz.astgen.countBodyLenAfterFixups(param_body);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Param).Struct.fields.len +
param_body.len);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Param).Struct.fields.len + body_len);
const doc_comment_index = if (first_doc_comment) |first|
try gz.astgen.docCommentAsStringFromFirst(abs_tok_index, first)
@ -11701,9 +11808,9 @@ const GenZir = struct {
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Param{
.name = name,
.doc_comment = doc_comment_index,
.body_len = @intCast(u32, param_body.len),
.body_len = @intCast(u32, body_len),
});
gz.astgen.extra.appendSliceAssumeCapacity(param_body);
gz.astgen.appendBodyWithFixups(param_body);
param_gz.unstack();
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);

View File

@ -12,7 +12,7 @@ pub const Tag = enum {
atomic_store,
bit_cast,
bit_offset_of,
bool_to_int,
int_from_bool,
bit_size_of,
breakpoint,
mul_add,
@ -39,10 +39,10 @@ pub const Tag = enum {
div_floor,
div_trunc,
embed_file,
enum_to_int,
int_from_enum,
error_name,
error_return_trace,
error_to_int,
int_from_error,
err_set_cast,
@"export",
@"extern",
@ -50,7 +50,7 @@ pub const Tag = enum {
field,
field_parent_ptr,
float_cast,
float_to_int,
int_from_float,
frame,
Frame,
frame_address,
@ -60,10 +60,10 @@ pub const Tag = enum {
import,
in_comptime,
int_cast,
int_to_enum,
int_to_error,
int_to_float,
int_to_ptr,
enum_from_int,
error_from_int,
float_from_int,
ptr_from_int,
max,
memcpy,
memset,
@ -76,7 +76,7 @@ pub const Tag = enum {
pop_count,
prefetch,
ptr_cast,
ptr_to_int,
int_from_ptr,
rem,
return_address,
select,
@ -238,9 +238,9 @@ pub const list = list: {
},
},
.{
"@boolToInt",
"@intFromBool",
.{
.tag = .bool_to_int,
.tag = .int_from_bool,
.param_count = 1,
},
},
@ -425,9 +425,9 @@ pub const list = list: {
},
},
.{
"@enumToInt",
"@intFromEnum",
.{
.tag = .enum_to_int,
.tag = .int_from_enum,
.param_count = 1,
},
},
@ -446,9 +446,9 @@ pub const list = list: {
},
},
.{
"@errorToInt",
"@intFromError",
.{
.tag = .error_to_int,
.tag = .int_from_error,
.param_count = 1,
},
},
@ -506,9 +506,9 @@ pub const list = list: {
},
},
.{
"@floatToInt",
"@intFromFloat",
.{
.tag = .float_to_int,
.tag = .int_from_float,
.param_count = 2,
},
},
@ -576,31 +576,31 @@ pub const list = list: {
},
},
.{
"@intToEnum",
"@enumFromInt",
.{
.tag = .int_to_enum,
.tag = .enum_from_int,
.param_count = 2,
},
},
.{
"@intToError",
"@errorFromInt",
.{
.tag = .int_to_error,
.tag = .error_from_int,
.eval_to_error = .always,
.param_count = 1,
},
},
.{
"@intToFloat",
"@floatFromInt",
.{
.tag = .int_to_float,
.tag = .float_from_int,
.param_count = 2,
},
},
.{
"@intToPtr",
"@ptrFromInt",
.{
.tag = .int_to_ptr,
.tag = .ptr_from_int,
.param_count = 2,
},
},
@ -689,9 +689,9 @@ pub const list = list: {
},
},
.{
"@ptrToInt",
"@intFromPtr",
.{
.tag = .ptr_to_int,
.tag = .int_from_ptr,
.param_count = 1,
},
},

View File

@ -19,6 +19,7 @@ const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable;
const Ast = std.zig.Ast;
const InternPool = @import("../analyser/InternPool.zig");
const Zir = @This();
const Module = @import("Module.zig");
const LazySrcLoc = Module.LazySrcLoc;
@ -294,6 +295,14 @@ pub const Inst = struct {
/// Uses the `pl_node` union field with payload `Call`.
/// AST node is the function call.
call,
/// Function call using `a.b()` syntax.
/// Uses the named field as the callee. If there is no such field, searches in the type for
/// a decl matching the field name. The decl is resolved and we ensure that it's a function
/// which can accept the object as the first parameter, with one pointer fixup. This
/// function is then used as the callee, with the object as an implicit first parameter.
/// Uses the `pl_node` union field with payload `FieldCall`.
/// AST node is the function call.
field_call,
/// Implements the `@call` builtin.
/// Uses the `pl_node` union field with payload `BuiltinCall`.
/// AST node is the builtin call.
@ -429,15 +438,6 @@ pub const Inst = struct {
/// This instruction also accepts a pointer.
/// Uses `pl_node` field. The AST node is the a.b syntax. Payload is Field.
field_val,
/// Given a pointer to a struct or object that contains virtual fields, returns the
/// named field. If there is no named field, searches in the type for a decl that
/// matches the field name. The decl is resolved and we ensure that it's a function
/// which can accept the object as the first parameter, with one pointer fixup. If
/// all of that works, this instruction produces a special "bound function" value
/// which contains both the function and the saved first parameter value.
/// Bound functions may only be used as the function parameter to a `call` or
/// `builtin_call` instruction. Any other use is invalid zir and may crash the compiler.
field_call_bind,
/// Given a pointer to a struct or object that contains virtual fields, returns a pointer
/// to the named field. The field name is a comptime instruction. Used by @field.
/// Uses `pl_node` field. The AST node is the builtin call. Payload is FieldNamed.
@ -567,6 +567,10 @@ pub const Inst = struct {
/// Returns a pointer to the subslice.
/// Uses the `pl_node` field. AST node is the slice syntax. Payload is `SliceSentinel`.
slice_sentinel,
/// Slice operation `array_ptr[start..][0..len]`. Optional sentinel.
/// Returns a pointer to the subslice.
/// Uses the `pl_node` field. AST node is the slice syntax. Payload is `SliceLength`.
slice_length,
/// Write a value to a pointer. For loading, see `load`.
/// Source location is assumed to be same as previous instruction.
/// Uses the `bin` union field.
@ -660,38 +664,9 @@ pub const Inst = struct {
/// A switch expression. Uses the `pl_node` union field.
/// AST node is the switch, payload is `SwitchBlock`.
switch_block,
/// Produces the value that will be switched on. For example, for
/// integers, it returns the integer with no modifications. For tagged unions, it
/// returns the active enum tag.
/// Uses the `un_node` union field.
switch_cond,
/// Same as `switch_cond`, except the input operand is a pointer to
/// what will be switched on.
/// Uses the `un_node` union field.
switch_cond_ref,
/// Produces the capture value for a switch prong.
/// Uses the `switch_capture` field.
/// If the `prong_index` field is max int, it means this is the capture
/// for the else/`_` prong.
switch_capture,
/// Produces the capture value for a switch prong.
/// Result is a pointer to the value.
/// Uses the `switch_capture` field.
/// If the `prong_index` field is max int, it means this is the capture
/// for the else/`_` prong.
switch_capture_ref,
/// Produces the capture value for a switch prong.
/// The prong is one of the multi cases.
/// Uses the `switch_capture` field.
switch_capture_multi,
/// Produces the capture value for a switch prong.
/// The prong is one of the multi cases.
/// Result is a pointer to the value.
/// Uses the `switch_capture` field.
switch_capture_multi_ref,
/// Produces the capture value for an inline switch prong tag capture.
/// Uses the `un_tok` field.
switch_capture_tag,
/// A switch expression. Uses the `pl_node` union field.
/// AST node is the switch, payload is `SwitchBlock`. Operand is a pointer.
switch_block_ref,
/// Given a
/// *A returns *A
/// *E!A returns *A
@ -771,9 +746,9 @@ pub const Inst = struct {
/// Implements the `@bitSizeOf` builtin. Uses `un_node`.
bit_size_of,
/// Implement builtin `@ptrToInt`. Uses `un_node`.
/// Implement builtin `@intFromPtr`. Uses `un_node`.
/// Convert a pointer to a `usize` integer.
ptr_to_int,
int_from_ptr,
/// Emit an error message and fail compilation.
/// Uses the `un_node` field.
compile_error,
@ -783,11 +758,11 @@ pub const Inst = struct {
set_eval_branch_quota,
/// Converts an enum value into an integer. Resulting type will be the tag type
/// of the enum. Uses `un_node`.
enum_to_int,
int_from_enum,
/// Implement builtin `@alignOf`. Uses `un_node`.
align_of,
/// Implement builtin `@boolToInt`. Uses `un_node`.
bool_to_int,
/// Implement builtin `@intFromBool`. Uses `un_node`.
int_from_bool,
/// Implement builtin `@embedFile`. Uses `un_node`.
embed_file,
/// Implement builtin `@errorName`. Uses `un_node`.
@ -836,18 +811,18 @@ pub const Inst = struct {
/// Implement builtin `@frameSize`. Uses `un_node`.
frame_size,
/// Implements the `@floatToInt` builtin.
/// Implements the `@intFromFloat` builtin.
/// Uses `pl_node` with payload `Bin`. `lhs` is dest type, `rhs` is operand.
float_to_int,
/// Implements the `@intToFloat` builtin.
int_from_float,
/// Implements the `@floatFromInt` builtin.
/// Uses `pl_node` with payload `Bin`. `lhs` is dest type, `rhs` is operand.
int_to_float,
/// Implements the `@intToPtr` builtin.
float_from_int,
/// Implements the `@ptrFromInt` builtin.
/// Uses `pl_node` with payload `Bin`. `lhs` is dest type, `rhs` is operand.
int_to_ptr,
ptr_from_int,
/// Converts an integer into an enum value.
/// Uses `pl_node` with payload `Bin`. `lhs` is dest type, `rhs` is operand.
int_to_enum,
enum_from_int,
/// Convert a larger float type to any other float type, possibly causing
/// a loss of precision.
/// Uses the `pl_node` field. AST is the `@floatCast` syntax.
@ -1044,6 +1019,7 @@ pub const Inst = struct {
.bool_br_or,
.bool_not,
.call,
.field_call,
.cmp_lt,
.cmp_lte,
.cmp_eq,
@ -1076,7 +1052,6 @@ pub const Inst = struct {
.field_ptr,
.field_ptr_init,
.field_val,
.field_call_bind,
.field_ptr_named,
.field_val_named,
.func,
@ -1132,18 +1107,13 @@ pub const Inst = struct {
.slice_start,
.slice_end,
.slice_sentinel,
.slice_length,
.import,
.typeof_log2_int_type,
.resolve_inferred_alloc,
.set_eval_branch_quota,
.switch_capture,
.switch_capture_ref,
.switch_capture_multi,
.switch_capture_multi_ref,
.switch_capture_tag,
.switch_block,
.switch_cond,
.switch_cond_ref,
.switch_block_ref,
.array_base_ptr,
.field_base_ptr,
.validate_array_init_ty,
@ -1163,14 +1133,14 @@ pub const Inst = struct {
.union_init,
.field_type,
.field_type_ref,
.int_to_enum,
.enum_to_int,
.enum_from_int,
.int_from_enum,
.type_info,
.size_of,
.bit_size_of,
.ptr_to_int,
.int_from_ptr,
.align_of,
.bool_to_int,
.int_from_bool,
.embed_file,
.error_name,
.set_runtime_safety,
@ -1192,9 +1162,9 @@ pub const Inst = struct {
.type_name,
.frame_type,
.frame_size,
.float_to_int,
.int_to_float,
.int_to_ptr,
.int_from_float,
.float_from_int,
.ptr_from_int,
.float_cast,
.int_cast,
.ptr_cast,
@ -1353,6 +1323,7 @@ pub const Inst = struct {
.bool_br_or,
.bool_not,
.call,
.field_call,
.cmp_lt,
.cmp_lte,
.cmp_eq,
@ -1375,7 +1346,6 @@ pub const Inst = struct {
.field_ptr,
.field_ptr_init,
.field_val,
.field_call_bind,
.field_ptr_named,
.field_val_named,
.func,
@ -1427,16 +1397,11 @@ pub const Inst = struct {
.slice_start,
.slice_end,
.slice_sentinel,
.slice_length,
.import,
.typeof_log2_int_type,
.switch_capture,
.switch_capture_ref,
.switch_capture_multi,
.switch_capture_multi_ref,
.switch_capture_tag,
.switch_block,
.switch_cond,
.switch_cond_ref,
.switch_block_ref,
.array_base_ptr,
.field_base_ptr,
.struct_init_empty,
@ -1451,14 +1416,14 @@ pub const Inst = struct {
.union_init,
.field_type,
.field_type_ref,
.int_to_enum,
.enum_to_int,
.enum_from_int,
.int_from_enum,
.type_info,
.size_of,
.bit_size_of,
.ptr_to_int,
.int_from_ptr,
.align_of,
.bool_to_int,
.int_from_bool,
.embed_file,
.error_name,
.sqrt,
@ -1479,9 +1444,9 @@ pub const Inst = struct {
.type_name,
.frame_type,
.frame_size,
.float_to_int,
.int_to_float,
.int_to_ptr,
.int_from_float,
.float_from_int,
.ptr_from_int,
.float_cast,
.int_cast,
.ptr_cast,
@ -1592,6 +1557,7 @@ pub const Inst = struct {
.check_comptime_control_flow = .un_node,
.for_len = .pl_node,
.call = .pl_node,
.field_call = .pl_node,
.cmp_lt = .pl_node,
.cmp_lte = .pl_node,
.cmp_eq = .pl_node,
@ -1632,7 +1598,6 @@ pub const Inst = struct {
.field_val = .pl_node,
.field_ptr_named = .pl_node,
.field_val_named = .pl_node,
.field_call_bind = .pl_node,
.func = .pl_node,
.func_inferred = .pl_node,
.func_fancy = .pl_node,
@ -1664,6 +1629,7 @@ pub const Inst = struct {
.slice_start = .pl_node,
.slice_end = .pl_node,
.slice_sentinel = .pl_node,
.slice_length = .pl_node,
.store = .bin,
.store_node = .pl_node,
.store_to_block_ptr = .bin,
@ -1686,13 +1652,7 @@ pub const Inst = struct {
.err_union_code_ptr = .un_node,
.enum_literal = .str_tok,
.switch_block = .pl_node,
.switch_cond = .un_node,
.switch_cond_ref = .un_node,
.switch_capture = .switch_capture,
.switch_capture_ref = .switch_capture,
.switch_capture_multi = .switch_capture,
.switch_capture_multi_ref = .switch_capture,
.switch_capture_tag = .un_tok,
.switch_block_ref = .pl_node,
.array_base_ptr = .un_node,
.field_base_ptr = .un_node,
.validate_array_init_ty = .pl_node,
@ -1716,12 +1676,12 @@ pub const Inst = struct {
.size_of = .un_node,
.bit_size_of = .un_node,
.ptr_to_int = .un_node,
.int_from_ptr = .un_node,
.compile_error = .un_node,
.set_eval_branch_quota = .un_node,
.enum_to_int = .un_node,
.int_from_enum = .un_node,
.align_of = .un_node,
.bool_to_int = .un_node,
.int_from_bool = .un_node,
.embed_file = .un_node,
.error_name = .un_node,
.panic = .un_node,
@ -1746,10 +1706,10 @@ pub const Inst = struct {
.frame_type = .un_node,
.frame_size = .un_node,
.float_to_int = .pl_node,
.int_to_float = .pl_node,
.int_to_ptr = .pl_node,
.int_to_enum = .pl_node,
.int_from_float = .pl_node,
.float_from_int = .pl_node,
.ptr_from_int = .pl_node,
.enum_from_int = .pl_node,
.float_cast = .pl_node,
.int_cast = .pl_node,
.ptr_cast = .pl_node,
@ -1945,16 +1905,6 @@ pub const Inst = struct {
/// The `@prefetch` builtin.
/// `operand` is payload index to `BinNode`.
prefetch,
/// Given a pointer to a struct or object that contains virtual fields, returns the
/// named field. If there is no named field, searches in the type for a decl that
/// matches the field name. The decl is resolved and we ensure that it's a function
/// which can accept the object as the first parameter, with one pointer fixup. If
/// all of that works, this instruction produces a special "bound function" value
/// which contains both the function and the saved first parameter value.
/// Bound functions may only be used as the function parameter to a `call` or
/// `builtin_call` instruction. Any other use is invalid zir and may crash the compiler.
/// Uses `pl_node` field. The AST node is the `@field` builtin. Payload is FieldNamedNode.
field_call_bind_named,
/// Implements the `@fence` builtin.
/// `operand` is payload index to `UnNode`.
fence,
@ -1980,10 +1930,10 @@ pub const Inst = struct {
select,
/// Implement builtin `@errToInt`.
/// `operand` is payload index to `UnNode`.
error_to_int,
/// Implement builtin `@intToError`.
int_from_error,
/// Implement builtin `@errorFromInt`.
/// `operand` is payload index to `UnNode`.
int_to_error,
error_from_int,
/// Implement builtin `@Type`.
/// `operand` is payload index to `UnNode`.
/// `small` contains `NameStrategy`.
@ -2028,9 +1978,10 @@ pub const Inst = struct {
/// Implements the `@inComptime` builtin.
/// `operand` is `src_node: i32`.
in_comptime,
/// Used as a placeholder for the capture of an `errdefer`.
/// This is replaced by Sema with the captured value.
errdefer_err_code,
/// Used as a placeholder instruction which is just a dummy index for Sema to replace
/// with a specific value. For instance, this is used for the capture of an `errdefer`.
/// This should never appear in a body.
value_placeholder,
pub const InstData = struct {
opcode: Extended,
@ -2058,107 +2009,89 @@ pub const Inst = struct {
/// The tag type is specified so that it is safe to bitcast between `[]u32`
/// and `[]Ref`.
pub const Ref = enum(u32) {
u1_type,
u8_type,
i8_type,
u16_type,
i16_type,
u29_type,
u32_type,
i32_type,
u64_type,
i64_type,
u128_type,
i128_type,
usize_type,
isize_type,
c_char_type,
c_short_type,
c_ushort_type,
c_int_type,
c_uint_type,
c_long_type,
c_ulong_type,
c_longlong_type,
c_ulonglong_type,
c_longdouble_type,
f16_type,
f32_type,
f64_type,
f80_type,
f128_type,
anyopaque_type,
bool_type,
void_type,
type_type,
anyerror_type,
comptime_int_type,
comptime_float_type,
noreturn_type,
anyframe_type,
empty_struct_literal,
null_type,
undefined_type,
enum_literal_type,
atomic_order_type,
atomic_rmw_op_type,
calling_convention_type,
address_space_type,
float_mode_type,
reduce_op_type,
modifier_type,
prefetch_options_type,
export_options_type,
extern_options_type,
type_info_type,
manyptr_u8_type,
manyptr_const_u8_type,
fn_noreturn_no_args_type,
fn_void_no_args_type,
fn_naked_noreturn_no_args_type,
fn_ccc_void_no_args_type,
single_const_pointer_to_comptime_int_type,
const_slice_u8_type,
anyerror_void_error_union_type,
generic_poison_type,
unknown_type,
u1_type = @intFromEnum(InternPool.Index.u1_type),
u8_type = @intFromEnum(InternPool.Index.u8_type),
i8_type = @intFromEnum(InternPool.Index.i8_type),
u16_type = @intFromEnum(InternPool.Index.u16_type),
i16_type = @intFromEnum(InternPool.Index.i16_type),
u29_type = @intFromEnum(InternPool.Index.u29_type),
u32_type = @intFromEnum(InternPool.Index.u32_type),
i32_type = @intFromEnum(InternPool.Index.i32_type),
u64_type = @intFromEnum(InternPool.Index.u64_type),
i64_type = @intFromEnum(InternPool.Index.i64_type),
u128_type = @intFromEnum(InternPool.Index.u128_type),
i128_type = @intFromEnum(InternPool.Index.i128_type),
usize_type = @intFromEnum(InternPool.Index.usize_type),
isize_type = @intFromEnum(InternPool.Index.isize_type),
c_char_type = @intFromEnum(InternPool.Index.c_char_type),
c_short_type = @intFromEnum(InternPool.Index.c_short_type),
c_ushort_type = @intFromEnum(InternPool.Index.c_ushort_type),
c_int_type = @intFromEnum(InternPool.Index.c_int_type),
c_uint_type = @intFromEnum(InternPool.Index.c_uint_type),
c_long_type = @intFromEnum(InternPool.Index.c_long_type),
c_ulong_type = @intFromEnum(InternPool.Index.c_ulong_type),
c_longlong_type = @intFromEnum(InternPool.Index.c_longlong_type),
c_ulonglong_type = @intFromEnum(InternPool.Index.c_ulonglong_type),
c_longdouble_type = @intFromEnum(InternPool.Index.c_longdouble_type),
f16_type = @intFromEnum(InternPool.Index.f16_type),
f32_type = @intFromEnum(InternPool.Index.f32_type),
f64_type = @intFromEnum(InternPool.Index.f64_type),
f80_type = @intFromEnum(InternPool.Index.f80_type),
f128_type = @intFromEnum(InternPool.Index.f128_type),
anyopaque_type = @intFromEnum(InternPool.Index.anyopaque_type),
bool_type = @intFromEnum(InternPool.Index.bool_type),
void_type = @intFromEnum(InternPool.Index.void_type),
type_type = @intFromEnum(InternPool.Index.type_type),
anyerror_type = @intFromEnum(InternPool.Index.anyerror_type),
comptime_int_type = @intFromEnum(InternPool.Index.comptime_int_type),
comptime_float_type = @intFromEnum(InternPool.Index.comptime_float_type),
noreturn_type = @intFromEnum(InternPool.Index.noreturn_type),
anyframe_type = @intFromEnum(InternPool.Index.anyframe_type),
empty_struct_type = @intFromEnum(InternPool.Index.empty_struct_type),
null_type = @intFromEnum(InternPool.Index.null_type),
undefined_type = @intFromEnum(InternPool.Index.undefined_type),
enum_literal_type = @intFromEnum(InternPool.Index.enum_literal_type),
atomic_order_type = @intFromEnum(InternPool.Index.atomic_order_type),
atomic_rmw_op_type = @intFromEnum(InternPool.Index.atomic_rmw_op_type),
calling_convention_type = @intFromEnum(InternPool.Index.calling_convention_type),
address_space_type = @intFromEnum(InternPool.Index.address_space_type),
float_mode_type = @intFromEnum(InternPool.Index.float_mode_type),
reduce_op_type = @intFromEnum(InternPool.Index.reduce_op_type),
call_modifier_type = @intFromEnum(InternPool.Index.call_modifier_type),
prefetch_options_type = @intFromEnum(InternPool.Index.prefetch_options_type),
export_options_type = @intFromEnum(InternPool.Index.export_options_type),
extern_options_type = @intFromEnum(InternPool.Index.extern_options_type),
type_info_type = @intFromEnum(InternPool.Index.type_info_type),
manyptr_u8_type = @intFromEnum(InternPool.Index.manyptr_u8_type),
manyptr_const_u8_type = @intFromEnum(InternPool.Index.manyptr_const_u8_type),
manyptr_const_u8_sentinel_0_type = @intFromEnum(InternPool.Index.manyptr_const_u8_sentinel_0_type),
single_const_pointer_to_comptime_int_type = @intFromEnum(InternPool.Index.single_const_pointer_to_comptime_int_type),
slice_const_u8_type = @intFromEnum(InternPool.Index.slice_const_u8_type),
slice_const_u8_sentinel_0_type = @intFromEnum(InternPool.Index.slice_const_u8_sentinel_0_type),
optional_noreturn_type = @intFromEnum(InternPool.Index.optional_noreturn_type),
anyerror_void_error_union_type = @intFromEnum(InternPool.Index.anyerror_void_error_union_type),
generic_poison_type = @intFromEnum(InternPool.Index.generic_poison_type),
unknown_type = @intFromEnum(InternPool.Index.unknown_type),
/// `undefined` (untyped)
undef,
/// `0` (comptime_int)
zero,
/// `1` (comptime_int)
one,
/// `{}`
void_value,
/// `unreachable` (noreturn type)
unreachable_value,
/// `null` (untyped)
null_value,
/// `true`
bool_true,
/// `false`
bool_false,
/// `.{}` (untyped)
empty_struct,
/// `0` (usize)
zero_usize,
/// `1` (usize)
one_usize,
/// `std.builtin.CallingConvention.C`
calling_convention_c,
/// `std.builtin.CallingConvention.Inline`
calling_convention_inline,
/// Used for generic parameters where the type and value
/// is not known until generic function instantiation.
generic_poison,
unknown,
undef = @intFromEnum(InternPool.Index.undefined_value),
zero = @intFromEnum(InternPool.Index.zero_comptime_int),
zero_u8 = @intFromEnum(InternPool.Index.zero_u8),
one = @intFromEnum(InternPool.Index.one_comptime_int),
one_u8 = @intFromEnum(InternPool.Index.one_u8),
void_value = @intFromEnum(InternPool.Index.void_value),
unreachable_value = @intFromEnum(InternPool.Index.unreachable_value),
null_value = @intFromEnum(InternPool.Index.null_value),
bool_true = @intFromEnum(InternPool.Index.bool_true),
bool_false = @intFromEnum(InternPool.Index.bool_false),
empty_aggregate = @intFromEnum(InternPool.Index.empty_aggregate),
zero_usize = @intFromEnum(InternPool.Index.zero_usize),
one_usize = @intFromEnum(InternPool.Index.one_usize),
the_only_possible_value = @intFromEnum(InternPool.Index.the_only_possible_value),
generic_poison = @intFromEnum(InternPool.Index.generic_poison),
unknown_unknown = @intFromEnum(InternPool.Index.unknown_unknown),
ref_start_index,
/// This Ref does not correspond to any ZIR instruction or constant
/// value and may instead be used as a sentinel to indicate null.
none = std.math.maxInt(u32),
none = @intFromEnum(InternPool.Index.none),
_,
};
@ -2290,10 +2223,6 @@ pub const Inst = struct {
operand: Ref,
payload_index: u32,
},
switch_capture: struct {
switch_inst: Index,
prong_index: u32,
},
dbg_stmt: LineColumn,
/// Used for unary operators which reference an inst,
/// with an AST node source location.
@ -2363,7 +2292,6 @@ pub const Inst = struct {
bool_br,
@"unreachable",
@"break",
switch_capture,
dbg_stmt,
inst_node,
str_op,
@ -2579,6 +2507,19 @@ pub const Inst = struct {
};
};
/// Stored inside extra, with trailing arguments according to `args_len`.
/// Implicit 0. arg_0_start: u32, // always same as `args_len`
/// 1. arg_end: u32, // for each `args_len`
/// arg_N_start is the same as arg_N-1_end
pub const FieldCall = struct {
// Note: Flags *must* come first so that unusedResultExpr
// can find it when it goes to modify them.
flags: Call.Flags,
obj_ptr: Ref,
/// Offset into `string_bytes`.
field_name_start: u32,
};
pub const TypeOfPeer = struct {
src_node: i32,
body_len: u32,
@ -2658,6 +2599,14 @@ pub const Inst = struct {
sentinel: Ref,
};
pub const SliceLength = struct {
lhs: Ref,
start: Ref,
len: Ref,
sentinel: Ref,
start_src_node_offset: i32,
};
/// The meaning of these operands depends on the corresponding `Tag`.
pub const Bin = struct {
lhs: Ref,
@ -2681,37 +2630,53 @@ pub const Inst = struct {
};
/// 0. multi_cases_len: u32 // If has_multi_cases is set.
/// 1. else_body { // If has_else or has_under is set.
/// body_len: u32,
/// body member Index for every body_len
/// 1. tag_capture_inst: u32 // If any_has_tag_capture is set. Index of instruction prongs use to refer to the inline tag capture.
/// 2. else_body { // If has_else or has_under is set.
/// info: ProngInfo,
/// body member Index for every info.body_len
/// }
/// 2. scalar_cases: { // for every scalar_cases_len
/// 3. scalar_cases: { // for every scalar_cases_len
/// item: Ref,
/// body_len: u32,
/// body member Index for every body_len
/// info: ProngInfo,
/// body member Index for every info.body_len
/// }
/// 3. multi_cases: { // for every multi_cases_len
/// 4. multi_cases: { // for every multi_cases_len
/// items_len: u32,
/// ranges_len: u32,
/// body_len: u32,
/// info: ProngInfo,
/// item: Ref // for every items_len
/// ranges: { // for every ranges_len
/// item_first: Ref,
/// item_last: Ref,
/// }
/// body member Index for every body_len
/// body member Index for every info.body_len
/// }
///
/// When analyzing a case body, the switch instruction itself refers to the
/// captured payload. Whether this is captured by reference or by value
/// depends on whether the `byref` bit is set for the corresponding body.
pub const SwitchBlock = struct {
/// This is always a `switch_cond` or `switch_cond_ref` instruction.
/// If it is a `switch_cond_ref` instruction, bits.is_ref is always true.
/// If it is a `switch_cond` instruction, bits.is_ref is always false.
/// Both `switch_cond` and `switch_cond_ref` return a value, not a pointer,
/// that is useful for the case items, but cannot be used for capture values.
/// For the capture values, Sema is expected to find the operand of this operand
/// and use that.
/// The operand passed to the `switch` expression. If this is a
/// `switch_block`, this is the operand value; if `switch_block_ref` it
/// is a pointer to the operand. `switch_block_ref` is always used if
/// any prong has a byref capture.
operand: Ref,
bits: Bits,
/// These are stored in trailing data in `extra` for each prong.
pub const ProngInfo = packed struct(u32) {
body_len: u28,
capture: Capture,
is_inline: bool,
has_tag_capture: bool,
pub const Capture = enum(u2) {
none,
by_val,
by_ref,
};
};
pub const Bits = packed struct {
/// If true, one or more prongs have multiple items.
has_multi_cases: bool,
@ -2719,9 +2684,11 @@ pub const Inst = struct {
has_else: bool,
/// If true, there is an underscore prong. This is mutually exclusive with `has_else`.
has_under: bool,
/// If true, at least one prong has an inline tag capture.
any_has_tag_capture: bool,
scalar_cases_len: ScalarCasesLen,
pub const ScalarCasesLen = u29;
pub const ScalarCasesLen = u28;
pub fn specialProng(bits: Bits) SpecialProng {
const has_else: u2 = @intFromBool(bits.has_else);
@ -2735,103 +2702,10 @@ pub const Inst = struct {
}
};
pub const ScalarProng = struct {
item: Ref,
body: []const Index,
};
/// TODO performance optimization: instead of having this helper method
/// change the definition of switch_capture instruction to store extra_index
/// instead of prong_index. This way, Sema won't be doing O(N^2) iterations
/// over the switch prongs.
pub fn getScalarProng(
self: SwitchBlock,
zir: Zir,
extra_end: usize,
prong_index: usize,
) ScalarProng {
var extra_index: usize = extra_end;
if (self.bits.has_multi_cases) {
extra_index += 1;
}
if (self.bits.specialProng() != .none) {
const body_len = @truncate(u31, zir.extra[extra_index]);
extra_index += 1;
const body = zir.extra[extra_index..][0..body_len];
extra_index += body.len;
}
var scalar_i: usize = 0;
while (true) : (scalar_i += 1) {
const item = @enumFromInt(Ref, zir.extra[extra_index]);
extra_index += 1;
const body_len = @truncate(u31, zir.extra[extra_index]);
extra_index += 1;
const body = zir.extra[extra_index..][0..body_len];
extra_index += body.len;
if (scalar_i < prong_index) continue;
return .{
.item = item,
.body = body,
};
}
}
pub const MultiProng = struct {
items: []const Ref,
body: []const Index,
};
pub fn getMultiProng(
self: SwitchBlock,
zir: Zir,
extra_end: usize,
prong_index: usize,
) MultiProng {
// +1 for self.bits.has_multi_cases == true
var extra_index: usize = extra_end + 1;
if (self.bits.specialProng() != .none) {
const body_len = @truncate(u31, zir.extra[extra_index]);
extra_index += 1;
const body = zir.extra[extra_index..][0..body_len];
extra_index += body.len;
}
var scalar_i: usize = 0;
while (scalar_i < self.bits.scalar_cases_len) : (scalar_i += 1) {
extra_index += 1;
const body_len = @truncate(u31, zir.extra[extra_index]);
extra_index += 1;
extra_index += body_len;
}
var multi_i: u32 = 0;
while (true) : (multi_i += 1) {
const items_len = zir.extra[extra_index];
extra_index += 1;
const ranges_len = zir.extra[extra_index];
extra_index += 1;
const body_len = @truncate(u31, zir.extra[extra_index]);
extra_index += 1;
const items = zir.refSlice(extra_index, items_len);
extra_index += items_len;
// Each range has a start and an end.
extra_index += 2 * ranges_len;
const body = zir.extra[extra_index..][0..body_len];
extra_index += body_len;
if (multi_i < prong_index) continue;
return .{
.items = items,
.body = body,
};
}
}
};
pub const Field = struct {
@ -2845,12 +2719,6 @@ pub const Inst = struct {
field_name: Ref,
};
pub const FieldNamedNode = struct {
node: i32,
lhs: Ref,
field_name: Ref,
};
pub const As = struct {
dest_type: Ref,
operand: Ref,
@ -3832,6 +3700,7 @@ pub fn indexToRef(inst: Inst.Index) Inst.Ref {
}
pub fn refToIndex(inst: Inst.Ref) ?Inst.Index {
assert(inst != .none);
const ref_int = @intFromEnum(inst);
if (ref_int >= ref_start_index) {
return ref_int - ref_start_index;
@ -3839,3 +3708,8 @@ pub fn refToIndex(inst: Inst.Ref) ?Inst.Index {
return null;
}
}
pub fn refToIndexAllowNone(inst: Inst.Ref) ?Inst.Index {
if (inst == .none) return null;
return refToIndex(inst);
}

View File

@ -236,12 +236,10 @@ pub fn translate(
const body_size = @sizeOf(std.zig.Server.Message.EmitBinPath);
if (header.bytes_len <= body_size) return error.InvalidResponse;
const trailing_size = header.bytes_len - body_size;
_ = try zcs.receiveEmitBinPath();
const result_path = try zcs.receiveBytes(allocator, trailing_size);
defer allocator.free(result_path);
const trailing_size = header.bytes_len - body_size;
const result_path = zcs.pooler.fifo(.in).readableSliceOfLen(trailing_size);
return Result{ .success = try URI.fromPath(allocator, std.mem.sliceTo(result_path, '\n')) };
},

View File

@ -509,6 +509,17 @@ test "completion - block" {
, &.{
.{ .label = "blk", .kind = .Text }, // idk what kind this should be
});
try testCompletion(
\\const S = struct { alpha: u32 };
\\const foo: S = undefined;
\\const bar = blk: {
\\ break :blk foo;
\\};
\\const baz = bar.<cursor>
, &.{
.{ .label = "alpha", .kind = .Field, .detail = "alpha: u32" },
});
}
fn testCompletion(source: []const u8, expected_completions: []const Completion) !void {

View File

@ -48,6 +48,23 @@ test "inlayhints - function self parameter" {
\\const foo: Foo = .{};
\\const _ = foo.bar(<alpha>5,<beta>"");
);
try testInlayHints(
\\const Foo = struct { pub fn bar(self: Foo, alpha: u32, beta: anytype) void {} };
\\const foo: Foo = .{};
\\const _ = foo.bar(<alpha>5,<beta>4);
);
try testInlayHints(
\\const Foo = struct { pub fn bar(self: Foo, alpha: u32, beta: []const u8) void {} };
\\const _ = Foo.bar(<self>undefined,<alpha>5,<beta>"");
);
try testInlayHints(
\\const Foo = struct {
\\ pub fn bar(self: Foo, alpha: u32, beta: []const u8) void {}
\\ pub fn foo() void {
\\ bar(<self>undefined,<alpha>5,<beta>"");
\\ }
\\};
);
}
test "inlayhints - builtin call" {