various intern pool & comptime interpreter changes (#1179)

* intern_pool: add getUnsignedInt

* add an empty struct literal

* resolve array length as usize in comptime interpreter

* use only one global intern pool

* store analysis errors in `DocumentStore.Handle`

* add typed undefined value

* add typed null value
This commit is contained in:
Techatrix 2023-05-09 06:25:26 +02:00 committed by GitHub
parent f6c808a4b3
commit 029f5094ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 146 additions and 71 deletions

View File

@ -20,24 +20,16 @@ pub const ComptimeInterpreter = @This();
const log = std.log.scoped(.zls_comptime_interpreter);
allocator: std.mem.Allocator,
ip: InternPool,
ip: *InternPool,
document_store: *DocumentStore,
uri: DocumentStore.Uri,
namespaces: std.MultiArrayList(Namespace) = .{},
/// Interpreter diagnostic errors
errors: std.AutoArrayHashMapUnmanaged(Ast.Node.Index, InterpreterError) = .{},
pub fn getHandle(interpreter: *ComptimeInterpreter) *const DocumentStore.Handle {
// This interpreter is loaded from a known-valid handle so a valid handle must exist
return interpreter.document_store.getHandle(interpreter.uri).?;
}
pub const InterpreterError = struct {
code: []const u8,
message: []const u8,
};
pub fn recordError(
interpreter: *ComptimeInterpreter,
node_idx: Ast.Node.Index,
@ -47,25 +39,21 @@ pub fn recordError(
) error{OutOfMemory}!void {
const message = try std.fmt.allocPrint(interpreter.allocator, fmt, args);
errdefer interpreter.allocator.free(message);
const previous = try interpreter.errors.fetchPut(interpreter.allocator, node_idx, .{
const handle = interpreter.document_store.handles.get(interpreter.uri).?;
try handle.analysis_errors.append(interpreter.document_store.allocator, .{
.loc = offsets.nodeToLoc(handle.tree, node_idx),
.code = code,
.message = message,
});
if (previous) |p| interpreter.allocator.free(p.value.message);
}
pub fn deinit(interpreter: *ComptimeInterpreter) void {
for (interpreter.errors.values()) |err| {
interpreter.allocator.free(err.message);
}
interpreter.errors.deinit(interpreter.allocator);
interpreter.ip.deinit(interpreter.allocator);
var i: usize = 0;
while (i < interpreter.namespaces.len) : (i += 1) {
interpreter.namespaces.items(.decls)[i].deinit(interpreter.allocator);
interpreter.namespaces.items(.usingnamespaces)[i].deinit(interpreter.allocator);
for (
interpreter.namespaces.items(.decls),
interpreter.namespaces.items(.usingnamespaces),
) |*decls, *usingnamespaces| {
decls.deinit(interpreter.allocator);
usingnamespaces.deinit(interpreter.allocator);
}
interpreter.namespaces.deinit(interpreter.allocator);
}
@ -240,7 +228,7 @@ pub fn interpret(
container_field.ast.type_expr,
"expected_type",
"expected type 'type', found '{}'",
.{init_value_ty.fmt(interpreter.ip)},
.{init_value_ty.fmt(interpreter.ip.*)},
);
continue;
}
@ -470,7 +458,7 @@ pub fn interpret(
const can_have_fields: bool = switch (inner_ty) {
.simple_type => |simple| switch (simple) {
.type => blk: {
if (interpreter.huntItDown(val.getNamespace(interpreter.ip), field_name, options)) |decl_index| {
if (interpreter.huntItDown(val.getNamespace(interpreter.ip.*), field_name, options)) |decl_index| {
const decl = interpreter.ip.getDecl(decl_index);
return InterpretResult{ .value = Value{
.interpreter = interpreter,
@ -560,7 +548,7 @@ pub fn interpret(
.interpreter = interpreter,
.node_idx = data[node_idx].rhs,
.index = try interpreter.ip.get(interpreter.allocator, .{ .int_u64_value = .{
.ty = .comptime_int_type,
.ty = .usize_type,
.int = array_info.len,
} }),
} };
@ -575,7 +563,7 @@ pub fn interpret(
node_idx,
"null_unwrap",
"tried to unwrap optional of type `{}` which was null",
.{optional_info.payload_type.fmt(interpreter.ip)},
.{optional_info.payload_type.fmt(interpreter.ip.*)},
);
return error.InvalidOperation;
}
@ -645,6 +633,8 @@ pub fn interpret(
.slice,
.aggregate,
.union_value,
.null_value,
.undefined_value,
.unknown_value,
=> unreachable,
};
@ -655,14 +645,14 @@ pub fn interpret(
node_idx,
"undeclared_identifier",
"`{}` has no member '{s}'",
.{ accessed_ty.fmt(interpreter.ip), field_name },
.{ accessed_ty.fmt(interpreter.ip.*), field_name },
);
} else {
try interpreter.recordError(
node_idx,
"invalid_field_access",
"`{}` does not support field access",
.{accessed_ty.fmt(interpreter.ip)},
.{accessed_ty.fmt(interpreter.ip.*)},
);
}
return error.InvalidOperation;
@ -702,7 +692,7 @@ pub fn interpret(
if_info.ast.cond_expr,
"invalid_if_condition",
"expected `bool` but found `{}`",
.{condition_ty.fmt(interpreter.ip)},
.{condition_ty.fmt(interpreter.ip.*)},
);
return error.InvalidOperation;
},
@ -850,7 +840,7 @@ pub fn interpret(
const val = interpreter.ip.indexToKey(ir_value.index);
const ty = val.typeOf();
try writer.print("@as({}, {})", .{ ty.fmt(interpreter.ip), val.fmt(interpreter.ip) });
try writer.print("@as({}, {})", .{ ty.fmt(interpreter.ip.*), val.fmt(interpreter.ip.*) });
if (index != params.len - 1)
try writer.writeAll(", ");
}
@ -897,7 +887,7 @@ pub fn interpret(
defer interpreter.allocator.free(import_uri);
var handle = interpreter.document_store.getOrLoadHandle(import_uri) orelse return error.ImportFailure;
_ = try interpreter.document_store.ensureInterpreterExists(handle.uri);
_ = try interpreter.document_store.ensureInterpreterExists(handle.uri, interpreter.ip);
return InterpretResult{
.value = Value{
@ -927,7 +917,7 @@ pub fn interpret(
try writer.writeAll("incompatible types: ");
for (types, 0..) |ty, i| {
if (i != 0) try writer.writeAll(", ");
try writer.print("`{}`", .{ty.fmt(interpreter.ip)});
try writer.print("`{}`", .{ty.fmt(interpreter.ip.*)});
}
try interpreter.recordError(node_idx, "invalid_typeof", "{s}", .{output.items});
@ -952,7 +942,7 @@ pub fn interpret(
if (ty != .type_type) return error.InvalidBuiltin;
const value_namespace = interpreter.ip.indexToKey(ty).getNamespace(interpreter.ip);
const value_namespace = interpreter.ip.indexToKey(ty).getNamespace(interpreter.ip.*);
if (value_namespace == .none) return error.InvalidBuiltin;
const name = interpreter.ip.indexToKey(field_name.index).bytes; // TODO add checks
@ -1137,7 +1127,7 @@ pub fn interpret(
node_idx,
"invalid_deref",
"expected type `bool` but got `{}`",
.{ty.fmt(interpreter.ip)},
.{ty.fmt(interpreter.ip.*)},
);
return error.InvalidOperation;
}
@ -1259,7 +1249,7 @@ pub fn call(
param.type_expr,
"expected_type",
"expected type 'type', found '{}'",
.{tex_ty.fmt(interpreter.ip)},
.{tex_ty.fmt(interpreter.ip.*)},
);
return error.InvalidCast;
}

View File

@ -15,6 +15,7 @@ const translate_c = @import("translate_c.zig");
const ComptimeInterpreter = @import("ComptimeInterpreter.zig");
const AstGen = @import("stage2/AstGen.zig");
const Zir = @import("stage2/Zir.zig");
const InternPool = @import("analyser/InternPool.zig");
const DocumentStore = @This();
@ -73,6 +74,9 @@ pub const Handle = struct {
/// Contains one entry for every cimport in the document
cimports: std.MultiArrayList(CImportHandle) = .{},
/// error messages from comptime_interpreter or astgen_analyser
analysis_errors: std.ArrayListUnmanaged(ErrorMessage) = .{},
/// `DocumentStore.build_files` is guaranteed to contain this uri
/// uri memory managed by its build_file
associated_build_file: ?Uri = null,
@ -97,9 +101,20 @@ pub const Handle = struct {
allocator.free(source);
}
self.cimports.deinit(allocator);
for (self.analysis_errors.items) |err| {
allocator.free(err.message);
}
self.analysis_errors.deinit(allocator);
}
};
pub const ErrorMessage = struct {
loc: offsets.Loc,
code: []const u8,
message: []const u8,
};
allocator: std.mem.Allocator,
config: *const Config,
runtime_zig_version: *const ?ZigVersionWrapper,
@ -209,7 +224,7 @@ pub fn refreshDocument(self: *DocumentStore, uri: Uri, new_text: [:0]const u8) !
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
const handle = self.handles.get(uri) orelse unreachable;
const handle = self.handles.get(uri).?;
// TODO: Handle interpreter cross reference
if (handle.interpreter) |int| {
@ -255,6 +270,12 @@ pub fn refreshDocument(self: *DocumentStore, uri: Uri, new_text: [:0]const u8) !
handle.cimports.deinit(self.allocator);
handle.cimports = new_cimports;
for (handle.analysis_errors.items) |err| {
self.allocator.free(err.message);
}
handle.analysis_errors.deinit(self.allocator);
handle.analysis_errors = .{};
if (old_import_count != new_import_count or
old_cimport_count != new_cimport_count)
{
@ -1125,7 +1146,7 @@ pub fn wantZir(self: DocumentStore) bool {
return !can_run_ast_check;
}
pub fn ensureInterpreterExists(self: *DocumentStore, uri: Uri) !*ComptimeInterpreter {
pub fn ensureInterpreterExists(self: *DocumentStore, uri: Uri, ip: *InternPool) !*ComptimeInterpreter {
var handle = self.handles.get(uri).?;
if (handle.interpreter != null) return handle.interpreter.?;
@ -1133,9 +1154,6 @@ pub fn ensureInterpreterExists(self: *DocumentStore, uri: Uri) !*ComptimeInterpr
var interpreter = try self.allocator.create(ComptimeInterpreter);
errdefer self.allocator.destroy(interpreter);
var ip = try ComptimeInterpreter.InternPool.init(self.allocator);
errdefer ip.deinit(self.allocator);
interpreter.* = ComptimeInterpreter{
.allocator = self.allocator,
.ip = ip,

View File

@ -1571,6 +1571,10 @@ pub fn create(
try configuration.configChanged(config, &server.runtime_zig_version, allocator, config_path);
if (config.dangerous_comptime_experiments_do_not_enable) {
server.analyser.ip = try analyser.InternPool.init(allocator);
}
return server;
}

View File

@ -178,6 +178,14 @@ pub const UnionValue = packed struct {
val: Index,
};
pub const NullValue = packed struct {
ty: Index,
};
pub const UndefinedValue = packed struct {
ty: Index,
};
pub const UnknownValue = packed struct {
ty: Index,
};
@ -236,6 +244,8 @@ pub const Key = union(enum) {
slice: Slice,
aggregate: Aggregate,
union_value: UnionValue,
null_value: NullValue,
undefined_value: UndefinedValue,
unknown_value: UnknownValue,
// error
@ -288,6 +298,8 @@ pub const Key = union(enum) {
.slice => .slice,
.aggregate => .aggregate,
.union_value => .union_value,
.null_value => .null_value,
.undefined_value => .undefined_value,
.unknown_value => .unknown_value,
};
}
@ -326,6 +338,7 @@ pub const Key = union(enum) {
.anyerror => .ErrorSet,
.noreturn => .NoReturn,
.anyframe_type => .AnyFrame,
.empty_struct_literal => .Struct,
.null_type => .Null,
.undefined_type => .Undefined,
.enum_literal_type => .EnumLiteral,
@ -377,6 +390,8 @@ pub const Key = union(enum) {
.slice,
.aggregate,
.union_value,
.null_value,
.undefined_value,
.unknown_value,
=> unreachable,
};
@ -426,6 +441,8 @@ pub const Key = union(enum) {
.slice => |slice_info| slice_info.ty,
.aggregate => |aggregate_info| aggregate_info.ty,
.union_value => |union_info| union_info.ty,
.null_value => |null_info| null_info.ty,
.undefined_value => |undefined_info| undefined_info.ty,
.unknown_value => |unknown_info| unknown_info.ty,
};
}
@ -617,6 +634,7 @@ pub const Key = union(enum) {
.enum_literal_type,
=> Index.none,
.empty_struct_literal => Index.empty_aggregate,
.void => Index.void_value,
.noreturn => Index.unreachable_value,
.null_type => Index.null_value,
@ -710,6 +728,8 @@ pub const Key = union(enum) {
.slice,
.aggregate,
.union_value,
.null_value,
.undefined_value,
.unknown_value,
=> unreachable,
};
@ -728,6 +748,7 @@ pub const Key = union(enum) {
.int_i64_value => |int_value| int_value.int == 0,
.int_big_value => |int_value| int_value.int.orderAgainstScalar(0).compare(.eq),
.null_value => true,
.optional_value => false,
.unknown_value => unreachable,
@ -735,6 +756,25 @@ pub const Key = union(enum) {
};
}
/// If the value fits in a u64, return it, otherwise null.
/// Asserts not undefined.
pub fn getUnsignedInt(val: Key) !?u64 {
return switch (val) {
.simple_value => |simple| switch (simple) {
.null_value => 0,
.bool_true => 1,
.bool_false => 0,
.the_only_possible_value => 0,
else => null,
},
.int_u64_value => |int_value| int_value.int,
.int_i64_value => |int_value| @intCast(u64, int_value.int),
.int_big_value => |int_value| int_value.int.to(u64) catch null,
.null_value => 0,
else => null,
};
}
pub const FormatContext = struct {
key: Key,
options: FormatOptions = .{},
@ -795,6 +835,7 @@ pub const Key = union(enum) {
.null_type => try writer.writeAll("@TypeOf(null)"),
.undefined_type => try writer.writeAll("@TypeOf(undefined)"),
.empty_struct_literal => try writer.writeAll("@TypeOf(.{})"),
.enum_literal_type => try writer.writeAll("@TypeOf(.enum_literal)"),
.atomic_order => try writer.writeAll("std.builtin.AtomicOrder"),
@ -998,6 +1039,8 @@ pub const Key = union(enum) {
union_value.val.fmt(ip),
});
},
.null_value => try writer.print("null", .{}),
.undefined_value => try writer.print("undefined", .{}),
.unknown_value => try writer.print("(unknown value)", .{}),
}
return null;
@ -1061,6 +1104,7 @@ pub const Index = enum(u32) {
comptime_float_type,
noreturn_type,
anyframe_type,
empty_struct_literal,
null_type,
undefined_type,
enum_literal_type,
@ -1263,6 +1307,12 @@ pub const Tag = enum(u8) {
/// A union value.
/// data is index to UnionValue.
union_value,
/// A null value.
/// data is index to type which may be unknown.
null_value,
/// A undefined value.
/// data is index to type which may be unknown.
undefined_value,
/// A unknown value.
/// data is index to type which may also be unknown.
unknown_value,
@ -1295,6 +1345,7 @@ pub const SimpleType = enum(u32) {
comptime_float,
noreturn,
anyframe_type,
empty_struct_literal,
null_type,
undefined_type,
enum_literal_type,
@ -1373,6 +1424,7 @@ pub fn init(gpa: Allocator) Allocator.Error!InternPool {
.{ .index = .comptime_float_type, .key = .{ .simple_type = .comptime_float } },
.{ .index = .noreturn_type, .key = .{ .simple_type = .noreturn } },
.{ .index = .anyframe_type, .key = .{ .simple_type = .anyframe_type } },
.{ .index = .empty_struct_literal, .key = .{ .simple_type = .empty_struct_literal } },
.{ .index = .null_type, .key = .{ .simple_type = .null_type } },
.{ .index = .undefined_type, .key = .{ .simple_type = .undefined_type } },
.{ .index = .enum_literal_type, .key = .{ .simple_type = .enum_literal_type } },
@ -1409,7 +1461,7 @@ pub fn init(gpa: Allocator) Allocator.Error!InternPool {
.{ .index = .bool_true, .key = .{ .simple_value = .bool_true } },
.{ .index = .bool_false, .key = .{ .simple_value = .bool_false } },
.{ .index = .empty_aggregate, .key = .{ .aggregate = .{ .ty = .none, .values = &.{} } } },
.{ .index = .empty_aggregate, .key = .{ .aggregate = .{ .ty = .empty_struct_literal, .values = &.{} } } },
.{ .index = .zero_usize, .key = .{ .int_u64_value = .{ .ty = .usize_type, .int = 0 } } },
.{ .index = .one_usize, .key = .{ .int_u64_value = .{ .ty = .usize_type, .int = 1 } } },
.{ .index = .the_only_possible_value, .key = .{ .simple_value = .the_only_possible_value } },
@ -1515,6 +1567,8 @@ pub fn indexToKey(ip: InternPool, index: Index) Key {
.slice => .{ .slice = ip.extraData(Slice, data) },
.aggregate => .{ .aggregate = ip.extraData(Aggregate, data) },
.union_value => .{ .union_value = ip.extraData(UnionValue, data) },
.null_value => .{ .null_value = .{ .ty = @intToEnum(Index, data) } },
.undefined_value => .{ .undefined_value = .{ .ty = @intToEnum(Index, data) } },
.unknown_value => .{ .unknown_value = .{ .ty = @intToEnum(Index, data) } },
};
}
@ -1545,6 +1599,8 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
}),
.float_16_value => |float_val| @bitCast(u16, float_val),
.float_32_value => |float_val| @bitCast(u32, float_val),
.null_value => |null_val| @enumToInt(null_val.ty),
.undefined_value => |undefined_val| @enumToInt(undefined_val.ty),
.unknown_value => |unknown_val| @enumToInt(unknown_val.ty),
inline else => |data| try ip.addExtra(gpa, data), // TODO sad stage1 noises :(
};

View File

@ -175,6 +175,8 @@ pub fn dotCompletions(
.slice,
.aggregate,
.union_value,
.null_value,
.undefined_value,
.unknown_value,
=> unreachable,
}

View File

@ -16,6 +16,7 @@ const Analyser = @This();
gpa: std.mem.Allocator,
arena: std.heap.ArenaAllocator,
store: *DocumentStore,
ip: ?InternPool,
bound_type_params: std.AutoHashMapUnmanaged(Ast.full.FnProto.Param, TypeWithHandle) = .{},
using_trail: std.AutoHashMapUnmanaged(Ast.Node.Index, void) = .{},
resolved_nodes: std.HashMapUnmanaged(NodeWithUri, ?TypeWithHandle, NodeWithUri.Context, std.hash_map.default_max_load_percentage) = .{},
@ -25,6 +26,7 @@ pub fn init(gpa: std.mem.Allocator, store: *DocumentStore) Analyser {
.gpa = gpa,
.arena = std.heap.ArenaAllocator.init(gpa),
.store = store,
.ip = null,
};
}
@ -32,6 +34,7 @@ pub fn deinit(self: *Analyser) void {
self.bound_type_params.deinit(self.gpa);
self.using_trail.deinit(self.gpa);
self.resolved_nodes.deinit(self.gpa);
if (self.ip) |*intern_pool| intern_pool.deinit(self.gpa);
self.arena.deinit();
}
@ -823,7 +826,7 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e
log.info("Invoking interpreter!", .{});
const interpreter = analyser.store.ensureInterpreterExists(handle.uri) catch |err| {
const interpreter = analyser.store.ensureInterpreterExists(handle.uri, &analyser.ip.?) catch |err| {
log.err("Failed to interpret file: {s}", .{@errorName(err)});
if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*);

View File

@ -82,7 +82,7 @@ fn typeToCompletion(
.@"comptime" => |co| try analyser.completions.dotCompletions(
allocator,
list,
&co.interpreter.ip,
co.interpreter.ip,
co.value.index,
type_handle.type.is_type_val,
co.value.node_idx,

View File

@ -164,22 +164,16 @@ pub fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{
}
}
if (handle.interpreter) |int| {
try diagnostics.ensureUnusedCapacity(allocator, int.errors.count());
var err_it = int.errors.iterator();
while (err_it.next()) |err| {
diagnostics.appendAssumeCapacity(.{
.range = offsets.nodeToRange(tree, err.key_ptr.*, server.offset_encoding),
.severity = .Error,
.code = .{ .string = err.value_ptr.code },
.source = "zls",
.message = err.value_ptr.message,
});
}
try diagnostics.ensureUnusedCapacity(allocator, handle.analysis_errors.items.len);
for (handle.analysis_errors.items) |err| {
diagnostics.appendAssumeCapacity(.{
.range = offsets.locToRange(handle.tree.source, err.loc, server.offset_encoding),
.severity = .Error,
.code = .{ .string = err.code },
.source = "zls",
.message = err.message,
});
}
// try diagnostics.appendSlice(allocator, handle.interpreter.?.diagnostics.items);
return .{
.uri = handle.uri,

View File

@ -68,7 +68,7 @@ pub fn hoverSymbol(server: *Server, decl_handle: Analyser.DeclWithHandle, markup
const resolved_type_str = if (resolved_type) |rt|
if (rt.type.is_type_val) switch (rt.type.data) {
.@"comptime" => |co| try std.fmt.allocPrint(server.arena.allocator(), "{}", .{co.value.index.fmt(co.interpreter.ip)}),
.@"comptime" => |co| try std.fmt.allocPrint(server.arena.allocator(), "{}", .{co.value.index.fmt(co.interpreter.ip.*)}),
else => "type",
} else switch (rt.type.data) { // TODO: Investigate random weird numbers like 897 that cause index of bounds
.pointer,

View File

@ -63,11 +63,9 @@ pub const SrcLoc = struct {
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end, .main = start };
},
.node_offset => |traced_off| {
const node_off = traced_off.x;
.node_offset => |node_off| {
const tree = src_loc.handle.tree;
const node = src_loc.declRelativeToNodeIndex(node_off);
assert(src_loc.handle.tree_loaded);
return nodeToSpan(tree, node);
},
.node_offset_main_token => |node_off| {
@ -79,7 +77,6 @@ pub const SrcLoc = struct {
.node_offset_bin_op => |node_off| {
const tree = src_loc.handle.tree;
const node = src_loc.declRelativeToNodeIndex(node_off);
assert(src_loc.handle.tree_loaded);
return nodeToSpan(tree, node);
},
.node_offset_initializer => |node_off| {
@ -594,7 +591,7 @@ pub const SrcLoc = struct {
src_loc: SrcLoc,
node_off: i32,
arg_index: u32,
) !Span {
) Span {
const tree = src_loc.handle.tree;
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
@ -611,7 +608,7 @@ pub const SrcLoc = struct {
return nodeToSpan(tree, param);
}
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
pub fn nodeToSpan(tree: Ast, node: u32) Span {
return tokensToSpan(
tree,
tree.firstToken(node),
@ -620,7 +617,7 @@ pub const SrcLoc = struct {
);
}
fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
fn tokensToSpan(tree: Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
const token_starts = tree.tokens.items(.start);
var start_tok = start;
var end_tok = end;

View File

@ -2096,6 +2096,7 @@ pub const Inst = struct {
comptime_float_type,
noreturn_type,
anyframe_type,
empty_struct_literal,
null_type,
undefined_type,
enum_literal_type,

View File

@ -134,7 +134,7 @@ test "ComptimeInterpreter - variable lookup" {
defer context.deinit();
const result = try context.interpret(context.findVar("bar"));
try expectEqualKey(context.interpreter.ip, .{ .int_u64_value = .{ .ty = .comptime_int_type, .int = 3 } }, result.val);
try expectEqualKey(context.interpreter.ip.*, .{ .int_u64_value = .{ .ty = .comptime_int_type, .int = 3 } }, result.val);
}
test "ComptimeInterpreter - field access" {
@ -294,6 +294,7 @@ const KV = struct {
const Context = struct {
config: *zls.Config,
document_store: *zls.DocumentStore,
ip: *InternPool,
interpreter: *ComptimeInterpreter,
pub fn init(source: []const u8) !Context {
@ -306,6 +307,12 @@ const Context = struct {
var interpreter = try allocator.create(ComptimeInterpreter);
errdefer allocator.destroy(interpreter);
var ip = try allocator.create(InternPool);
errdefer allocator.destroy(ip);
ip.* = try InternPool.init(allocator);
errdefer ip.deinit(allocator);
config.* = .{};
document_store.* = .{
.allocator = allocator,
@ -325,7 +332,7 @@ const Context = struct {
interpreter.* = .{
.allocator = allocator,
.ip = try InternPool.init(allocator),
.ip = ip,
.document_store = document_store,
.uri = handle.uri,
};
@ -337,6 +344,7 @@ const Context = struct {
return .{
.config = config,
.document_store = document_store,
.ip = ip,
.interpreter = interpreter,
};
}
@ -344,10 +352,12 @@ const Context = struct {
pub fn deinit(self: *Context) void {
self.interpreter.deinit();
self.document_store.deinit();
self.ip.deinit(allocator);
allocator.destroy(self.config);
allocator.destroy(self.document_store);
allocator.destroy(self.interpreter);
allocator.destroy(self.ip);
}
pub fn call(self: *Context, func_node: Ast.Node.Index, arguments: []const KV) !KV {
@ -428,8 +438,8 @@ fn testCall(
const result = try context.call(context.findFn("Foo"), arguments);
try expectEqualKey(context.interpreter.ip, Key{ .simple_type = .type }, result.ty);
try expectEqualKey(context.interpreter.ip, expected_ty, result.val);
try expectEqualKey(context.interpreter.ip.*, Key{ .simple_type = .type }, result.ty);
try expectEqualKey(context.interpreter.ip.*, expected_ty, result.val);
}
fn testExpr(
@ -446,7 +456,7 @@ fn testExpr(
const result = try context.interpret(context.findVar("foobarbaz"));
try expectEqualKey(context.interpreter.ip, expected, result.val);
try expectEqualKey(context.interpreter.ip.*, expected, result.val);
}
fn expectEqualKey(ip: InternPool, expected: Key, actual: ?Key) !void {