Use new ast helper functions (#939)
* use new ast helper functions * use new ast helper functions * update min build version * fix merge conflicts
This commit is contained in:
parent
d2586f79a1
commit
fe54fb7cfa
@ -7,7 +7,7 @@ const zls_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 };
|
||||
pub fn build(b: *std.build.Builder) !void {
|
||||
comptime {
|
||||
const current_zig = builtin.zig_version;
|
||||
const min_zig = std.SemanticVersion.parse("0.11.0-dev.874+40ed6ae84") catch return; // Changes to builtin.Type API
|
||||
const min_zig = std.SemanticVersion.parse("0.11.0-dev.1254+1f8f79cd5") catch return; // add helper functions to std.zig.Ast
|
||||
if (current_zig.order(min_zig) == .lt) {
|
||||
@compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig }));
|
||||
}
|
||||
|
@ -325,7 +325,7 @@ pub const Declaration = struct {
|
||||
.aligned_var_decl,
|
||||
.simple_var_decl,
|
||||
=> {
|
||||
const var_decl = ast.varDecl(tree, decl.node_idx).?;
|
||||
const var_decl = tree.fullVarDecl(decl.node_idx).?;
|
||||
if (var_decl.ast.init_node == 0)
|
||||
return error.CriticalAstFailure;
|
||||
|
||||
@ -361,7 +361,7 @@ pub const Declaration = struct {
|
||||
.aligned_var_decl,
|
||||
.simple_var_decl,
|
||||
=> {
|
||||
return tree.tokenSlice(ast.varDecl(tree, declaration.node_idx).?.ast.mut_token).len != 3;
|
||||
return tree.tokenSlice(tree.fullVarDecl(declaration.node_idx).?.ast.mut_token).len != 3;
|
||||
},
|
||||
else => false,
|
||||
};
|
||||
@ -496,24 +496,7 @@ pub const InterpreterScope = struct {
|
||||
pub const ScopeKind = enum { container, block, function };
|
||||
pub fn scopeKind(scope: InterpreterScope) ScopeKind {
|
||||
const tree = scope.interpreter.getHandle().tree;
|
||||
return switch (tree.nodes.items(.tag)[scope.node_idx]) {
|
||||
.container_decl,
|
||||
.container_decl_trailing,
|
||||
.container_decl_arg,
|
||||
.container_decl_arg_trailing,
|
||||
.container_decl_two,
|
||||
.container_decl_two_trailing,
|
||||
.tagged_union,
|
||||
.tagged_union_trailing,
|
||||
.tagged_union_two,
|
||||
.tagged_union_two_trailing,
|
||||
.tagged_union_enum_tag,
|
||||
.tagged_union_enum_tag_trailing,
|
||||
.root,
|
||||
.error_set_decl,
|
||||
=> .container,
|
||||
else => .block,
|
||||
};
|
||||
return if (ast.isContainer(tree, scope.node_idx)) .container else .block;
|
||||
}
|
||||
|
||||
pub fn getLabel(scope: InterpreterScope) ?Ast.TokenIndex {
|
||||
@ -601,11 +584,10 @@ pub const InterpretResult = union(enum) {
|
||||
|
||||
fn getDeclCount(tree: Ast, node_idx: Ast.Node.Index) usize {
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const members = ast.declMembers(tree, node_idx, &buffer);
|
||||
const container_decl = tree.fullContainerDecl(&buffer, node_idx).?;
|
||||
|
||||
var count: usize = 0;
|
||||
|
||||
for (members) |member| {
|
||||
for (container_decl.ast.members) |member| {
|
||||
switch (tree.nodes.items(.tag)[member]) {
|
||||
.global_var_decl,
|
||||
.local_var_decl,
|
||||
@ -638,9 +620,9 @@ pub fn huntItDown(
|
||||
log.info("Order-independent evaluating {s}...", .{decl_name});
|
||||
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const members = ast.declMembers(tree, pscope.node_idx, &buffer);
|
||||
const container_decl = tree.fullContainerDecl(&buffer, pscope.node_idx).?;
|
||||
|
||||
for (members) |member| {
|
||||
for (container_decl.ast.members) |member| {
|
||||
switch (tags[member]) {
|
||||
.global_var_decl,
|
||||
.local_var_decl,
|
||||
@ -751,7 +733,6 @@ pub fn interpret(
|
||||
// .tagged_union_enum_tag,
|
||||
// .tagged_union_enum_tag_trailing,
|
||||
.root,
|
||||
.error_set_decl,
|
||||
=> {
|
||||
var container_scope = try interpreter.newScope(scope, node_idx);
|
||||
var type_info = TypeInfo{
|
||||
@ -764,55 +745,47 @@ pub fn interpret(
|
||||
if (node_idx == 0) interpreter.root_type = cont_type;
|
||||
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const members = ast.declMembers(tree, node_idx, &buffer);
|
||||
const container_decl = tree.fullContainerDecl(&buffer, node_idx).?;
|
||||
|
||||
var field_idx: usize = 0;
|
||||
for (members) |member| {
|
||||
const maybe_container_field: ?zig.Ast.full.ContainerField = switch (tags[member]) {
|
||||
.container_field => tree.containerField(member),
|
||||
.container_field_align => tree.containerFieldAlign(member),
|
||||
.container_field_init => tree.containerFieldInit(member),
|
||||
else => null,
|
||||
for (container_decl.ast.members) |member| {
|
||||
const container_field = tree.fullContainerField(member) orelse {
|
||||
_ = try interpreter.interpret(member, container_scope, options);
|
||||
continue;
|
||||
};
|
||||
|
||||
if (maybe_container_field) |field_info| {
|
||||
var init_type_value = try (try interpreter.interpret(field_info.ast.type_expr, container_scope, .{})).getValue();
|
||||
var default_value = if (field_info.ast.value_expr == 0)
|
||||
null
|
||||
else
|
||||
try (try interpreter.interpret(field_info.ast.value_expr, container_scope, .{})).getValue();
|
||||
var init_type_value = try (try interpreter.interpret(container_field.ast.type_expr, container_scope, .{})).getValue();
|
||||
var default_value = if (container_field.ast.value_expr == 0)
|
||||
null
|
||||
else
|
||||
try (try interpreter.interpret(container_field.ast.value_expr, container_scope, .{})).getValue();
|
||||
|
||||
if (init_type_value.type.getTypeInfo() != .type) {
|
||||
try interpreter.recordError(
|
||||
field_info.ast.type_expr,
|
||||
"expected_type",
|
||||
try std.fmt.allocPrint(interpreter.allocator, "expected type 'type', found '{s}'", .{interpreter.formatTypeInfo(init_type_value.type.getTypeInfo())}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const name = if (field_info.ast.tuple_like)
|
||||
&[0]u8{}
|
||||
else
|
||||
tree.tokenSlice(field_info.ast.main_token);
|
||||
const field = FieldDefinition{
|
||||
.node_idx = member,
|
||||
.name = name,
|
||||
.type = init_type_value.value_data.type,
|
||||
.default_value = default_value,
|
||||
// TODO: Default values
|
||||
// .@"type" = T: {
|
||||
// var value = (try interpreter.interpret(field_info.ast.type_expr, scope_idx, true)).?.value;
|
||||
// break :T @ptrCast(*Type, @alignCast(@alignOf(*Type), value)).*;
|
||||
// },
|
||||
// .value = null,
|
||||
};
|
||||
|
||||
try cont_type.getTypeInfoMutable().@"struct".fields.put(interpreter.allocator, name, field);
|
||||
field_idx += 1;
|
||||
} else {
|
||||
_ = try interpreter.interpret(member, container_scope, options);
|
||||
if (init_type_value.type.getTypeInfo() != .type) {
|
||||
try interpreter.recordError(
|
||||
container_field.ast.type_expr,
|
||||
"expected_type",
|
||||
try std.fmt.allocPrint(interpreter.allocator, "expected type 'type', found '{s}'", .{interpreter.formatTypeInfo(init_type_value.type.getTypeInfo())}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const name = if (container_field.ast.tuple_like)
|
||||
&[0]u8{}
|
||||
else
|
||||
tree.tokenSlice(container_field.ast.main_token);
|
||||
const field = FieldDefinition{
|
||||
.node_idx = member,
|
||||
.name = name,
|
||||
.type = init_type_value.value_data.type,
|
||||
.default_value = default_value,
|
||||
// TODO: Default values
|
||||
// .@"type" = T: {
|
||||
// var value = (try interpreter.interpret(container_field.ast.type_expr, scope_idx, true)).?.value;
|
||||
// break :T @ptrCast(*Type, @alignCast(@alignOf(*Type), value)).*;
|
||||
// },
|
||||
// .value = null,
|
||||
};
|
||||
|
||||
try cont_type.getTypeInfoMutable().@"struct".fields.put(interpreter.allocator, name, field);
|
||||
}
|
||||
|
||||
return InterpretResult{ .value = Value{
|
||||
@ -822,6 +795,9 @@ pub fn interpret(
|
||||
.value_data = try interpreter.createValueData(.{ .type = cont_type }),
|
||||
} };
|
||||
},
|
||||
.error_set_decl => {
|
||||
return InterpretResult{ .nothing = {} };
|
||||
},
|
||||
.global_var_decl,
|
||||
.local_var_decl,
|
||||
.aligned_var_decl,
|
||||
@ -832,7 +808,7 @@ pub fn interpret(
|
||||
if (scope.?.declarations.contains(name))
|
||||
return InterpretResult{ .nothing = {} };
|
||||
|
||||
const decl = ast.varDecl(tree, node_idx).?;
|
||||
const decl = tree.fullVarDecl(node_idx).?;
|
||||
if (decl.ast.init_node == 0)
|
||||
return InterpretResult{ .nothing = {} };
|
||||
|
||||
@ -1009,16 +985,18 @@ pub fn interpret(
|
||||
else
|
||||
InterpretResult{ .return_with_value = try (try interpreter.interpret(data[node_idx].lhs, scope, options)).getValue() };
|
||||
},
|
||||
.@"if", .if_simple => {
|
||||
const iff = ast.ifFull(tree, node_idx);
|
||||
.@"if",
|
||||
.if_simple,
|
||||
=> {
|
||||
const if_node = ast.fullIf(tree, node_idx).?;
|
||||
// TODO: Don't evaluate runtime ifs
|
||||
// if (options.observe_values) {
|
||||
const ir = try interpreter.interpret(iff.ast.cond_expr, scope, options);
|
||||
const ir = try interpreter.interpret(if_node.ast.cond_expr, scope, options);
|
||||
if ((try ir.getValue()).value_data.bool) {
|
||||
return try interpreter.interpret(iff.ast.then_expr, scope, options);
|
||||
return try interpreter.interpret(if_node.ast.then_expr, scope, options);
|
||||
} else {
|
||||
if (iff.ast.else_expr != 0) {
|
||||
return try interpreter.interpret(iff.ast.else_expr, scope, options);
|
||||
if (if_node.ast.else_expr != 0) {
|
||||
return try interpreter.interpret(if_node.ast.else_expr, scope, options);
|
||||
} else return InterpretResult{ .nothing = {} };
|
||||
}
|
||||
},
|
||||
@ -1254,7 +1232,7 @@ pub fn interpret(
|
||||
// .fn_proto_simple,
|
||||
.fn_decl => {
|
||||
// var buf: [1]Ast.Node.Index = undefined;
|
||||
// const func = ast.fnProto(tree, node_idx, &buf).?;
|
||||
// const func = tree.fullFnProto(node_idx, &buf).?;
|
||||
|
||||
// TODO: Add params
|
||||
|
||||
@ -1315,7 +1293,7 @@ pub fn interpret(
|
||||
.async_call_one_comma,
|
||||
=> {
|
||||
var params: [1]Ast.Node.Index = undefined;
|
||||
const call_full = ast.callFull(tree, node_idx, ¶ms) orelse unreachable;
|
||||
const call_full = tree.fullCall(¶ms, node_idx) orelse unreachable;
|
||||
|
||||
var args = try std.ArrayListUnmanaged(Value).initCapacity(interpreter.allocator, call_full.ast.params.len);
|
||||
defer args.deinit(interpreter.allocator);
|
||||
@ -1433,7 +1411,7 @@ pub fn call(
|
||||
var fn_scope = try interpreter.newScope(scope, func_node_idx);
|
||||
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
var proto = ast.fnProto(tree, func_node_idx, &buf).?;
|
||||
var proto = tree.fullFnProto(&buf, func_node_idx).?;
|
||||
|
||||
var arg_it = proto.iterate(&tree);
|
||||
var arg_index: usize = 0;
|
||||
|
@ -273,7 +273,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
|
||||
.fn_decl,
|
||||
=> blk: {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const func = ast.fnProto(tree, decl_idx, &buf).?;
|
||||
const func = tree.fullFnProto(&buf, decl_idx).?;
|
||||
if (func.extern_export_inline_token != null) break :blk;
|
||||
|
||||
if (func.name_token) |name_token| {
|
||||
@ -689,7 +689,7 @@ fn nodeToCompletion(
|
||||
.fn_decl,
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const func = ast.fnProto(tree, node, &buf).?;
|
||||
const func = tree.fullFnProto(&buf, node).?;
|
||||
if (func.name_token) |name_token| {
|
||||
const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets;
|
||||
const insert_text = if (use_snippets) blk: {
|
||||
@ -715,7 +715,7 @@ fn nodeToCompletion(
|
||||
.aligned_var_decl,
|
||||
.simple_var_decl,
|
||||
=> {
|
||||
const var_decl = ast.varDecl(tree, node).?;
|
||||
const var_decl = tree.fullVarDecl(node).?;
|
||||
const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const;
|
||||
|
||||
if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, node_handle)) |result| {
|
||||
@ -740,7 +740,7 @@ fn nodeToCompletion(
|
||||
.container_field_align,
|
||||
.container_field_init,
|
||||
=> {
|
||||
const field = ast.containerField(tree, node).?;
|
||||
const field = tree.fullContainerField(node).?;
|
||||
try list.append(allocator, .{
|
||||
.label = handle.tree.tokenSlice(field.ast.main_token),
|
||||
.kind = if (field.ast.tuple_like) .Enum else .Field,
|
||||
@ -766,7 +766,7 @@ fn nodeToCompletion(
|
||||
.ptr_type_bit_range,
|
||||
.ptr_type_sentinel,
|
||||
=> {
|
||||
const ptr_type = ast.ptrType(tree, node).?;
|
||||
const ptr_type = ast.fullPtrType(tree, node).?;
|
||||
|
||||
switch (ptr_type.size) {
|
||||
.One, .C, .Many => if (server.config.operator_completions) {
|
||||
@ -900,11 +900,11 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO
|
||||
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
|
||||
if (ast.varDecl(tree, node)) |var_decl| {
|
||||
if (tree.fullVarDecl(node)) |var_decl| {
|
||||
break :def analysis.getVariableSignature(tree, var_decl);
|
||||
} else if (ast.fnProto(tree, node, &buf)) |fn_proto| {
|
||||
} else if (tree.fullFnProto(&buf, node)) |fn_proto| {
|
||||
break :def analysis.getFunctionSignature(tree, fn_proto);
|
||||
} else if (ast.containerField(tree, node)) |field| {
|
||||
} else if (tree.fullContainerField(node)) |field| {
|
||||
break :def analysis.getContainerFieldSignature(tree, field);
|
||||
} else {
|
||||
break :def analysis.nodeToString(tree, node) orelse return null;
|
||||
@ -2621,8 +2621,10 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
|
||||
.root => continue,
|
||||
// only fold the expression pertaining to the if statement, and the else statement, each respectively.
|
||||
// TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures.
|
||||
.@"if", .if_simple => {
|
||||
const if_full = ast.ifFull(handle.tree, node);
|
||||
.@"if",
|
||||
.if_simple,
|
||||
=> {
|
||||
const if_full = ast.fullIf(handle.tree, node).?;
|
||||
|
||||
const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr);
|
||||
const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr);
|
||||
@ -2643,7 +2645,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
|
||||
.while_cont,
|
||||
.while_simple,
|
||||
=> {
|
||||
const loop_full = ast.whileAst(handle.tree, node).?;
|
||||
const loop_full = ast.fullWhile(handle.tree, node).?;
|
||||
|
||||
const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr);
|
||||
const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr);
|
||||
@ -2685,8 +2687,8 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
|
||||
}
|
||||
|
||||
// Function prototype folding regions
|
||||
var fn_proto_buffer: [1]Node.Index = undefined;
|
||||
const fn_proto = ast.fnProto(handle.tree, node, fn_proto_buffer[0..]) orelse
|
||||
var buffer: [1]Node.Index = undefined;
|
||||
const fn_proto = handle.tree.fullFnProto(&buffer, node) orelse
|
||||
break :decl_node_blk;
|
||||
|
||||
const list_start_tok: Ast.TokenIndex = fn_proto.lparen;
|
||||
|
196
src/analysis.zig
196
src/analysis.zig
@ -194,7 +194,6 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
|
||||
if (param.type_expr == 0) return false;
|
||||
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
const token_data = tree.nodes.items(.data);
|
||||
const in_container = innermostContainer(handle, token_starts[func.ast.fn_token]);
|
||||
|
||||
if (try resolveTypeOfNode(document_store, arena, .{
|
||||
@ -205,9 +204,9 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ast.isPtrType(tree, param.type_expr)) {
|
||||
if (ast.fullPtrType(tree, param.type_expr)) |ptr_type| {
|
||||
if (try resolveTypeOfNode(document_store, arena, .{
|
||||
.node = token_data[param.type_expr].rhs,
|
||||
.node = ptr_type.ast.child_type,
|
||||
.handle = handle,
|
||||
})) |resolved_prefix_op| {
|
||||
if (std.meta.eql(in_container, resolved_prefix_op))
|
||||
@ -280,7 +279,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
|
||||
.global_var_decl,
|
||||
.simple_var_decl,
|
||||
.aligned_var_decl,
|
||||
=> ast.varDecl(tree, node).?.ast.mut_token + 1,
|
||||
=> tree.fullVarDecl(node).?.ast.mut_token + 1,
|
||||
// function declaration names
|
||||
.fn_proto,
|
||||
.fn_proto_multi,
|
||||
@ -289,7 +288,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
|
||||
.fn_decl,
|
||||
=> blk: {
|
||||
var params: [1]Ast.Node.Index = undefined;
|
||||
break :blk ast.fnProto(tree, node, ¶ms).?.name_token;
|
||||
break :blk tree.fullFnProto(¶ms, node).?.name_token;
|
||||
},
|
||||
|
||||
// containers
|
||||
@ -297,7 +296,7 @@ pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
|
||||
.container_field_init,
|
||||
.container_field_align,
|
||||
=> {
|
||||
const field = ast.containerField(tree, node).?.ast;
|
||||
const field = tree.fullContainerField(node).?.ast;
|
||||
return field.main_token;
|
||||
},
|
||||
|
||||
@ -381,7 +380,7 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
|
||||
if (ast.varDecl(handle.tree, decl)) |var_decl| {
|
||||
if (handle.tree.fullVarDecl(decl)) |var_decl| {
|
||||
if (var_decl.ast.init_node == 0) return null;
|
||||
const base_exp = var_decl.ast.init_node;
|
||||
if (token_tags[var_decl.ast.mut_token] != .keyword_const) return null;
|
||||
@ -411,8 +410,9 @@ fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.N
|
||||
if (node_tags[child_idx] == .@"return") {
|
||||
if (datas[child_idx].lhs != 0) {
|
||||
const lhs = datas[child_idx].lhs;
|
||||
if (ast.isCall(tree, lhs)) {
|
||||
const call_name = getDeclName(tree, datas[lhs].lhs);
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
if (tree.fullCall(&buf, lhs)) |call| {
|
||||
const call_name = getDeclName(tree, call.ast.fn_expr);
|
||||
if (call_name) |name| {
|
||||
if (std.mem.eql(u8, name, tree.tokenSlice(fn_decl.name_token.?))) {
|
||||
continue;
|
||||
@ -528,8 +528,7 @@ fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, dere
|
||||
const main_token = tree.nodes.items(.main_token)[deref_node];
|
||||
const token_tag = tree.tokens.items(.tag)[main_token];
|
||||
|
||||
if (ast.isPtrType(tree, deref_node)) {
|
||||
const ptr_type = ast.ptrType(tree, deref_node).?;
|
||||
if (ast.fullPtrType(tree, deref_node)) |ptr_type| {
|
||||
switch (token_tag) {
|
||||
.asterisk => {
|
||||
return ((try resolveTypeOfNodeInternal(store, arena, .{
|
||||
@ -566,7 +565,7 @@ fn resolveBracketAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocat
|
||||
.type = .{ .data = .{ .slice = data.rhs }, .is_type_val = false },
|
||||
.handle = lhs.handle,
|
||||
};
|
||||
} else if (ast.ptrType(tree, lhs_node)) |ptr_type| {
|
||||
} else if (ast.fullPtrType(tree, lhs_node)) |ptr_type| {
|
||||
if (ptr_type.size == .Slice) {
|
||||
if (rhs == .Single) {
|
||||
return ((try resolveTypeOfNodeInternal(store, arena, .{
|
||||
@ -647,7 +646,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
.simple_var_decl,
|
||||
.aligned_var_decl,
|
||||
=> {
|
||||
const var_decl = ast.varDecl(tree, node).?;
|
||||
const var_decl = tree.fullVarDecl(node).?;
|
||||
if (var_decl.ast.type_node != 0) {
|
||||
const decl_type = .{ .node = var_decl.ast.type_node, .handle = handle };
|
||||
if (try resolveTypeOfNodeInternal(store, arena, decl_type, bound_type_params)) |typ|
|
||||
@ -679,7 +678,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
switch (child.decl.*) {
|
||||
.ast_node => |n| {
|
||||
if (n == node) return null;
|
||||
if (ast.varDecl(child.handle.tree, n)) |var_decl| {
|
||||
if (child.handle.tree.fullVarDecl(n)) |var_decl| {
|
||||
if (var_decl.ast.init_node == node)
|
||||
return null;
|
||||
}
|
||||
@ -700,7 +699,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
.async_call_one_comma,
|
||||
=> {
|
||||
var params: [1]Ast.Node.Index = undefined;
|
||||
const call = ast.callFull(tree, node, ¶ms) orelse unreachable;
|
||||
const call = tree.fullCall(¶ms, node) orelse unreachable;
|
||||
|
||||
const callee = .{ .node = call.ast.fn_expr, .handle = handle };
|
||||
const decl = (try resolveTypeOfNodeInternal(store, arena, callee, bound_type_params)) orelse
|
||||
@ -712,7 +711,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
else => return null,
|
||||
};
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const func_maybe = ast.fnProto(decl.handle.tree, decl_node, &buf);
|
||||
const func_maybe = decl.handle.tree.fullFnProto(&buf, decl_node);
|
||||
|
||||
if (func_maybe) |fn_decl| {
|
||||
var expected_params = fn_decl.ast.params.len;
|
||||
@ -979,7 +978,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
const decl = root_scope_decls.get("Type") orelse return null;
|
||||
if (decl != .ast_node) return null;
|
||||
|
||||
const var_decl = ast.varDecl(new_handle.tree, decl.ast_node) orelse return null;
|
||||
const var_decl = new_handle.tree.fullVarDecl(decl.ast_node) orelse return null;
|
||||
|
||||
return TypeWithHandle{
|
||||
.type = .{
|
||||
@ -1019,7 +1018,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
// This is a function type
|
||||
if (ast.fnProto(tree, node, &buf).?.name_token == null) {
|
||||
if (tree.fullFnProto(&buf, node).?.name_token == null) {
|
||||
return TypeWithHandle.typeVal(node_handle);
|
||||
}
|
||||
|
||||
@ -1107,11 +1106,10 @@ pub const TypeWithHandle = struct {
|
||||
const tree = self.handle.tree;
|
||||
const node = self.type.data.other;
|
||||
const tags = tree.nodes.items(.tag);
|
||||
if (ast.isContainer(tree, node)) {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
for (ast.declMembers(tree, node, &buf)) |child| {
|
||||
if (tags[child].isContainerField()) return false;
|
||||
}
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const full = tree.fullContainerDecl(&buf, node) orelse return true;
|
||||
for (full.ast.members) |member| {
|
||||
if (tags[member].isContainerField()) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -1132,7 +1130,7 @@ pub const TypeWithHandle = struct {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const tree = self.handle.tree;
|
||||
return switch (self.type.data) {
|
||||
.other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: {
|
||||
.other => |n| if (tree.fullFnProto(&buf, n)) |fn_proto| blk: {
|
||||
break :blk isTypeFunction(tree, fn_proto);
|
||||
} else false,
|
||||
else => false,
|
||||
@ -1143,7 +1141,7 @@ pub const TypeWithHandle = struct {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const tree = self.handle.tree;
|
||||
return switch (self.type.data) {
|
||||
.other => |n| if (ast.fnProto(tree, n, &buf)) |fn_proto| blk: {
|
||||
.other => |n| if (tree.fullFnProto(&buf, n)) |fn_proto| blk: {
|
||||
break :blk isGenericFunction(tree, fn_proto);
|
||||
} else false,
|
||||
else => false,
|
||||
@ -1343,7 +1341,7 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
||||
if (current_type.?.type.is_type_val) return null;
|
||||
const cur_tree = current_type.?.handle.tree;
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
if (ast.fnProto(cur_tree, current_type_node, &buf)) |func| {
|
||||
if (cur_tree.fullFnProto(&buf, current_type_node)) |func| {
|
||||
// Check if the function has a body and if so, pass it
|
||||
// so the type can be resolved if it's a generic function returning
|
||||
// an anonymous struct
|
||||
@ -1408,13 +1406,13 @@ pub fn isNodePublic(tree: Ast, node: Ast.Node.Index) bool {
|
||||
.local_var_decl,
|
||||
.simple_var_decl,
|
||||
.aligned_var_decl,
|
||||
=> ast.varDecl(tree, node).?.visib_token != null,
|
||||
=> tree.fullVarDecl(node).?.visib_token != null,
|
||||
.fn_proto,
|
||||
.fn_proto_multi,
|
||||
.fn_proto_one,
|
||||
.fn_proto_simple,
|
||||
.fn_decl,
|
||||
=> ast.fnProto(tree, node, &buf).?.visib_token != null,
|
||||
=> tree.fullFnProto(&buf, node).?.visib_token != null,
|
||||
else => true,
|
||||
};
|
||||
}
|
||||
@ -1428,7 +1426,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
|
||||
.container_field_init,
|
||||
.container_field_align,
|
||||
=> {
|
||||
const field = ast.containerField(tree, node).?.ast;
|
||||
const field = tree.fullContainerField(node).?.ast;
|
||||
return if (field.tuple_like) null else tree.tokenSlice(field.main_token);
|
||||
},
|
||||
.error_value => tree.tokenSlice(data[node].rhs),
|
||||
@ -1438,7 +1436,7 @@ pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
|
||||
.fn_proto_one,
|
||||
.fn_proto_simple,
|
||||
.fn_decl,
|
||||
=> if (ast.fnProto(tree, node, &buf).?.name_token) |name| tree.tokenSlice(name) else null,
|
||||
=> if (tree.fullFnProto(&buf, node).?.name_token) |name| tree.tokenSlice(name) else null,
|
||||
.field_access => tree.tokenSlice(data[node].rhs),
|
||||
.call,
|
||||
.call_comma,
|
||||
@ -1465,16 +1463,16 @@ fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize)
|
||||
|
||||
pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
if (ast.isContainer(tree, node)) {
|
||||
const decls = ast.declMembers(tree, node, &buf);
|
||||
for (decls) |decl_idx| {
|
||||
if (tree.fullContainerDecl(&buf, node)) |container_decl| {
|
||||
for (container_decl.ast.members) |decl_idx| {
|
||||
if (getImportStr(tree, decl_idx, source_index)) |name| {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} else if (ast.varDecl(tree, node)) |var_decl| {
|
||||
} else if (tree.fullVarDecl(node)) |var_decl| {
|
||||
return getImportStr(tree, var_decl.ast.init_node, source_index);
|
||||
} else if (node_tags[node] == .@"usingnamespace") {
|
||||
return getImportStr(tree, tree.nodes.items(.data)[node].lhs, source_index);
|
||||
@ -1868,7 +1866,8 @@ fn addOutlineNodes(allocator: std.mem.Allocator, tree: Ast, child: Ast.Node.Inde
|
||||
.tagged_union_two_trailing,
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
for (ast.declMembers(tree, child, &buf)) |member|
|
||||
const members = tree.fullContainerDecl(&buf, child).?.ast.members;
|
||||
for (members) |member|
|
||||
try addOutlineNodes(allocator, tree, member, context);
|
||||
return;
|
||||
},
|
||||
@ -1927,20 +1926,18 @@ fn getDocumentSymbolsInternal(allocator: std.mem.Allocator, tree: Ast, node: Ast
|
||||
.encoding = context.encoding,
|
||||
};
|
||||
|
||||
if (ast.isContainer(tree, node)) {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
for (ast.declMembers(tree, node, &buf)) |child|
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
if (tree.fullContainerDecl(&buf, node)) |container_decl| {
|
||||
for (container_decl.ast.members) |child| {
|
||||
try addOutlineNodes(allocator, tree, child, &child_context);
|
||||
}
|
||||
|
||||
if (ast.varDecl(tree, node)) |var_decl| {
|
||||
}
|
||||
} else if (tree.fullVarDecl(node)) |var_decl| {
|
||||
if (var_decl.ast.init_node != 0)
|
||||
try addOutlineNodes(allocator, tree, var_decl.ast.init_node, &child_context);
|
||||
}
|
||||
if (tags[node] == .fn_decl) fn_ch: {
|
||||
} else if (tags[node] == .fn_decl) fn_ch: {
|
||||
const fn_decl = tree.nodes.items(.data)[node];
|
||||
var params: [1]Ast.Node.Index = undefined;
|
||||
const fn_proto = ast.fnProto(tree, fn_decl.lhs, ¶ms) orelse break :fn_ch;
|
||||
const fn_proto = tree.fullFnProto(¶ms, fn_decl.lhs).?;
|
||||
if (!isTypeFunction(tree, fn_proto)) break :fn_ch;
|
||||
const ret_stmt = findReturnStatement(tree, fn_proto, fn_decl.rhs) orelse break :fn_ch;
|
||||
const type_decl = tree.nodes.items(.data)[ret_stmt].lhs;
|
||||
@ -2098,7 +2095,7 @@ pub const DeclWithHandle = struct {
|
||||
|
||||
switch (candidate.value_ptr.*) {
|
||||
.ast_node => |node| {
|
||||
if (ast.containerField(switch_expr_type.handle.tree, node)) |container_field| {
|
||||
if (switch_expr_type.handle.tree.fullContainerField(node)) |container_field| {
|
||||
if (container_field.ast.type_expr != 0) {
|
||||
return ((try resolveTypeOfNodeInternal(
|
||||
store,
|
||||
@ -2576,10 +2573,6 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx:
|
||||
const scopes = context.scopes;
|
||||
const tree = context.tree;
|
||||
const tags = tree.nodes.items(.tag);
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const data = tree.nodes.items(.data);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const node_tag = tags[node_idx];
|
||||
|
||||
try scopes.append(allocator, .{
|
||||
.loc = offsets.nodeToLoc(tree, node_idx),
|
||||
@ -2587,32 +2580,9 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx:
|
||||
});
|
||||
const scope_index = scopes.len - 1;
|
||||
|
||||
if (node_tag == .error_set_decl) {
|
||||
// All identifiers in main_token..data.lhs are error fields.
|
||||
var i = main_tokens[node_idx];
|
||||
while (i < data[node_idx].rhs) : (i += 1) {
|
||||
if (token_tags[i] == .identifier) {
|
||||
const name = offsets.tokenToSlice(tree, i);
|
||||
if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .error_token = i })) |_| {
|
||||
// TODO Record a redefinition error.
|
||||
}
|
||||
const gop = try context.errors.getOrPut(allocator, .{
|
||||
.label = name,
|
||||
.kind = .Constant,
|
||||
//.detail =
|
||||
.insertText = name,
|
||||
.insertTextFormat = .PlainText,
|
||||
});
|
||||
if (!gop.found_existing) {
|
||||
gop.key_ptr.detail = try std.fmt.allocPrint(allocator, "error.{s}", .{name});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const ast_decls = ast.declMembers(tree, node_idx, &buf);
|
||||
for (ast_decls) |decl| {
|
||||
const container_decl = tree.fullContainerDecl(&buf, node_idx).?;
|
||||
for (container_decl.ast.members) |decl| {
|
||||
if (tags[decl] == .@"usingnamespace") {
|
||||
try scopes.items(.uses)[scope_index].append(allocator, decl);
|
||||
continue;
|
||||
@ -2630,9 +2600,6 @@ fn makeInnerScope(allocator: std.mem.Allocator, context: ScopeContext, node_idx:
|
||||
// TODO Record a redefinition error.
|
||||
}
|
||||
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const container_decl = ast.containerDecl(tree, node_idx, &buffer) orelse continue;
|
||||
|
||||
if (container_decl.ast.enum_token != null) {
|
||||
if (std.mem.eql(u8, name, "_")) return;
|
||||
const Documentation = @TypeOf(@as(types.CompletionItem, undefined).documentation);
|
||||
@ -2684,10 +2651,37 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.tagged_union_enum_tag,
|
||||
.tagged_union_enum_tag_trailing,
|
||||
.root,
|
||||
.error_set_decl,
|
||||
=> {
|
||||
try makeInnerScope(allocator, context, node_idx);
|
||||
},
|
||||
.error_set_decl => {
|
||||
try scopes.append(allocator, .{
|
||||
.loc = offsets.nodeToLoc(tree, node_idx),
|
||||
.data = .{ .container = node_idx },
|
||||
});
|
||||
const scope_index = scopes.len - 1;
|
||||
|
||||
// All identifiers in main_token..data.lhs are error fields.
|
||||
var i = main_tokens[node_idx];
|
||||
while (i < data[node_idx].rhs) : (i += 1) {
|
||||
if (token_tags[i] == .identifier) {
|
||||
const name = offsets.tokenToSlice(tree, i);
|
||||
if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .error_token = i })) |_| {
|
||||
// TODO Record a redefinition error.
|
||||
}
|
||||
const gop = try context.errors.getOrPut(allocator, .{
|
||||
.label = name,
|
||||
.kind = .Constant,
|
||||
//.detail =
|
||||
.insertText = name,
|
||||
.insertTextFormat = .PlainText,
|
||||
});
|
||||
if (!gop.found_existing) {
|
||||
gop.key_ptr.detail = try std.fmt.allocPrint(allocator, "error.{s}", .{name});
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.array_type_sentinel => {
|
||||
// TODO: ???
|
||||
return;
|
||||
@ -2699,7 +2693,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.fn_decl,
|
||||
=> |fn_tag| {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const func = ast.fnProto(tree, node_idx, &buf).?;
|
||||
const func = tree.fullFnProto(&buf, node_idx).?;
|
||||
|
||||
try scopes.append(allocator, .{
|
||||
.loc = offsets.nodeToLoc(tree, node_idx),
|
||||
@ -2779,7 +2773,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
}
|
||||
|
||||
try makeScopeInternal(allocator, context, idx);
|
||||
if (ast.varDecl(tree, idx)) |var_decl| {
|
||||
if (tree.fullVarDecl(idx)) |var_decl| {
|
||||
const name = tree.tokenSlice(var_decl.ast.mut_token + 1);
|
||||
if (try scopes.items(.decls)[scope_index].fetchPut(allocator, name, .{ .ast_node = idx })) |existing| {
|
||||
_ = existing;
|
||||
@ -2793,7 +2787,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.@"if",
|
||||
.if_simple,
|
||||
=> {
|
||||
const if_node = ast.ifFull(tree, node_idx);
|
||||
const if_node = ast.fullIf(tree, node_idx).?;
|
||||
|
||||
if (if_node.payload_token) |payload| {
|
||||
try scopes.append(allocator, .{
|
||||
@ -2867,7 +2861,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.@"for",
|
||||
.for_simple,
|
||||
=> {
|
||||
const while_node = ast.whileAst(tree, node_idx).?;
|
||||
const while_node = ast.fullWhile(tree, node_idx).?;
|
||||
const is_for = node_tag == .@"for" or node_tag == .for_simple;
|
||||
|
||||
if (while_node.label_token) |label| {
|
||||
@ -2955,11 +2949,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
const cases = tree.extra_data[extra.start..extra.end];
|
||||
|
||||
for (cases) |case| {
|
||||
const switch_case: Ast.full.SwitchCase = switch (tags[case]) {
|
||||
.switch_case => tree.switchCase(case),
|
||||
.switch_case_one => tree.switchCaseOne(case),
|
||||
else => continue,
|
||||
};
|
||||
const switch_case: Ast.full.SwitchCase = tree.fullSwitchCase(case).?;
|
||||
|
||||
if (switch_case.payload_token) |payload| {
|
||||
try scopes.append(allocator, .{
|
||||
@ -3000,7 +2990,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.aligned_var_decl,
|
||||
.simple_var_decl,
|
||||
=> {
|
||||
const var_decl = ast.varDecl(tree, node_idx).?;
|
||||
const var_decl = tree.fullVarDecl(node_idx).?;
|
||||
if (var_decl.ast.type_node != 0) {
|
||||
try makeScopeInternal(allocator, context, var_decl.ast.type_node);
|
||||
}
|
||||
@ -3019,7 +3009,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.async_call_one_comma,
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const call = ast.callFull(tree, node_idx, &buf).?;
|
||||
const call = tree.fullCall(&buf, node_idx).?;
|
||||
|
||||
try makeScopeInternal(allocator, context, call.ast.fn_expr);
|
||||
for (call.ast.params) |param|
|
||||
@ -3035,13 +3025,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.struct_init_one_comma,
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const struct_init: Ast.full.StructInit = switch (node_tag) {
|
||||
.struct_init, .struct_init_comma => tree.structInit(node_idx),
|
||||
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node_idx),
|
||||
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node_idx),
|
||||
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node_idx),
|
||||
else => unreachable,
|
||||
};
|
||||
const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node_idx).?;
|
||||
|
||||
if (struct_init.ast.type_expr != 0)
|
||||
try makeScopeInternal(allocator, context, struct_init.ast.type_expr);
|
||||
@ -3060,13 +3044,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.array_init_one_comma,
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const array_init: Ast.full.ArrayInit = switch (node_tag) {
|
||||
.array_init, .array_init_comma => tree.arrayInit(node_idx),
|
||||
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node_idx),
|
||||
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node_idx),
|
||||
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node_idx),
|
||||
else => unreachable,
|
||||
};
|
||||
const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buf, node_idx).?;
|
||||
|
||||
if (array_init.ast.type_expr != 0)
|
||||
try makeScopeInternal(allocator, context, array_init.ast.type_expr);
|
||||
@ -3078,7 +3056,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.container_field_align,
|
||||
.container_field_init,
|
||||
=> {
|
||||
const field = ast.containerField(tree, node_idx).?;
|
||||
const field = tree.fullContainerField(node_idx).?;
|
||||
|
||||
try makeScopeInternal(allocator, context, field.ast.type_expr);
|
||||
try makeScopeInternal(allocator, context, field.ast.align_expr);
|
||||
@ -3101,7 +3079,7 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.ptr_type_bit_range,
|
||||
.ptr_type_sentinel,
|
||||
=> {
|
||||
const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node_idx).?;
|
||||
const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node_idx).?;
|
||||
|
||||
try makeScopeInternal(allocator, context, ptr_type.ast.sentinel);
|
||||
try makeScopeInternal(allocator, context, ptr_type.ast.align_node);
|
||||
@ -3111,12 +3089,8 @@ fn makeScopeInternal(allocator: std.mem.Allocator, context: ScopeContext, node_i
|
||||
.slice_open,
|
||||
.slice_sentinel,
|
||||
=> {
|
||||
const slice: Ast.full.Slice = switch (node_tag) {
|
||||
.slice => tree.slice(node_idx),
|
||||
.slice_open => tree.sliceOpen(node_idx),
|
||||
.slice_sentinel => tree.sliceSentinel(node_idx),
|
||||
else => unreachable,
|
||||
};
|
||||
const slice: Ast.full.Slice = tree.fullSlice(node_idx).?;
|
||||
|
||||
try makeScopeInternal(allocator, context, slice.ast.sliced);
|
||||
try makeScopeInternal(allocator, context, slice.ast.start);
|
||||
try makeScopeInternal(allocator, context, slice.ast.end);
|
||||
|
210
src/ast.zig
210
src/ast.zig
@ -7,7 +7,7 @@ const Ast = std.zig.Ast;
|
||||
const Node = Ast.Node;
|
||||
const full = Ast.full;
|
||||
|
||||
fn fullPtrType(tree: Ast, info: full.PtrType.Components) full.PtrType {
|
||||
fn fullPtrTypeComponents(tree: Ast, info: full.PtrType.Components) full.PtrType {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const size: std.builtin.Type.Pointer.Size = switch (token_tags[info.main_token]) {
|
||||
.asterisk,
|
||||
@ -57,7 +57,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType {
|
||||
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.lhs, Node.PtrType);
|
||||
return fullPtrType(tree, .{
|
||||
return fullPtrTypeComponents(tree, .{
|
||||
.main_token = tree.nodes.items(.main_token)[node],
|
||||
.align_node = extra.align_node,
|
||||
.addrspace_node = extra.addrspace_node,
|
||||
@ -71,7 +71,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType {
|
||||
pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
|
||||
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullPtrType(tree, .{
|
||||
return fullPtrTypeComponents(tree, .{
|
||||
.main_token = tree.nodes.items(.main_token)[node],
|
||||
.align_node = 0,
|
||||
.addrspace_node = 0,
|
||||
@ -85,7 +85,7 @@ pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
|
||||
pub fn ptrTypeAligned(tree: Ast, node: Node.Index) full.PtrType {
|
||||
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullPtrType(tree, .{
|
||||
return fullPtrTypeComponents(tree, .{
|
||||
.main_token = tree.nodes.items(.main_token)[node],
|
||||
.align_node = data.lhs,
|
||||
.addrspace_node = 0,
|
||||
@ -100,7 +100,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
|
||||
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
|
||||
return fullPtrType(tree, .{
|
||||
return fullPtrTypeComponents(tree, .{
|
||||
.main_token = tree.nodes.items(.main_token)[node],
|
||||
.align_node = extra.align_node,
|
||||
.addrspace_node = extra.addrspace_node,
|
||||
@ -111,7 +111,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
|
||||
});
|
||||
}
|
||||
|
||||
fn fullIf(tree: Ast, info: full.If.Components) full.If {
|
||||
fn fullIfComponents(tree: Ast, info: full.If.Components) full.If {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var result: full.If = .{
|
||||
.ast = info,
|
||||
@ -137,27 +137,29 @@ fn fullIf(tree: Ast, info: full.If.Components) full.If {
|
||||
}
|
||||
|
||||
pub fn ifFull(tree: Ast, node: Node.Index) full.If {
|
||||
std.debug.assert(tree.nodes.items(.tag)[node] == .@"if");
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
if (tree.nodes.items(.tag)[node] == .@"if") {
|
||||
const extra = tree.extraData(data.rhs, Node.If);
|
||||
return fullIf(tree, .{
|
||||
.cond_expr = data.lhs,
|
||||
.then_expr = extra.then_expr,
|
||||
.else_expr = extra.else_expr,
|
||||
.if_token = tree.nodes.items(.main_token)[node],
|
||||
});
|
||||
} else {
|
||||
std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple);
|
||||
return fullIf(tree, .{
|
||||
.cond_expr = data.lhs,
|
||||
.then_expr = data.rhs,
|
||||
.else_expr = 0,
|
||||
.if_token = tree.nodes.items(.main_token)[node],
|
||||
});
|
||||
}
|
||||
const extra = tree.extraData(data.rhs, Node.If);
|
||||
return fullIfComponents(tree, .{
|
||||
.cond_expr = data.lhs,
|
||||
.then_expr = extra.then_expr,
|
||||
.else_expr = extra.else_expr,
|
||||
.if_token = tree.nodes.items(.main_token)[node],
|
||||
});
|
||||
}
|
||||
|
||||
fn fullWhile(tree: Ast, info: full.While.Components) full.While {
|
||||
pub fn ifSimple(tree: Ast, node: Node.Index) full.If {
|
||||
std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullIfComponents(tree, .{
|
||||
.cond_expr = data.lhs,
|
||||
.then_expr = data.rhs,
|
||||
.else_expr = 0,
|
||||
.if_token = tree.nodes.items(.main_token)[node],
|
||||
});
|
||||
}
|
||||
|
||||
fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var result: full.While = .{
|
||||
.ast = info,
|
||||
@ -194,7 +196,7 @@ fn fullWhile(tree: Ast, info: full.While.Components) full.While {
|
||||
|
||||
pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullWhile(tree, .{
|
||||
return fullWhileComponents(tree, .{
|
||||
.while_token = tree.nodes.items(.main_token)[node],
|
||||
.cond_expr = data.lhs,
|
||||
.cont_expr = 0,
|
||||
@ -206,7 +208,7 @@ pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
|
||||
pub fn whileCont(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.WhileCont);
|
||||
return fullWhile(tree, .{
|
||||
return fullWhileComponents(tree, .{
|
||||
.while_token = tree.nodes.items(.main_token)[node],
|
||||
.cond_expr = data.lhs,
|
||||
.cont_expr = extra.cont_expr,
|
||||
@ -218,7 +220,7 @@ pub fn whileCont(tree: Ast, node: Node.Index) full.While {
|
||||
pub fn whileFull(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.While);
|
||||
return fullWhile(tree, .{
|
||||
return fullWhileComponents(tree, .{
|
||||
.while_token = tree.nodes.items(.main_token)[node],
|
||||
.cond_expr = data.lhs,
|
||||
.cont_expr = extra.cont_expr,
|
||||
@ -229,7 +231,7 @@ pub fn whileFull(tree: Ast, node: Node.Index) full.While {
|
||||
|
||||
pub fn forSimple(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullWhile(tree, .{
|
||||
return fullWhileComponents(tree, .{
|
||||
.while_token = tree.nodes.items(.main_token)[node],
|
||||
.cond_expr = data.lhs,
|
||||
.cont_expr = 0,
|
||||
@ -241,7 +243,7 @@ pub fn forSimple(tree: Ast, node: Node.Index) full.While {
|
||||
pub fn forFull(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.If);
|
||||
return fullWhile(tree, .{
|
||||
return fullWhileComponents(tree, .{
|
||||
.while_token = tree.nodes.items(.main_token)[node],
|
||||
.cond_expr = data.lhs,
|
||||
.cont_expr = 0,
|
||||
@ -250,6 +252,35 @@ pub fn forFull(tree: Ast, node: Node.Index) full.While {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn fullPtrType(tree: Ast, node: Node.Index) ?full.PtrType {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.ptr_type_aligned => tree.ptrTypeAligned(node),
|
||||
.ptr_type_sentinel => tree.ptrTypeSentinel(node),
|
||||
.ptr_type => tree.ptrType(node),
|
||||
.ptr_type_bit_range => tree.ptrTypeBitRange(node),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fullIf(tree: Ast, node: Node.Index) ?full.If {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.if_simple => tree.ifSimple(node),
|
||||
.@"if" => tree.ifFull(node),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.while_simple => tree.whileSimple(node),
|
||||
.while_cont => tree.whileCont(node),
|
||||
.@"while" => tree.whileFull(node),
|
||||
.for_simple => tree.forSimple(node),
|
||||
.@"for" => tree.forFull(node),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
|
||||
const TokenIndex = Ast.TokenIndex;
|
||||
const tags = tree.nodes.items(.tag);
|
||||
@ -911,36 +942,6 @@ pub fn paramLastToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
|
||||
return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
|
||||
}
|
||||
|
||||
pub fn containerField(tree: Ast, node: Ast.Node.Index) ?Ast.full.ContainerField {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.container_field => tree.containerField(node),
|
||||
.container_field_init => tree.containerFieldInit(node),
|
||||
.container_field_align => tree.containerFieldAlign(node),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn ptrType(tree: Ast, node: Ast.Node.Index) ?Ast.full.PtrType {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.ptr_type => ptrTypeSimple(tree, node),
|
||||
.ptr_type_aligned => ptrTypeAligned(tree, node),
|
||||
.ptr_type_bit_range => ptrTypeBitRange(tree, node),
|
||||
.ptr_type_sentinel => ptrTypeSentinel(tree, node),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn whileAst(tree: Ast, node: Ast.Node.Index) ?Ast.full.While {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.@"while" => whileFull(tree, node),
|
||||
.while_simple => whileSimple(tree, node),
|
||||
.while_cont => whileCont(tree, node),
|
||||
.@"for" => forFull(tree, node),
|
||||
.for_simple => forSimple(tree, node),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.container_decl,
|
||||
@ -962,58 +963,6 @@ pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn containerDecl(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) ?full.ContainerDecl {
|
||||
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
|
||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
|
||||
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx),
|
||||
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
|
||||
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
|
||||
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// Returns the member indices of a given declaration container.
|
||||
/// Asserts given `tag` is a container node
|
||||
pub fn declMembers(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
|
||||
std.debug.assert(isContainer(tree, node_idx));
|
||||
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members,
|
||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members,
|
||||
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx).ast.members,
|
||||
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members,
|
||||
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members,
|
||||
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members,
|
||||
.root => tree.rootDecls(),
|
||||
.error_set_decl => &[_]Ast.Node.Index{},
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
/// Returns an `ast.full.VarDecl` for a given node index.
|
||||
/// Returns null if the tag doesn't match
|
||||
pub fn varDecl(tree: Ast, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
|
||||
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||
.global_var_decl => tree.globalVarDecl(node_idx),
|
||||
.local_var_decl => tree.localVarDecl(node_idx),
|
||||
.aligned_var_decl => tree.alignedVarDecl(node_idx),
|
||||
.simple_var_decl => tree.simpleVarDecl(node_idx),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isPtrType(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.ptr_type,
|
||||
.ptr_type_aligned,
|
||||
.ptr_type_bit_range,
|
||||
.ptr_type_sentinel,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.builtin_call,
|
||||
@ -1051,45 +1000,6 @@ pub fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fnProtoHasBody(tree: Ast, node: Ast.Node.Index) ?bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.fn_proto,
|
||||
.fn_proto_multi,
|
||||
.fn_proto_one,
|
||||
.fn_proto_simple,
|
||||
=> false,
|
||||
.fn_decl => true,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.fn_proto => tree.fnProto(node),
|
||||
.fn_proto_multi => tree.fnProtoMulti(node),
|
||||
.fn_proto_one => tree.fnProtoOne(buf, node),
|
||||
.fn_proto_simple => tree.fnProtoSimple(buf, node),
|
||||
.fn_decl => fnProto(tree, tree.nodes.items(.data)[node].lhs, buf),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn callFull(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.Call {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.call,
|
||||
.call_comma,
|
||||
.async_call,
|
||||
.async_call_comma,
|
||||
=> tree.callFull(node),
|
||||
.call_one,
|
||||
.call_one_comma,
|
||||
.async_call_one,
|
||||
.async_call_one_comma,
|
||||
=> tree.callOne(buf, node),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// returns a list of parameters
|
||||
pub fn builtinCallParams(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index {
|
||||
const node_data = tree.nodes.items(.data);
|
||||
|
@ -84,7 +84,7 @@ fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *Doc
|
||||
switch (decl.*) {
|
||||
.ast_node => |fn_node| {
|
||||
var buffer: [1]Ast.Node.Index = undefined;
|
||||
if (ast.fnProto(decl_tree, fn_node, &buffer)) |fn_proto| {
|
||||
if (decl_tree.fullFnProto(&buffer, fn_node)) |fn_proto| {
|
||||
var i: usize = 0;
|
||||
var it = fn_proto.iterate(&decl_tree);
|
||||
|
||||
@ -282,7 +282,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.async_call_comma,
|
||||
=> {
|
||||
var params: [1]Ast.Node.Index = undefined;
|
||||
const call = ast.callFull(tree, node, ¶ms).?;
|
||||
const call = tree.fullCall(¶ms, node).?;
|
||||
try writeCallNodeHint(builder, arena, store, call);
|
||||
|
||||
for (call.ast.params) |param| {
|
||||
@ -351,7 +351,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.ptr_type,
|
||||
.ptr_type_bit_range,
|
||||
=> {
|
||||
const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node).?;
|
||||
const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node).?;
|
||||
|
||||
if (ptr_type.ast.sentinel != 0) {
|
||||
return try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.sentinel, range });
|
||||
@ -458,12 +458,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.slice,
|
||||
.slice_sentinel,
|
||||
=> {
|
||||
const slice: Ast.full.Slice = switch (tag) {
|
||||
.slice => tree.slice(node),
|
||||
.slice_open => tree.sliceOpen(node),
|
||||
.slice_sentinel => tree.sliceSentinel(node),
|
||||
else => unreachable,
|
||||
};
|
||||
const slice: Ast.full.Slice = tree.fullSlice(node).?;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.sliced, range });
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.start, range });
|
||||
@ -481,13 +476,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.array_init_comma,
|
||||
=> {
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const array_init: Ast.full.ArrayInit = switch (tag) {
|
||||
.array_init, .array_init_comma => tree.arrayInit(node),
|
||||
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buffer[0..1], node),
|
||||
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
||||
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buffer, node),
|
||||
else => unreachable,
|
||||
};
|
||||
const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buffer, node).?;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, array_init.ast.type_expr, range });
|
||||
for (array_init.ast.elements) |elem| {
|
||||
@ -505,13 +494,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.struct_init_comma,
|
||||
=> {
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const struct_init: Ast.full.StructInit = switch (tag) {
|
||||
.struct_init, .struct_init_comma => tree.structInit(node),
|
||||
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
||||
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buffer[0..1], node),
|
||||
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buffer, node),
|
||||
else => unreachable,
|
||||
};
|
||||
const struct_init: Ast.full.StructInit = tree.fullStructInit(&buffer, node).?;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, struct_init.ast.type_expr, range });
|
||||
|
||||
@ -546,7 +529,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.switch_case_inline_one,
|
||||
.switch_case_inline,
|
||||
=> {
|
||||
const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node);
|
||||
const switch_case = tree.fullSwitchCase(node).?;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, switch_case.ast.target_expr, range });
|
||||
},
|
||||
@ -557,7 +540,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.for_simple,
|
||||
.@"for",
|
||||
=> {
|
||||
const while_node = ast.whileAst(tree, node).?;
|
||||
const while_node = ast.fullWhile(tree, node).?;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cond_expr, range });
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cont_expr, range });
|
||||
@ -571,7 +554,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.if_simple,
|
||||
.@"if",
|
||||
=> {
|
||||
const if_node = ast.ifFull(tree, node);
|
||||
const if_node = ast.fullIf(tree, node).?;
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.cond_expr, range });
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.then_expr, range });
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.else_expr, range });
|
||||
@ -584,7 +567,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.fn_decl,
|
||||
=> {
|
||||
var buffer: [1]Ast.Node.Index = undefined;
|
||||
const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buffer).?;
|
||||
const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buffer, node).?;
|
||||
|
||||
var it = fn_proto.iterate(&tree);
|
||||
while (ast.nextFnParam(&it)) |param_decl| {
|
||||
@ -617,7 +600,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.tagged_union_enum_tag_trailing,
|
||||
=> {
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buffer).?;
|
||||
const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buffer, node).?;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, decl.ast.arg, range });
|
||||
|
||||
@ -634,7 +617,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.container_field_align,
|
||||
.container_field,
|
||||
=> {
|
||||
const container_field = ast.containerField(tree, node).?;
|
||||
const container_field = tree.fullContainerField(node).?;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.value_expr, range });
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.align_expr, range });
|
||||
@ -666,11 +649,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
|
||||
.asm_output,
|
||||
.asm_input,
|
||||
=> {
|
||||
const asm_node: Ast.full.Asm = switch (tag) {
|
||||
.@"asm" => tree.asmFull(node),
|
||||
.asm_simple => tree.asmSimple(node),
|
||||
else => return,
|
||||
};
|
||||
const asm_node: Ast.full.Asm = tree.fullAsm(node) orelse return;
|
||||
|
||||
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, asm_node.ast.template, range });
|
||||
},
|
||||
@ -700,8 +679,7 @@ pub fn writeRangeInlayHint(
|
||||
.encoding = encoding,
|
||||
};
|
||||
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
for (ast.declMembers(handle.tree, 0, &buf)) |child| {
|
||||
for (handle.tree.rootDecls()) |child| {
|
||||
if (!isNodeInRange(handle.tree, child, range)) continue;
|
||||
try writeNodeInlayHint(&builder, arena, store, child, range);
|
||||
}
|
||||
|
@ -118,18 +118,19 @@ fn symbolReferencesInternal(
|
||||
.tagged_union_enum_tag,
|
||||
.tagged_union_enum_tag_trailing,
|
||||
.root,
|
||||
.error_set_decl,
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
for (ast.declMembers(tree, node, &buf)) |member|
|
||||
const container_decl = tree.fullContainerDecl(&buf, node).?;
|
||||
for (container_decl.ast.members) |member|
|
||||
try symbolReferencesInternal(builder, member, handle, false);
|
||||
},
|
||||
.error_set_decl => {},
|
||||
.global_var_decl,
|
||||
.local_var_decl,
|
||||
.simple_var_decl,
|
||||
.aligned_var_decl,
|
||||
=> {
|
||||
const var_decl = ast.varDecl(tree, node).?;
|
||||
const var_decl = tree.fullVarDecl(node).?;
|
||||
try symbolReferencesInternal(builder, var_decl.ast.type_node, handle, false);
|
||||
try symbolReferencesInternal(builder, var_decl.ast.init_node, handle, false);
|
||||
},
|
||||
@ -137,7 +138,7 @@ fn symbolReferencesInternal(
|
||||
.container_field_align,
|
||||
.container_field_init,
|
||||
=> {
|
||||
const field = ast.containerField(tree, node).?;
|
||||
const field = tree.fullContainerField(node).?;
|
||||
try symbolReferencesInternal(builder, field.ast.type_expr, handle, false);
|
||||
try symbolReferencesInternal(builder, field.ast.value_expr, handle, false);
|
||||
},
|
||||
@ -152,7 +153,7 @@ fn symbolReferencesInternal(
|
||||
.fn_decl,
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const fn_proto = ast.fnProto(tree, node, &buf).?;
|
||||
const fn_proto = tree.fullFnProto(&buf, node).?;
|
||||
var it = fn_proto.iterate(&tree);
|
||||
while (ast.nextFnParam(&it)) |param| {
|
||||
try symbolReferencesInternal(builder, param.type_expr, handle, false);
|
||||
@ -179,16 +180,10 @@ fn symbolReferencesInternal(
|
||||
},
|
||||
.switch_case_one,
|
||||
.switch_case_inline_one,
|
||||
=> {
|
||||
const case_one = tree.switchCaseOne(node);
|
||||
try symbolReferencesInternal(builder, case_one.ast.target_expr, handle, false);
|
||||
for (case_one.ast.values) |val|
|
||||
try symbolReferencesInternal(builder, val, handle, false);
|
||||
},
|
||||
.switch_case,
|
||||
.switch_case_inline,
|
||||
=> {
|
||||
const case = tree.switchCase(node);
|
||||
const case = tree.fullSwitchCase(node).?;
|
||||
try symbolReferencesInternal(builder, case.ast.target_expr, handle, false);
|
||||
for (case.ast.values) |val|
|
||||
try symbolReferencesInternal(builder, val, handle, false);
|
||||
@ -199,7 +194,7 @@ fn symbolReferencesInternal(
|
||||
.for_simple,
|
||||
.@"for",
|
||||
=> {
|
||||
const loop = ast.whileAst(tree, node).?;
|
||||
const loop = ast.fullWhile(tree, node).?;
|
||||
try symbolReferencesInternal(builder, loop.ast.cond_expr, handle, false);
|
||||
try symbolReferencesInternal(builder, loop.ast.then_expr, handle, false);
|
||||
try symbolReferencesInternal(builder, loop.ast.cont_expr, handle, false);
|
||||
@ -208,7 +203,7 @@ fn symbolReferencesInternal(
|
||||
.@"if",
|
||||
.if_simple,
|
||||
=> {
|
||||
const if_node = ast.ifFull(tree, node);
|
||||
const if_node = ast.fullIf(tree, node).?;
|
||||
try symbolReferencesInternal(builder, if_node.ast.cond_expr, handle, false);
|
||||
try symbolReferencesInternal(builder, if_node.ast.then_expr, handle, false);
|
||||
try symbolReferencesInternal(builder, if_node.ast.else_expr, handle, false);
|
||||
@ -218,7 +213,7 @@ fn symbolReferencesInternal(
|
||||
.ptr_type_bit_range,
|
||||
.ptr_type_sentinel,
|
||||
=> {
|
||||
const ptr_type = ast.ptrType(tree, node).?;
|
||||
const ptr_type = ast.fullPtrType(tree, node).?;
|
||||
|
||||
if (ptr_type.ast.align_node != 0) {
|
||||
try symbolReferencesInternal(builder, ptr_type.ast.align_node, handle, false);
|
||||
@ -239,15 +234,10 @@ fn symbolReferencesInternal(
|
||||
.array_init_one_comma,
|
||||
.array_init_dot_two,
|
||||
.array_init_dot_two_comma,
|
||||
=> |tag| {
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const array_init = switch (tag) {
|
||||
.array_init, .array_init_comma => tree.arrayInit(node),
|
||||
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
||||
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
|
||||
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
|
||||
else => unreachable,
|
||||
};
|
||||
const array_init = tree.fullArrayInit(&buf, node).?;
|
||||
|
||||
try symbolReferencesInternal(builder, array_init.ast.type_expr, handle, false);
|
||||
for (array_init.ast.elements) |e|
|
||||
try symbolReferencesInternal(builder, e, handle, false);
|
||||
@ -260,15 +250,10 @@ fn symbolReferencesInternal(
|
||||
.struct_init_dot_two_comma,
|
||||
.struct_init_one,
|
||||
.struct_init_one_comma,
|
||||
=> |tag| {
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const struct_init: Ast.full.StructInit = switch (tag) {
|
||||
.struct_init, .struct_init_comma => tree.structInit(node),
|
||||
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
||||
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
|
||||
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
|
||||
else => unreachable,
|
||||
};
|
||||
const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?;
|
||||
|
||||
try symbolReferencesInternal(builder, struct_init.ast.type_expr, handle, false);
|
||||
for (struct_init.ast.fields) |field|
|
||||
try symbolReferencesInternal(builder, field, handle, false);
|
||||
@ -283,7 +268,7 @@ fn symbolReferencesInternal(
|
||||
.async_call_one_comma,
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const call = ast.callFull(tree, node, &buf).?;
|
||||
const call = tree.fullCall(&buf, node).?;
|
||||
|
||||
try symbolReferencesInternal(builder, call.ast.fn_expr, handle, false);
|
||||
|
||||
@ -294,13 +279,8 @@ fn symbolReferencesInternal(
|
||||
.slice,
|
||||
.slice_sentinel,
|
||||
.slice_open,
|
||||
=> |tag| {
|
||||
const slice: Ast.full.Slice = switch (tag) {
|
||||
.slice => tree.slice(node),
|
||||
.slice_open => tree.sliceOpen(node),
|
||||
.slice_sentinel => tree.sliceSentinel(node),
|
||||
else => unreachable,
|
||||
};
|
||||
=> {
|
||||
const slice: Ast.full.Slice = tree.fullSlice(node).?;
|
||||
|
||||
try symbolReferencesInternal(builder, slice.ast.sliced, handle, false);
|
||||
try symbolReferencesInternal(builder, slice.ast.start, handle, false);
|
||||
@ -520,7 +500,7 @@ pub fn symbolReferences(
|
||||
const proto = scope_data.function;
|
||||
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const fn_proto = ast.fnProto(curr_handle.tree, proto, &buf).?;
|
||||
const fn_proto = curr_handle.tree.fullFnProto(&buf, proto).?;
|
||||
|
||||
var it = fn_proto.iterate(&curr_handle.tree);
|
||||
while (ast.nextFnParam(&it)) |candidate| {
|
||||
|
@ -339,7 +339,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.simple_var_decl,
|
||||
.aligned_var_decl,
|
||||
=> {
|
||||
const var_decl = ast.varDecl(tree, node).?;
|
||||
const var_decl = tree.fullVarDecl(node).?;
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
|
||||
try writeDocComments(builder, tree, comment_idx);
|
||||
|
||||
@ -386,7 +386,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.tagged_union_two_trailing,
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buf).?;
|
||||
const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buf, node).?;
|
||||
|
||||
try writeToken(builder, decl.layout_token, .keyword);
|
||||
try writeToken(builder, decl.ast.main_token, .keyword);
|
||||
@ -446,7 +446,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.fn_decl,
|
||||
=> {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buf).?;
|
||||
const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?;
|
||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
||||
try writeDocComments(builder, tree, docs);
|
||||
|
||||
@ -523,7 +523,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.switch_case_inline_one,
|
||||
.switch_case_inline,
|
||||
=> {
|
||||
const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node);
|
||||
const switch_case = tree.fullSwitchCase(node).?;
|
||||
try writeToken(builder, switch_case.inline_token, .keyword);
|
||||
for (switch_case.ast.values) |item_node| try callWriteNodeTokens(allocator, .{ builder, item_node });
|
||||
// check it it's 'else'
|
||||
@ -541,7 +541,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.for_simple,
|
||||
.@"for",
|
||||
=> {
|
||||
const while_node = ast.whileAst(tree, node).?;
|
||||
const while_node = ast.fullWhile(tree, node).?;
|
||||
try writeToken(builder, while_node.label_token, .label);
|
||||
try writeToken(builder, while_node.inline_token, .keyword);
|
||||
try writeToken(builder, while_node.ast.while_token, .keyword);
|
||||
@ -575,7 +575,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.@"if",
|
||||
.if_simple,
|
||||
=> {
|
||||
const if_node = ast.ifFull(tree, node);
|
||||
const if_node = ast.fullIf(tree, node).?;
|
||||
|
||||
try writeToken(builder, if_node.ast.if_token, .keyword);
|
||||
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr });
|
||||
@ -609,13 +609,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.array_init_dot_two_comma,
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const array_init: Ast.full.ArrayInit = switch (tag) {
|
||||
.array_init, .array_init_comma => tree.arrayInit(node),
|
||||
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
|
||||
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
||||
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
|
||||
else => unreachable,
|
||||
};
|
||||
const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buf, node).?;
|
||||
|
||||
try callWriteNodeTokens(allocator, .{ builder, array_init.ast.type_expr });
|
||||
for (array_init.ast.elements) |elem| try callWriteNodeTokens(allocator, .{ builder, elem });
|
||||
@ -630,13 +624,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.struct_init_dot_two_comma,
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const struct_init: Ast.full.StructInit = switch (tag) {
|
||||
.struct_init, .struct_init_comma => tree.structInit(node),
|
||||
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
||||
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
|
||||
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
|
||||
else => unreachable,
|
||||
};
|
||||
const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?;
|
||||
|
||||
var field_token_type: ?TokenType = null;
|
||||
|
||||
@ -674,7 +662,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.async_call_one_comma,
|
||||
=> {
|
||||
var params: [1]Ast.Node.Index = undefined;
|
||||
const call = ast.callFull(tree, node, ¶ms).?;
|
||||
const call = tree.fullCall(¶ms, node).?;
|
||||
|
||||
try writeToken(builder, call.async_token, .keyword);
|
||||
try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr });
|
||||
@ -690,12 +678,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.slice_open,
|
||||
.slice_sentinel,
|
||||
=> {
|
||||
const slice: Ast.full.Slice = switch (tag) {
|
||||
.slice => tree.slice(node),
|
||||
.slice_open => tree.sliceOpen(node),
|
||||
.slice_sentinel => tree.sliceSentinel(node),
|
||||
else => unreachable,
|
||||
};
|
||||
const slice: Ast.full.Slice = tree.fullSlice(node).?;
|
||||
|
||||
try callWriteNodeTokens(allocator, .{ builder, slice.ast.sliced });
|
||||
try callWriteNodeTokens(allocator, .{ builder, slice.ast.start });
|
||||
@ -772,11 +755,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.asm_input,
|
||||
.asm_simple,
|
||||
=> {
|
||||
const asm_node: Ast.full.Asm = switch (tag) {
|
||||
.@"asm" => tree.asmFull(node),
|
||||
.asm_simple => tree.asmSimple(node),
|
||||
else => return, // TODO Inputs, outputs
|
||||
};
|
||||
const asm_node: Ast.full.Asm = tree.fullAsm(node).?;
|
||||
|
||||
try writeToken(builder, main_token, .keyword);
|
||||
try writeToken(builder, asm_node.volatile_token, .keyword);
|
||||
@ -920,7 +899,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.ptr_type_bit_range,
|
||||
.ptr_type_sentinel,
|
||||
=> {
|
||||
const ptr_type = ast.ptrType(tree, node).?;
|
||||
const ptr_type = ast.fullPtrType(tree, node).?;
|
||||
|
||||
if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and
|
||||
main_token == main_tokens[ptr_type.ast.child_type])
|
||||
@ -955,10 +934,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
.array_type,
|
||||
.array_type_sentinel,
|
||||
=> {
|
||||
const array_type: Ast.full.ArrayType = if (tag == .array_type)
|
||||
tree.arrayType(node)
|
||||
else
|
||||
tree.arrayTypeSentinel(node);
|
||||
const array_type: Ast.full.ArrayType = tree.fullArrayType(node).?;
|
||||
|
||||
try callWriteNodeTokens(allocator, .{ builder, array_type.ast.elem_count });
|
||||
try callWriteNodeTokens(allocator, .{ builder, array_type.ast.sentinel });
|
||||
@ -988,7 +964,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
|
||||
|
||||
fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void {
|
||||
const tree = builder.handle.tree;
|
||||
const container_field = ast.containerField(tree, node).?;
|
||||
const container_field = tree.fullContainerField(node).?;
|
||||
const base = tree.nodes.items(.main_token)[node];
|
||||
const tokens = tree.tokens.items(.tag);
|
||||
|
||||
@ -1033,8 +1009,7 @@ pub fn writeAllSemanticTokens(
|
||||
var builder = Builder.init(arena, store, handle, encoding);
|
||||
|
||||
// reverse the ast from the root declarations
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
for (ast.declMembers(handle.tree, 0, &buf)) |child| {
|
||||
for (handle.tree.rootDecls()) |child| {
|
||||
writeNodeTokens(&builder, child) catch |err| switch (err) {
|
||||
error.MovedBackwards => break,
|
||||
else => |e| return e,
|
||||
|
@ -275,7 +275,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
};
|
||||
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
if (ast.fnProto(type_handle.handle.tree, node, &buf)) |proto| {
|
||||
if (type_handle.handle.tree.fullFnProto(&buf, node)) |proto| {
|
||||
return try fnProtoToSignatureInfo(
|
||||
document_store,
|
||||
arena,
|
||||
@ -327,7 +327,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
|
||||
}
|
||||
}
|
||||
|
||||
if (ast.fnProto(res_handle.tree, node, &buf)) |proto| {
|
||||
if (res_handle.tree.fullFnProto(&buf, node)) |proto| {
|
||||
return try fnProtoToSignatureInfo(
|
||||
document_store,
|
||||
arena,
|
||||
|
@ -69,8 +69,8 @@ fn convertCIncludeInternal(
|
||||
var writer = output.writer(allocator);
|
||||
|
||||
var buffer: [2]Ast.Node.Index = undefined;
|
||||
if (ast.isBlock(tree, node)) {
|
||||
for (ast.blockStatements(tree, node, &buffer).?) |statement| {
|
||||
if (ast.blockStatements(tree, node, &buffer)) |statements| {
|
||||
for (statements) |statement| {
|
||||
try callConvertCIncludeInternal(stack_allocator, .{ allocator, stack_allocator, tree, statement, output });
|
||||
}
|
||||
} else if (ast.builtinCallParams(tree, node, &buffer)) |params| {
|
||||
|
Loading…
Reference in New Issue
Block a user