Refactor makeScopeInternal

This commit is contained in:
Jonathan Hähne 2021-04-06 00:04:27 +02:00
parent 45c7f9671c
commit b59db79a05
4 changed files with 262 additions and 376 deletions

View File

@ -2495,7 +2495,13 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen
enum_completions.deinit(allocator); enum_completions.deinit(allocator);
} }
// pass root node index ('0') // pass root node index ('0')
try makeScopeInternal(allocator, &scopes, &error_completions, &enum_completions, tree, 0); had_root = false;
try makeScopeInternal(allocator, .{
.scopes = &scopes,
.errors = &error_completions,
.enums = &enum_completions,
.tree = tree,
}, 0);
return DocumentScope{ return DocumentScope{
.scopes = scopes.toOwnedSlice(allocator), .scopes = scopes.toOwnedSlice(allocator),
.error_completions = error_completions, .error_completions = error_completions,
@ -2513,136 +2519,176 @@ fn nodeSourceRange(tree: ast.Tree, node: ast.Node.Index) SourceRange {
}; };
} }
fn makeScopeInternal( const ScopeContext = struct {
allocator: *std.mem.Allocator,
scopes: *std.ArrayListUnmanaged(Scope), scopes: *std.ArrayListUnmanaged(Scope),
error_completions: *CompletionSet, enums: *CompletionSet,
enum_completions: *CompletionSet, errors: *CompletionSet,
tree: ast.Tree, tree: ast.Tree,
};
fn makeInnerScope(
allocator: *std.mem.Allocator,
context: ScopeContext,
node_idx: ast.Node.Index, node_idx: ast.Node.Index,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
const scopes = context.scopes;
const tree = context.tree;
const tags = tree.nodes.items(.tag); const tags = tree.nodes.items(.tag);
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const data = tree.nodes.items(.data); const data = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token); const main_tokens = tree.nodes.items(.main_token);
const node_tag = tags[node_idx]; const node_tag = tags[node_idx];
if (isContainer(tree, node_idx)) { var buf: [2]ast.Node.Index = undefined;
var buf: [2]ast.Node.Index = undefined; const ast_decls = declMembers(tree, node_idx, &buf);
const ast_decls = declMembers(tree, node_idx, &buf);
(try scopes.addOne(allocator)).* = .{ (try scopes.addOne(allocator)).* = .{
.range = nodeSourceRange(tree, node_idx), .range = nodeSourceRange(tree, node_idx),
.decls = std.StringHashMap(Declaration).init(allocator), .decls = std.StringHashMap(Declaration).init(allocator),
.uses = &.{}, .uses = &.{},
.tests = &.{}, .tests = &.{},
.data = .{ .container = node_idx }, .data = .{ .container = node_idx },
}; };
const scope_idx = scopes.items.len - 1; const scope_idx = scopes.items.len - 1;
var uses = std.ArrayListUnmanaged(*const ast.Node.Index){}; var uses = std.ArrayListUnmanaged(*const ast.Node.Index){};
var tests = std.ArrayListUnmanaged(ast.Node.Index){}; var tests = std.ArrayListUnmanaged(ast.Node.Index){};
errdefer { errdefer {
scopes.items[scope_idx].decls.deinit(); scopes.items[scope_idx].decls.deinit();
uses.deinit(allocator); uses.deinit(allocator);
tests.deinit(allocator); tests.deinit(allocator);
} }
if (node_tag == .error_set_decl) { if (node_tag == .error_set_decl) {
// All identifiers in main_token..data.lhs are error fields. // All identifiers in main_token..data.lhs are error fields.
var i = main_tokens[node_idx]; var i = main_tokens[node_idx];
while (i < data[node_idx].rhs) : (i += 1) { while (i < data[node_idx].rhs) : (i += 1) {
if (token_tags[i] == .identifier) { if (token_tags[i] == .identifier) {
try error_completions.put(allocator, .{ try context.errors.put(allocator, .{
.label = tree.tokenSlice(i), .label = tree.tokenSlice(i),
.kind = .Constant, .kind = .Constant,
.insertText = tree.tokenSlice(i), .insertText = tree.tokenSlice(i),
.insertTextFormat = .PlainText, .insertTextFormat = .PlainText,
}, {}); }, {});
}
} }
} }
}
const container_decl = switch (node_tag) { const container_decl = switch (node_tag) {
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx), .container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx), .container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
.container_decl_two, .container_decl_two_trailing => blk: { .container_decl_two, .container_decl_two_trailing => blk: {
var buffer: [2]ast.Node.Index = undefined; var buffer: [2]ast.Node.Index = undefined;
break :blk tree.containerDeclTwo(&buffer, node_idx); break :blk tree.containerDeclTwo(&buffer, node_idx);
}, },
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx), .tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx), .tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
.tagged_union_two, .tagged_union_two_trailing => blk: { .tagged_union_two, .tagged_union_two_trailing => blk: {
var buffer: [2]ast.Node.Index = undefined; var buffer: [2]ast.Node.Index = undefined;
break :blk tree.taggedUnionTwo(&buffer, node_idx); break :blk tree.taggedUnionTwo(&buffer, node_idx);
}, },
else => null,
};
// Only tagged unions and enums should pass this
const can_have_enum_completions = if (container_decl) |container| blk: {
const kind = token_tags[container.ast.main_token];
break :blk kind != .keyword_struct and
(kind != .keyword_union or container.ast.enum_token != null or container.ast.arg != 0);
} else false;
for (ast_decls) |*ptr_decl| {
const decl = ptr_decl.*;
if (tags[decl] == .@"usingnamespace") {
try uses.append(allocator, ptr_decl);
continue;
}
try makeScopeInternal(allocator, context, decl);
const name = getDeclName(tree, decl) orelse continue;
if (tags[decl] == .test_decl) {
try tests.append(allocator, decl);
continue;
}
if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = decl })) |existing| {
// TODO Record a redefinition error.
}
if (!can_have_enum_completions)
continue;
const container_field = switch (tags[decl]) {
.container_field => tree.containerField(decl),
.container_field_align => tree.containerFieldAlign(decl),
.container_field_init => tree.containerFieldInit(decl),
else => null, else => null,
}; };
// Only tagged unions and enums should pass this if (container_field) |field| {
const can_have_enum_completions = if (container_decl) |container| blk: { if (!std.mem.eql(u8, name, "_")) {
const kind = token_tags[container.ast.main_token]; try context.enums.put(allocator, .{
break :blk kind != .keyword_struct and .label = name,
(kind != .keyword_union or container.ast.enum_token != null or container.ast.arg != 0); .kind = .Constant,
} else false; .insertText = name,
.insertTextFormat = .PlainText,
for (ast_decls) |*ptr_decl| { .documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs|
const decl = ptr_decl.*; .{ .kind = .Markdown, .value = docs }
if (tags[decl] == .@"usingnamespace") { else
try uses.append(allocator, ptr_decl); null,
continue; }, {});
}
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
decl,
);
const name = getDeclName(tree, decl) orelse continue;
if (tags[decl] == .test_decl) {
try tests.append(allocator, decl);
continue;
}
if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = decl })) |existing| {
// TODO Record a redefinition error.
}
if (!can_have_enum_completions)
continue;
const container_field = switch (tags[decl]) {
.container_field => tree.containerField(decl),
.container_field_align => tree.containerFieldAlign(decl),
.container_field_init => tree.containerFieldInit(decl),
else => null,
};
if (container_field) |field| {
if (!std.mem.eql(u8, name, "_")) {
try enum_completions.put(allocator, .{
.label = name,
.kind = .Constant,
.insertText = name,
.insertTextFormat = .PlainText,
.documentation = if (try getDocComments(allocator, tree, decl, .Markdown)) |docs| .{
.kind = .Markdown,
.value = docs,
} else null,
}, {});
}
} }
} }
}
scopes.items[scope_idx].tests = tests.toOwnedSlice(allocator); scopes.items[scope_idx].tests = tests.toOwnedSlice(allocator);
scopes.items[scope_idx].uses = uses.toOwnedSlice(allocator); scopes.items[scope_idx].uses = uses.toOwnedSlice(allocator);
return; }
// Whether we have already visited the root node.
var had_root = true;
fn makeScopeInternal(
allocator: *std.mem.Allocator,
context: ScopeContext,
node_idx: ast.Node.Index,
) error{OutOfMemory}!void {
const scopes = context.scopes;
const tree = context.tree;
const tags = tree.nodes.items(.tag);
const token_tags = tree.tokens.items(.tag);
const data = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const node_tag = tags[node_idx];
if (node_idx == 0) {
if (had_root)
return
else
had_root = true;
} }
switch (node_tag) { switch (node_tag) {
.container_decl,
.container_decl_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.container_decl_two,
.container_decl_two_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.root,
.error_set_decl,
=> {
try makeInnerScope(allocator, context, node_idx);
},
.array_type_sentinel => {
// TODO: ???
return;
},
.fn_proto, .fn_proto,
.fn_proto_one, .fn_proto_one,
.fn_proto_simple, .fn_proto_simple,
@ -2675,53 +2721,22 @@ fn makeScopeInternal(
} }
// Visit parameter types to pick up any error sets and enum // Visit parameter types to pick up any error sets and enum
// completions // completions
if (param.type_expr != 0) try makeScopeInternal(allocator, context, param.type_expr);
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
param.type_expr,
);
} }
if (fn_tag == .fn_decl) blk: { if (fn_tag == .fn_decl) blk: {
if (data[node_idx].lhs == 0) break :blk; if (data[node_idx].lhs == 0) break :blk;
const return_type_node = data[data[node_idx].lhs].rhs; const return_type_node = data[data[node_idx].lhs].rhs;
if (return_type_node == 0) break :blk;
// Visit the return type // Visit the return type
try makeScopeInternal( try makeScopeInternal(allocator, context, return_type_node);
allocator,
scopes,
error_completions,
enum_completions,
tree,
return_type_node,
);
} }
if (data[node_idx].rhs == 0) return;
// Visit the function body // Visit the function body
try makeScopeInternal( try makeScopeInternal(allocator, context, data[node_idx].rhs);
allocator,
scopes,
error_completions,
enum_completions,
tree,
data[node_idx].rhs,
);
}, },
.test_decl => { .test_decl => {
return try makeScopeInternal( return try makeScopeInternal(allocator, context, data[node_idx].rhs);
allocator,
scopes,
error_completions,
enum_completions,
tree,
data[node_idx].rhs,
);
}, },
.block, .block,
.block_semicolon, .block_semicolon,
@ -2785,7 +2800,7 @@ fn makeScopeInternal(
continue; continue;
} }
try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, idx); try makeScopeInternal(allocator, context, idx);
if (varDecl(tree, idx)) |var_decl| { if (varDecl(tree, idx)) |var_decl| {
const name = tree.tokenSlice(var_decl.ast.mut_token + 1); const name = tree.tokenSlice(var_decl.ast.mut_token + 1);
if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| { if (try scopes.items[scope_idx].decls.fetchPut(name, .{ .ast_node = idx })) |existing| {
@ -2800,10 +2815,7 @@ fn makeScopeInternal(
.@"if", .@"if",
.if_simple, .if_simple,
=> { => {
const if_node: ast.full.If = if (node_tag == .@"if") const if_node = ifFull(tree, node_idx);
ifFull(tree, node_idx)
else
ifSimple(tree, node_idx);
if (if_node.payload_token) |payload| { if (if_node.payload_token) |payload| {
var scope = try scopes.addOne(allocator); var scope = try scopes.addOne(allocator);
@ -2831,14 +2843,7 @@ fn makeScopeInternal(
}); });
} }
try makeScopeInternal( try makeScopeInternal(allocator, context, if_node.ast.then_expr);
allocator,
scopes,
error_completions,
enum_completions,
tree,
if_node.ast.then_expr,
);
if (if_node.ast.else_expr != 0) { if (if_node.ast.else_expr != 0) {
if (if_node.error_token) |err_token| { if (if_node.error_token) |err_token| {
@ -2859,16 +2864,13 @@ fn makeScopeInternal(
const name = tree.tokenSlice(err_token); const name = tree.tokenSlice(err_token);
try scope.decls.putNoClobber(name, .{ .ast_node = if_node.ast.else_expr }); try scope.decls.putNoClobber(name, .{ .ast_node = if_node.ast.else_expr });
} }
try makeScopeInternal( try makeScopeInternal(allocator, context, if_node.ast.else_expr);
allocator,
scopes,
error_completions,
enum_completions,
tree,
if_node.ast.else_expr,
);
} }
}, },
.@"catch" => {
// TODO: ???
return;
},
.@"while", .@"while",
.while_simple, .while_simple,
.while_cont, .while_cont,
@ -2941,14 +2943,7 @@ fn makeScopeInternal(
} }
} }
} }
try makeScopeInternal( try makeScopeInternal(allocator, context, while_node.ast.then_expr);
allocator,
scopes,
error_completions,
enum_completions,
tree,
while_node.ast.then_expr,
);
if (while_node.ast.else_expr != 0) { if (while_node.ast.else_expr != 0) {
if (while_node.error_token) |err_token| { if (while_node.error_token) |err_token| {
@ -2969,14 +2964,7 @@ fn makeScopeInternal(
const name = tree.tokenSlice(err_token); const name = tree.tokenSlice(err_token);
try scope.decls.putNoClobber(name, .{ .ast_node = while_node.ast.else_expr }); try scope.decls.putNoClobber(name, .{ .ast_node = while_node.ast.else_expr });
} }
try makeScopeInternal( try makeScopeInternal(allocator, context, while_node.ast.else_expr);
allocator,
scopes,
error_completions,
enum_completions,
tree,
while_node.ast.else_expr,
);
} }
}, },
.@"switch", .@"switch",
@ -3020,16 +3008,15 @@ fn makeScopeInternal(
}); });
} }
try makeScopeInternal( try makeScopeInternal(allocator, context, switch_case.ast.target_expr);
allocator,
scopes,
error_completions,
enum_completions,
tree,
switch_case.ast.target_expr,
);
} }
}, },
.switch_case,
.switch_case_one,
.switch_range,
=> {
return;
},
.global_var_decl, .global_var_decl,
.local_var_decl, .local_var_decl,
.aligned_var_decl, .aligned_var_decl,
@ -3037,25 +3024,11 @@ fn makeScopeInternal(
=> { => {
const var_decl = varDecl(tree, node_idx).?; const var_decl = varDecl(tree, node_idx).?;
if (var_decl.ast.type_node != 0) { if (var_decl.ast.type_node != 0) {
try makeScopeInternal( try makeScopeInternal(allocator, context, var_decl.ast.type_node);
allocator,
scopes,
error_completions,
enum_completions,
tree,
var_decl.ast.type_node,
);
} }
if (var_decl.ast.init_node != 0) { if (var_decl.ast.init_node != 0) {
try makeScopeInternal( try makeScopeInternal(allocator, context, var_decl.ast.init_node);
allocator,
scopes,
error_completions,
enum_completions,
tree,
var_decl.ast.init_node,
);
} }
}, },
.call, .call,
@ -3070,9 +3043,9 @@ fn makeScopeInternal(
var buf: [1]ast.Node.Index = undefined; var buf: [1]ast.Node.Index = undefined;
const call = callFull(tree, node_idx, &buf).?; const call = callFull(tree, node_idx, &buf).?;
try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, call.ast.fn_expr); try makeScopeInternal(allocator, context, call.ast.fn_expr);
for (call.ast.params) |param| for (call.ast.params) |param|
try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, param); try makeScopeInternal(allocator, context, param);
}, },
.struct_init, .struct_init,
.struct_init_comma, .struct_init_comma,
@ -3093,17 +3066,10 @@ fn makeScopeInternal(
}; };
if (struct_init.ast.type_expr != 0) if (struct_init.ast.type_expr != 0)
try makeScopeInternal( try makeScopeInternal(allocator, context, struct_init.ast.type_expr);
allocator,
scopes,
error_completions,
enum_completions,
tree,
struct_init.ast.type_expr,
);
for (struct_init.ast.fields) |field| { for (struct_init.ast.fields) |field| {
try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, field); try makeScopeInternal(allocator, context, field);
} }
}, },
.array_init, .array_init,
@ -3125,16 +3091,9 @@ fn makeScopeInternal(
}; };
if (array_init.ast.type_expr != 0) if (array_init.ast.type_expr != 0)
try makeScopeInternal( try makeScopeInternal(allocator, context, array_init.ast.type_expr);
allocator,
scopes,
error_completions,
enum_completions,
tree,
array_init.ast.type_expr,
);
for (array_init.ast.elements) |elem| { for (array_init.ast.elements) |elem| {
try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, elem); try makeScopeInternal(allocator, context, elem);
} }
}, },
.container_field, .container_field,
@ -3143,33 +3102,9 @@ fn makeScopeInternal(
=> { => {
const field = containerField(tree, node_idx).?; const field = containerField(tree, node_idx).?;
if (field.ast.type_expr != 0) try makeScopeInternal(allocator, context, field.ast.type_expr);
try makeScopeInternal( try makeScopeInternal(allocator, context, field.ast.align_expr);
allocator, try makeScopeInternal(allocator, context, field.ast.value_expr);
scopes,
error_completions,
enum_completions,
tree,
field.ast.type_expr,
);
if (field.ast.align_expr != 0)
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
field.ast.align_expr,
);
if (field.ast.value_expr != 0)
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
field.ast.value_expr,
);
}, },
.builtin_call, .builtin_call,
.builtin_call_comma, .builtin_call_comma,
@ -3189,7 +3124,7 @@ fn makeScopeInternal(
}; };
for (params) |param| { for (params) |param| {
try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, param); try makeScopeInternal(allocator, context, param);
} }
}, },
.ptr_type, .ptr_type,
@ -3198,33 +3133,10 @@ fn makeScopeInternal(
.ptr_type_sentinel, .ptr_type_sentinel,
=> { => {
const ptr_type: ast.full.PtrType = ptrType(tree, node_idx).?; const ptr_type: ast.full.PtrType = ptrType(tree, node_idx).?;
if (ptr_type.ast.sentinel != 0)
try makeScopeInternal( try makeScopeInternal(allocator, context, ptr_type.ast.sentinel);
allocator, try makeScopeInternal(allocator, context, ptr_type.ast.align_node);
scopes, try makeScopeInternal(allocator, context, ptr_type.ast.child_type);
error_completions,
enum_completions,
tree,
ptr_type.ast.sentinel,
);
if (ptr_type.ast.align_node != 0)
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
ptr_type.ast.align_node,
);
if (ptr_type.ast.child_type != 0)
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
ptr_type.ast.child_type,
);
}, },
.slice, .slice,
.slice_open, .slice_open,
@ -3236,43 +3148,10 @@ fn makeScopeInternal(
.slice_sentinel => tree.sliceSentinel(node_idx), .slice_sentinel => tree.sliceSentinel(node_idx),
else => unreachable, else => unreachable,
}; };
try makeScopeInternal(allocator, context, slice.ast.sliced);
if (slice.ast.sliced != 0) try makeScopeInternal(allocator, context, slice.ast.start);
try makeScopeInternal( try makeScopeInternal(allocator, context, slice.ast.end);
allocator, try makeScopeInternal(allocator, context, slice.ast.sentinel);
scopes,
error_completions,
enum_completions,
tree,
slice.ast.sliced,
);
if (slice.ast.start != 0)
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
slice.ast.start,
);
if (slice.ast.end != 0)
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
slice.ast.end,
);
if (slice.ast.sentinel != 0)
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
slice.ast.sentinel,
);
}, },
.@"errdefer" => { .@"errdefer" => {
const expr = data[node_idx].rhs; const expr = data[node_idx].rhs;
@ -3295,10 +3174,9 @@ fn makeScopeInternal(
try scope.decls.putNoClobber(name, .{ .ast_node = expr }); try scope.decls.putNoClobber(name, .{ .ast_node = expr });
} }
try makeScopeInternal(allocator, scopes, error_completions, enum_completions, tree, expr); try makeScopeInternal(allocator, context, expr);
}, },
// no scope
.@"asm", .@"asm",
.asm_simple, .asm_simple,
.asm_output, .asm_output,
@ -3322,17 +3200,9 @@ fn makeScopeInternal(
.@"continue", .@"continue",
=> {}, => {},
.@"break", .@"defer" => { .@"break", .@"defer" => {
if (data[node_idx].rhs != 0) try makeScopeInternal(allocator, context, data[node_idx].rhs);
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
data[node_idx].rhs,
);
}, },
// all lhs kind of nodes
.@"return", .@"return",
.@"resume", .@"resume",
.field_access, .field_access,
@ -3352,36 +3222,54 @@ fn makeScopeInternal(
.unwrap_optional, .unwrap_optional,
.@"usingnamespace", .@"usingnamespace",
=> { => {
if (data[node_idx].lhs != 0) { try makeScopeInternal(allocator, context, data[node_idx].lhs);
try makeScopeInternal(
allocator,
scopes,
error_completions,
enum_completions,
tree,
data[node_idx].lhs,
);
}
}, },
else => {
if (data[node_idx].lhs != 0) .equal_equal,
try makeScopeInternal( .bang_equal,
allocator, .less_than,
scopes, .greater_than,
error_completions, .less_or_equal,
enum_completions, .greater_or_equal,
tree, .assign_mul,
data[node_idx].lhs, .assign_div,
); .assign_mod,
if (data[node_idx].rhs != 0) .assign_add,
try makeScopeInternal( .assign_sub,
allocator, .assign_bit_shift_left,
scopes, .assign_bit_shift_right,
error_completions, .assign_bit_and,
enum_completions, .assign_bit_xor,
tree, .assign_bit_or,
data[node_idx].rhs, .assign_mul_wrap,
); .assign_add_wrap,
.assign_sub_wrap,
.assign,
.merge_error_sets,
.mul,
.div,
.mod,
.array_mult,
.mul_wrap,
.add,
.sub,
.array_cat,
.add_wrap,
.sub_wrap,
.bit_shift_left,
.bit_shift_right,
.bit_and,
.bit_xor,
.bit_or,
.@"orelse",
.bool_and,
.bool_or,
.array_type,
.array_access,
.error_union,
=> {
try makeScopeInternal(allocator, context, data[node_idx].lhs);
try makeScopeInternal(allocator, context, data[node_idx].rhs);
}, },
} }
} }

View File

@ -138,26 +138,24 @@ fn fullIf(tree: Tree, info: full.If.Ast) full.If {
} }
pub fn ifFull(tree: Tree, node: Node.Index) full.If { pub fn ifFull(tree: Tree, node: Node.Index) full.If {
assert(tree.nodes.items(.tag)[node] == .@"if");
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.If); if (tree.nodes.items(.tag)[node] == .@"if") {
return fullIf(tree, .{ const extra = tree.extraData(data.rhs, Node.If);
.cond_expr = data.lhs, return fullIf(tree, .{
.then_expr = extra.then_expr, .cond_expr = data.lhs,
.else_expr = extra.else_expr, .then_expr = extra.then_expr,
.if_token = tree.nodes.items(.main_token)[node], .else_expr = extra.else_expr,
}); .if_token = tree.nodes.items(.main_token)[node],
} });
} else {
pub fn ifSimple(tree: Tree, node: Node.Index) full.If { assert(tree.nodes.items(.tag)[node] == .if_simple);
assert(tree.nodes.items(.tag)[node] == .if_simple); return fullIf(tree, .{
const data = tree.nodes.items(.data)[node]; .cond_expr = data.lhs,
return fullIf(tree, .{ .then_expr = data.rhs,
.cond_expr = data.lhs, .else_expr = 0,
.then_expr = data.rhs, .if_token = tree.nodes.items(.main_token)[node],
.else_expr = 0, });
.if_token = tree.nodes.items(.main_token)[node], }
});
} }
fn fullWhile(tree: Tree, info: full.While.Ast) full.While { fn fullWhile(tree: Tree, info: full.While.Ast) full.While {

View File

@ -244,7 +244,7 @@ fn symbolReferencesInternal(
.@"if", .@"if",
.if_simple, .if_simple,
=> { => {
const if_node: ast.full.If = if (node_tags[node] == .@"if") ifFull(tree, node) else ifSimple(tree, node); const if_node = ifFull(tree, node);
try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.cond_expr, .handle = handle }, decl, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.cond_expr, .handle = handle }, decl, encoding, context, handler);
try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.then_expr, .handle = handle }, decl, encoding, context, handler); try symbolReferencesInternal(arena, store, .{ .node = if_node.ast.then_expr, .handle = handle }, decl, encoding, context, handler);

View File

@ -696,7 +696,7 @@ fn writeNodeTokens(
.@"if", .@"if",
.if_simple, .if_simple,
=> { => {
const if_node: ast.full.If = if (tag == .@"if") ifFull(tree, node) else ifSimple(tree, node); const if_node = ifFull(tree, node);
try writeToken(builder, if_node.ast.if_token, .keyword); try writeToken(builder, if_node.ast.if_token, .keyword);
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr }); try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, if_node.ast.cond_expr });