fix semantic token generation crash on continue expression

This commit is contained in:
Techarix 2023-05-10 15:05:29 +02:00
parent 029f5094ff
commit 1bbdcab3ac

View File

@ -701,14 +701,17 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
.grouped_expression => {
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });
},
.@"break",
.@"continue",
=> {
.@"break" => {
try writeToken(builder, main_token, .keyword);
if (node_data[node].lhs != 0)
try writeToken(builder, node_data[node].lhs, .label);
try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs });
},
.@"continue" => {
try writeToken(builder, main_token, .keyword);
if (node_data[node].lhs != 0)
try writeToken(builder, node_data[node].lhs, .label);
},
.@"suspend", .@"return" => {
try writeToken(builder, main_token, .keyword);
try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs });