Fix some crashes & find all @imports

If there are parse errors, an AST can contain uninitialized nodes.
Walking the tree in this case can lead to horribly nasty crashes.
This commit is contained in:
Jonathan Hähne 2021-03-30 10:33:21 +02:00
parent a5dbb56945
commit aab9ca18f2
5 changed files with 64 additions and 63 deletions

View File

@ -222,6 +222,8 @@ pub fn isPascalCase(name: []const u8) bool {
pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex { pub fn getDeclNameToken(tree: ast.Tree, node: ast.Node.Index) ?ast.TokenIndex {
const tags = tree.nodes.items(.tag); const tags = tree.nodes.items(.tag);
const main_token = tree.nodes.items(.main_token)[node]; const main_token = tree.nodes.items(.main_token)[node];
if (tree.errors.len > 0)
return null;
return switch (tags[node]) { return switch (tags[node]) {
// regular declaration names. + 1 to mut token because name comes after 'const'/'var' // regular declaration names. + 1 to mut token because name comes after 'const'/'var'
.local_var_decl => tree.localVarDecl(node).ast.mut_token + 1, .local_var_decl => tree.localVarDecl(node).ast.mut_token + 1,
@ -346,6 +348,8 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const main_tokes = tree.nodes.items(.main_token); const main_tokes = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag); const node_tags = tree.nodes.items(.tag);
if (tree.errors.len > 0)
return null;
if (varDecl(handle.tree, decl)) |var_decl| { if (varDecl(handle.tree, decl)) |var_decl| {
if (var_decl.ast.init_node == 0) return null; if (var_decl.ast.init_node == 0) return null;
@ -1182,64 +1186,29 @@ pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator,
return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params); return resolveTypeOfNodeInternal(store, arena, node_handle, &bound_type_params);
} }
fn maybeCollectImport(tree: ast.Tree, builtin_call: ast.Node.Index, arr: *std.ArrayList([]const u8)) !void {
const tags = tree.nodes.items(.tag);
const datas = tree.nodes.items(.data);
const builtin_tag = tags[builtin_call];
const data = datas[builtin_call];
std.debug.assert(isBuiltinCall(tree, builtin_call));
if (!std.mem.eql(u8, tree.tokenSlice(builtin_call), "@import")) return;
const params = switch (builtin_tag) {
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
&[_]ast.Node.Index{}
else if (data.rhs == 0)
&[_]ast.Node.Index{data.lhs}
else
&[_]ast.Node.Index{ data.lhs, data.rhs },
else => unreachable,
};
if (params.len != 1) return;
if (tags[params[0]] != .string_literal) return;
const import_str = tree.tokenSlice(tree.nodes.items(.main_token)[params[0]]);
try arr.append(import_str[1 .. import_str.len - 1]);
}
/// Collects all imports we can find into a slice of import paths (without quotes). /// Collects all imports we can find into a slice of import paths (without quotes).
/// The import paths are valid as long as the tree is.
pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: ast.Tree) !void { pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: ast.Tree) !void {
// TODO: Currently only detects `const smth = @import("string literal")<.SomeThing>;` const tags = tree.tokens.items(.tag);
const tags = tree.nodes.items(.tag);
for (tree.rootDecls()) |decl_idx| {
const var_decl_maybe: ?ast.full.VarDecl = switch (tags[decl_idx]) {
.global_var_decl => tree.globalVarDecl(decl_idx),
.local_var_decl => tree.localVarDecl(decl_idx),
.simple_var_decl => tree.simpleVarDecl(decl_idx),
else => null,
};
const var_decl = var_decl_maybe orelse continue;
if (var_decl.ast.init_node == 0) continue;
const init_node = var_decl.ast.init_node; while (i < tags.len) : (i += 1) {
const init_node_tag = tags[init_node]; if (tags[i] != .builtin)
switch (init_node_tag) { continue;
.builtin_call, const text = tree.tokenSlice(i);
.builtin_call_comma, log.debug("Found {}", .{ text });
.builtin_call_two,
.builtin_call_two_comma, if (std.mem.eql(u8, text, "@import")) {
=> try maybeCollectImport(tree, init_node, import_arr), if (i + 3 >= tags.len)
.field_access => { break;
const lhs = tree.nodes.items(.data)[init_node].lhs; if (tags[i + 1] != .l_paren)
if (isBuiltinCall(tree, lhs)) { continue;
try maybeCollectImport(tree, lhs, import_arr); if (tags[i + 2] != .string_literal)
} continue;
}, if (tags[i + 3] != .r_paren)
else => {}, continue;
const str = tree.tokenSlice(i + 2);
try import_arr.append(str[1..str.len-1]);
} }
} }
} }
@ -1265,6 +1234,8 @@ pub fn getFieldAccessType(
.node = undefined, .node = undefined,
.handle = handle, .handle = handle,
}); });
if (handle.tree.errors > 0)
return null;
// TODO Actually bind params here when calling functions instead of just skipping args. // TODO Actually bind params here when calling functions instead of just skipping args.
var bound_type_params = BoundTypeParams.init(&arena.allocator); var bound_type_params = BoundTypeParams.init(&arena.allocator);
@ -1910,6 +1881,8 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: ast.Tree, nod
pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: ast.Tree, encoding: offsets.Encoding) ![]types.DocumentSymbol { pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: ast.Tree, encoding: offsets.Encoding) ![]types.DocumentSymbol {
var symbols = try std.ArrayList(types.DocumentSymbol).initCapacity(allocator, tree.rootDecls().len); var symbols = try std.ArrayList(types.DocumentSymbol).initCapacity(allocator, tree.rootDecls().len);
if (tree.errors.len > 0)
return 0;
var context = GetDocumentSymbolsContext{ var context = GetDocumentSymbolsContext{
.symbols = &symbols, .symbols = &symbols,
@ -2467,6 +2440,12 @@ pub const DocumentScope = struct {
error_completions: CompletionSet, error_completions: CompletionSet,
enum_completions: CompletionSet, enum_completions: CompletionSet,
pub const none = DocumentScope{
.scopes = &[0]Scope{},
.error_completions = CompletionSet{},
.enum_completions = CompletionSet{},
};
pub fn debugPrint(self: DocumentScope) void { pub fn debugPrint(self: DocumentScope) void {
for (self.scopes) |scope| { for (self.scopes) |scope| {
log.debug( log.debug(
@ -2530,6 +2509,9 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: ast.Tree) !Documen
var error_completions = CompletionSet{}; var error_completions = CompletionSet{};
var enum_completions = CompletionSet{}; var enum_completions = CompletionSet{};
if (tree.errors.len > 0)
return DocumentScope.none;
errdefer { errdefer {
scopes.deinit(allocator); scopes.deinit(allocator);
for (error_completions.entries.items) |entry| { for (error_completions.entries.items) |entry| {
@ -2748,6 +2730,7 @@ fn makeScopeInternal(
=> |fn_tag| { => |fn_tag| {
var buf: [1]ast.Node.Index = undefined; var buf: [1]ast.Node.Index = undefined;
const func = fnProto(tree, node_idx, &buf).?; const func = fnProto(tree, node_idx, &buf).?;
// log.debug("Alive 3.1", .{});
(try scopes.addOne(allocator)).* = .{ (try scopes.addOne(allocator)).* = .{
.range = nodeSourceRange(tree, node_idx), .range = nodeSourceRange(tree, node_idx),
@ -2781,7 +2764,10 @@ fn makeScopeInternal(
param.type_expr, param.type_expr,
); );
} }
const a = data[node_idx];
const left = data[a.lhs];
const right = data[a.rhs];
// log.debug("Alive 3.2 - {}- {}- {}-{} {}- {}-{}", .{tags[node_idx], tags[a.lhs], tags[left.lhs], tags[left.rhs], tags[a.rhs], tags[right.lhs], tags[right.rhs]});
// Visit the return type // Visit the return type
try makeScopeInternal( try makeScopeInternal(
allocator, allocator,
@ -2795,6 +2781,7 @@ fn makeScopeInternal(
else else
data[node_idx].rhs, data[node_idx].rhs,
); );
log.debug("Alive 3.3", .{});
// Visit the function body // Visit the function body
if (fn_tag == .fn_decl) { if (fn_tag == .fn_decl) {
try makeScopeInternal( try makeScopeInternal(
@ -3256,7 +3243,7 @@ fn makeScopeInternal(
=> { => {
const field = containerField(tree, node_idx).?; const field = containerField(tree, node_idx).?;
if (field.ast.type_expr != 0) if (field.ast.type_expr != 0) {
try makeScopeInternal( try makeScopeInternal(
allocator, allocator,
scopes, scopes,
@ -3265,7 +3252,8 @@ fn makeScopeInternal(
tree, tree,
field.ast.type_expr, field.ast.type_expr,
); );
if (field.ast.align_expr != 0) }
if (field.ast.align_expr != 0) {
try makeScopeInternal( try makeScopeInternal(
allocator, allocator,
scopes, scopes,
@ -3274,7 +3262,8 @@ fn makeScopeInternal(
tree, tree,
field.ast.align_expr, field.ast.align_expr,
); );
if (field.ast.value_expr != 0) }
if (field.ast.value_expr != 0) {
try makeScopeInternal( try makeScopeInternal(
allocator, allocator,
scopes, scopes,
@ -3283,6 +3272,7 @@ fn makeScopeInternal(
tree, tree,
field.ast.value_expr, field.ast.value_expr,
); );
}
}, },
.builtin_call, .builtin_call,
.builtin_call_comma, .builtin_call_comma,

View File

@ -422,13 +422,16 @@ pub fn applyChanges(
if (change.Object.get("range")) |range| { if (change.Object.get("range")) |range| {
std.debug.assert(document.text.ptr == document.mem.ptr); std.debug.assert(document.text.ptr == document.mem.ptr);
// TODO: add tests and validate the JSON
const start_obj = range.Object.get("start").?.Object;
const start_pos = types.Position{ const start_pos = types.Position{
.line = range.Object.get("start").?.Object.get("line").?.Integer, .line = start_obj.get("line").?.Integer,
.character = range.Object.get("start").?.Object.get("character").?.Integer, .character = start_obj.get("character").?.Integer,
}; };
const end_obj = range.Object.get("end").?.Object;
const end_pos = types.Position{ const end_pos = types.Position{
.line = range.Object.get("end").?.Object.get("line").?.Integer, .line = end_obj.get("line").?.Integer,
.character = range.Object.get("end").?.Object.get("character").?.Integer, .character = end_obj.get("character").?.Integer,
}; };
const change_text = change.Object.get("text").?.String; const change_text = change.Object.get("text").?.String;

View File

@ -356,6 +356,8 @@ fn nodeToCompletion(
const node_tags = tree.nodes.items(.tag); const node_tags = tree.nodes.items(.tag);
const datas = tree.nodes.items(.data); const datas = tree.nodes.items(.data);
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
if (tree.errors.len > 0)
return;
const doc_kind: types.MarkupContent.Kind = if (client_capabilities.completion_doc_supports_md) const doc_kind: types.MarkupContent.Kind = if (client_capabilities.completion_doc_supports_md)
.Markdown .Markdown

View File

@ -85,6 +85,9 @@ fn symbolReferencesInternal(
const main_tokens = tree.nodes.items(.main_token); const main_tokens = tree.nodes.items(.main_token);
const starts = tree.tokens.items(.start); const starts = tree.tokens.items(.start);
if (tree.errors.len > 0)
return;
switch (node_tags[node]) { switch (node_tags[node]) {
.block, .block_semicolon, .block_two, .block_two_semicolon => { .block, .block_semicolon, .block_two, .block_two_semicolon => {
const statements: []const ast.Node.Index = switch (node_tags[node]) { const statements: []const ast.Node.Index = switch (node_tags[node]) {

View File

@ -1145,6 +1145,9 @@ fn writeNodeTokens(
// TODO Range version, edit version. // TODO Range version, edit version.
pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 { pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentStore, handle: *DocumentStore.Handle, encoding: offsets.Encoding) ![]u32 {
var builder = Builder.init(arena.child_allocator, handle, encoding); var builder = Builder.init(arena.child_allocator, handle, encoding);
if (handle.tree.errors.len > 0) {
return builder.toOwnedSlice();
}
// reverse the ast from the root declarations // reverse the ast from the root declarations
var gap_highlighter = GapHighlighter.init(&builder, 0); var gap_highlighter = GapHighlighter.init(&builder, 0);