Renamed import context to analysis context, added arena for temporary allocations while analyzing

This commit is contained in:
Alexandros Naskos 2020-05-14 12:23:20 +03:00
parent 7a8a4e1ec5
commit 6db3c74550
3 changed files with 39 additions and 43 deletions

View File

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const ImportContext = @import("document_store.zig").ImportContext; const AnalysisContext = @import("document_store.zig").AnalysisContext;
const ast = std.zig.ast; const ast = std.zig.ast;
/// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing) /// REALLY BAD CODE, PLEASE DON'T USE THIS!!!!!!! (only for testing)
@ -178,21 +178,21 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node {
} }
/// Resolves the type of a node /// Resolves the type of a node
pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node { pub fn resolveTypeOfNode(analysis_ctx: *AnalysisContext, node: *ast.Node) ?*ast.Node {
switch (node.id) { switch (node.id) {
.VarDecl => { .VarDecl => {
const vari = node.cast(ast.Node.VarDecl).?; const vari = node.cast(ast.Node.VarDecl).?;
return resolveTypeOfNode(import_ctx, vari.type_node orelse vari.init_node.?) orelse null; return resolveTypeOfNode(analysis_ctx, vari.type_node orelse vari.init_node.?) orelse null;
}, },
.FnProto => { .FnProto => {
const func = node.cast(ast.Node.FnProto).?; const func = node.cast(ast.Node.FnProto).?;
switch (func.return_type) { switch (func.return_type) {
.Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(import_ctx, return_type), .Explicit, .InferErrorSet => |return_type| return resolveTypeOfNode(analysis_ctx, return_type),
} }
}, },
.Identifier => { .Identifier => {
if (getChild(import_ctx.tree, &import_ctx.tree.root_node.base, import_ctx.tree.getNodeSource(node))) |child| { if (getChild(analysis_ctx.tree, &analysis_ctx.tree.root_node.base, analysis_ctx.tree.getNodeSource(node))) |child| {
return resolveTypeOfNode(import_ctx, child); return resolveTypeOfNode(analysis_ctx, child);
} else return null; } else return null;
}, },
.ContainerDecl => { .ContainerDecl => {
@ -200,13 +200,13 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node
}, },
.ContainerField => { .ContainerField => {
const field = node.cast(ast.Node.ContainerField).?; const field = node.cast(ast.Node.ContainerField).?;
return resolveTypeOfNode(import_ctx, field.type_expr.?); return resolveTypeOfNode(analysis_ctx, field.type_expr.?);
}, },
.SuffixOp => { .SuffixOp => {
const suffix_op = node.cast(ast.Node.SuffixOp).?; const suffix_op = node.cast(ast.Node.SuffixOp).?;
switch (suffix_op.op) { switch (suffix_op.op) {
.Call => { .Call => {
return resolveTypeOfNode(import_ctx, suffix_op.lhs.node); return resolveTypeOfNode(analysis_ctx, suffix_op.lhs.node);
}, },
else => {} else => {}
} }
@ -217,12 +217,11 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node
.Period => { .Period => {
// Save the child string from this tree since the tree may switch when processing // Save the child string from this tree since the tree may switch when processing
// an import lhs. // an import lhs.
var rhs_str = nodeToString(import_ctx.tree, infix_op.rhs) orelse return null; var rhs_str = nodeToString(analysis_ctx.tree, infix_op.rhs) orelse return null;
// @TODO: This is hackish, pass an explicit allocator or smth // Use the analysis context temporary arena to store the rhs string.
rhs_str = std.mem.dupe(import_ctx.store.allocator, u8, rhs_str) catch return null; rhs_str = std.mem.dupe(&analysis_ctx.arena.allocator, u8, rhs_str) catch return null;
defer import_ctx.store.allocator.free(rhs_str); const left = resolveTypeOfNode(analysis_ctx, infix_op.lhs) orelse return null;
const left = resolveTypeOfNode(import_ctx, infix_op.lhs) orelse return null; return getChild(analysis_ctx.tree, left, rhs_str);
return getChild(import_ctx.tree, left, rhs_str);
}, },
else => {} else => {}
} }
@ -231,21 +230,21 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node
const prefix_op = node.cast(ast.Node.PrefixOp).?; const prefix_op = node.cast(ast.Node.PrefixOp).?;
switch (prefix_op.op) { switch (prefix_op.op) {
.PtrType => { .PtrType => {
return resolveTypeOfNode(import_ctx, prefix_op.rhs); return resolveTypeOfNode(analysis_ctx, prefix_op.rhs);
}, },
else => {} else => {}
} }
}, },
.BuiltinCall => { .BuiltinCall => {
const builtin_call = node.cast(ast.Node.BuiltinCall).?; const builtin_call = node.cast(ast.Node.BuiltinCall).?;
if (!std.mem.eql(u8, import_ctx.tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; if (!std.mem.eql(u8, analysis_ctx.tree.tokenSlice(builtin_call.builtin_token), "@import")) return null;
if (builtin_call.params.len > 1) return null; if (builtin_call.params.len > 1) return null;
const import_param = builtin_call.params.at(0).*; const import_param = builtin_call.params.at(0).*;
if (import_param.id != .StringLiteral) return null; if (import_param.id != .StringLiteral) return null;
const import_str = import_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); const import_str = analysis_ctx.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
return import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: { return analysis_ctx.onImport(import_str[1 .. import_str.len - 1]) catch |err| block: {
std.debug.warn("Error {} while proessing import {}\n", .{err, import_str}); std.debug.warn("Error {} while proessing import {}\n", .{err, import_str});
break :block null; break :block null;
}; };
@ -257,8 +256,8 @@ pub fn resolveTypeOfNode(import_ctx: *ImportContext, node: *ast.Node) ?*ast.Node
return null; return null;
} }
pub fn getFieldAccessTypeNode(import_ctx: *ImportContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node { pub fn getFieldAccessTypeNode(analysis_ctx: *AnalysisContext, tokenizer: *std.zig.Tokenizer) ?*ast.Node {
var current_node = &import_ctx.tree.root_node.base; var current_node = &analysis_ctx.tree.root_node.base;
while (true) { while (true) {
var next = tokenizer.next(); var next = tokenizer.next();
@ -269,8 +268,8 @@ pub fn getFieldAccessTypeNode(import_ctx: *ImportContext, tokenizer: *std.zig.To
.Identifier => { .Identifier => {
// var root = current_node.cast(ast.Node.Root).?; // var root = current_node.cast(ast.Node.Root).?;
// current_node. // current_node.
if (getChild(import_ctx.tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { if (getChild(analysis_ctx.tree, current_node, tokenizer.buffer[next.start..next.end])) |child| {
if (resolveTypeOfNode(import_ctx, child)) |node_type| { if (resolveTypeOfNode(analysis_ctx, child)) |node_type| {
current_node = node_type; current_node = node_type;
} else return null; } else return null;
} else return null; } else return null;
@ -280,8 +279,8 @@ pub fn getFieldAccessTypeNode(import_ctx: *ImportContext, tokenizer: *std.zig.To
if (after_period.id == .Eof) { if (after_period.id == .Eof) {
return current_node; return current_node;
} else if (after_period.id == .Identifier) { } else if (after_period.id == .Identifier) {
if (getChild(import_ctx.tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { if (getChild(analysis_ctx.tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| {
if (resolveTypeOfNode(import_ctx, child)) |child_type| { if (resolveTypeOfNode(analysis_ctx, child)) |child_type| {
current_node = child_type; current_node = child_type;
} else return null; } else return null;
} else return null; } else return null;

View File

@ -217,18 +217,15 @@ pub fn applyChanges(self: *DocumentStore, handle: *Handle, content_changes: std.
// @TODO: Make this hold a single tree, remove tree param // @TODO: Make this hold a single tree, remove tree param
// from analysis functions that take an import_context. // from analysis functions that take an import_context.
// (can we reset-reuse it or do we need to deinit-init a new one?) // (can we reset-reuse it or do we need to deinit-init a new one?)
pub const ImportContext = struct { pub const AnalysisContext = struct {
store: *DocumentStore, store: *DocumentStore,
handle: *Handle, handle: *Handle,
// This arena is used for temporary allocations while analyzing,
// not for the tree allocations.
arena: *std.heap.ArenaAllocator,
tree: *std.zig.ast.Tree, tree: *std.zig.ast.Tree,
// @TODO RemoveMe pub fn onImport(self: *AnalysisContext, import_str: []const u8) !?*std.zig.ast.Node {
// pub fn lastTree(self: *ImportContext) ?*std.zig.ast.Tree {
// if (self.trees.items.len == 0) return null;
// return self.trees.items[self.trees.items.len - 1];
// }
pub fn onImport(self: *ImportContext, import_str: []const u8) !?*std.zig.ast.Node {
const allocator = self.store.allocator; const allocator = self.store.allocator;
const final_uri = if (std.mem.eql(u8, import_str, "std")) const final_uri = if (std.mem.eql(u8, import_str, "std"))
@ -326,17 +323,18 @@ pub const ImportContext = struct {
return null; return null;
} }
pub fn deinit(self: *ImportContext) void { pub fn deinit(self: *AnalysisContext) void {
self.tree.deinit(); self.tree.deinit();
} }
}; };
pub fn importContext(self: *DocumentStore, handle: *Handle) !?ImportContext { pub fn analysisContext(self: *DocumentStore, handle: *Handle, arena: *std.heap.ArenaAllocator) !?AnalysisContext {
const tree = (try handle.saneTree(self.allocator)) orelse return null; const tree = (try handle.saneTree(self.allocator)) orelse return null;
return ImportContext{ return AnalysisContext{
.store = self, .store = self,
.handle = handle, .handle = handle,
.arena = arena,
.tree = tree, .tree = tree,
}; };
} }

View File

@ -254,7 +254,10 @@ fn completeGlobal(id: i64, handle: DocumentStore.Handle, config: Config) !void {
} }
fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, config: Config) !void { fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.Position, config: Config) !void {
var import_ctx = (try document_store.importContext(handle)) orelse { var arena = std.heap.ArenaAllocator.init(allocator);
defer arena.deinit();
var analysis_ctx = (try document_store.analysisContext(handle, &arena)) orelse {
return send(types.Response{ return send(types.Response{
.id = .{.Integer = id}, .id = .{.Integer = id},
.result = .{ .result = .{
@ -265,21 +268,17 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
}, },
}); });
}; };
defer import_ctx.deinit(); defer analysis_ctx.deinit();
// We use a local arena allocator to deallocate all temporary data without iterating
var arena = std.heap.ArenaAllocator.init(allocator);
var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator); var completions = std.ArrayList(types.CompletionItem).init(&arena.allocator);
// Deallocate all temporary data.
defer arena.deinit();
var line = try handle.document.getLine(@intCast(usize, position.line)); var line = try handle.document.getLine(@intCast(usize, position.line));
var tokenizer = std.zig.Tokenizer.init(line); var tokenizer = std.zig.Tokenizer.init(line);
if (analysis.getFieldAccessTypeNode(&import_ctx, &tokenizer)) |node| { if (analysis.getFieldAccessTypeNode(&analysis_ctx, &tokenizer)) |node| {
var index: usize = 0; var index: usize = 0;
while (node.iterate(index)) |child_node| { while (node.iterate(index)) |child_node| {
if (try nodeToCompletion(&arena.allocator, import_ctx.tree, child_node, config)) |completion| { if (try nodeToCompletion(&arena.allocator, analysis_ctx.tree, child_node, config)) |completion| {
try completions.append(completion); try completions.append(completion);
} }
index += 1; index += 1;