Fixed relative URI code, get latest tree in analysis

This commit is contained in:
Alexandros Naskos 2020-05-14 05:14:37 +03:00
parent 86d264f488
commit 95b6e6cd6a
3 changed files with 28 additions and 21 deletions

View File

@ -179,20 +179,22 @@ pub fn getChild(tree: *ast.Tree, node: *ast.Node, name: []const u8) ?*ast.Node {
/// Resolves the type of a node /// Resolves the type of a node
pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportContext) ?*ast.Node { pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportContext) ?*ast.Node {
var latest_tree = import_ctx.lastTree() orelse tree;
switch (node.id) { switch (node.id) {
.VarDecl => { .VarDecl => {
const vari = node.cast(ast.Node.VarDecl).?; const vari = node.cast(ast.Node.VarDecl).?;
return resolveTypeOfNode(tree, vari.type_node orelse vari.init_node.?, import_ctx) orelse null; return resolveTypeOfNode(latest_tree, vari.type_node orelse vari.init_node.?, import_ctx) orelse null;
}, },
.FnProto => { .FnProto => {
const func = node.cast(ast.Node.FnProto).?; const func = node.cast(ast.Node.FnProto).?;
switch (func.return_type) { switch (func.return_type) {
.Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(tree, return_type, import_ctx);} .Explicit, .InferErrorSet => |return_type| {return resolveTypeOfNode(latest_tree, return_type, import_ctx);}
} }
}, },
.Identifier => { .Identifier => {
if (getChild(tree, &tree.root_node.base, tree.getNodeSource(node))) |child| { if (getChild(latest_tree, &latest_tree.root_node.base, latest_tree.getNodeSource(node))) |child| {
return resolveTypeOfNode(tree, child, import_ctx); return resolveTypeOfNode(latest_tree, child, import_ctx);
} else return null; } else return null;
}, },
.ContainerDecl => { .ContainerDecl => {
@ -200,13 +202,13 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo
}, },
.ContainerField => { .ContainerField => {
const field = node.cast(ast.Node.ContainerField).?; const field = node.cast(ast.Node.ContainerField).?;
return resolveTypeOfNode(tree, field.type_expr.?, import_ctx); return resolveTypeOfNode(latest_tree, field.type_expr.?, import_ctx);
}, },
.SuffixOp => { .SuffixOp => {
const suffix_op = node.cast(ast.Node.SuffixOp).?; const suffix_op = node.cast(ast.Node.SuffixOp).?;
switch (suffix_op.op) { switch (suffix_op.op) {
.Call => { .Call => {
return resolveTypeOfNode(tree, suffix_op.lhs.node, import_ctx); return resolveTypeOfNode(latest_tree, suffix_op.lhs.node, import_ctx);
}, },
else => {} else => {}
} }
@ -215,9 +217,9 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo
const infix_op = node.cast(ast.Node.InfixOp).?; const infix_op = node.cast(ast.Node.InfixOp).?;
switch (infix_op.op) { switch (infix_op.op) {
.Period => { .Period => {
var left = resolveTypeOfNode(tree, infix_op.lhs, import_ctx) orelse return null; var left = resolveTypeOfNode(latest_tree, infix_op.lhs, import_ctx) orelse return null;
if (nodeToString(tree, infix_op.rhs)) |string| { if (nodeToString(latest_tree, infix_op.rhs)) |string| {
return getChild(tree, left, string); return getChild(latest_tree, left, string);
} else return null; } else return null;
}, },
else => {} else => {}
@ -227,21 +229,21 @@ pub fn resolveTypeOfNode(tree: *ast.Tree, node: *ast.Node, import_ctx: *ImportCo
const prefix_op = node.cast(ast.Node.PrefixOp).?; const prefix_op = node.cast(ast.Node.PrefixOp).?;
switch (prefix_op.op) { switch (prefix_op.op) {
.PtrType => { .PtrType => {
return resolveTypeOfNode(tree, prefix_op.rhs, import_ctx); return resolveTypeOfNode(latest_tree, prefix_op.rhs, import_ctx);
}, },
else => {} else => {}
} }
}, },
.BuiltinCall => { .BuiltinCall => {
const builtin_call = node.cast(ast.Node.BuiltinCall).?; const builtin_call = node.cast(ast.Node.BuiltinCall).?;
if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return null; if (!std.mem.eql(u8, latest_tree.tokenSlice(builtin_call.builtin_token), "@import")) return null;
if (builtin_call.params.len > 1) return null; if (builtin_call.params.len > 1) return null;
const import_param = builtin_call.params.at(0).*; const import_param = builtin_call.params.at(0).*;
if (import_param.id != .StringLiteral) return null; if (import_param.id != .StringLiteral) return null;
const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token); const import_str = latest_tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
// @TODO: Handle error better. // @TODO: Handle this error better.
return (import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch unreachable); return (import_ctx.onImport(import_str[1 .. import_str.len - 1]) catch unreachable);
}, },
else => { else => {
@ -255,6 +257,7 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im
var current_node = &tree.root_node.base; var current_node = &tree.root_node.base;
while (true) { while (true) {
var latest_tree = import_ctx.lastTree() orelse tree;
var next = tokenizer.next(); var next = tokenizer.next();
switch (next.id) { switch (next.id) {
.Eof => { .Eof => {
@ -263,8 +266,8 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im
.Identifier => { .Identifier => {
// var root = current_node.cast(ast.Node.Root).?; // var root = current_node.cast(ast.Node.Root).?;
// current_node. // current_node.
if (getChild(tree, current_node, tokenizer.buffer[next.start..next.end])) |child| { if (getChild(latest_tree, current_node, tokenizer.buffer[next.start..next.end])) |child| {
if (resolveTypeOfNode(tree, child, import_ctx)) |node_type| { if (resolveTypeOfNode(latest_tree, child, import_ctx)) |node_type| {
current_node = node_type; current_node = node_type;
} else return null; } else return null;
} else return null; } else return null;
@ -274,8 +277,8 @@ pub fn getFieldAccessTypeNode(tree: *ast.Tree, tokenizer: *std.zig.Tokenizer, im
if (after_period.id == .Eof) { if (after_period.id == .Eof) {
return current_node; return current_node;
} else if (after_period.id == .Identifier) { } else if (after_period.id == .Identifier) {
if (getChild(tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| { if (getChild(latest_tree, current_node, tokenizer.buffer[after_period.start..after_period.end])) |child| {
if (resolveTypeOfNode(tree, child, import_ctx)) |child_type| { if (resolveTypeOfNode(latest_tree, child, import_ctx)) |child_type| {
current_node = child_type; current_node = child_type;
} else return null; } else return null;
} else return null; } else return null;

View File

@ -209,8 +209,8 @@ pub const ImportContext = struct {
handle: *Handle, handle: *Handle,
trees: std.ArrayList(*std.zig.ast.Tree), trees: std.ArrayList(*std.zig.ast.Tree),
pub fn lastTree(self: *ImportContext) *std.zig.ast.Tree { pub fn lastTree(self: *ImportContext) ?*std.zig.ast.Tree {
std.debug.assert(self.trees.items.len > 0); if (self.trees.items.len == 0) return null;
return self.trees.items[self.trees.items.len - 1]; return self.trees.items[self.trees.items.len - 1];
} }
@ -233,9 +233,13 @@ pub const ImportContext = struct {
dir_path, import_str dir_path, import_str
}); });
break :b import_path; defer allocator.free(import_path);
break :b (try URI.fromPath(allocator, import_path));
}; };
std.debug.warn("Import final URI: {}\n", .{final_uri});
// @TODO Clean up code, lots of repetition // @TODO Clean up code, lots of repetition
{ {
errdefer allocator.free(final_uri); errdefer allocator.free(final_uri);

View File

@ -282,7 +282,7 @@ fn completeFieldAccess(id: i64, handle: *DocumentStore.Handle, position: types.P
if (analysis.getFieldAccessTypeNode(tree, &tokenizer, &import_ctx)) |node| { if (analysis.getFieldAccessTypeNode(tree, &tokenizer, &import_ctx)) |node| {
var index: usize = 0; var index: usize = 0;
while (node.iterate(index)) |child_node| { while (node.iterate(index)) |child_node| {
if (try nodeToCompletion(&arena.allocator, import_ctx.lastTree(), child_node, config)) |completion| { if (try nodeToCompletion(&arena.allocator, import_ctx.lastTree() orelse tree, child_node, config)) |completion| {
try completions.append(completion); try completions.append(completion);
} }
index += 1; index += 1;