2020-05-03 22:27:08 +01:00
|
|
|
const std = @import("std");
|
2020-06-10 17:54:01 +01:00
|
|
|
const DocumentStore = @import("document_store.zig");
|
2020-05-09 03:38:08 +01:00
|
|
|
const ast = std.zig.ast;
|
2020-05-28 01:39:36 +01:00
|
|
|
const types = @import("types.zig");
|
2020-05-09 03:38:08 +01:00
|
|
|
|
2020-06-03 09:23:14 +01:00
|
|
|
/// Get a declaration's doc comment node
|
|
|
|
fn getDocCommentNode(tree: *ast.Tree, node: *ast.Node) ?*ast.Node.DocComment {
|
|
|
|
if (node.cast(ast.Node.FnProto)) |func| {
|
|
|
|
return func.doc_comments;
|
|
|
|
} else if (node.cast(ast.Node.VarDecl)) |var_decl| {
|
|
|
|
return var_decl.doc_comments;
|
|
|
|
} else if (node.cast(ast.Node.ContainerField)) |field| {
|
|
|
|
return field.doc_comments;
|
|
|
|
} else if (node.cast(ast.Node.ErrorTag)) |tag| {
|
|
|
|
return tag.doc_comments;
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Gets a declaration's doc comments, caller must free memory when a value is returned
|
2020-05-03 22:27:08 +01:00
|
|
|
/// Like:
|
|
|
|
///```zig
|
|
|
|
///var comments = getFunctionDocComments(allocator, tree, func);
|
|
|
|
///defer if (comments) |comments_pointer| allocator.free(comments_pointer);
|
|
|
|
///```
|
2020-05-09 03:38:08 +01:00
|
|
|
pub fn getDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) !?[]const u8 {
|
2020-06-03 09:23:14 +01:00
|
|
|
if (getDocCommentNode(tree, node)) |doc_comment_node| {
|
|
|
|
return try collectDocComments(allocator, tree, doc_comment_node);
|
2020-05-03 22:27:08 +01:00
|
|
|
}
|
2020-05-14 17:07:46 +01:00
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn collectDocComments(allocator: *std.mem.Allocator, tree: *ast.Tree, doc_comments: *ast.Node.DocComment) ![]const u8 {
|
|
|
|
var lines = std.ArrayList([]const u8).init(allocator);
|
|
|
|
defer lines.deinit();
|
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
var curr_line_tok = doc_comments.first_line;
|
|
|
|
while (true) : (curr_line_tok += 1) {
|
|
|
|
switch (tree.token_ids[curr_line_tok]) {
|
|
|
|
.LineComment => continue,
|
|
|
|
.DocComment, .ContainerDocComment => {
|
|
|
|
try lines.append(std.fmt.trim(tree.tokenSlice(curr_line_tok)[3..]));
|
|
|
|
},
|
|
|
|
else => break,
|
|
|
|
}
|
2020-05-14 17:07:46 +01:00
|
|
|
}
|
|
|
|
|
2020-06-04 22:19:00 +01:00
|
|
|
return try std.mem.join(allocator, "\\\n", lines.items);
|
2020-05-03 22:27:08 +01:00
|
|
|
}
|
|
|
|
|
2020-05-03 22:27:37 +01:00
|
|
|
/// Gets a function signature (keywords, name, return value)
|
2020-05-09 03:38:08 +01:00
|
|
|
pub fn getFunctionSignature(tree: *ast.Tree, func: *ast.Node.FnProto) []const u8 {
|
2020-05-23 23:21:02 +01:00
|
|
|
const start = tree.token_locs[func.firstToken()].start;
|
|
|
|
const end = tree.token_locs[switch (func.return_type) {
|
2020-05-16 16:30:16 +01:00
|
|
|
.Explicit, .InferErrorSet => |node| node.lastToken(),
|
2020-05-14 15:22:15 +01:00
|
|
|
.Invalid => |r_paren| r_paren,
|
2020-05-23 23:21:02 +01:00
|
|
|
}].end;
|
2020-05-03 22:27:08 +01:00
|
|
|
return tree.source[start..end];
|
|
|
|
}
|
2020-05-04 03:17:19 +01:00
|
|
|
|
2020-05-09 03:38:08 +01:00
|
|
|
/// Gets a function snippet insert text
|
2020-06-06 00:44:43 +01:00
|
|
|
pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: *ast.Tree, func: *ast.Node.FnProto, skip_self_param: bool) ![]const u8 {
|
2020-05-09 03:38:08 +01:00
|
|
|
const name_tok = func.name_token orelse unreachable;
|
|
|
|
|
|
|
|
var buffer = std.ArrayList(u8).init(allocator);
|
|
|
|
try buffer.ensureCapacity(128);
|
|
|
|
|
|
|
|
try buffer.appendSlice(tree.tokenSlice(name_tok));
|
|
|
|
try buffer.append('(');
|
|
|
|
|
|
|
|
var buf_stream = buffer.outStream();
|
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
for (func.paramsConst()) |param, param_num| {
|
2020-06-06 00:44:43 +01:00
|
|
|
if (skip_self_param and param_num == 0) continue;
|
2020-06-06 08:35:59 +01:00
|
|
|
if (param_num != @boolToInt(skip_self_param)) try buffer.appendSlice(", ${") else try buffer.appendSlice("${");
|
2020-05-09 03:38:08 +01:00
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
try buf_stream.print("{}:", .{param_num + 1});
|
2020-05-09 03:38:08 +01:00
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
if (param.comptime_token) |_| {
|
2020-05-12 17:59:16 +01:00
|
|
|
try buffer.appendSlice("comptime ");
|
|
|
|
}
|
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
if (param.noalias_token) |_| {
|
2020-05-12 17:59:16 +01:00
|
|
|
try buffer.appendSlice("noalias ");
|
|
|
|
}
|
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
if (param.name_token) |name_token| {
|
2020-05-12 17:59:16 +01:00
|
|
|
try buffer.appendSlice(tree.tokenSlice(name_token));
|
|
|
|
try buffer.appendSlice(": ");
|
|
|
|
}
|
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
switch (param.param_type) {
|
2020-05-16 15:21:42 +01:00
|
|
|
.var_args => try buffer.appendSlice("..."),
|
|
|
|
.var_type => try buffer.appendSlice("var"),
|
|
|
|
.type_expr => |type_expr| {
|
|
|
|
var curr_tok = type_expr.firstToken();
|
2020-05-16 15:24:41 +01:00
|
|
|
var end_tok = type_expr.lastToken();
|
2020-05-16 15:21:42 +01:00
|
|
|
while (curr_tok <= end_tok) : (curr_tok += 1) {
|
2020-05-23 23:21:02 +01:00
|
|
|
const id = tree.token_ids[curr_tok];
|
|
|
|
const is_comma = id == .Comma;
|
2020-05-13 18:30:57 +01:00
|
|
|
|
2020-05-16 15:21:42 +01:00
|
|
|
if (curr_tok == end_tok and is_comma) continue;
|
2020-05-13 18:30:57 +01:00
|
|
|
|
2020-05-16 15:21:42 +01:00
|
|
|
try buffer.appendSlice(tree.tokenSlice(curr_tok));
|
|
|
|
if (is_comma or id == .Keyword_const) try buffer.append(' ');
|
|
|
|
}
|
2020-05-17 15:23:04 +01:00
|
|
|
},
|
2020-05-09 03:38:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
try buffer.append('}');
|
|
|
|
}
|
|
|
|
try buffer.append(')');
|
|
|
|
|
|
|
|
return buffer.toOwnedSlice();
|
|
|
|
}
|
|
|
|
|
2020-05-04 03:17:19 +01:00
|
|
|
/// Gets a function signature (keywords, name, return value)
|
2020-05-09 03:38:08 +01:00
|
|
|
pub fn getVariableSignature(tree: *ast.Tree, var_decl: *ast.Node.VarDecl) []const u8 {
|
2020-05-23 23:21:02 +01:00
|
|
|
const start = tree.token_locs[var_decl.firstToken()].start;
|
|
|
|
const end = tree.token_locs[var_decl.semicolon_token].start;
|
2020-05-04 03:17:19 +01:00
|
|
|
return tree.source[start..end];
|
|
|
|
}
|
|
|
|
|
2020-05-17 15:23:04 +01:00
|
|
|
pub fn isTypeFunction(tree: *ast.Tree, func: *ast.Node.FnProto) bool {
|
|
|
|
switch (func.return_type) {
|
|
|
|
.Explicit => |node| return if (node.cast(std.zig.ast.Node.Identifier)) |ident|
|
|
|
|
std.mem.eql(u8, tree.tokenSlice(ident.token), "type")
|
|
|
|
else
|
|
|
|
false,
|
|
|
|
.InferErrorSet, .Invalid => return false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-04 03:17:19 +01:00
|
|
|
// STYLE
|
|
|
|
|
|
|
|
pub fn isCamelCase(name: []const u8) bool {
|
|
|
|
return !std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name, "_") == null;
|
|
|
|
}
|
2020-05-08 16:01:34 +01:00
|
|
|
|
|
|
|
pub fn isPascalCase(name: []const u8) bool {
|
|
|
|
return std.ascii.isUpper(name[0]) and std.mem.indexOf(u8, name, "_") == null;
|
|
|
|
}
|
2020-05-11 13:28:08 +01:00
|
|
|
|
|
|
|
// ANALYSIS ENGINE
|
|
|
|
|
2020-05-18 21:19:23 +01:00
|
|
|
pub fn getDeclNameToken(tree: *ast.Tree, node: *ast.Node) ?ast.TokenIndex {
|
|
|
|
switch (node.id) {
|
|
|
|
.VarDecl => {
|
|
|
|
const vari = node.cast(ast.Node.VarDecl).?;
|
|
|
|
return vari.name_token;
|
|
|
|
},
|
|
|
|
.FnProto => {
|
|
|
|
const func = node.cast(ast.Node.FnProto).?;
|
|
|
|
if (func.name_token == null) return null;
|
|
|
|
return func.name_token.?;
|
|
|
|
},
|
|
|
|
.ContainerField => {
|
|
|
|
const field = node.cast(ast.Node.ContainerField).?;
|
|
|
|
return field.name_token;
|
|
|
|
},
|
2020-05-23 00:58:39 +01:00
|
|
|
// We need identifier for captures
|
|
|
|
.Identifier => {
|
|
|
|
const ident = node.cast(ast.Node.Identifier).?;
|
|
|
|
return ident.token;
|
|
|
|
},
|
2020-05-28 13:41:40 +01:00
|
|
|
.TestDecl => {
|
|
|
|
const decl = node.cast(ast.Node.TestDecl).?;
|
|
|
|
return (decl.name.cast(ast.Node.StringLiteral) orelse return null).token;
|
|
|
|
},
|
2020-05-18 21:19:23 +01:00
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn getDeclName(tree: *ast.Tree, node: *ast.Node) ?[]const u8 {
|
2020-05-28 13:41:40 +01:00
|
|
|
const name = tree.tokenSlice(getDeclNameToken(tree, node) orelse return null);
|
|
|
|
return switch (node.id) {
|
2020-06-01 18:48:14 +01:00
|
|
|
.TestDecl => name[1 .. name.len - 1],
|
|
|
|
else => name,
|
2020-05-28 13:41:40 +01:00
|
|
|
};
|
2020-05-18 21:19:23 +01:00
|
|
|
}
|
|
|
|
|
2020-05-24 13:24:18 +01:00
|
|
|
fn findReturnStatementInternal(
|
|
|
|
tree: *ast.Tree,
|
|
|
|
fn_decl: *ast.Node.FnProto,
|
|
|
|
base_node: *ast.Node,
|
|
|
|
already_found: *bool,
|
|
|
|
) ?*ast.Node.ControlFlowExpression {
|
2020-05-19 14:42:36 +01:00
|
|
|
var result: ?*ast.Node.ControlFlowExpression = null;
|
2020-05-24 15:07:48 +01:00
|
|
|
var child_idx: usize = 0;
|
2020-05-24 15:31:28 +01:00
|
|
|
while (base_node.iterate(child_idx)) |child_node| : (child_idx += 1) {
|
2020-05-19 14:42:36 +01:00
|
|
|
switch (child_node.id) {
|
|
|
|
.ControlFlowExpression => {
|
|
|
|
const cfe = child_node.cast(ast.Node.ControlFlowExpression).?;
|
|
|
|
if (cfe.kind == .Return) {
|
2020-05-24 13:24:18 +01:00
|
|
|
// If we are calling ourselves recursively, ignore this return.
|
|
|
|
if (cfe.rhs) |rhs| {
|
2020-05-24 15:12:13 +01:00
|
|
|
if (rhs.cast(ast.Node.Call)) |call_node| {
|
|
|
|
if (call_node.lhs.id == .Identifier) {
|
|
|
|
if (std.mem.eql(u8, getDeclName(tree, call_node.lhs).?, getDeclName(tree, &fn_decl.base).?)) {
|
|
|
|
continue;
|
2020-05-24 13:24:18 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-19 14:42:36 +01:00
|
|
|
if (already_found.*) return null;
|
|
|
|
already_found.* = true;
|
|
|
|
result = cfe;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
|
2020-05-24 13:24:18 +01:00
|
|
|
result = findReturnStatementInternal(tree, fn_decl, child_node, already_found);
|
2020-05-19 14:42:36 +01:00
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-05-24 13:24:18 +01:00
|
|
|
fn findReturnStatement(tree: *ast.Tree, fn_decl: *ast.Node.FnProto) ?*ast.Node.ControlFlowExpression {
|
2020-05-19 14:42:36 +01:00
|
|
|
var already_found = false;
|
2020-05-24 13:24:18 +01:00
|
|
|
return findReturnStatementInternal(tree, fn_decl, fn_decl.body_node.?, &already_found);
|
2020-05-19 14:42:36 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Resolves the return type of a function
|
2020-06-10 18:48:40 +01:00
|
|
|
fn resolveReturnType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, fn_decl: *ast.Node.FnProto, handle: *DocumentStore.Handle) !?NodeWithHandle {
|
2020-06-10 17:54:01 +01:00
|
|
|
if (isTypeFunction(handle.tree, fn_decl) and fn_decl.body_node != null) {
|
2020-05-19 14:42:36 +01:00
|
|
|
// If this is a type function and it only contains a single return statement that returns
|
|
|
|
// a container declaration, we will return that declaration.
|
2020-06-10 17:54:01 +01:00
|
|
|
const ret = findReturnStatement(handle.tree, fn_decl) orelse return null;
|
2020-05-22 16:51:57 +01:00
|
|
|
if (ret.rhs) |rhs|
|
2020-06-10 18:48:40 +01:00
|
|
|
if (try resolveTypeOfNode(store, arena, .{ .node = rhs, .handle = handle })) |res_rhs| switch (res_rhs.node.id) {
|
2020-05-24 13:24:18 +01:00
|
|
|
.ContainerDecl => {
|
|
|
|
return res_rhs;
|
|
|
|
},
|
2020-05-22 16:51:57 +01:00
|
|
|
else => return null,
|
|
|
|
};
|
2020-05-19 14:42:36 +01:00
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
return switch (fn_decl.return_type) {
|
2020-06-10 18:48:40 +01:00
|
|
|
.Explicit, .InferErrorSet => |return_type| try resolveTypeOfNode(store, arena, .{ .node = return_type, .handle = handle }),
|
2020-05-19 14:42:36 +01:00
|
|
|
.Invalid => null,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-05-27 16:49:11 +01:00
|
|
|
/// Resolves the child type of an optional type
|
2020-06-10 18:48:40 +01:00
|
|
|
fn resolveUnwrapOptionalType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, opt: NodeWithHandle) !?NodeWithHandle {
|
2020-06-10 17:54:01 +01:00
|
|
|
if (opt.node.cast(ast.Node.PrefixOp)) |prefix_op| {
|
2020-05-27 16:49:11 +01:00
|
|
|
if (prefix_op.op == .OptionalType) {
|
2020-06-10 18:48:40 +01:00
|
|
|
return try resolveTypeOfNode(store, arena, .{ .node = prefix_op.rhs, .handle = opt.handle });
|
2020-05-27 16:49:11 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Resolves the child type of a defer type
|
2020-06-10 18:48:40 +01:00
|
|
|
fn resolveDerefType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, deref: NodeWithHandle) !?NodeWithHandle {
|
2020-06-10 17:54:01 +01:00
|
|
|
if (deref.node.cast(ast.Node.PrefixOp)) |pop| {
|
2020-05-27 16:49:11 +01:00
|
|
|
if (pop.op == .PtrType) {
|
2020-06-10 17:54:01 +01:00
|
|
|
const op_token_id = deref.handle.tree.token_ids[pop.op_token];
|
2020-05-27 16:49:11 +01:00
|
|
|
switch (op_token_id) {
|
2020-06-10 18:48:40 +01:00
|
|
|
.Asterisk => return try resolveTypeOfNode(store, arena, .{ .node = pop.rhs, .handle = deref.handle }),
|
2020-05-27 16:49:11 +01:00
|
|
|
.LBracket, .AsteriskAsterisk => return null,
|
|
|
|
else => unreachable,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2020-06-10 17:54:01 +01:00
|
|
|
fn makeSliceType(arena: *std.heap.ArenaAllocator, child_type: *ast.Node) ?*ast.Node {
|
2020-05-27 16:49:11 +01:00
|
|
|
// TODO: Better values for fields, better way to do this?
|
2020-06-10 17:54:01 +01:00
|
|
|
var slice_type = arena.allocator.create(ast.Node.PrefixOp) catch return null;
|
2020-05-27 16:49:11 +01:00
|
|
|
slice_type.* = .{
|
|
|
|
.op_token = child_type.firstToken(),
|
|
|
|
.op = .{
|
|
|
|
.SliceType = .{
|
|
|
|
.allowzero_token = null,
|
|
|
|
.align_info = null,
|
|
|
|
.const_token = null,
|
|
|
|
.volatile_token = null,
|
|
|
|
.sentinel = null,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
.rhs = child_type,
|
|
|
|
};
|
|
|
|
|
|
|
|
return &slice_type.base;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Resolves bracket access type (both slicing and array access)
|
|
|
|
fn resolveBracketAccessType(
|
2020-06-10 17:54:01 +01:00
|
|
|
store: *DocumentStore,
|
|
|
|
arena: *std.heap.ArenaAllocator,
|
2020-06-10 18:48:40 +01:00
|
|
|
lhs: NodeWithHandle,
|
2020-05-27 16:49:11 +01:00
|
|
|
rhs: enum { Single, Range },
|
2020-06-10 17:54:01 +01:00
|
|
|
) !?NodeWithHandle {
|
|
|
|
if (lhs.node.cast(ast.Node.PrefixOp)) |pop| {
|
2020-05-27 16:49:11 +01:00
|
|
|
switch (pop.op) {
|
|
|
|
.SliceType => {
|
2020-06-10 18:48:40 +01:00
|
|
|
if (rhs == .Single) return try resolveTypeOfNode(store, arena, .{ .node = pop.rhs, .handle = lhs.handle });
|
2020-05-27 16:49:11 +01:00
|
|
|
return lhs;
|
|
|
|
},
|
|
|
|
.ArrayType => {
|
2020-06-10 18:48:40 +01:00
|
|
|
if (rhs == .Single) return try resolveTypeOfNode(store, arena, .{ .node = pop.rhs, .handle = lhs.handle });
|
|
|
|
return NodeWithHandle{ .node = makeSliceType(arena, pop.rhs) orelse return null, .handle = lhs.handle };
|
2020-05-27 16:49:11 +01:00
|
|
|
},
|
|
|
|
.PtrType => {
|
|
|
|
if (pop.rhs.cast(std.zig.ast.Node.PrefixOp)) |child_pop| {
|
|
|
|
switch (child_pop.op) {
|
|
|
|
.ArrayType => {
|
|
|
|
if (rhs == .Single) {
|
2020-06-10 18:48:40 +01:00
|
|
|
return try resolveTypeOfNode(store, arena, .{ .node = child_pop.rhs, .handle = lhs.handle });
|
2020-05-27 16:49:11 +01:00
|
|
|
}
|
2020-05-27 17:01:23 +01:00
|
|
|
return lhs;
|
2020-05-27 16:49:11 +01:00
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Called to remove one level of pointerness before a field access
|
2020-06-10 18:48:40 +01:00
|
|
|
fn resolveFieldAccessLhsType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, lhs: NodeWithHandle) !NodeWithHandle {
|
|
|
|
return (try resolveDerefType(store, arena, lhs)) orelse lhs;
|
2020-05-27 16:49:11 +01:00
|
|
|
}
|
|
|
|
|
2020-05-13 14:03:33 +01:00
|
|
|
/// Resolves the type of a node
|
2020-06-10 18:48:40 +01:00
|
|
|
pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator, node_handle: NodeWithHandle) error{OutOfMemory}!?NodeWithHandle {
|
|
|
|
const node = node_handle.node;
|
|
|
|
const handle = node_handle.handle;
|
|
|
|
|
2020-05-11 13:28:08 +01:00
|
|
|
switch (node.id) {
|
2020-05-13 14:03:33 +01:00
|
|
|
.VarDecl => {
|
2020-05-14 00:10:41 +01:00
|
|
|
const vari = node.cast(ast.Node.VarDecl).?;
|
2020-06-10 18:48:40 +01:00
|
|
|
return try resolveTypeOfNode(store, arena, .{ .node = vari.type_node orelse vari.init_node.?, .handle = handle });
|
2020-05-11 13:28:08 +01:00
|
|
|
},
|
2020-05-13 14:03:33 +01:00
|
|
|
.Identifier => {
|
2020-06-10 18:48:40 +01:00
|
|
|
if (try lookupSymbolGlobal(store, handle, handle.tree.getNodeSource(node), handle.tree.token_locs[node.firstToken()].start)) |child| {
|
|
|
|
return try child.resolveType(store, arena);
|
|
|
|
}
|
|
|
|
return null;
|
2020-05-11 13:28:08 +01:00
|
|
|
},
|
2020-05-13 14:03:33 +01:00
|
|
|
.ContainerField => {
|
2020-05-14 00:10:41 +01:00
|
|
|
const field = node.cast(ast.Node.ContainerField).?;
|
2020-06-10 18:48:40 +01:00
|
|
|
return try resolveTypeOfNode(store, arena, .{ .node = field.type_expr orelse return null, .handle = handle });
|
2020-05-11 13:28:08 +01:00
|
|
|
},
|
2020-05-23 23:21:02 +01:00
|
|
|
.Call => {
|
|
|
|
const call = node.cast(ast.Node.Call).?;
|
2020-06-02 14:48:23 +01:00
|
|
|
|
2020-06-10 18:48:40 +01:00
|
|
|
const decl = (try resolveTypeOfNode(store, arena, .{ .node = call.lhs, .handle = handle })) orelse return null;
|
|
|
|
if (decl.node.cast(ast.Node.FnProto)) |fn_decl| {
|
2020-06-10 20:05:11 +01:00
|
|
|
// TODO Use some type of ParamDecl -> NodeWithHandle map while resolving, and associate type params here.
|
2020-06-10 19:24:17 +01:00
|
|
|
return try resolveReturnType(store, arena, fn_decl, decl.handle);
|
2020-06-01 18:48:14 +01:00
|
|
|
}
|
|
|
|
return decl;
|
2020-05-23 23:21:02 +01:00
|
|
|
},
|
|
|
|
.StructInitializer => {
|
|
|
|
const struct_init = node.cast(ast.Node.StructInitializer).?;
|
2020-06-10 19:24:17 +01:00
|
|
|
return try resolveTypeOfNode(store, arena, .{ .node = struct_init.lhs, .handle = handle });
|
2020-05-11 13:28:08 +01:00
|
|
|
},
|
2020-05-16 17:04:07 +01:00
|
|
|
.ErrorSetDecl => {
|
|
|
|
const set = node.cast(ast.Node.ErrorSetDecl).?;
|
|
|
|
var i: usize = 0;
|
2020-05-28 16:18:48 +01:00
|
|
|
while (set.iterate(i)) |decl| : (i += 1) {
|
2020-06-10 19:24:17 +01:00
|
|
|
try store.error_completions.add(handle.tree, decl);
|
2020-05-16 17:04:07 +01:00
|
|
|
}
|
2020-06-10 19:24:17 +01:00
|
|
|
return node_handle;
|
2020-05-16 17:04:07 +01:00
|
|
|
},
|
2020-05-27 16:49:11 +01:00
|
|
|
.SuffixOp => {
|
|
|
|
const suffix_op = node.cast(ast.Node.SuffixOp).?;
|
2020-06-10 19:24:17 +01:00
|
|
|
const left_type = (try resolveTypeOfNode(store, arena, .{ .node = suffix_op.lhs, .handle = handle })) orelse return null;
|
|
|
|
return switch (suffix_op.op) {
|
|
|
|
.UnwrapOptional => try resolveUnwrapOptionalType(store, arena, left_type),
|
|
|
|
.Deref => try resolveDerefType(store, arena, left_type),
|
|
|
|
.ArrayAccess => try resolveBracketAccessType(store, arena, left_type, .Single),
|
|
|
|
.Slice => try resolveBracketAccessType(store, arena, left_type, .Range),
|
|
|
|
else => null,
|
|
|
|
};
|
2020-05-27 16:49:11 +01:00
|
|
|
},
|
2020-05-11 13:28:08 +01:00
|
|
|
.InfixOp => {
|
2020-05-14 00:10:41 +01:00
|
|
|
const infix_op = node.cast(ast.Node.InfixOp).?;
|
2020-05-13 14:03:33 +01:00
|
|
|
switch (infix_op.op) {
|
|
|
|
.Period => {
|
2020-06-10 19:24:17 +01:00
|
|
|
const rhs_str = nodeToString(handle.tree, infix_op.rhs) orelse return null;
|
2020-05-27 16:49:11 +01:00
|
|
|
// If we are accessing a pointer type, remove one pointerness level :)
|
2020-06-10 19:24:17 +01:00
|
|
|
const left_type = try resolveFieldAccessLhsType(
|
|
|
|
store,
|
|
|
|
arena,
|
|
|
|
(try resolveTypeOfNode(store, arena, .{
|
|
|
|
.node = infix_op.lhs,
|
|
|
|
.handle = handle,
|
|
|
|
})) orelse return null,
|
2020-05-27 16:49:11 +01:00
|
|
|
);
|
|
|
|
|
2020-06-10 19:24:17 +01:00
|
|
|
// @TODO Error sets
|
|
|
|
if (left_type.node.id != .ContainerDecl and left_type.node.id != .Root) return null;
|
|
|
|
|
|
|
|
if (try lookupSymbolContainer(store, left_type, rhs_str, true)) |child| {
|
|
|
|
return try child.resolveType(store, arena);
|
|
|
|
} else return null;
|
2020-05-13 14:03:33 +01:00
|
|
|
},
|
2020-05-27 16:49:11 +01:00
|
|
|
.UnwrapOptional => {
|
2020-06-10 19:24:17 +01:00
|
|
|
const left_type = (try resolveTypeOfNode(store, arena, .{
|
|
|
|
.node = infix_op.lhs,
|
|
|
|
.handle = handle,
|
|
|
|
})) orelse return null;
|
|
|
|
return try resolveUnwrapOptionalType(store, arena, left_type);
|
2020-05-27 16:49:11 +01:00
|
|
|
},
|
2020-06-10 19:24:17 +01:00
|
|
|
else => return null,
|
2020-05-13 14:03:33 +01:00
|
|
|
}
|
2020-05-11 13:28:08 +01:00
|
|
|
},
|
2020-05-13 20:35:14 +01:00
|
|
|
.PrefixOp => {
|
2020-05-14 00:10:41 +01:00
|
|
|
const prefix_op = node.cast(ast.Node.PrefixOp).?;
|
2020-05-13 20:35:14 +01:00
|
|
|
switch (prefix_op.op) {
|
2020-05-27 16:49:11 +01:00
|
|
|
.SliceType,
|
|
|
|
.ArrayType,
|
|
|
|
.OptionalType,
|
|
|
|
.PtrType,
|
2020-06-10 19:24:17 +01:00
|
|
|
=> return node_handle,
|
2020-05-21 13:06:16 +01:00
|
|
|
.Try => {
|
2020-06-10 19:24:17 +01:00
|
|
|
const rhs_type = (try resolveTypeOfNode(store, arena, .{ .node = prefix_op.rhs, .handle = handle })) orelse return null;
|
|
|
|
switch (rhs_type.node.id) {
|
2020-05-21 13:06:16 +01:00
|
|
|
.InfixOp => {
|
2020-06-10 19:24:17 +01:00
|
|
|
const infix_op = rhs_type.node.cast(ast.Node.InfixOp).?;
|
|
|
|
if (infix_op.op == .ErrorUnion) return NodeWithHandle{
|
|
|
|
.node = infix_op.rhs,
|
|
|
|
.handle = rhs_type.handle,
|
|
|
|
};
|
2020-05-21 13:06:16 +01:00
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
return rhs_type;
|
|
|
|
},
|
2020-05-16 16:30:16 +01:00
|
|
|
else => {},
|
2020-05-13 20:35:14 +01:00
|
|
|
}
|
|
|
|
},
|
2020-05-14 00:10:41 +01:00
|
|
|
.BuiltinCall => {
|
|
|
|
const builtin_call = node.cast(ast.Node.BuiltinCall).?;
|
2020-06-10 19:24:17 +01:00
|
|
|
const call_name = handle.tree.tokenSlice(builtin_call.builtin_token);
|
2020-05-19 16:53:01 +01:00
|
|
|
if (std.mem.eql(u8, call_name, "@This")) {
|
2020-05-23 23:21:02 +01:00
|
|
|
if (builtin_call.params_len != 0) return null;
|
2020-06-10 19:24:17 +01:00
|
|
|
return innermostContainer(handle, handle.tree.token_locs[builtin_call.firstToken()].start);
|
2020-05-19 16:53:01 +01:00
|
|
|
}
|
2020-05-22 16:51:57 +01:00
|
|
|
|
2020-06-08 17:07:16 +01:00
|
|
|
// TODO: https://github.com/ziglang/zig/issues/4335
|
|
|
|
const cast_map = std.ComptimeStringMap(void, .{
|
|
|
|
.{ .@"0" = "@as" },
|
|
|
|
.{ .@"0" = "@bitCast" },
|
|
|
|
.{ .@"0" = "@fieldParentPtr" },
|
|
|
|
.{ .@"0" = "@floatCast" },
|
|
|
|
.{ .@"0" = "@floatToInt" },
|
|
|
|
.{ .@"0" = "@intCast" },
|
|
|
|
.{ .@"0" = "@intToEnum" },
|
|
|
|
.{ .@"0" = "@intToFloat" },
|
|
|
|
.{ .@"0" = "@intToPtr" },
|
|
|
|
.{ .@"0" = "@truncate" },
|
|
|
|
.{ .@"0" = "@ptrCast" },
|
|
|
|
});
|
|
|
|
if (cast_map.has(call_name)) {
|
|
|
|
if (builtin_call.params_len < 1) return null;
|
2020-06-10 19:24:17 +01:00
|
|
|
return try resolveTypeOfNode(store, arena, .{ .node = builtin_call.paramsConst()[0], .handle = handle });
|
2020-06-08 17:07:16 +01:00
|
|
|
}
|
|
|
|
|
2020-05-19 16:53:01 +01:00
|
|
|
if (!std.mem.eql(u8, call_name, "@import")) return null;
|
2020-06-08 17:07:16 +01:00
|
|
|
if (builtin_call.params_len < 1) return null;
|
2020-05-14 00:10:41 +01:00
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
const import_param = builtin_call.paramsConst()[0];
|
2020-05-14 00:10:41 +01:00
|
|
|
if (import_param.id != .StringLiteral) return null;
|
|
|
|
|
2020-06-10 19:24:17 +01:00
|
|
|
const import_str = handle.tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
|
|
|
|
const new_handle = (store.resolveImport(handle, import_str[1 .. import_str.len - 1]) catch |err| block: {
|
2020-05-17 15:23:04 +01:00
|
|
|
std.debug.warn("Error {} while processing import {}\n", .{ err, import_str });
|
2020-06-10 19:24:17 +01:00
|
|
|
return null;
|
|
|
|
}) orelse return null;
|
|
|
|
|
|
|
|
return NodeWithHandle{ .node = &new_handle.tree.root_node.base, .handle = new_handle };
|
2020-05-14 00:10:41 +01:00
|
|
|
},
|
2020-05-24 13:24:18 +01:00
|
|
|
.ContainerDecl => {
|
2020-05-28 16:18:48 +01:00
|
|
|
const container = node.cast(ast.Node.ContainerDecl).?;
|
2020-06-10 19:24:17 +01:00
|
|
|
const kind = handle.tree.token_ids[container.kind_token];
|
2020-05-28 16:18:48 +01:00
|
|
|
|
|
|
|
if (kind == .Keyword_struct or (kind == .Keyword_union and container.init_arg_expr == .None)) {
|
2020-06-10 19:24:17 +01:00
|
|
|
return node_handle;
|
2020-05-28 16:18:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
var i: usize = 0;
|
|
|
|
while (container.iterate(i)) |decl| : (i += 1) {
|
|
|
|
if (decl.id != .ContainerField) continue;
|
2020-06-10 19:24:17 +01:00
|
|
|
try store.enum_completions.add(handle.tree, decl);
|
2020-05-28 16:18:48 +01:00
|
|
|
}
|
2020-06-10 19:24:17 +01:00
|
|
|
return node_handle;
|
2020-05-24 13:24:18 +01:00
|
|
|
},
|
2020-06-10 19:24:17 +01:00
|
|
|
.MultilineStringLiteral, .StringLiteral, .FnProto => return node_handle,
|
2020-05-18 13:46:17 +01:00
|
|
|
else => std.debug.warn("Type resolution case not implemented; {}\n", .{node.id}),
|
2020-05-13 14:03:33 +01:00
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
2020-05-11 13:28:08 +01:00
|
|
|
|
2020-05-14 12:51:07 +01:00
|
|
|
fn maybeCollectImport(tree: *ast.Tree, builtin_call: *ast.Node.BuiltinCall, arr: *std.ArrayList([]const u8)) !void {
|
|
|
|
if (!std.mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@import")) return;
|
2020-05-23 23:21:02 +01:00
|
|
|
if (builtin_call.params_len > 1) return;
|
2020-05-14 12:51:07 +01:00
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
const import_param = builtin_call.paramsConst()[0];
|
2020-05-14 12:51:07 +01:00
|
|
|
if (import_param.id != .StringLiteral) return;
|
|
|
|
|
|
|
|
const import_str = tree.tokenSlice(import_param.cast(ast.Node.StringLiteral).?.token);
|
|
|
|
try arr.append(import_str[1 .. import_str.len - 1]);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Collects all imports we can find into a slice of import paths (without quotes).
|
|
|
|
/// The import paths are valid as long as the tree is.
|
2020-05-18 13:14:16 +01:00
|
|
|
pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: *ast.Tree) !void {
|
2020-05-18 13:46:17 +01:00
|
|
|
// TODO: Currently only detects `const smth = @import("string literal")<.SomeThing>;`
|
2020-05-23 23:21:02 +01:00
|
|
|
for (tree.root_node.decls()) |decl| {
|
2020-05-14 12:51:07 +01:00
|
|
|
if (decl.id != .VarDecl) continue;
|
|
|
|
const var_decl = decl.cast(ast.Node.VarDecl).?;
|
|
|
|
if (var_decl.init_node == null) continue;
|
2020-05-16 16:30:16 +01:00
|
|
|
|
|
|
|
switch (var_decl.init_node.?.id) {
|
2020-05-14 12:51:07 +01:00
|
|
|
.BuiltinCall => {
|
|
|
|
const builtin_call = var_decl.init_node.?.cast(ast.Node.BuiltinCall).?;
|
2020-05-18 13:14:16 +01:00
|
|
|
try maybeCollectImport(tree, builtin_call, import_arr);
|
2020-05-14 12:51:07 +01:00
|
|
|
},
|
|
|
|
.InfixOp => {
|
|
|
|
const infix_op = var_decl.init_node.?.cast(ast.Node.InfixOp).?;
|
2020-05-16 16:30:16 +01:00
|
|
|
|
|
|
|
switch (infix_op.op) {
|
2020-05-14 12:51:07 +01:00
|
|
|
.Period => {},
|
|
|
|
else => continue,
|
|
|
|
}
|
|
|
|
if (infix_op.lhs.id != .BuiltinCall) continue;
|
2020-05-18 13:14:16 +01:00
|
|
|
try maybeCollectImport(tree, infix_op.lhs.cast(ast.Node.BuiltinCall).?, import_arr);
|
2020-05-14 12:51:07 +01:00
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-10 17:54:01 +01:00
|
|
|
pub const NodeWithHandle = struct {
|
|
|
|
node: *ast.Node,
|
|
|
|
handle: *DocumentStore.Handle,
|
|
|
|
};
|
2020-05-29 10:46:50 +01:00
|
|
|
|
2020-05-18 14:21:16 +01:00
|
|
|
pub fn getFieldAccessTypeNode(
|
2020-06-10 17:54:01 +01:00
|
|
|
store: *DocumentStore,
|
|
|
|
arena: *std.heap.ArenaAllocator,
|
|
|
|
handle: *DocumentStore.Handle,
|
2020-05-18 14:21:16 +01:00
|
|
|
tokenizer: *std.zig.Tokenizer,
|
2020-06-10 17:54:01 +01:00
|
|
|
) !?NodeWithHandle {
|
|
|
|
var current_node = NodeWithHandle{
|
|
|
|
.node = &handle.tree.root_node.base,
|
|
|
|
.handle = handle,
|
|
|
|
};
|
2020-05-11 13:28:08 +01:00
|
|
|
|
2020-05-13 14:03:33 +01:00
|
|
|
while (true) {
|
2020-05-26 23:45:18 +01:00
|
|
|
const tok = tokenizer.next();
|
|
|
|
switch (tok.id) {
|
2020-06-10 18:48:40 +01:00
|
|
|
.Eof => return try resolveFieldAccessLhsType(store, arena, current_node),
|
2020-05-13 14:03:33 +01:00
|
|
|
.Identifier => {
|
2020-06-10 17:54:01 +01:00
|
|
|
if (try lookupSymbolGlobal(store, current_node.handle, tokenizer.buffer[tok.loc.start..tok.loc.end], tok.loc.start)) |child| {
|
|
|
|
current_node = (try child.resolveType(store, arena)) orelse return null;
|
2020-05-13 14:03:33 +01:00
|
|
|
} else return null;
|
|
|
|
},
|
|
|
|
.Period => {
|
2020-05-26 23:45:18 +01:00
|
|
|
const after_period = tokenizer.next();
|
|
|
|
switch (after_period.id) {
|
2020-06-10 18:48:40 +01:00
|
|
|
.Eof => return try resolveFieldAccessLhsType(store, arena, current_node),
|
2020-05-26 23:45:18 +01:00
|
|
|
.Identifier => {
|
2020-06-10 18:48:40 +01:00
|
|
|
if (after_period.loc.end == tokenizer.buffer.len) return try resolveFieldAccessLhsType(store, arena, current_node);
|
2020-05-27 16:49:11 +01:00
|
|
|
|
2020-06-10 18:48:40 +01:00
|
|
|
current_node = try resolveFieldAccessLhsType(store, arena, current_node);
|
2020-06-10 19:24:17 +01:00
|
|
|
// @TODO Error sets
|
2020-06-10 17:54:01 +01:00
|
|
|
if (current_node.node.id != .ContainerDecl and current_node.node.id != .Root) {
|
|
|
|
// @TODO Is this ok?
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2020-06-10 18:48:40 +01:00
|
|
|
if (try lookupSymbolContainer(store, current_node, tokenizer.buffer[after_period.loc.start..after_period.loc.end], true)) |child| {
|
2020-06-10 17:54:01 +01:00
|
|
|
current_node = (try child.resolveType(store, arena)) orelse return null;
|
2020-05-13 16:43:28 +01:00
|
|
|
} else return null;
|
2020-05-26 23:45:18 +01:00
|
|
|
},
|
|
|
|
.QuestionMark => {
|
2020-06-10 18:48:40 +01:00
|
|
|
current_node = (try resolveUnwrapOptionalType(store, arena, current_node)) orelse return null;
|
2020-05-26 23:45:18 +01:00
|
|
|
},
|
|
|
|
else => {
|
|
|
|
std.debug.warn("Unrecognized token {} after period.\n", .{after_period.id});
|
|
|
|
return null;
|
|
|
|
},
|
2020-05-13 14:03:33 +01:00
|
|
|
}
|
|
|
|
},
|
2020-05-26 23:45:18 +01:00
|
|
|
.PeriodAsterisk => {
|
2020-06-10 18:48:40 +01:00
|
|
|
current_node = (try resolveDerefType(store, arena, current_node)) orelse return null;
|
2020-05-26 23:45:18 +01:00
|
|
|
},
|
2020-05-24 03:07:09 +01:00
|
|
|
.LParen => {
|
2020-06-10 17:54:01 +01:00
|
|
|
switch (current_node.node.id) {
|
2020-05-24 03:07:09 +01:00
|
|
|
.FnProto => {
|
2020-06-10 17:54:01 +01:00
|
|
|
const func = current_node.node.cast(ast.Node.FnProto).?;
|
2020-06-10 18:48:40 +01:00
|
|
|
if (try resolveReturnType(store, arena, func, current_node.handle)) |ret| {
|
2020-05-24 03:07:09 +01:00
|
|
|
current_node = ret;
|
2020-05-24 13:24:18 +01:00
|
|
|
// Skip to the right paren
|
|
|
|
var paren_count: usize = 1;
|
2020-05-26 23:45:18 +01:00
|
|
|
var next = tokenizer.next();
|
2020-05-24 13:24:18 +01:00
|
|
|
while (next.id != .Eof) : (next = tokenizer.next()) {
|
|
|
|
if (next.id == .RParen) {
|
|
|
|
paren_count -= 1;
|
|
|
|
if (paren_count == 0) break;
|
|
|
|
} else if (next.id == .LParen) {
|
|
|
|
paren_count += 1;
|
|
|
|
}
|
2020-05-27 00:00:22 +01:00
|
|
|
} else return null;
|
2020-05-26 23:45:18 +01:00
|
|
|
} else return null;
|
2020-05-24 03:07:09 +01:00
|
|
|
},
|
2020-05-24 13:24:18 +01:00
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
},
|
2020-05-26 23:45:18 +01:00
|
|
|
.LBracket => {
|
2020-05-27 16:49:11 +01:00
|
|
|
var brack_count: usize = 1;
|
|
|
|
var next = tokenizer.next();
|
|
|
|
var is_range = false;
|
|
|
|
while (next.id != .Eof) : (next = tokenizer.next()) {
|
|
|
|
if (next.id == .RBracket) {
|
|
|
|
brack_count -= 1;
|
|
|
|
if (brack_count == 0) break;
|
|
|
|
} else if (next.id == .LBracket) {
|
|
|
|
brack_count += 1;
|
|
|
|
} else if (next.id == .Ellipsis2 and brack_count == 1) {
|
|
|
|
is_range = true;
|
|
|
|
}
|
|
|
|
} else return null;
|
|
|
|
|
2020-06-10 18:48:40 +01:00
|
|
|
current_node = (try resolveBracketAccessType(store, arena, current_node, if (is_range) .Range else .Single)) orelse return null;
|
2020-05-24 03:07:09 +01:00
|
|
|
},
|
2020-05-26 23:45:18 +01:00
|
|
|
else => {
|
|
|
|
std.debug.warn("Unimplemented token: {}\n", .{tok.id});
|
|
|
|
return null;
|
2020-05-27 16:49:11 +01:00
|
|
|
},
|
2020-05-13 14:03:33 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-10 18:48:40 +01:00
|
|
|
return try resolveFieldAccessLhsType(store, arena, current_node);
|
2020-05-13 14:03:33 +01:00
|
|
|
}
|
|
|
|
|
2020-05-14 17:23:26 +01:00
|
|
|
pub fn isNodePublic(tree: *ast.Tree, node: *ast.Node) bool {
|
2020-05-14 17:14:35 +01:00
|
|
|
switch (node.id) {
|
|
|
|
.VarDecl => {
|
|
|
|
const var_decl = node.cast(ast.Node.VarDecl).?;
|
2020-05-14 15:22:15 +01:00
|
|
|
return var_decl.visib_token != null;
|
2020-05-14 17:14:35 +01:00
|
|
|
},
|
|
|
|
.FnProto => {
|
|
|
|
const func = node.cast(ast.Node.FnProto).?;
|
2020-05-14 15:22:15 +01:00
|
|
|
return func.visib_token != null;
|
2020-05-16 16:30:16 +01:00
|
|
|
},
|
2020-05-17 15:23:04 +01:00
|
|
|
else => return true,
|
2020-05-14 17:14:35 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-14 00:10:41 +01:00
|
|
|
pub fn nodeToString(tree: *ast.Tree, node: *ast.Node) ?[]const u8 {
|
2020-05-13 14:03:33 +01:00
|
|
|
switch (node.id) {
|
|
|
|
.ContainerField => {
|
2020-05-14 00:10:41 +01:00
|
|
|
const field = node.cast(ast.Node.ContainerField).?;
|
2020-05-13 14:03:33 +01:00
|
|
|
return tree.tokenSlice(field.name_token);
|
|
|
|
},
|
2020-05-14 17:11:03 +01:00
|
|
|
.ErrorTag => {
|
|
|
|
const tag = node.cast(ast.Node.ErrorTag).?;
|
|
|
|
return tree.tokenSlice(tag.name_token);
|
|
|
|
},
|
2020-05-13 14:03:33 +01:00
|
|
|
.Identifier => {
|
2020-05-14 00:10:41 +01:00
|
|
|
const field = node.cast(ast.Node.Identifier).?;
|
2020-05-13 14:03:33 +01:00
|
|
|
return tree.tokenSlice(field.token);
|
2020-05-11 13:28:08 +01:00
|
|
|
},
|
2020-05-13 16:59:44 +01:00
|
|
|
.FnProto => {
|
2020-05-14 00:10:41 +01:00
|
|
|
const func = node.cast(ast.Node.FnProto).?;
|
2020-05-13 16:59:44 +01:00
|
|
|
if (func.name_token) |name_token| {
|
|
|
|
return tree.tokenSlice(name_token);
|
|
|
|
}
|
|
|
|
},
|
2020-05-11 13:28:08 +01:00
|
|
|
else => {
|
2020-05-13 14:03:33 +01:00
|
|
|
std.debug.warn("INVALID: {}\n", .{node.id});
|
2020-05-16 16:30:16 +01:00
|
|
|
},
|
2020-05-11 13:28:08 +01:00
|
|
|
}
|
2020-05-16 16:30:16 +01:00
|
|
|
|
2020-05-13 16:59:44 +01:00
|
|
|
return null;
|
2020-05-13 14:03:33 +01:00
|
|
|
}
|
2020-05-16 19:06:48 +01:00
|
|
|
|
2020-05-22 16:51:57 +01:00
|
|
|
fn nodeContainsSourceIndex(tree: *ast.Tree, node: *ast.Node, source_index: usize) bool {
|
2020-05-23 23:21:02 +01:00
|
|
|
const first_token = tree.token_locs[node.firstToken()];
|
|
|
|
const last_token = tree.token_locs[node.lastToken()];
|
2020-05-22 16:51:57 +01:00
|
|
|
return source_index >= first_token.start and source_index <= last_token.end;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn getImportStr(tree: *ast.Tree, source_index: usize) ?[]const u8 {
|
|
|
|
var node = &tree.root_node.base;
|
2020-05-23 23:21:02 +01:00
|
|
|
|
2020-05-24 15:07:48 +01:00
|
|
|
var child_idx: usize = 0;
|
2020-05-25 22:37:18 +01:00
|
|
|
while (node.iterate(child_idx)) |child| {
|
2020-05-22 16:51:57 +01:00
|
|
|
if (!nodeContainsSourceIndex(tree, child, source_index)) {
|
2020-05-25 22:37:18 +01:00
|
|
|
child_idx += 1;
|
2020-05-22 16:51:57 +01:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (child.cast(ast.Node.BuiltinCall)) |builtin_call| blk: {
|
|
|
|
const call_name = tree.tokenSlice(builtin_call.builtin_token);
|
|
|
|
|
|
|
|
if (!std.mem.eql(u8, call_name, "@import")) break :blk;
|
2020-05-23 23:21:02 +01:00
|
|
|
if (builtin_call.params_len != 1) break :blk;
|
2020-05-22 16:51:57 +01:00
|
|
|
|
2020-05-23 23:21:02 +01:00
|
|
|
const import_param = builtin_call.paramsConst()[0];
|
2020-05-22 16:51:57 +01:00
|
|
|
const import_str_node = import_param.cast(ast.Node.StringLiteral) orelse break :blk;
|
|
|
|
const import_str = tree.tokenSlice(import_str_node.token);
|
|
|
|
return import_str[1 .. import_str.len - 1];
|
|
|
|
}
|
|
|
|
node = child;
|
2020-05-24 15:07:48 +01:00
|
|
|
child_idx = 0;
|
2020-05-22 16:51:57 +01:00
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
2020-05-26 23:45:18 +01:00
|
|
|
|
|
|
|
pub const SourceRange = std.zig.Token.Loc;
|
|
|
|
|
2020-05-27 19:58:35 +01:00
|
|
|
pub const PositionContext = union(enum) {
|
2020-05-26 23:45:18 +01:00
|
|
|
builtin: SourceRange,
|
|
|
|
comment,
|
|
|
|
string_literal: SourceRange,
|
|
|
|
field_access: SourceRange,
|
|
|
|
var_access: SourceRange,
|
2020-05-16 17:04:07 +01:00
|
|
|
global_error_set,
|
2020-05-26 23:45:18 +01:00
|
|
|
enum_literal,
|
|
|
|
other,
|
|
|
|
empty,
|
|
|
|
|
2020-05-27 19:58:35 +01:00
|
|
|
pub fn range(self: PositionContext) ?SourceRange {
|
2020-05-26 23:45:18 +01:00
|
|
|
return switch (self) {
|
|
|
|
.builtin => |r| r,
|
|
|
|
.comment => null,
|
|
|
|
.string_literal => |r| r,
|
|
|
|
.field_access => |r| r,
|
|
|
|
.var_access => |r| r,
|
|
|
|
.enum_literal => null,
|
|
|
|
.other => null,
|
|
|
|
.empty => null,
|
2020-05-16 17:04:07 +01:00
|
|
|
.global_error_set => null,
|
2020-05-26 23:45:18 +01:00
|
|
|
};
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const StackState = struct {
|
|
|
|
ctx: PositionContext,
|
|
|
|
stack_id: enum { Paren, Bracket, Global },
|
|
|
|
};
|
|
|
|
|
|
|
|
fn peek(arr: *std.ArrayList(StackState)) !*StackState {
|
|
|
|
if (arr.items.len == 0) {
|
|
|
|
try arr.append(.{ .ctx = .empty, .stack_id = .Global });
|
|
|
|
}
|
|
|
|
return &arr.items[arr.items.len - 1];
|
|
|
|
}
|
|
|
|
|
|
|
|
fn tokenRangeAppend(prev: SourceRange, token: std.zig.Token) SourceRange {
|
|
|
|
return .{
|
|
|
|
.start = prev.start,
|
|
|
|
.end = token.loc.end,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn documentPositionContext(allocator: *std.mem.Allocator, document: types.TextDocument, position: types.Position) !PositionContext {
|
|
|
|
const line = try document.getLine(@intCast(usize, position.line));
|
|
|
|
const pos_char = @intCast(usize, position.character) + 1;
|
|
|
|
const idx = if (pos_char > line.len) line.len else pos_char;
|
|
|
|
|
|
|
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
|
|
|
defer arena.deinit();
|
|
|
|
|
|
|
|
var tokenizer = std.zig.Tokenizer.init(line[0..idx]);
|
|
|
|
var stack = try std.ArrayList(StackState).initCapacity(&arena.allocator, 8);
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
const tok = tokenizer.next();
|
|
|
|
// Early exits.
|
|
|
|
switch (tok.id) {
|
|
|
|
.Invalid, .Invalid_ampersands => {
|
|
|
|
// Single '@' do not return a builtin token so we check this on our own.
|
|
|
|
if (line[idx - 1] == '@') {
|
|
|
|
return PositionContext{
|
|
|
|
.builtin = .{
|
|
|
|
.start = idx - 1,
|
|
|
|
.end = idx,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
return .other;
|
|
|
|
},
|
|
|
|
.LineComment, .DocComment, .ContainerDocComment => return .comment,
|
|
|
|
.Eof => break,
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
|
|
|
|
// State changes
|
|
|
|
var curr_ctx = try peek(&stack);
|
|
|
|
switch (tok.id) {
|
2020-05-27 11:09:50 +01:00
|
|
|
.StringLiteral, .MultilineStringLiteralLine => curr_ctx.ctx = .{ .string_literal = tok.loc },
|
2020-05-26 23:45:18 +01:00
|
|
|
.Identifier => switch (curr_ctx.ctx) {
|
2020-05-27 11:09:50 +01:00
|
|
|
.empty => curr_ctx.ctx = .{ .var_access = tok.loc },
|
2020-05-26 23:45:18 +01:00
|
|
|
else => {},
|
|
|
|
},
|
|
|
|
.Builtin => switch (curr_ctx.ctx) {
|
2020-05-27 11:09:50 +01:00
|
|
|
.empty => curr_ctx.ctx = .{ .builtin = tok.loc },
|
2020-05-26 23:45:18 +01:00
|
|
|
else => {},
|
|
|
|
},
|
|
|
|
.Period, .PeriodAsterisk => switch (curr_ctx.ctx) {
|
|
|
|
.empty => curr_ctx.ctx = .enum_literal,
|
|
|
|
.enum_literal => curr_ctx.ctx = .empty,
|
|
|
|
.field_access => {},
|
|
|
|
.other => {},
|
2020-05-16 17:04:07 +01:00
|
|
|
.global_error_set => {},
|
2020-05-26 23:45:18 +01:00
|
|
|
else => curr_ctx.ctx = .{
|
|
|
|
.field_access = tokenRangeAppend(curr_ctx.ctx.range().?, tok),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
.QuestionMark => switch (curr_ctx.ctx) {
|
|
|
|
.field_access => {},
|
|
|
|
else => curr_ctx.ctx = .empty,
|
|
|
|
},
|
|
|
|
.LParen => try stack.append(.{ .ctx = .empty, .stack_id = .Paren }),
|
|
|
|
.LBracket => try stack.append(.{ .ctx = .empty, .stack_id = .Bracket }),
|
|
|
|
.RParen => {
|
|
|
|
_ = stack.pop();
|
|
|
|
if (curr_ctx.stack_id != .Paren) {
|
|
|
|
(try peek(&stack)).ctx = .empty;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.RBracket => {
|
|
|
|
_ = stack.pop();
|
|
|
|
if (curr_ctx.stack_id != .Bracket) {
|
|
|
|
(try peek(&stack)).ctx = .empty;
|
|
|
|
}
|
|
|
|
},
|
2020-05-16 17:04:07 +01:00
|
|
|
.Keyword_error => curr_ctx.ctx = .global_error_set,
|
2020-05-26 23:45:18 +01:00
|
|
|
else => curr_ctx.ctx = .empty,
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (curr_ctx.ctx) {
|
|
|
|
.field_access => |r| curr_ctx.ctx = .{
|
|
|
|
.field_access = tokenRangeAppend(r, tok),
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return block: {
|
|
|
|
if (stack.popOrNull()) |state| break :block state.ctx;
|
|
|
|
break :block .empty;
|
|
|
|
};
|
|
|
|
}
|
2020-05-28 01:39:36 +01:00
|
|
|
|
|
|
|
fn addOutlineNodes(allocator: *std.mem.Allocator, children: *std.ArrayList(types.DocumentSymbol), tree: *ast.Tree, child: *ast.Node) anyerror!void {
|
|
|
|
switch (child.id) {
|
2020-06-06 23:02:53 +01:00
|
|
|
.StringLiteral, .IntegerLiteral, .BuiltinCall, .Call, .Identifier, .InfixOp, .PrefixOp, .SuffixOp, .ControlFlowExpression, .ArrayInitializerDot, .SwitchElse, .SwitchCase, .For, .EnumLiteral, .PointerIndexPayload, .StructInitializerDot, .PointerPayload, .While, .Switch, .Else, .BoolLiteral, .NullLiteral, .Defer, .StructInitializer, .FieldInitializer, .If, .MultilineStringLiteral, .UndefinedLiteral, .VarType, .Block, .ErrorSetDecl => return,
|
2020-06-01 18:48:14 +01:00
|
|
|
|
2020-05-28 01:39:36 +01:00
|
|
|
.ContainerDecl => {
|
|
|
|
const decl = child.cast(ast.Node.ContainerDecl).?;
|
|
|
|
|
2020-05-28 13:41:40 +01:00
|
|
|
for (decl.fieldsAndDecls()) |cchild|
|
2020-05-28 01:39:36 +01:00
|
|
|
try addOutlineNodes(allocator, children, tree, cchild);
|
|
|
|
return;
|
|
|
|
},
|
2020-05-28 16:18:48 +01:00
|
|
|
else => {},
|
2020-05-28 01:39:36 +01:00
|
|
|
}
|
|
|
|
_ = try children.append(try getDocumentSymbolsInternal(allocator, tree, child));
|
|
|
|
}
|
|
|
|
|
|
|
|
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: *ast.Tree, node: *ast.Node) anyerror!types.DocumentSymbol {
|
|
|
|
// const symbols = std.ArrayList(types.DocumentSymbol).init(allocator);
|
|
|
|
const start_loc = tree.tokenLocation(0, node.firstToken());
|
|
|
|
const end_loc = tree.tokenLocation(0, node.lastToken());
|
|
|
|
const range = types.Range{
|
|
|
|
.start = .{
|
|
|
|
.line = @intCast(i64, start_loc.line),
|
|
|
|
.character = @intCast(i64, start_loc.column),
|
|
|
|
},
|
|
|
|
.end = .{
|
|
|
|
.line = @intCast(i64, end_loc.line),
|
|
|
|
.character = @intCast(i64, end_loc.column),
|
2020-05-28 16:18:48 +01:00
|
|
|
},
|
2020-05-28 01:39:36 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
if (getDeclName(tree, node) == null) {
|
|
|
|
std.debug.warn("NULL NAME: {}\n", .{node.id});
|
|
|
|
}
|
|
|
|
|
2020-06-07 10:20:41 +01:00
|
|
|
const maybe_name = if (getDeclName(tree, node)) |name|
|
|
|
|
name
|
|
|
|
else
|
|
|
|
"";
|
|
|
|
|
2020-05-28 16:18:48 +01:00
|
|
|
// TODO: Get my lazy bum to fix detail newlines
|
2020-05-28 01:39:36 +01:00
|
|
|
return types.DocumentSymbol{
|
2020-06-07 10:20:41 +01:00
|
|
|
.name = if (maybe_name.len == 0) switch (node.id) {
|
2020-06-06 23:02:53 +01:00
|
|
|
.TestDecl => "Nameless Test",
|
2020-06-07 10:20:41 +01:00
|
|
|
else => "no_name",
|
|
|
|
} else maybe_name,
|
2020-05-28 01:39:36 +01:00
|
|
|
// .detail = (try getDocComments(allocator, tree, node)) orelse "",
|
|
|
|
.detail = "",
|
|
|
|
.kind = switch (node.id) {
|
|
|
|
.FnProto => .Function,
|
|
|
|
.VarDecl => .Variable,
|
|
|
|
.ContainerField => .Field,
|
2020-05-28 16:18:48 +01:00
|
|
|
else => .Variable,
|
2020-05-28 01:39:36 +01:00
|
|
|
},
|
|
|
|
.range = range,
|
|
|
|
.selectionRange = range,
|
|
|
|
.children = ch: {
|
|
|
|
var children = std.ArrayList(types.DocumentSymbol).init(allocator);
|
|
|
|
|
|
|
|
var index: usize = 0;
|
|
|
|
while (node.iterate(index)) |child| : (index += 1) {
|
|
|
|
try addOutlineNodes(allocator, &children, tree, child);
|
|
|
|
}
|
|
|
|
|
|
|
|
break :ch children.items;
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
// return symbols.items;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: *ast.Tree) ![]types.DocumentSymbol {
|
|
|
|
var symbols = std.ArrayList(types.DocumentSymbol).init(allocator);
|
|
|
|
|
|
|
|
for (tree.root_node.decls()) |node| {
|
|
|
|
_ = try symbols.append(try getDocumentSymbolsInternal(allocator, tree, node));
|
|
|
|
}
|
|
|
|
|
|
|
|
return symbols.items;
|
|
|
|
}
|
2020-06-10 14:12:00 +01:00
|
|
|
|
|
|
|
pub const Declaration = union(enum) {
|
|
|
|
ast_node: *ast.Node,
|
|
|
|
param_decl: *ast.Node.FnProto.ParamDecl,
|
|
|
|
pointer_payload: struct {
|
|
|
|
node: *ast.Node.PointerPayload,
|
|
|
|
condition: *ast.Node,
|
|
|
|
},
|
|
|
|
array_payload: struct {
|
|
|
|
identifier: *ast.Node,
|
|
|
|
array_expr: *ast.Node,
|
|
|
|
},
|
|
|
|
switch_payload: struct {
|
|
|
|
node: *ast.Node.PointerPayload,
|
|
|
|
items: []const *ast.Node,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2020-06-10 17:01:44 +01:00
|
|
|
pub const DeclWithHandle = struct {
|
|
|
|
decl: *Declaration,
|
|
|
|
handle: *DocumentStore.Handle,
|
|
|
|
|
|
|
|
pub fn location(self: DeclWithHandle) ast.Tree.Location {
|
|
|
|
const tree = self.handle.tree;
|
|
|
|
return switch (self.decl.*) {
|
|
|
|
.ast_node => |n| block: {
|
|
|
|
const name_token = getDeclNameToken(tree, n).?;
|
|
|
|
break :block tree.tokenLocation(0, name_token);
|
|
|
|
},
|
|
|
|
.param_decl => |p| tree.tokenLocation(0, p.name_token.?),
|
|
|
|
.pointer_payload => |pp| tree.tokenLocation(0, pp.node.value_symbol.firstToken()),
|
|
|
|
.array_payload => |ap| tree.tokenLocation(0, ap.identifier.firstToken()),
|
|
|
|
.switch_payload => |sp| tree.tokenLocation(0, sp.node.value_symbol.firstToken()),
|
|
|
|
};
|
|
|
|
}
|
2020-06-10 17:54:01 +01:00
|
|
|
|
|
|
|
fn resolveType(self: DeclWithHandle, store: *DocumentStore, arena: *std.heap.ArenaAllocator) !?NodeWithHandle {
|
2020-06-10 18:48:40 +01:00
|
|
|
return switch (self.decl.*) {
|
|
|
|
.ast_node => |node| try resolveTypeOfNode(store, arena, .{ .node = node, .handle = self.handle }),
|
2020-06-10 17:54:01 +01:00
|
|
|
.param_decl => |param_decl| switch (param_decl.param_type) {
|
2020-06-10 18:48:40 +01:00
|
|
|
.type_expr => |type_node| try resolveTypeOfNode(store, arena, .{ .node = type_node, .handle = self.handle }),
|
2020-06-10 17:54:01 +01:00
|
|
|
else => null,
|
|
|
|
},
|
|
|
|
.pointer_payload => |pay| try resolveUnwrapOptionalType(
|
|
|
|
store,
|
2020-06-10 18:48:40 +01:00
|
|
|
arena,
|
|
|
|
(try resolveTypeOfNode(store, arena, .{
|
2020-06-10 17:54:01 +01:00
|
|
|
.node = pay.condition,
|
|
|
|
.handle = self.handle,
|
2020-06-10 18:48:40 +01:00
|
|
|
})) orelse return null,
|
2020-06-10 17:54:01 +01:00
|
|
|
),
|
|
|
|
.array_payload => |pay| try resolveBracketAccessType(
|
|
|
|
store,
|
2020-06-10 18:48:40 +01:00
|
|
|
arena,
|
2020-06-10 17:54:01 +01:00
|
|
|
.{
|
|
|
|
.node = pay.array_expr,
|
|
|
|
.handle = self.handle,
|
|
|
|
},
|
|
|
|
.Single,
|
|
|
|
),
|
|
|
|
// TODO Resolve switch payload types
|
|
|
|
.switch_payload => |pay| return null,
|
|
|
|
};
|
|
|
|
}
|
2020-06-10 17:01:44 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
pub fn iterateSymbolsGlobal(
|
|
|
|
store: *DocumentStore,
|
|
|
|
handle: *DocumentStore.Handle,
|
|
|
|
source_index: usize,
|
|
|
|
comptime callback: var,
|
|
|
|
context: var,
|
|
|
|
) !void {
|
|
|
|
for (handle.document_scope.scopes) |scope| {
|
|
|
|
if (source_index >= scope.range.start and source_index < scope.range.end) {
|
|
|
|
var decl_it = scope.decls.iterator();
|
|
|
|
while (decl_it.next()) |entry| {
|
|
|
|
try callback(context, DeclWithHandle{ .decl = &entry.value, .handle = handle });
|
|
|
|
}
|
2020-06-10 14:12:00 +01:00
|
|
|
|
2020-06-10 17:01:44 +01:00
|
|
|
for (scope.uses) |use| {
|
|
|
|
// @TODO Resolve uses, iterate over their symbols.
|
|
|
|
}
|
|
|
|
}
|
2020-06-10 14:12:00 +01:00
|
|
|
|
2020-06-10 17:01:44 +01:00
|
|
|
if (scope.range.start >= source_index) return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-10 19:24:17 +01:00
|
|
|
pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) NodeWithHandle {
|
|
|
|
var current = handle.document_scope.scopes[0].data.container;
|
|
|
|
if (handle.document_scope.scopes.len == 1) return .{ .node = current, .handle = handle };
|
|
|
|
|
|
|
|
for (handle.document_scope.scopes[1..]) |scope| {
|
|
|
|
if (source_index >= scope.range.start and source_index < scope.range.end) {
|
|
|
|
switch (scope.data) {
|
|
|
|
.container => |node| current = node,
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (scope.range.start > source_index) break;
|
|
|
|
}
|
|
|
|
return .{ .node = current, .handle = handle };
|
|
|
|
}
|
|
|
|
|
2020-06-10 17:01:44 +01:00
|
|
|
pub fn lookupSymbolGlobal(store: *DocumentStore, handle: *DocumentStore.Handle, symbol: []const u8, source_index: usize) !?DeclWithHandle {
|
|
|
|
for (handle.document_scope.scopes) |scope| {
|
|
|
|
if (source_index >= scope.range.start and source_index < scope.range.end) {
|
|
|
|
if (scope.decls.get(symbol)) |candidate| {
|
|
|
|
switch (candidate.value) {
|
|
|
|
.ast_node => |node| {
|
|
|
|
if (node.id == .ContainerField) continue;
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
return DeclWithHandle{
|
|
|
|
.decl = &candidate.value,
|
|
|
|
.handle = handle,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
for (scope.uses) |use| {
|
|
|
|
// @TODO Resolve use, lookup symbol in resulting scope.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-10 19:24:17 +01:00
|
|
|
if (scope.range.start > source_index) return null;
|
2020-06-10 17:01:44 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2020-06-10 18:48:40 +01:00
|
|
|
pub fn lookupSymbolContainer(store: *DocumentStore, container_handle: NodeWithHandle, symbol: []const u8, accept_fields: bool) !?DeclWithHandle {
|
2020-06-10 17:54:01 +01:00
|
|
|
const container = container_handle.node;
|
|
|
|
const handle = container_handle.handle;
|
2020-06-10 17:01:44 +01:00
|
|
|
std.debug.assert(container.id == .ContainerDecl or container.id == .Root);
|
|
|
|
// Find the container scope.
|
|
|
|
var maybe_container_scope: ?*Scope = null;
|
|
|
|
for (handle.document_scope.scopes) |*scope| {
|
2020-06-10 18:48:40 +01:00
|
|
|
switch (scope.*.data) {
|
2020-06-10 17:01:44 +01:00
|
|
|
.container => |node| if (node == container) {
|
|
|
|
maybe_container_scope = scope;
|
|
|
|
break;
|
|
|
|
},
|
2020-06-10 18:48:40 +01:00
|
|
|
else => {},
|
2020-06-10 17:01:44 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (maybe_container_scope) |container_scope| {
|
|
|
|
if (container_scope.decls.get(symbol)) |candidate| {
|
|
|
|
switch (candidate.value) {
|
|
|
|
.ast_node => |node| {
|
|
|
|
if (node.id == .ContainerDecl and !accept_fields) return null;
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
2020-06-10 18:48:40 +01:00
|
|
|
return DeclWithHandle{ .decl = &candidate.value, .handle = handle };
|
2020-06-10 17:01:44 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for (container_scope.uses) |use| {
|
|
|
|
// @TODO Resolve use, lookup symbol in resulting scope.
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
unreachable;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub const DocumentScope = struct {
|
2020-06-10 18:48:40 +01:00
|
|
|
scopes: []Scope,
|
2020-06-10 14:12:00 +01:00
|
|
|
|
|
|
|
pub fn debugPrint(self: DocumentScope) void {
|
|
|
|
for (self.scopes) |scope| {
|
|
|
|
std.debug.warn(
|
|
|
|
\\--------------------------
|
|
|
|
\\Scope {}, range: [{}, {})
|
|
|
|
\\ {} usingnamespaces
|
|
|
|
\\Decls:
|
|
|
|
, .{
|
|
|
|
scope.data,
|
|
|
|
scope.range.start,
|
|
|
|
scope.range.end,
|
|
|
|
scope.uses.len,
|
|
|
|
});
|
|
|
|
|
|
|
|
var decl_it = scope.decls.iterator();
|
|
|
|
var idx: usize = 0;
|
|
|
|
while (decl_it.next()) |name_decl| : (idx += 1) {
|
|
|
|
if (idx != 0) std.debug.warn(", ", .{});
|
|
|
|
std.debug.warn("{}", .{name_decl.key});
|
|
|
|
}
|
|
|
|
std.debug.warn("\n--------------------------\n", .{});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn deinit(self: DocumentScope, allocator: *std.mem.Allocator) void {
|
|
|
|
for (self.scopes) |scope| {
|
|
|
|
scope.decls.deinit();
|
|
|
|
allocator.free(scope.uses);
|
|
|
|
}
|
|
|
|
allocator.free(self.scopes);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
pub const Scope = struct {
|
|
|
|
pub const Data = union(enum) {
|
|
|
|
container: *ast.Node, // .id is ContainerDecl or Root
|
|
|
|
function: *ast.Node, // .id is FnProto
|
|
|
|
block: *ast.Node, // .id is Block
|
|
|
|
other,
|
|
|
|
};
|
|
|
|
|
|
|
|
range: SourceRange,
|
|
|
|
decls: std.StringHashMap(Declaration),
|
2020-06-10 17:01:44 +01:00
|
|
|
tests: []const *ast.Node,
|
2020-06-10 14:12:00 +01:00
|
|
|
uses: []const *ast.Node.Use,
|
|
|
|
|
|
|
|
data: Data,
|
|
|
|
};
|
|
|
|
|
|
|
|
pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: *ast.Tree) !DocumentScope {
|
|
|
|
var scopes = std.ArrayList(Scope).init(allocator);
|
|
|
|
errdefer scopes.deinit();
|
|
|
|
|
|
|
|
try makeScopeInternal(allocator, &scopes, tree, &tree.root_node.base);
|
|
|
|
return DocumentScope{
|
|
|
|
.scopes = scopes.toOwnedSlice(),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
fn nodeSourceRange(tree: *ast.Tree, node: *ast.Node) SourceRange {
|
|
|
|
return SourceRange{
|
|
|
|
.start = tree.token_locs[node.firstToken()].start,
|
|
|
|
.end = tree.token_locs[node.lastToken()].end,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-06-10 17:01:44 +01:00
|
|
|
// TODO Make enum and error stores per-document
|
|
|
|
// CLear the doc ones before calling this and
|
|
|
|
// rebuild them here.
|
2020-06-10 20:05:11 +01:00
|
|
|
// TODO Possibly collect all imports to diff them on changes
|
|
|
|
// as well
|
2020-06-10 14:12:00 +01:00
|
|
|
fn makeScopeInternal(
|
|
|
|
allocator: *std.mem.Allocator,
|
|
|
|
scopes: *std.ArrayList(Scope),
|
|
|
|
tree: *ast.Tree,
|
|
|
|
node: *ast.Node,
|
|
|
|
) error{OutOfMemory}!void {
|
|
|
|
if (node.id == .Root or node.id == .ContainerDecl) {
|
|
|
|
const ast_decls = switch (node.id) {
|
|
|
|
.ContainerDecl => node.cast(ast.Node.ContainerDecl).?.fieldsAndDeclsConst(),
|
|
|
|
.Root => node.cast(ast.Node.Root).?.declsConst(),
|
|
|
|
else => unreachable,
|
|
|
|
};
|
|
|
|
|
|
|
|
(try scopes.addOne()).* = .{
|
|
|
|
.range = nodeSourceRange(tree, node),
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 17:01:44 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .{ .container = node },
|
|
|
|
};
|
|
|
|
var scope_idx = scopes.items.len - 1;
|
|
|
|
var uses = std.ArrayList(*ast.Node.Use).init(allocator);
|
2020-06-10 17:01:44 +01:00
|
|
|
var tests = std.ArrayList(*ast.Node).init(allocator);
|
2020-06-10 14:12:00 +01:00
|
|
|
|
|
|
|
errdefer {
|
|
|
|
scopes.items[scope_idx].decls.deinit();
|
|
|
|
uses.deinit();
|
2020-06-10 17:01:44 +01:00
|
|
|
tests.deinit();
|
2020-06-10 14:12:00 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for (ast_decls) |decl| {
|
|
|
|
if (decl.cast(ast.Node.Use)) |use| {
|
|
|
|
try uses.append(use);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, decl);
|
|
|
|
const name = getDeclName(tree, decl) orelse continue;
|
2020-06-10 17:01:44 +01:00
|
|
|
if (decl.id == .TestDecl) {
|
|
|
|
try tests.append(decl);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2020-06-10 14:12:00 +01:00
|
|
|
if (try scopes.items[scope_idx].decls.put(name, .{ .ast_node = decl })) |existing| {
|
|
|
|
// TODO Record a redefinition error.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
scopes.items[scope_idx].uses = uses.toOwnedSlice();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (node.id) {
|
|
|
|
.FnProto => {
|
|
|
|
const func = node.cast(ast.Node.FnProto).?;
|
|
|
|
|
|
|
|
(try scopes.addOne()).* = .{
|
|
|
|
.range = nodeSourceRange(tree, node),
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .{ .function = node },
|
|
|
|
};
|
|
|
|
var scope_idx = scopes.items.len - 1;
|
|
|
|
errdefer scopes.items[scope_idx].decls.deinit();
|
|
|
|
|
|
|
|
for (func.params()) |*param| {
|
|
|
|
if (param.name_token) |name_tok| {
|
|
|
|
if (try scopes.items[scope_idx].decls.put(tree.tokenSlice(name_tok), .{ .param_decl = param })) |existing| {
|
|
|
|
// TODO Record a redefinition error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (func.body_node) |body| {
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, body);
|
|
|
|
}
|
|
|
|
|
|
|
|
return;
|
|
|
|
},
|
|
|
|
.TestDecl => {
|
|
|
|
return try makeScopeInternal(allocator, scopes, tree, node.cast(ast.Node.TestDecl).?.body_node);
|
|
|
|
},
|
|
|
|
.Block => {
|
|
|
|
(try scopes.addOne()).* = .{
|
|
|
|
.range = nodeSourceRange(tree, node),
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .{ .block = node },
|
|
|
|
};
|
|
|
|
var scope_idx = scopes.items.len - 1;
|
|
|
|
var uses = std.ArrayList(*ast.Node.Use).init(allocator);
|
|
|
|
|
|
|
|
errdefer {
|
|
|
|
scopes.items[scope_idx].decls.deinit();
|
|
|
|
uses.deinit();
|
|
|
|
}
|
|
|
|
|
|
|
|
var child_idx: usize = 0;
|
|
|
|
while (node.iterate(child_idx)) |child_node| : (child_idx += 1) {
|
|
|
|
if (child_node.cast(ast.Node.Use)) |use| {
|
|
|
|
try uses.append(use);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, child_node);
|
|
|
|
if (child_node.cast(ast.Node.VarDecl)) |var_decl| {
|
|
|
|
const name = tree.tokenSlice(var_decl.name_token);
|
|
|
|
if (try scopes.items[scope_idx].decls.put(name, .{ .ast_node = child_node })) |existing| {
|
|
|
|
// TODO Record a redefinition error.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
scopes.items[scope_idx].uses = uses.toOwnedSlice();
|
|
|
|
return;
|
|
|
|
},
|
|
|
|
.Comptime => {
|
|
|
|
return try makeScopeInternal(allocator, scopes, tree, node.cast(ast.Node.Comptime).?.expr);
|
|
|
|
},
|
|
|
|
.If => {
|
|
|
|
const if_node = node.cast(ast.Node.If).?;
|
|
|
|
|
|
|
|
if (if_node.payload) |payload| {
|
|
|
|
std.debug.assert(payload.id == .PointerPayload);
|
|
|
|
var scope = try scopes.addOne();
|
|
|
|
scope.* = .{
|
|
|
|
.range = .{
|
|
|
|
.start = tree.token_locs[payload.firstToken()].start,
|
|
|
|
.end = tree.token_locs[if_node.body.lastToken()].end,
|
|
|
|
},
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .other,
|
|
|
|
};
|
|
|
|
errdefer scope.decls.deinit();
|
|
|
|
|
|
|
|
const ptr_payload = payload.cast(ast.Node.PointerPayload).?;
|
|
|
|
std.debug.assert(ptr_payload.value_symbol.id == .Identifier);
|
|
|
|
const name = tree.tokenSlice(ptr_payload.value_symbol.firstToken());
|
|
|
|
try scope.decls.putNoClobber(name, .{
|
|
|
|
.pointer_payload = .{
|
|
|
|
.node = ptr_payload,
|
|
|
|
.condition = if_node.condition,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, if_node.body);
|
|
|
|
|
|
|
|
if (if_node.@"else") |else_node| {
|
|
|
|
if (else_node.payload) |payload| {
|
|
|
|
std.debug.assert(payload.id == .Payload);
|
|
|
|
var scope = try scopes.addOne();
|
|
|
|
scope.* = .{
|
|
|
|
.range = .{
|
|
|
|
.start = tree.token_locs[payload.firstToken()].start,
|
|
|
|
.end = tree.token_locs[else_node.body.lastToken()].end,
|
|
|
|
},
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .other,
|
|
|
|
};
|
|
|
|
errdefer scope.decls.deinit();
|
|
|
|
|
|
|
|
const err_payload = payload.cast(ast.Node.Payload).?;
|
|
|
|
std.debug.assert(err_payload.error_symbol.id == .Identifier);
|
|
|
|
const name = tree.tokenSlice(err_payload.error_symbol.firstToken());
|
|
|
|
try scope.decls.putNoClobber(name, .{ .ast_node = payload });
|
|
|
|
}
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, else_node.body);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.While => {
|
|
|
|
const while_node = node.cast(ast.Node.While).?;
|
|
|
|
if (while_node.payload) |payload| {
|
|
|
|
std.debug.assert(payload.id == .PointerPayload);
|
|
|
|
var scope = try scopes.addOne();
|
|
|
|
scope.* = .{
|
|
|
|
.range = .{
|
|
|
|
.start = tree.token_locs[payload.firstToken()].start,
|
|
|
|
.end = tree.token_locs[while_node.body.lastToken()].end,
|
|
|
|
},
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .other,
|
|
|
|
};
|
|
|
|
errdefer scope.decls.deinit();
|
|
|
|
|
|
|
|
const ptr_payload = payload.cast(ast.Node.PointerPayload).?;
|
|
|
|
std.debug.assert(ptr_payload.value_symbol.id == .Identifier);
|
|
|
|
const name = tree.tokenSlice(ptr_payload.value_symbol.firstToken());
|
|
|
|
try scope.decls.putNoClobber(name, .{
|
|
|
|
.pointer_payload = .{
|
|
|
|
.node = ptr_payload,
|
|
|
|
.condition = while_node.condition,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, while_node.body);
|
|
|
|
|
|
|
|
if (while_node.@"else") |else_node| {
|
|
|
|
if (else_node.payload) |payload| {
|
|
|
|
std.debug.assert(payload.id == .Payload);
|
|
|
|
var scope = try scopes.addOne();
|
|
|
|
scope.* = .{
|
|
|
|
.range = .{
|
|
|
|
.start = tree.token_locs[payload.firstToken()].start,
|
|
|
|
.end = tree.token_locs[else_node.body.lastToken()].end,
|
|
|
|
},
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .other,
|
|
|
|
};
|
|
|
|
errdefer scope.decls.deinit();
|
|
|
|
|
|
|
|
const err_payload = payload.cast(ast.Node.Payload).?;
|
|
|
|
std.debug.assert(err_payload.error_symbol.id == .Identifier);
|
|
|
|
const name = tree.tokenSlice(err_payload.error_symbol.firstToken());
|
|
|
|
try scope.decls.putNoClobber(name, .{ .ast_node = payload });
|
|
|
|
}
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, else_node.body);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.For => {
|
|
|
|
const for_node = node.cast(ast.Node.For).?;
|
|
|
|
std.debug.assert(for_node.payload.id == .PointerIndexPayload);
|
|
|
|
const ptr_idx_payload = for_node.payload.cast(ast.Node.PointerIndexPayload).?;
|
|
|
|
std.debug.assert(ptr_idx_payload.value_symbol.id == .Identifier);
|
|
|
|
|
|
|
|
var scope = try scopes.addOne();
|
|
|
|
scope.* = .{
|
|
|
|
.range = .{
|
|
|
|
.start = tree.token_locs[ptr_idx_payload.firstToken()].start,
|
|
|
|
.end = tree.token_locs[for_node.body.lastToken()].end,
|
|
|
|
},
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .other,
|
|
|
|
};
|
|
|
|
errdefer scope.decls.deinit();
|
|
|
|
|
|
|
|
const value_name = tree.tokenSlice(ptr_idx_payload.value_symbol.firstToken());
|
|
|
|
try scope.decls.putNoClobber(value_name, .{
|
|
|
|
.array_payload = .{
|
|
|
|
.identifier = ptr_idx_payload.value_symbol,
|
|
|
|
.array_expr = for_node.array_expr,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
|
|
|
|
if (ptr_idx_payload.index_symbol) |index_symbol| {
|
|
|
|
std.debug.assert(index_symbol.id == .Identifier);
|
|
|
|
const index_name = tree.tokenSlice(index_symbol.firstToken());
|
|
|
|
if (try scope.decls.put(index_name, .{ .ast_node = index_symbol })) |existing| {
|
|
|
|
// TODO Record a redefinition error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, for_node.body);
|
|
|
|
if (for_node.@"else") |else_node| {
|
|
|
|
std.debug.assert(else_node.payload == null);
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, else_node.body);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.Switch => {
|
|
|
|
const switch_node = node.cast(ast.Node.Switch).?;
|
|
|
|
for (switch_node.casesConst()) |case| {
|
|
|
|
if (case.*.cast(ast.Node.SwitchCase)) |case_node| {
|
|
|
|
if (case_node.payload) |payload| {
|
|
|
|
std.debug.assert(payload.id == .PointerPayload);
|
|
|
|
var scope = try scopes.addOne();
|
|
|
|
scope.* = .{
|
|
|
|
.range = .{
|
|
|
|
.start = tree.token_locs[payload.firstToken()].start,
|
|
|
|
.end = tree.token_locs[case_node.expr.lastToken()].end,
|
|
|
|
},
|
|
|
|
.decls = std.StringHashMap(Declaration).init(allocator),
|
|
|
|
.uses = &[0]*ast.Node.Use{},
|
2020-06-10 18:48:40 +01:00
|
|
|
.tests = &[0]*ast.Node{},
|
2020-06-10 14:12:00 +01:00
|
|
|
.data = .other,
|
|
|
|
};
|
|
|
|
errdefer scope.decls.deinit();
|
|
|
|
|
|
|
|
const ptr_payload = payload.cast(ast.Node.PointerPayload).?;
|
|
|
|
std.debug.assert(ptr_payload.value_symbol.id == .Identifier);
|
|
|
|
const name = tree.tokenSlice(ptr_payload.value_symbol.firstToken());
|
|
|
|
try scope.decls.putNoClobber(name, .{
|
|
|
|
.switch_payload = .{
|
|
|
|
.node = ptr_payload,
|
|
|
|
.items = case_node.itemsConst(),
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, case_node.expr);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
.VarDecl => {
|
|
|
|
const var_decl = node.cast(ast.Node.VarDecl).?;
|
|
|
|
if (var_decl.type_node) |type_node| {
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, type_node);
|
|
|
|
}
|
|
|
|
if (var_decl.init_node) |init_node| {
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, init_node);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
var child_idx: usize = 0;
|
|
|
|
while (node.iterate(child_idx)) |child_node| : (child_idx += 1) {
|
|
|
|
try makeScopeInternal(allocator, scopes, tree, child_node);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|