finish rename of usage of std.zig.Ast
This commit is contained in:
parent
dc8affe122
commit
9e0f201283
208
src/analysis.zig
208
src/analysis.zig
@ -1,6 +1,6 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const DocumentStore = @import("./document_store.zig");
|
const DocumentStore = @import("./document_store.zig");
|
||||||
const ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const types = @import("./types.zig");
|
const types = @import("./types.zig");
|
||||||
const offsets = @import("./offsets.zig");
|
const offsets = @import("./offsets.zig");
|
||||||
const log = std.log.scoped(.analysis);
|
const log = std.log.scoped(.analysis);
|
||||||
@ -19,7 +19,7 @@ pub fn deinit() void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Gets a declaration's doc comments. Caller owns returned memory.
|
/// Gets a declaration's doc comments. Caller owns returned memory.
|
||||||
pub fn getDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, node: ast.Node.Index, format: types.MarkupContent.Kind) !?[]const u8 {
|
pub fn getDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, node: Ast.Node.Index, format: types.MarkupContent.Kind) !?[]const u8 {
|
||||||
const base = tree.nodes.items(.main_token)[node];
|
const base = tree.nodes.items(.main_token)[node];
|
||||||
const base_kind = tree.nodes.items(.tag)[node];
|
const base_kind = tree.nodes.items(.tag)[node];
|
||||||
const tokens = tree.tokens.items(.tag);
|
const tokens = tree.tokens.items(.tag);
|
||||||
@ -48,7 +48,7 @@ pub fn getDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, node:
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get the first doc comment of a declaration.
|
/// Get the first doc comment of a declaration.
|
||||||
pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.TokenIndex) ?ast.TokenIndex {
|
pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: Ast.TokenIndex) ?Ast.TokenIndex {
|
||||||
var idx = base_token;
|
var idx = base_token;
|
||||||
if (idx == 0) return null;
|
if (idx == 0) return null;
|
||||||
idx -= 1;
|
idx -= 1;
|
||||||
@ -67,7 +67,7 @@ pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: ast.Toke
|
|||||||
} else idx + 1;
|
} else idx + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collectDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, doc_comments: ast.TokenIndex, format: types.MarkupContent.Kind, container_doc: bool) ![]const u8 {
|
pub fn collectDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, doc_comments: Ast.TokenIndex, format: types.MarkupContent.Kind, container_doc: bool) ![]const u8 {
|
||||||
var lines = std.ArrayList([]const u8).init(allocator);
|
var lines = std.ArrayList([]const u8).init(allocator);
|
||||||
defer lines.deinit();
|
defer lines.deinit();
|
||||||
const tokens = tree.tokens.items(.tag);
|
const tokens = tree.tokens.items(.tag);
|
||||||
@ -84,7 +84,7 @@ pub fn collectDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, do
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Gets a function's keyword, name, arguments and return value.
|
/// Gets a function's keyword, name, arguments and return value.
|
||||||
pub fn getFunctionSignature(tree: Analysis.Tree, func: ast.full.FnProto) []const u8 {
|
pub fn getFunctionSignature(tree: Analysis.Tree, func: Ast.full.FnProto) []const u8 {
|
||||||
const start = offsets.tokenLocation(tree, func.ast.fn_token);
|
const start = offsets.tokenLocation(tree, func.ast.fn_token);
|
||||||
|
|
||||||
const end = if (func.ast.return_type != 0)
|
const end = if (func.ast.return_type != 0)
|
||||||
@ -95,7 +95,7 @@ pub fn getFunctionSignature(tree: Analysis.Tree, func: ast.full.FnProto) []const
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates snippet insert text for a function. Caller owns returned memory.
|
/// Creates snippet insert text for a function. Caller owns returned memory.
|
||||||
pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: Analysis.Tree, func: ast.full.FnProto, skip_self_param: bool) ![]const u8 {
|
pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: Analysis.Tree, func: Ast.full.FnProto, skip_self_param: bool) ![]const u8 {
|
||||||
const name_index = func.name_token.?;
|
const name_index = func.name_token.?;
|
||||||
|
|
||||||
var buffer = std.ArrayList(u8).init(allocator);
|
var buffer = std.ArrayList(u8).init(allocator);
|
||||||
@ -156,7 +156,7 @@ pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: Analysis.Tree, fu
|
|||||||
return buffer.toOwnedSlice();
|
return buffer.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentStore, handle: *DocumentStore.Handle, func: ast.full.FnProto) !bool {
|
pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentStore, handle: *DocumentStore.Handle, func: Ast.full.FnProto) !bool {
|
||||||
// Non-decl prototypes cannot have a self parameter.
|
// Non-decl prototypes cannot have a self parameter.
|
||||||
if (func.name_token == null) return false;
|
if (func.name_token == null) return false;
|
||||||
if (func.ast.params.len == 0) return false;
|
if (func.ast.params.len == 0) return false;
|
||||||
@ -190,13 +190,13 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getVariableSignature(tree: Analysis.Tree, var_decl: ast.full.VarDecl) []const u8 {
|
pub fn getVariableSignature(tree: Analysis.Tree, var_decl: Ast.full.VarDecl) []const u8 {
|
||||||
const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start;
|
const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start;
|
||||||
const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, var_decl.ast.init_node)).end;
|
const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, var_decl.ast.init_node)).end;
|
||||||
return tree.source[start..end];
|
return tree.source[start..end];
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getContainerFieldSignature(tree: Analysis.Tree, field: ast.full.ContainerField) []const u8 {
|
pub fn getContainerFieldSignature(tree: Analysis.Tree, field: Ast.full.ContainerField) []const u8 {
|
||||||
const start = offsets.tokenLocation(tree, field.ast.name_token).start;
|
const start = offsets.tokenLocation(tree, field.ast.name_token).start;
|
||||||
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
||||||
const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, end_node)).end;
|
const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, end_node)).end;
|
||||||
@ -204,18 +204,18 @@ pub fn getContainerFieldSignature(tree: Analysis.Tree, field: ast.full.Container
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The node is the meta-type `type`
|
/// The node is the meta-type `type`
|
||||||
fn isMetaType(tree: Analysis.Tree, node: ast.Node.Index) bool {
|
fn isMetaType(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||||
if (tree.nodes.items(.tag)[node] == .identifier) {
|
if (tree.nodes.items(.tag)[node] == .identifier) {
|
||||||
return std.mem.eql(u8, tree.tokenSlice(tree.nodes.items(.main_token)[node]), "type");
|
return std.mem.eql(u8, tree.tokenSlice(tree.nodes.items(.main_token)[node]), "type");
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isTypeFunction(tree: Analysis.Tree, func: ast.full.FnProto) bool {
|
pub fn isTypeFunction(tree: Analysis.Tree, func: Ast.full.FnProto) bool {
|
||||||
return isMetaType(tree, func.ast.return_type);
|
return isMetaType(tree, func.ast.return_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isGenericFunction(tree: Analysis.Tree, func: ast.full.FnProto) bool {
|
pub fn isGenericFunction(tree: Analysis.Tree, func: Ast.full.FnProto) bool {
|
||||||
var it = func.iterate(tree);
|
var it = func.iterate(tree);
|
||||||
while (it.next()) |param| {
|
while (it.next()) |param| {
|
||||||
if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) {
|
if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) {
|
||||||
@ -241,7 +241,7 @@ pub fn isSnakeCase(name: []const u8) bool {
|
|||||||
|
|
||||||
// ANALYSIS ENGINE
|
// ANALYSIS ENGINE
|
||||||
|
|
||||||
pub fn getDeclNameToken(tree: Analysis.Tree, node: ast.Node.Index) ?ast.TokenIndex {
|
pub fn getDeclNameToken(tree: Analysis.Tree, node: Ast.Node.Index) ?Ast.TokenIndex {
|
||||||
const tags = tree.nodes.items(.tag);
|
const tags = tree.nodes.items(.tag);
|
||||||
const main_token = tree.nodes.items(.main_token)[node];
|
const main_token = tree.nodes.items(.main_token)[node];
|
||||||
return switch (tags[node]) {
|
return switch (tags[node]) {
|
||||||
@ -257,7 +257,7 @@ pub fn getDeclNameToken(tree: Analysis.Tree, node: ast.Node.Index) ?ast.TokenInd
|
|||||||
.fn_proto_simple,
|
.fn_proto_simple,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> blk: {
|
=> blk: {
|
||||||
var params: [1]ast.Node.Index = undefined;
|
var params: [1]Ast.Node.Index = undefined;
|
||||||
break :blk Analysis.fnProto(tree, node, ¶ms).?.name_token;
|
break :blk Analysis.fnProto(tree, node, ¶ms).?.name_token;
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -278,7 +278,7 @@ pub fn getDeclNameToken(tree: Analysis.Tree, node: ast.Node.Index) ?ast.TokenInd
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn getDeclName(tree: Analysis.Tree, node: ast.Node.Index) ?[]const u8 {
|
fn getDeclName(tree: Analysis.Tree, node: Ast.Node.Index) ?[]const u8 {
|
||||||
const name = tree.tokenSlice(getDeclNameToken(tree, node) orelse return null);
|
const name = tree.tokenSlice(getDeclNameToken(tree, node) orelse return null);
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.test_decl => name[1 .. name.len - 1],
|
.test_decl => name[1 .. name.len - 1],
|
||||||
@ -369,7 +369,7 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isBlock(tree: Analysis.Tree, node: ast.Node.Index) bool {
|
fn isBlock(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.block,
|
.block,
|
||||||
.block_semicolon,
|
.block_semicolon,
|
||||||
@ -380,18 +380,18 @@ fn isBlock(tree: Analysis.Tree, node: ast.Node.Index) bool {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn findReturnStatementInternal(tree: Analysis.Tree, fn_decl: ast.full.FnProto, body: ast.Node.Index, already_found: *bool) ?ast.Node.Index {
|
fn findReturnStatementInternal(tree: Analysis.Tree, fn_decl: Ast.full.FnProto, body: Ast.Node.Index, already_found: *bool) ?Ast.Node.Index {
|
||||||
var result: ?ast.Node.Index = null;
|
var result: ?Ast.Node.Index = null;
|
||||||
|
|
||||||
const node_tags = tree.nodes.items(.tag);
|
const node_tags = tree.nodes.items(.tag);
|
||||||
const datas = tree.nodes.items(.data);
|
const datas = tree.nodes.items(.data);
|
||||||
|
|
||||||
if (!isBlock(tree, body)) return null;
|
if (!isBlock(tree, body)) return null;
|
||||||
|
|
||||||
const statements: []const ast.Node.Index = switch (node_tags[body]) {
|
const statements: []const Ast.Node.Index = switch (node_tags[body]) {
|
||||||
.block, .block_semicolon => tree.extra_data[datas[body].lhs..datas[body].rhs],
|
.block, .block_semicolon => tree.extra_data[datas[body].lhs..datas[body].rhs],
|
||||||
.block_two, .block_two_semicolon => blk: {
|
.block_two, .block_two_semicolon => blk: {
|
||||||
const statements = &[_]ast.Node.Index{ datas[body].lhs, datas[body].rhs };
|
const statements = &[_]Ast.Node.Index{ datas[body].lhs, datas[body].rhs };
|
||||||
const len: usize = if (datas[body].lhs == 0)
|
const len: usize = if (datas[body].lhs == 0)
|
||||||
@as(usize, 0)
|
@as(usize, 0)
|
||||||
else if (datas[body].rhs == 0)
|
else if (datas[body].rhs == 0)
|
||||||
@ -429,12 +429,12 @@ fn findReturnStatementInternal(tree: Analysis.Tree, fn_decl: ast.full.FnProto, b
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn findReturnStatement(tree: Analysis.Tree, fn_decl: ast.full.FnProto, body: ast.Node.Index) ?ast.Node.Index {
|
fn findReturnStatement(tree: Analysis.Tree, fn_decl: Ast.full.FnProto, body: Ast.Node.Index) ?Ast.Node.Index {
|
||||||
var already_found = false;
|
var already_found = false;
|
||||||
return findReturnStatementInternal(tree, fn_decl, body, &already_found);
|
return findReturnStatementInternal(tree, fn_decl, body, &already_found);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolveReturnType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, fn_decl: ast.full.FnProto, handle: *DocumentStore.Handle, bound_type_params: *BoundTypeParams, fn_body: ?ast.Node.Index) !?TypeWithHandle {
|
pub fn resolveReturnType(store: *DocumentStore, arena: *std.heap.ArenaAllocator, fn_decl: Ast.full.FnProto, handle: *DocumentStore.Handle, bound_type_params: *BoundTypeParams, fn_body: ?Ast.Node.Index) !?TypeWithHandle {
|
||||||
const tree = handle.tree;
|
const tree = handle.tree;
|
||||||
if (isTypeFunction(tree, fn_decl) and fn_body != null) {
|
if (isTypeFunction(tree, fn_decl) and fn_body != null) {
|
||||||
// If this is a type function and it only contains a single return statement that returns
|
// If this is a type function and it only contains a single return statement that returns
|
||||||
@ -507,7 +507,7 @@ fn resolveUnwrapErrorType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isPtrType(tree: Analysis.Tree, node: ast.Node.Index) bool {
|
pub fn isPtrType(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.ptr_type,
|
.ptr_type,
|
||||||
.ptr_type_aligned,
|
.ptr_type_aligned,
|
||||||
@ -593,7 +593,7 @@ pub fn resolveFieldAccessLhsType(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
return (try resolveDerefType(store, arena, lhs, bound_type_params)) orelse lhs;
|
return (try resolveDerefType(store, arena, lhs, bound_type_params)) orelse lhs;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const BoundTypeParams = std.AutoHashMap(ast.full.FnProto.Param, TypeWithHandle);
|
pub const BoundTypeParams = std.AutoHashMap(Ast.full.FnProto.Param, TypeWithHandle);
|
||||||
|
|
||||||
fn allDigits(str: []const u8) bool {
|
fn allDigits(str: []const u8) bool {
|
||||||
for (str) |c| {
|
for (str) |c| {
|
||||||
@ -602,7 +602,7 @@ fn allDigits(str: []const u8) bool {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isTypeIdent(tree: Analysis.Tree, token_idx: ast.TokenIndex) bool {
|
pub fn isTypeIdent(tree: Analysis.Tree, token_idx: Ast.TokenIndex) bool {
|
||||||
const PrimitiveTypes = std.ComptimeStringMap(void, .{
|
const PrimitiveTypes = std.ComptimeStringMap(void, .{
|
||||||
.{"isize"}, .{"usize"},
|
.{"isize"}, .{"usize"},
|
||||||
.{"c_short"}, .{"c_ushort"},
|
.{"c_short"}, .{"c_ushort"},
|
||||||
@ -703,7 +703,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
.async_call_one,
|
.async_call_one,
|
||||||
.async_call_one_comma,
|
.async_call_one_comma,
|
||||||
=> {
|
=> {
|
||||||
var params: [1]ast.Node.Index = undefined;
|
var params: [1]Ast.Node.Index = undefined;
|
||||||
const call = Analysis.callFull(tree, node, ¶ms) orelse unreachable;
|
const call = Analysis.callFull(tree, node, ¶ms) orelse unreachable;
|
||||||
|
|
||||||
const callee = .{ .node = call.ast.fn_expr, .handle = handle };
|
const callee = .{ .node = call.ast.fn_expr, .handle = handle };
|
||||||
@ -715,7 +715,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
.other => |n| n,
|
.other => |n| n,
|
||||||
else => return null,
|
else => return null,
|
||||||
};
|
};
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const func_maybe = Analysis.fnProto(decl.handle.tree, decl_node, &buf);
|
const func_maybe = Analysis.fnProto(decl.handle.tree, decl_node, &buf);
|
||||||
|
|
||||||
if (func_maybe) |fn_decl| {
|
if (func_maybe) |fn_decl| {
|
||||||
@ -875,11 +875,11 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
const params = switch (node_tags[node]) {
|
const params = switch (node_tags[node]) {
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
||||||
&[_]ast.Node.Index{}
|
&[_]Ast.Node.Index{}
|
||||||
else if (data.rhs == 0)
|
else if (data.rhs == 0)
|
||||||
&[_]ast.Node.Index{data.lhs}
|
&[_]Ast.Node.Index{data.lhs}
|
||||||
else
|
else
|
||||||
&[_]ast.Node.Index{ data.lhs, data.rhs },
|
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -945,7 +945,7 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
.fn_proto_simple,
|
.fn_proto_simple,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> {
|
=> {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
// This is a function type
|
// This is a function type
|
||||||
if (Analysis.fnProto(tree, node, &buf).?.name_token == null) {
|
if (Analysis.fnProto(tree, node, &buf).?.name_token == null) {
|
||||||
return TypeWithHandle.typeVal(node_handle);
|
return TypeWithHandle.typeVal(node_handle);
|
||||||
@ -971,10 +971,10 @@ pub fn resolveTypeOfNodeInternal(store: *DocumentStore, arena: *std.heap.ArenaAl
|
|||||||
// TODO Make this better, nested levels of type vals
|
// TODO Make this better, nested levels of type vals
|
||||||
pub const Type = struct {
|
pub const Type = struct {
|
||||||
data: union(enum) {
|
data: union(enum) {
|
||||||
pointer: ast.Node.Index,
|
pointer: Ast.Node.Index,
|
||||||
slice: ast.Node.Index,
|
slice: Ast.Node.Index,
|
||||||
error_union: ast.Node.Index,
|
error_union: Ast.Node.Index,
|
||||||
other: ast.Node.Index,
|
other: Ast.Node.Index,
|
||||||
primitive,
|
primitive,
|
||||||
},
|
},
|
||||||
/// If true, the type `type`, the attached data is the value of the type value.
|
/// If true, the type `type`, the attached data is the value of the type value.
|
||||||
@ -1031,7 +1031,7 @@ pub const TypeWithHandle = struct {
|
|||||||
const node = self.type.data.other;
|
const node = self.type.data.other;
|
||||||
const tags = tree.nodes.items(.tag);
|
const tags = tree.nodes.items(.tag);
|
||||||
if (Analysis.isContainer(tree, node)) {
|
if (Analysis.isContainer(tree, node)) {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
for (Analysis.declMembers(tree, node, &buf)) |child| {
|
for (Analysis.declMembers(tree, node, &buf)) |child| {
|
||||||
if (tags[child].isContainerField()) return false;
|
if (tags[child].isContainerField()) return false;
|
||||||
}
|
}
|
||||||
@ -1052,7 +1052,7 @@ pub const TypeWithHandle = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn isTypeFunc(self: TypeWithHandle) bool {
|
pub fn isTypeFunc(self: TypeWithHandle) bool {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const tree = self.handle.tree;
|
const tree = self.handle.tree;
|
||||||
return switch (self.type.data) {
|
return switch (self.type.data) {
|
||||||
.other => |n| if (Analysis.fnProto(tree, n, &buf)) |fn_proto| blk: {
|
.other => |n| if (Analysis.fnProto(tree, n, &buf)) |fn_proto| blk: {
|
||||||
@ -1063,7 +1063,7 @@ pub const TypeWithHandle = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn isGenericFunc(self: TypeWithHandle) bool {
|
pub fn isGenericFunc(self: TypeWithHandle) bool {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const tree = self.handle.tree;
|
const tree = self.handle.tree;
|
||||||
return switch (self.type.data) {
|
return switch (self.type.data) {
|
||||||
.other => |n| if (Analysis.fnProto(tree, n, &buf)) |fn_proto| blk: {
|
.other => |n| if (Analysis.fnProto(tree, n, &buf)) |fn_proto| blk: {
|
||||||
@ -1123,7 +1123,7 @@ pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: Analysis.Tre
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub const NodeWithHandle = struct {
|
pub const NodeWithHandle = struct {
|
||||||
node: ast.Node.Index,
|
node: Ast.Node.Index,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1228,7 +1228,7 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
|||||||
// Can't call a function type, we need a function type instance.
|
// Can't call a function type, we need a function type instance.
|
||||||
if (current_type.type.is_type_val) return null;
|
if (current_type.type.is_type_val) return null;
|
||||||
const cur_tree = current_type.handle.tree;
|
const cur_tree = current_type.handle.tree;
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
if (Analysis.fnProto(cur_tree, current_type_node, &buf)) |func| {
|
if (Analysis.fnProto(cur_tree, current_type_node, &buf)) |func| {
|
||||||
// Check if the function has a body and if so, pass it
|
// Check if the function has a body and if so, pass it
|
||||||
// so the type can be resolved if it's a generic function returning
|
// so the type can be resolved if it's a generic function returning
|
||||||
@ -1283,8 +1283,8 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isNodePublic(tree: Analysis.Tree, node: ast.Node.Index) bool {
|
pub fn isNodePublic(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.global_var_decl,
|
.global_var_decl,
|
||||||
.local_var_decl,
|
.local_var_decl,
|
||||||
@ -1301,10 +1301,10 @@ pub fn isNodePublic(tree: Analysis.Tree, node: ast.Node.Index) bool {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nodeToString(tree: Analysis.Tree, node: ast.Node.Index) ?[]const u8 {
|
pub fn nodeToString(tree: Analysis.Tree, node: Ast.Node.Index) ?[]const u8 {
|
||||||
const data = tree.nodes.items(.data);
|
const data = tree.nodes.items(.data);
|
||||||
const main_token = tree.nodes.items(.main_token)[node];
|
const main_token = tree.nodes.items(.main_token)[node];
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
switch (tree.nodes.items(.tag)[node]) {
|
switch (tree.nodes.items(.tag)[node]) {
|
||||||
.container_field => return tree.tokenSlice(tree.containerField(node).ast.name_token),
|
.container_field => return tree.tokenSlice(tree.containerField(node).ast.name_token),
|
||||||
.container_field_init => return tree.tokenSlice(tree.containerFieldInit(node).ast.name_token),
|
.container_field_init => return tree.tokenSlice(tree.containerFieldInit(node).ast.name_token),
|
||||||
@ -1337,15 +1337,15 @@ pub fn nodeToString(tree: Analysis.Tree, node: ast.Node.Index) ?[]const u8 {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nodeContainsSourceIndex(tree: Analysis.Tree, node: ast.Node.Index, source_index: usize) bool {
|
fn nodeContainsSourceIndex(tree: Analysis.Tree, node: Ast.Node.Index, source_index: usize) bool {
|
||||||
const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start;
|
const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start;
|
||||||
const last_token = offsets.tokenLocation(tree, Analysis.lastToken(tree, node)).end;
|
const last_token = offsets.tokenLocation(tree, Analysis.lastToken(tree, node)).end;
|
||||||
return source_index >= first_token and source_index <= last_token;
|
return source_index >= first_token and source_index <= last_token;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getImportStr(tree: Analysis.Tree, node: ast.Node.Index, source_index: usize) ?[]const u8 {
|
pub fn getImportStr(tree: Analysis.Tree, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
||||||
const node_tags = tree.nodes.items(.tag);
|
const node_tags = tree.nodes.items(.tag);
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
if (Analysis.isContainer(tree, node)) {
|
if (Analysis.isContainer(tree, node)) {
|
||||||
const decls = Analysis.declMembers(tree, node, &buf);
|
const decls = Analysis.declMembers(tree, node, &buf);
|
||||||
for (decls) |decl_idx| {
|
for (decls) |decl_idx| {
|
||||||
@ -1373,11 +1373,11 @@ pub fn getImportStr(tree: Analysis.Tree, node: ast.Node.Index, source_index: usi
|
|||||||
const params = switch (node_tags[node]) {
|
const params = switch (node_tags[node]) {
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
||||||
&[_]ast.Node.Index{}
|
&[_]Ast.Node.Index{}
|
||||||
else if (data.rhs == 0)
|
else if (data.rhs == 0)
|
||||||
&[_]ast.Node.Index{data.lhs}
|
&[_]Ast.Node.Index{data.lhs}
|
||||||
else
|
else
|
||||||
&[_]ast.Node.Index{ data.lhs, data.rhs },
|
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1578,7 +1578,7 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addOutlineNodes(allocator: *std.mem.Allocator, tree: Analysis.Tree, child: ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
fn addOutlineNodes(allocator: *std.mem.Allocator, tree: Analysis.Tree, child: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
||||||
switch (tree.nodes.items(.tag)[child]) {
|
switch (tree.nodes.items(.tag)[child]) {
|
||||||
.string_literal,
|
.string_literal,
|
||||||
.integer_literal,
|
.integer_literal,
|
||||||
@ -1709,7 +1709,7 @@ fn addOutlineNodes(allocator: *std.mem.Allocator, tree: Analysis.Tree, child: as
|
|||||||
.tagged_union_two,
|
.tagged_union_two,
|
||||||
.tagged_union_two_trailing,
|
.tagged_union_two_trailing,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
for (Analysis.declMembers(tree, child, &buf)) |member|
|
for (Analysis.declMembers(tree, child, &buf)) |member|
|
||||||
try addOutlineNodes(allocator, tree, member, context);
|
try addOutlineNodes(allocator, tree, member, context);
|
||||||
return;
|
return;
|
||||||
@ -1729,7 +1729,7 @@ const GetDocumentSymbolsContext = struct {
|
|||||||
encoding: offsets.Encoding,
|
encoding: offsets.Encoding,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Analysis.Tree, node: ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Analysis.Tree, node: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
||||||
const name = getDeclName(tree, node) orelse return;
|
const name = getDeclName(tree, node) orelse return;
|
||||||
if (name.len == 0)
|
if (name.len == 0)
|
||||||
return;
|
return;
|
||||||
@ -1799,7 +1799,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Analysis.Tree
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (Analysis.isContainer(tree, node)) {
|
if (Analysis.isContainer(tree, node)) {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
for (Analysis.declMembers(tree, node, &buf)) |child|
|
for (Analysis.declMembers(tree, node, &buf)) |child|
|
||||||
try addOutlineNodes(allocator, tree, child, &child_context);
|
try addOutlineNodes(allocator, tree, child, &child_context);
|
||||||
}
|
}
|
||||||
@ -1830,31 +1830,31 @@ pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: Analysis.Tree, en
|
|||||||
|
|
||||||
pub const Declaration = union(enum) {
|
pub const Declaration = union(enum) {
|
||||||
/// Index of the ast node
|
/// Index of the ast node
|
||||||
ast_node: ast.Node.Index,
|
ast_node: Ast.Node.Index,
|
||||||
/// Function parameter
|
/// Function parameter
|
||||||
param_decl: ast.full.FnProto.Param,
|
param_decl: Ast.full.FnProto.Param,
|
||||||
pointer_payload: struct {
|
pointer_payload: struct {
|
||||||
name: ast.TokenIndex,
|
name: Ast.TokenIndex,
|
||||||
condition: ast.Node.Index,
|
condition: Ast.Node.Index,
|
||||||
},
|
},
|
||||||
array_payload: struct {
|
array_payload: struct {
|
||||||
identifier: ast.TokenIndex,
|
identifier: Ast.TokenIndex,
|
||||||
array_expr: ast.Node.Index,
|
array_expr: Ast.Node.Index,
|
||||||
},
|
},
|
||||||
array_index: ast.TokenIndex,
|
array_index: Ast.TokenIndex,
|
||||||
switch_payload: struct {
|
switch_payload: struct {
|
||||||
node: ast.TokenIndex,
|
node: Ast.TokenIndex,
|
||||||
switch_expr: ast.Node.Index,
|
switch_expr: Ast.Node.Index,
|
||||||
items: []const ast.Node.Index,
|
items: []const Ast.Node.Index,
|
||||||
},
|
},
|
||||||
label_decl: ast.TokenIndex,
|
label_decl: Ast.TokenIndex,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const DeclWithHandle = struct {
|
pub const DeclWithHandle = struct {
|
||||||
decl: *Declaration,
|
decl: *Declaration,
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
|
|
||||||
pub fn nameToken(self: DeclWithHandle) ast.TokenIndex {
|
pub fn nameToken(self: DeclWithHandle) Ast.TokenIndex {
|
||||||
const tree = self.handle.tree;
|
const tree = self.handle.tree;
|
||||||
return switch (self.decl.*) {
|
return switch (self.decl.*) {
|
||||||
.ast_node => |n| getDeclNameToken(tree, n).?,
|
.ast_node => |n| getDeclNameToken(tree, n).?,
|
||||||
@ -1988,7 +1988,7 @@ fn findContainerScope(container_handle: NodeWithHandle) ?*Scope {
|
|||||||
} else null;
|
} else null;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool, use_trail: *std.ArrayList(*const ast.Node.Index)) error{OutOfMemory}!void {
|
fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool, use_trail: *std.ArrayList(*const Ast.Node.Index)) error{OutOfMemory}!void {
|
||||||
const container = container_handle.node;
|
const container = container_handle.node;
|
||||||
const handle = container_handle.handle;
|
const handle = container_handle.handle;
|
||||||
|
|
||||||
@ -2029,7 +2029,7 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena
|
|||||||
const use_token = tree.nodes.items(.main_token)[use.*];
|
const use_token = tree.nodes.items(.main_token)[use.*];
|
||||||
const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub;
|
const is_pub = use_token > 0 and token_tags[use_token - 1] == .keyword_pub;
|
||||||
if (handle != orig_handle and !is_pub) continue;
|
if (handle != orig_handle and !is_pub) continue;
|
||||||
if (std.mem.indexOfScalar(*const ast.Node.Index, use_trail.items, use) != null) continue;
|
if (std.mem.indexOfScalar(*const Ast.Node.Index, use_trail.items, use) != null) continue;
|
||||||
try use_trail.append(use);
|
try use_trail.append(use);
|
||||||
|
|
||||||
const lhs = tree.nodes.items(.data)[use.*].lhs;
|
const lhs = tree.nodes.items(.data)[use.*].lhs;
|
||||||
@ -2056,7 +2056,7 @@ fn iterateSymbolsContainerInternal(store: *DocumentStore, arena: *std.heap.Arena
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool) error{OutOfMemory}!void {
|
pub fn iterateSymbolsContainer(store: *DocumentStore, arena: *std.heap.ArenaAllocator, container_handle: NodeWithHandle, orig_handle: *DocumentStore.Handle, comptime callback: anytype, context: anytype, instance_access: bool) error{OutOfMemory}!void {
|
||||||
var use_trail = std.ArrayList(*const ast.Node.Index).init(&arena.allocator);
|
var use_trail = std.ArrayList(*const Ast.Node.Index).init(&arena.allocator);
|
||||||
return try iterateSymbolsContainerInternal(store, arena, container_handle, orig_handle, callback, context, instance_access, &use_trail);
|
return try iterateSymbolsContainerInternal(store, arena, container_handle, orig_handle, callback, context, instance_access, &use_trail);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2076,7 +2076,7 @@ pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptim
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(*const ast.Node.Index)) error{OutOfMemory}!void {
|
fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype, use_trail: *std.ArrayList(*const Ast.Node.Index)) error{OutOfMemory}!void {
|
||||||
for (handle.document_scope.scopes) |scope| {
|
for (handle.document_scope.scopes) |scope| {
|
||||||
if (source_index >= scope.range.start and source_index <= scope.range.end) {
|
if (source_index >= scope.range.start and source_index <= scope.range.end) {
|
||||||
var decl_it = scope.decls.iterator();
|
var decl_it = scope.decls.iterator();
|
||||||
@ -2088,7 +2088,7 @@ fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAll
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (scope.uses) |use| {
|
for (scope.uses) |use| {
|
||||||
if (std.mem.indexOfScalar(*const ast.Node.Index, use_trail.items, use) != null) continue;
|
if (std.mem.indexOfScalar(*const Ast.Node.Index, use_trail.items, use) != null) continue;
|
||||||
try use_trail.append(use);
|
try use_trail.append(use);
|
||||||
|
|
||||||
const use_expr = (try resolveTypeOfNode(
|
const use_expr = (try resolveTypeOfNode(
|
||||||
@ -2118,7 +2118,7 @@ fn iterateSymbolsGlobalInternal(store: *DocumentStore, arena: *std.heap.ArenaAll
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn iterateSymbolsGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
pub fn iterateSymbolsGlobal(store: *DocumentStore, arena: *std.heap.ArenaAllocator, handle: *DocumentStore.Handle, source_index: usize, comptime callback: anytype, context: anytype) error{OutOfMemory}!void {
|
||||||
var use_trail = std.ArrayList(*const ast.Node.Index).init(&arena.allocator);
|
var use_trail = std.ArrayList(*const Ast.Node.Index).init(&arena.allocator);
|
||||||
return try iterateSymbolsGlobalInternal(store, arena, handle, source_index, callback, context, &use_trail);
|
return try iterateSymbolsGlobalInternal(store, arena, handle, source_index, callback, context, &use_trail);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2138,7 +2138,7 @@ pub fn innermostBlockScopeIndex(handle: DocumentStore.Handle, source_index: usiz
|
|||||||
return current;
|
return current;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn innermostBlockScope(handle: DocumentStore.Handle, source_index: usize) ast.Node.Index {
|
pub fn innermostBlockScope(handle: DocumentStore.Handle, source_index: usize) Ast.Node.Index {
|
||||||
return handle.document_scope.scopes[innermostBlockScopeIndex(handle, source_index)].toNodeIndex().?;
|
return handle.document_scope.scopes[innermostBlockScopeIndex(handle, source_index)].toNodeIndex().?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2158,7 +2158,7 @@ pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) Ty
|
|||||||
return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
return TypeWithHandle.typeVal(.{ .node = current, .handle = handle });
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []const *const ast.Node.Index, symbol: []const u8, handle: *DocumentStore.Handle) error{OutOfMemory}!?DeclWithHandle {
|
fn resolveUse(store: *DocumentStore, arena: *std.heap.ArenaAllocator, uses: []const *const Ast.Node.Index, symbol: []const u8, handle: *DocumentStore.Handle) error{OutOfMemory}!?DeclWithHandle {
|
||||||
// If we were asked to resolve this symbol before,
|
// If we were asked to resolve this symbol before,
|
||||||
// it is self-referential and we cannot resolve it.
|
// it is self-referential and we cannot resolve it.
|
||||||
if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null)
|
if (std.mem.indexOfScalar([*]const u8, using_trail.items, symbol.ptr) != null)
|
||||||
@ -2349,20 +2349,20 @@ pub const DocumentScope = struct {
|
|||||||
|
|
||||||
pub const Scope = struct {
|
pub const Scope = struct {
|
||||||
pub const Data = union(enum) {
|
pub const Data = union(enum) {
|
||||||
container: ast.Node.Index, // .tag is ContainerDecl or Root or ErrorSetDecl
|
container: Ast.Node.Index, // .tag is ContainerDecl or Root or ErrorSetDecl
|
||||||
function: ast.Node.Index, // .tag is FnProto
|
function: Ast.Node.Index, // .tag is FnProto
|
||||||
block: ast.Node.Index, // .tag is Block
|
block: Ast.Node.Index, // .tag is Block
|
||||||
other,
|
other,
|
||||||
};
|
};
|
||||||
|
|
||||||
range: SourceRange,
|
range: SourceRange,
|
||||||
decls: std.StringHashMap(Declaration),
|
decls: std.StringHashMap(Declaration),
|
||||||
tests: []const ast.Node.Index = &.{},
|
tests: []const Ast.Node.Index = &.{},
|
||||||
uses: []const *const ast.Node.Index = &.{},
|
uses: []const *const Ast.Node.Index = &.{},
|
||||||
|
|
||||||
data: Data,
|
data: Data,
|
||||||
|
|
||||||
pub fn toNodeIndex(self: Scope) ?ast.Node.Index {
|
pub fn toNodeIndex(self: Scope) ?Ast.Node.Index {
|
||||||
return switch (self.data) {
|
return switch (self.data) {
|
||||||
.container, .function, .block => |idx| idx,
|
.container, .function, .block => |idx| idx,
|
||||||
else => null,
|
else => null,
|
||||||
@ -2401,7 +2401,7 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: Analysis.Tree) !Do
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nodeSourceRange(tree: Analysis.Tree, node: ast.Node.Index) SourceRange {
|
fn nodeSourceRange(tree: Analysis.Tree, node: Ast.Node.Index) SourceRange {
|
||||||
const loc_start = offsets.tokenLocation(tree, tree.firstToken(node));
|
const loc_start = offsets.tokenLocation(tree, tree.firstToken(node));
|
||||||
const loc_end = offsets.tokenLocation(tree, Analysis.lastToken(tree, node));
|
const loc_end = offsets.tokenLocation(tree, Analysis.lastToken(tree, node));
|
||||||
|
|
||||||
@ -2418,7 +2418,7 @@ const ScopeContext = struct {
|
|||||||
tree: Analysis.Tree,
|
tree: Analysis.Tree,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx: ast.Node.Index) error{OutOfMemory}!void {
|
fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx: Ast.Node.Index) error{OutOfMemory}!void {
|
||||||
const scopes = context.scopes;
|
const scopes = context.scopes;
|
||||||
const tree = context.tree;
|
const tree = context.tree;
|
||||||
const tags = tree.nodes.items(.tag);
|
const tags = tree.nodes.items(.tag);
|
||||||
@ -2427,7 +2427,7 @@ fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx
|
|||||||
const main_tokens = tree.nodes.items(.main_token);
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
const node_tag = tags[node_idx];
|
const node_tag = tags[node_idx];
|
||||||
|
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const ast_decls = Analysis.declMembers(tree, node_idx, &buf);
|
const ast_decls = Analysis.declMembers(tree, node_idx, &buf);
|
||||||
|
|
||||||
(try scopes.addOne(allocator)).* = .{
|
(try scopes.addOne(allocator)).* = .{
|
||||||
@ -2436,8 +2436,8 @@ fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx
|
|||||||
.data = .{ .container = node_idx },
|
.data = .{ .container = node_idx },
|
||||||
};
|
};
|
||||||
const scope_idx = scopes.items.len - 1;
|
const scope_idx = scopes.items.len - 1;
|
||||||
var uses = std.ArrayListUnmanaged(*const ast.Node.Index){};
|
var uses = std.ArrayListUnmanaged(*const Ast.Node.Index){};
|
||||||
var tests = std.ArrayListUnmanaged(ast.Node.Index){};
|
var tests = std.ArrayListUnmanaged(Ast.Node.Index){};
|
||||||
|
|
||||||
errdefer {
|
errdefer {
|
||||||
scopes.items[scope_idx].decls.deinit();
|
scopes.items[scope_idx].decls.deinit();
|
||||||
@ -2464,13 +2464,13 @@ fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx
|
|||||||
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
|
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
|
||||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
|
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
|
||||||
.container_decl_two, .container_decl_two_trailing => blk: {
|
.container_decl_two, .container_decl_two_trailing => blk: {
|
||||||
var buffer: [2]ast.Node.Index = undefined;
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
break :blk tree.containerDeclTwo(&buffer, node_idx);
|
break :blk tree.containerDeclTwo(&buffer, node_idx);
|
||||||
},
|
},
|
||||||
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
|
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
|
||||||
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
|
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
|
||||||
.tagged_union_two, .tagged_union_two_trailing => blk: {
|
.tagged_union_two, .tagged_union_two_trailing => blk: {
|
||||||
var buffer: [2]ast.Node.Index = undefined;
|
var buffer: [2]Ast.Node.Index = undefined;
|
||||||
break :blk tree.taggedUnionTwo(&buffer, node_idx);
|
break :blk tree.taggedUnionTwo(&buffer, node_idx);
|
||||||
},
|
},
|
||||||
else => null,
|
else => null,
|
||||||
@ -2534,7 +2534,7 @@ fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx
|
|||||||
|
|
||||||
// Whether we have already visited the root node.
|
// Whether we have already visited the root node.
|
||||||
var had_root = true;
|
var had_root = true;
|
||||||
fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_idx: ast.Node.Index) error{OutOfMemory}!void {
|
fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_idx: Ast.Node.Index) error{OutOfMemory}!void {
|
||||||
const scopes = context.scopes;
|
const scopes = context.scopes;
|
||||||
const tree = context.tree;
|
const tree = context.tree;
|
||||||
const tags = tree.nodes.items(.tag);
|
const tags = tree.nodes.items(.tag);
|
||||||
@ -2578,7 +2578,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.fn_proto_multi,
|
.fn_proto_multi,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> |fn_tag| {
|
=> |fn_tag| {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const func = Analysis.fnProto(tree, node_idx, &buf).?;
|
const func = Analysis.fnProto(tree, node_idx, &buf).?;
|
||||||
|
|
||||||
(try scopes.addOne(allocator)).* = .{
|
(try scopes.addOne(allocator)).* = .{
|
||||||
@ -2649,17 +2649,17 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.data = .{ .block = node_idx },
|
.data = .{ .block = node_idx },
|
||||||
};
|
};
|
||||||
var scope_idx = scopes.items.len - 1;
|
var scope_idx = scopes.items.len - 1;
|
||||||
var uses = std.ArrayList(*const ast.Node.Index).init(allocator);
|
var uses = std.ArrayList(*const Ast.Node.Index).init(allocator);
|
||||||
|
|
||||||
errdefer {
|
errdefer {
|
||||||
scopes.items[scope_idx].decls.deinit();
|
scopes.items[scope_idx].decls.deinit();
|
||||||
uses.deinit();
|
uses.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
const statements: []const ast.Node.Index = switch (node_tag) {
|
const statements: []const Ast.Node.Index = switch (node_tag) {
|
||||||
.block, .block_semicolon => tree.extra_data[data[node_idx].lhs..data[node_idx].rhs],
|
.block, .block_semicolon => tree.extra_data[data[node_idx].lhs..data[node_idx].rhs],
|
||||||
.block_two, .block_two_semicolon => blk: {
|
.block_two, .block_two_semicolon => blk: {
|
||||||
const statements = &[_]ast.Node.Index{ data[node_idx].lhs, data[node_idx].rhs };
|
const statements = &[_]Ast.Node.Index{ data[node_idx].lhs, data[node_idx].rhs };
|
||||||
const len: usize = if (data[node_idx].lhs == 0)
|
const len: usize = if (data[node_idx].lhs == 0)
|
||||||
@as(usize, 0)
|
@as(usize, 0)
|
||||||
else if (data[node_idx].rhs == 0)
|
else if (data[node_idx].rhs == 0)
|
||||||
@ -2860,11 +2860,11 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.switch_comma,
|
.switch_comma,
|
||||||
=> {
|
=> {
|
||||||
const cond = data[node_idx].lhs;
|
const cond = data[node_idx].lhs;
|
||||||
const extra = tree.extraData(data[node_idx].rhs, ast.Node.SubRange);
|
const extra = tree.extraData(data[node_idx].rhs, Ast.Node.SubRange);
|
||||||
const cases = tree.extra_data[extra.start..extra.end];
|
const cases = tree.extra_data[extra.start..extra.end];
|
||||||
|
|
||||||
for (cases) |case| {
|
for (cases) |case| {
|
||||||
const switch_case: ast.full.SwitchCase = switch (tags[case]) {
|
const switch_case: Ast.full.SwitchCase = switch (tags[case]) {
|
||||||
.switch_case => tree.switchCase(case),
|
.switch_case => tree.switchCase(case),
|
||||||
.switch_case_one => tree.switchCaseOne(case),
|
.switch_case_one => tree.switchCaseOne(case),
|
||||||
else => continue,
|
else => continue,
|
||||||
@ -2927,7 +2927,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.async_call_one,
|
.async_call_one,
|
||||||
.async_call_one_comma,
|
.async_call_one_comma,
|
||||||
=> {
|
=> {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const call = Analysis.callFull(tree, node_idx, &buf).?;
|
const call = Analysis.callFull(tree, node_idx, &buf).?;
|
||||||
|
|
||||||
try makeScopeInternal(allocator, context, call.ast.fn_expr);
|
try makeScopeInternal(allocator, context, call.ast.fn_expr);
|
||||||
@ -2943,8 +2943,8 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.struct_init_one,
|
.struct_init_one,
|
||||||
.struct_init_one_comma,
|
.struct_init_one_comma,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const struct_init: ast.full.StructInit = switch (node_tag) {
|
const struct_init: Ast.full.StructInit = switch (node_tag) {
|
||||||
.struct_init, .struct_init_comma => tree.structInit(node_idx),
|
.struct_init, .struct_init_comma => tree.structInit(node_idx),
|
||||||
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node_idx),
|
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node_idx),
|
||||||
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node_idx),
|
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node_idx),
|
||||||
@ -2968,8 +2968,8 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.array_init_one,
|
.array_init_one,
|
||||||
.array_init_one_comma,
|
.array_init_one_comma,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const array_init: ast.full.ArrayInit = switch (node_tag) {
|
const array_init: Ast.full.ArrayInit = switch (node_tag) {
|
||||||
.array_init, .array_init_comma => tree.arrayInit(node_idx),
|
.array_init, .array_init_comma => tree.arrayInit(node_idx),
|
||||||
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node_idx),
|
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node_idx),
|
||||||
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node_idx),
|
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node_idx),
|
||||||
@ -3002,11 +3002,11 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
const params = switch (node_tag) {
|
const params = switch (node_tag) {
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[b_data.lhs..b_data.rhs],
|
.builtin_call, .builtin_call_comma => tree.extra_data[b_data.lhs..b_data.rhs],
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (b_data.lhs == 0)
|
.builtin_call_two, .builtin_call_two_comma => if (b_data.lhs == 0)
|
||||||
&[_]ast.Node.Index{}
|
&[_]Ast.Node.Index{}
|
||||||
else if (b_data.rhs == 0)
|
else if (b_data.rhs == 0)
|
||||||
&[_]ast.Node.Index{b_data.lhs}
|
&[_]Ast.Node.Index{b_data.lhs}
|
||||||
else
|
else
|
||||||
&[_]ast.Node.Index{ b_data.lhs, b_data.rhs },
|
&[_]Ast.Node.Index{ b_data.lhs, b_data.rhs },
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -3019,7 +3019,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.ptr_type_bit_range,
|
.ptr_type_bit_range,
|
||||||
.ptr_type_sentinel,
|
.ptr_type_sentinel,
|
||||||
=> {
|
=> {
|
||||||
const ptr_type: ast.full.PtrType = Analysis.ptrType(tree, node_idx).?;
|
const ptr_type: Ast.full.PtrType = Analysis.ptrType(tree, node_idx).?;
|
||||||
|
|
||||||
try makeScopeInternal(allocator, context, ptr_type.ast.sentinel);
|
try makeScopeInternal(allocator, context, ptr_type.ast.sentinel);
|
||||||
try makeScopeInternal(allocator, context, ptr_type.ast.align_node);
|
try makeScopeInternal(allocator, context, ptr_type.ast.align_node);
|
||||||
@ -3029,7 +3029,7 @@ fn makeScopeInternal(allocator: *std.mem.Allocator, context: ScopeContext, node_
|
|||||||
.slice_open,
|
.slice_open,
|
||||||
.slice_sentinel,
|
.slice_sentinel,
|
||||||
=> {
|
=> {
|
||||||
const slice: ast.full.Slice = switch (node_tag) {
|
const slice: Ast.full.Slice = switch (node_tag) {
|
||||||
.slice => tree.slice(node_idx),
|
.slice => tree.slice(node_idx),
|
||||||
.slice_open => tree.sliceOpen(node_idx),
|
.slice_open => tree.sliceOpen(node_idx),
|
||||||
.slice_sentinel => tree.sliceSentinel(node_idx),
|
.slice_sentinel => tree.sliceSentinel(node_idx),
|
||||||
|
34
src/ast.zig
34
src/ast.zig
@ -3,10 +3,10 @@
|
|||||||
//! when there are parser errors.
|
//! when there are parser errors.
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
pub const Tree = ast;
|
pub const Tree = Ast;
|
||||||
const Node = ast.Node;
|
const Node = Ast.Node;
|
||||||
const full = ast.full;
|
const full = Ast.full;
|
||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
|
|
||||||
fn fullPtrType(tree: Tree, info: full.PtrType.Components) full.PtrType {
|
fn fullPtrType(tree: Tree, info: full.PtrType.Components) full.PtrType {
|
||||||
@ -255,8 +255,8 @@ pub fn forFull(tree: Tree, node: Node.Index) full.While {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lastToken(tree: Tree, node: ast.Node.Index) ast.TokenIndex {
|
pub fn lastToken(tree: Tree, node: Ast.Node.Index) Ast.TokenIndex {
|
||||||
const TokenIndex = ast.TokenIndex;
|
const TokenIndex = Ast.TokenIndex;
|
||||||
const tags = tree.nodes.items(.tag);
|
const tags = tree.nodes.items(.tag);
|
||||||
const datas = tree.nodes.items(.data);
|
const datas = tree.nodes.items(.data);
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
@ -875,7 +875,7 @@ pub fn lastToken(tree: Tree, node: ast.Node.Index) ast.TokenIndex {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn containerField(tree: Tree, node: ast.Node.Index) ?ast.full.ContainerField {
|
pub fn containerField(tree: Tree, node: Ast.Node.Index) ?Ast.full.ContainerField {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.container_field => tree.containerField(node),
|
.container_field => tree.containerField(node),
|
||||||
.container_field_init => tree.containerFieldInit(node),
|
.container_field_init => tree.containerFieldInit(node),
|
||||||
@ -884,7 +884,7 @@ pub fn containerField(tree: Tree, node: ast.Node.Index) ?ast.full.ContainerField
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ptrType(tree: Tree, node: ast.Node.Index) ?ast.full.PtrType {
|
pub fn ptrType(tree: Tree, node: Ast.Node.Index) ?Ast.full.PtrType {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.ptr_type => ptrTypeSimple(tree, node),
|
.ptr_type => ptrTypeSimple(tree, node),
|
||||||
.ptr_type_aligned => ptrTypeAligned(tree, node),
|
.ptr_type_aligned => ptrTypeAligned(tree, node),
|
||||||
@ -894,7 +894,7 @@ pub fn ptrType(tree: Tree, node: ast.Node.Index) ?ast.full.PtrType {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn whileAst(tree: Tree, node: ast.Node.Index) ?ast.full.While {
|
pub fn whileAst(tree: Tree, node: Ast.Node.Index) ?Ast.full.While {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.@"while" => whileFull(tree, node),
|
.@"while" => whileFull(tree, node),
|
||||||
.while_simple => whileSimple(tree, node),
|
.while_simple => whileSimple(tree, node),
|
||||||
@ -905,7 +905,7 @@ pub fn whileAst(tree: Tree, node: ast.Node.Index) ?ast.full.While {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isContainer(tree: Tree, node: ast.Node.Index) bool {
|
pub fn isContainer(tree: Tree, node: Ast.Node.Index) bool {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.container_decl,
|
.container_decl,
|
||||||
.container_decl_trailing,
|
.container_decl_trailing,
|
||||||
@ -928,7 +928,7 @@ pub fn isContainer(tree: Tree, node: ast.Node.Index) bool {
|
|||||||
|
|
||||||
/// Returns the member indices of a given declaration container.
|
/// Returns the member indices of a given declaration container.
|
||||||
/// Asserts given `tag` is a container node
|
/// Asserts given `tag` is a container node
|
||||||
pub fn declMembers(tree: Tree, node_idx: ast.Node.Index, buffer: *[2]ast.Node.Index) []const ast.Node.Index {
|
pub fn declMembers(tree: Tree, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
|
||||||
std.debug.assert(isContainer(tree, node_idx));
|
std.debug.assert(isContainer(tree, node_idx));
|
||||||
return switch (tree.nodes.items(.tag)[node_idx]) {
|
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||||
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members,
|
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members,
|
||||||
@ -938,14 +938,14 @@ pub fn declMembers(tree: Tree, node_idx: ast.Node.Index, buffer: *[2]ast.Node.In
|
|||||||
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members,
|
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members,
|
||||||
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members,
|
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members,
|
||||||
.root => tree.rootDecls(),
|
.root => tree.rootDecls(),
|
||||||
.error_set_decl => &[_]ast.Node.Index{},
|
.error_set_decl => &[_]Ast.Node.Index{},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an `ast.full.VarDecl` for a given node index.
|
/// Returns an `ast.full.VarDecl` for a given node index.
|
||||||
/// Returns null if the tag doesn't match
|
/// Returns null if the tag doesn't match
|
||||||
pub fn varDecl(tree: Tree, node_idx: ast.Node.Index) ?ast.full.VarDecl {
|
pub fn varDecl(tree: Tree, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
|
||||||
return switch (tree.nodes.items(.tag)[node_idx]) {
|
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||||
.global_var_decl => tree.globalVarDecl(node_idx),
|
.global_var_decl => tree.globalVarDecl(node_idx),
|
||||||
.local_var_decl => tree.localVarDecl(node_idx),
|
.local_var_decl => tree.localVarDecl(node_idx),
|
||||||
@ -955,7 +955,7 @@ pub fn varDecl(tree: Tree, node_idx: ast.Node.Index) ?ast.full.VarDecl {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isBuiltinCall(tree: Tree, node: ast.Node.Index) bool {
|
pub fn isBuiltinCall(tree: Tree, node: Ast.Node.Index) bool {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.builtin_call,
|
.builtin_call,
|
||||||
.builtin_call_comma,
|
.builtin_call_comma,
|
||||||
@ -966,7 +966,7 @@ pub fn isBuiltinCall(tree: Tree, node: ast.Node.Index) bool {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isCall(tree: Tree, node: ast.Node.Index) bool {
|
pub fn isCall(tree: Tree, node: Ast.Node.Index) bool {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.call,
|
.call,
|
||||||
.call_comma,
|
.call_comma,
|
||||||
@ -981,7 +981,7 @@ pub fn isCall(tree: Tree, node: ast.Node.Index) bool {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fnProto(tree: Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?ast.full.FnProto {
|
pub fn fnProto(tree: Tree, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.fn_proto => tree.fnProto(node),
|
.fn_proto => tree.fnProto(node),
|
||||||
.fn_proto_multi => tree.fnProtoMulti(node),
|
.fn_proto_multi => tree.fnProtoMulti(node),
|
||||||
@ -992,7 +992,7 @@ pub fn fnProto(tree: Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?ast.f
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn callFull(tree: Tree, node: ast.Node.Index, buf: *[1]ast.Node.Index) ?ast.full.Call {
|
pub fn callFull(tree: Tree, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.Call {
|
||||||
return switch (tree.nodes.items(.tag)[node]) {
|
return switch (tree.nodes.items(.tag)[node]) {
|
||||||
.call,
|
.call,
|
||||||
.call_comma,
|
.call_comma,
|
||||||
|
12
src/main.zig
12
src/main.zig
@ -13,7 +13,7 @@ const rename = @import("./rename.zig");
|
|||||||
const offsets = @import("./offsets.zig");
|
const offsets = @import("./offsets.zig");
|
||||||
const setup = @import("./setup.zig");
|
const setup = @import("./setup.zig");
|
||||||
const semantic_tokens = @import("./semantic_tokens.zig");
|
const semantic_tokens = @import("./semantic_tokens.zig");
|
||||||
const ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const known_folders = @import("known-folders");
|
const known_folders = @import("known-folders");
|
||||||
const data = blk: {
|
const data = blk: {
|
||||||
if (std.mem.eql(u8, build_options.data_version, "0.7.0")) break :blk @import("data/0.7.0.zig");
|
if (std.mem.eql(u8, build_options.data_version, "0.7.0")) break :blk @import("data/0.7.0.zig");
|
||||||
@ -203,7 +203,7 @@ fn showMessage(message_type: types.MessageType, message: []const u8) !void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Is this correct or can we get a better end?
|
// TODO: Is this correct or can we get a better end?
|
||||||
fn astLocationToRange(loc: ast.Location) types.Range {
|
fn astLocationToRange(loc: Ast.Location) types.Range {
|
||||||
return .{
|
return .{
|
||||||
.start = .{
|
.start = .{
|
||||||
.line = @intCast(i64, loc.line),
|
.line = @intCast(i64, loc.line),
|
||||||
@ -249,7 +249,7 @@ fn publishDiagnostics(arena: *std.heap.ArenaAllocator, handle: DocumentStore.Han
|
|||||||
.fn_proto_simple,
|
.fn_proto_simple,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> blk: {
|
=> blk: {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const func = analysis.fnProto(tree, decl_idx, &buf).?;
|
const func = analysis.fnProto(tree, decl_idx, &buf).?;
|
||||||
if (func.extern_export_inline_token != null) break :blk;
|
if (func.extern_export_inline_token != null) break :blk;
|
||||||
|
|
||||||
@ -403,7 +403,7 @@ fn nodeToCompletion(arena: *std.heap.ArenaAllocator, list: *std.ArrayList(types.
|
|||||||
.fn_proto_simple,
|
.fn_proto_simple,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> {
|
=> {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const func = analysis.fnProto(tree, node, &buf).?;
|
const func = analysis.fnProto(tree, node, &buf).?;
|
||||||
if (func.name_token) |name_token| {
|
if (func.name_token) |name_token| {
|
||||||
const use_snippets = config.enable_snippets and client_capabilities.supports_snippets;
|
const use_snippets = config.enable_snippets and client_capabilities.supports_snippets;
|
||||||
@ -622,7 +622,7 @@ fn hoverSymbol(id: types.RequestId, arena: *std.heap.ArenaAllocator, decl_handle
|
|||||||
}
|
}
|
||||||
doc_str = try analysis.getDocComments(&arena.allocator, tree, node, hover_kind);
|
doc_str = try analysis.getDocComments(&arena.allocator, tree, node, hover_kind);
|
||||||
|
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
|
|
||||||
if (analysis.varDecl(tree, node)) |var_decl| {
|
if (analysis.varDecl(tree, node)) |var_decl| {
|
||||||
break :def analysis.getVariableSignature(tree, var_decl);
|
break :def analysis.getVariableSignature(tree, var_decl);
|
||||||
@ -891,7 +891,7 @@ fn referencesDefinitionLabel(arena: *std.heap.ArenaAllocator, id: types.RequestI
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
|
fn hasComment(tree: Ast.Tree, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex) bool {
|
||||||
const token_starts = tree.tokens.items(.start);
|
const token_starts = tree.tokens.items(.start);
|
||||||
|
|
||||||
const start = token_starts[start_token];
|
const start = token_starts[start_token];
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const types = @import("./types.zig");
|
const types = @import("./types.zig");
|
||||||
const ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const Tree = ast;
|
const Tree = Ast;
|
||||||
|
|
||||||
pub const Encoding = enum {
|
pub const Encoding = enum {
|
||||||
utf8,
|
utf8,
|
||||||
@ -140,7 +140,7 @@ pub fn tokenRelativeLocation(tree: Tree, start_index: usize, token_start: usize,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Asserts the token is comprised of valid utf8
|
/// Asserts the token is comprised of valid utf8
|
||||||
pub fn tokenLength(tree: Tree, token: ast.TokenIndex, encoding: Encoding) usize {
|
pub fn tokenLength(tree: Tree, token: Ast.TokenIndex, encoding: Encoding) usize {
|
||||||
const token_loc = tokenLocation(tree, token);
|
const token_loc = tokenLocation(tree, token);
|
||||||
if (encoding == .utf8)
|
if (encoding == .utf8)
|
||||||
return token_loc.end - token_loc.start;
|
return token_loc.end - token_loc.start;
|
||||||
@ -166,7 +166,7 @@ pub const Loc = struct {
|
|||||||
end: usize,
|
end: usize,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn tokenLocation(tree: Tree, token_index: ast.TokenIndex) Loc {
|
pub fn tokenLocation(tree: Tree, token_index: Ast.TokenIndex) Loc {
|
||||||
const start = tree.tokens.items(.start)[token_index];
|
const start = tree.tokens.items(.start)[token_index];
|
||||||
const tag = tree.tokens.items(.tag)[token_index];
|
const tag = tree.tokens.items(.tag)[token_index];
|
||||||
|
|
||||||
|
@ -7,9 +7,9 @@ const log = std.log.scoped(.references);
|
|||||||
const Reference = @This();
|
const Reference = @This();
|
||||||
usingnamespace @import("./ast.zig");
|
usingnamespace @import("./ast.zig");
|
||||||
|
|
||||||
const ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
|
|
||||||
fn tokenReference(handle: *DocumentStore.Handle, tok: ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
|
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
|
||||||
const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return;
|
const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return;
|
||||||
try handler(context, types.Location{
|
try handler(context, types.Location{
|
||||||
.uri = handle.uri(),
|
.uri = handle.uri(),
|
||||||
@ -69,10 +69,10 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
|
|
||||||
switch (node_tags[node]) {
|
switch (node_tags[node]) {
|
||||||
.block, .block_semicolon, .block_two, .block_two_semicolon => {
|
.block, .block_semicolon, .block_two, .block_two_semicolon => {
|
||||||
const statements: []const ast.Node.Index = switch (node_tags[node]) {
|
const statements: []const Ast.Node.Index = switch (node_tags[node]) {
|
||||||
.block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs],
|
.block, .block_semicolon => tree.extra_data[datas[node].lhs..datas[node].rhs],
|
||||||
.block_two, .block_two_semicolon => blk: {
|
.block_two, .block_two_semicolon => blk: {
|
||||||
const statements = &[_]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
|
const statements = &[_]Ast.Node.Index{ datas[node].lhs, datas[node].rhs };
|
||||||
const len: usize = if (datas[node].lhs == 0)
|
const len: usize = if (datas[node].lhs == 0)
|
||||||
@as(usize, 0)
|
@as(usize, 0)
|
||||||
else if (datas[node].rhs == 0)
|
else if (datas[node].rhs == 0)
|
||||||
@ -101,7 +101,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.root,
|
.root,
|
||||||
.error_set_decl,
|
.error_set_decl,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
for (Reference.declMembers(tree, node, &buf)) |member|
|
for (Reference.declMembers(tree, node, &buf)) |member|
|
||||||
try symbolReferencesInternal(arena, store, .{ .node = member, .handle = handle }, decl, encoding, context, handler);
|
try symbolReferencesInternal(arena, store, .{ .node = member, .handle = handle }, decl, encoding, context, handler);
|
||||||
},
|
},
|
||||||
@ -146,7 +146,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.fn_proto_simple,
|
.fn_proto_simple,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> {
|
=> {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const fn_proto = Reference.fnProto(tree, node, &buf).?;
|
const fn_proto = Reference.fnProto(tree, node, &buf).?;
|
||||||
var it = fn_proto.iterate(tree);
|
var it = fn_proto.iterate(tree);
|
||||||
while (it.next()) |param| {
|
while (it.next()) |param| {
|
||||||
@ -187,7 +187,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
=> {
|
=> {
|
||||||
// TODO When renaming a union(enum) field, also rename switch items that refer to it.
|
// TODO When renaming a union(enum) field, also rename switch items that refer to it.
|
||||||
try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler);
|
try symbolReferencesInternal(arena, store, .{ .node = datas[node].lhs, .handle = handle }, decl, encoding, context, handler);
|
||||||
const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange);
|
const extra = tree.extraData(datas[node].rhs, Ast.Node.SubRange);
|
||||||
const cases = tree.extra_data[extra.start..extra.end];
|
const cases = tree.extra_data[extra.start..extra.end];
|
||||||
for (cases) |case| {
|
for (cases) |case| {
|
||||||
try symbolReferencesInternal(arena, store, .{ .node = case, .handle = handle }, decl, encoding, context, handler);
|
try symbolReferencesInternal(arena, store, .{ .node = case, .handle = handle }, decl, encoding, context, handler);
|
||||||
@ -278,7 +278,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.array_init_dot_two,
|
.array_init_dot_two,
|
||||||
.array_init_dot_two_comma,
|
.array_init_dot_two_comma,
|
||||||
=> |n| {
|
=> |n| {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const array_init = switch (n) {
|
const array_init = switch (n) {
|
||||||
.array_init, .array_init_comma => tree.arrayInit(node),
|
.array_init, .array_init_comma => tree.arrayInit(node),
|
||||||
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
||||||
@ -300,8 +300,8 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.struct_init_one,
|
.struct_init_one,
|
||||||
.struct_init_one_comma,
|
.struct_init_one_comma,
|
||||||
=> |n| {
|
=> |n| {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const struct_init: ast.full.StructInit = switch (n) {
|
const struct_init: Ast.full.StructInit = switch (n) {
|
||||||
.struct_init, .struct_init_comma => tree.structInit(node),
|
.struct_init, .struct_init_comma => tree.structInit(node),
|
||||||
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
||||||
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
|
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
|
||||||
@ -322,8 +322,8 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.async_call_one,
|
.async_call_one,
|
||||||
.async_call_one_comma,
|
.async_call_one_comma,
|
||||||
=> |c| {
|
=> |c| {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const call: ast.full.Call = switch (c) {
|
const call: Ast.full.Call = switch (c) {
|
||||||
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
|
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
|
||||||
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(&buf, node),
|
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(&buf, node),
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
@ -339,7 +339,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.slice_sentinel,
|
.slice_sentinel,
|
||||||
.slice_open,
|
.slice_open,
|
||||||
=> |s| {
|
=> |s| {
|
||||||
const slice: ast.full.Slice = switch (s) {
|
const slice: Ast.full.Slice = switch (s) {
|
||||||
.slice => tree.slice(node),
|
.slice => tree.slice(node),
|
||||||
.slice_open => tree.sliceOpen(node),
|
.slice_open => tree.sliceOpen(node),
|
||||||
.slice_sentinel => tree.sliceSentinel(node),
|
.slice_sentinel => tree.sliceSentinel(node),
|
||||||
@ -387,11 +387,11 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
const params = switch (builtin_tag) {
|
const params = switch (builtin_tag) {
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
||||||
&[_]ast.Node.Index{}
|
&[_]Ast.Node.Index{}
|
||||||
else if (data.rhs == 0)
|
else if (data.rhs == 0)
|
||||||
&[_]ast.Node.Index{data.lhs}
|
&[_]Ast.Node.Index{data.lhs}
|
||||||
else
|
else
|
||||||
&[_]ast.Node.Index{ data.lhs, data.rhs },
|
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -401,7 +401,7 @@ fn symbolReferencesInternal(arena: *std.heap.ArenaAllocator, store: *DocumentSto
|
|||||||
.@"asm",
|
.@"asm",
|
||||||
.asm_simple,
|
.asm_simple,
|
||||||
=> |a| {
|
=> |a| {
|
||||||
const _asm: ast.full.Asm = if (a == .@"asm") tree.asmFull(node) else tree.asmSimple(node);
|
const _asm: Ast.full.Asm = if (a == .@"asm") tree.asmFull(node) else tree.asmSimple(node);
|
||||||
if (_asm.ast.items.len == 0)
|
if (_asm.ast.items.len == 0)
|
||||||
try symbolReferencesInternal(arena, store, .{ .node = _asm.ast.template, .handle = handle }, decl, encoding, context, handler);
|
try symbolReferencesInternal(arena, store, .{ .node = _asm.ast.template, .handle = handle }, decl, encoding, context, handler);
|
||||||
|
|
||||||
@ -544,10 +544,10 @@ pub fn symbolReferences(arena: *std.heap.ArenaAllocator, store: *DocumentStore,
|
|||||||
},
|
},
|
||||||
.param_decl => |param| {
|
.param_decl => |param| {
|
||||||
// Rename the param tok.
|
// Rename the param tok.
|
||||||
const fn_node: ast.full.FnProto = loop: for (curr_handle.document_scope.scopes) |scope| {
|
const fn_node: Ast.full.FnProto = loop: for (curr_handle.document_scope.scopes) |scope| {
|
||||||
switch (scope.data) {
|
switch (scope.data) {
|
||||||
.function => |proto| {
|
.function => |proto| {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const fn_proto = Reference.fnProto(curr_handle.tree, proto, &buf).?;
|
const fn_proto = Reference.fnProto(curr_handle.tree, proto, &buf).?;
|
||||||
var it = fn_proto.iterate(curr_handle.tree);
|
var it = fn_proto.iterate(curr_handle.tree);
|
||||||
while (it.next()) |candidate| {
|
while (it.next()) |candidate| {
|
||||||
|
@ -2,7 +2,7 @@ const std = @import("std");
|
|||||||
const offsets = @import("./offsets.zig");
|
const offsets = @import("./offsets.zig");
|
||||||
const DocumentStore = @import("./document_store.zig");
|
const DocumentStore = @import("./document_store.zig");
|
||||||
const analysis = @import("./analysis.zig");
|
const analysis = @import("./analysis.zig");
|
||||||
const ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const log = std.log.scoped(.semantic_tokens);
|
const log = std.log.scoped(.semantic_tokens);
|
||||||
const SemanticToken = @This();
|
const SemanticToken = @This();
|
||||||
usingnamespace @import("./ast.zig");
|
usingnamespace @import("./ast.zig");
|
||||||
@ -54,7 +54,7 @@ pub const TokenModifiers = packed struct {
|
|||||||
const Builder = struct {
|
const Builder = struct {
|
||||||
handle: *DocumentStore.Handle,
|
handle: *DocumentStore.Handle,
|
||||||
previous_position: usize = 0,
|
previous_position: usize = 0,
|
||||||
previous_token: ?ast.TokenIndex = null,
|
previous_token: ?Ast.TokenIndex = null,
|
||||||
arr: std.ArrayList(u32),
|
arr: std.ArrayList(u32),
|
||||||
encoding: offsets.Encoding,
|
encoding: offsets.Encoding,
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ const Builder = struct {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add(self: *Builder, token: ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void {
|
||||||
const tree = self.handle.tree;
|
const tree = self.handle.tree;
|
||||||
const starts = tree.tokens.items(.start);
|
const starts = tree.tokens.items(.start);
|
||||||
const next_start = starts[token];
|
const next_start = starts[token];
|
||||||
@ -103,7 +103,7 @@ const Builder = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Highlight a token without semantic context.
|
/// Highlight a token without semantic context.
|
||||||
fn handleToken(self: *Builder, tok: ast.TokenIndex) !void {
|
fn handleToken(self: *Builder, tok: Ast.TokenIndex) !void {
|
||||||
const tree = self.handle.tree;
|
const tree = self.handle.tree;
|
||||||
// TODO More highlighting here
|
// TODO More highlighting here
|
||||||
const tok_id = tree.tokens.items(.tag)[tok];
|
const tok_id = tree.tokens.items(.tag)[tok];
|
||||||
@ -177,17 +177,17 @@ const Builder = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
inline fn writeToken(builder: *Builder, token_idx: ?ast.TokenIndex, tok_type: TokenType) !void {
|
inline fn writeToken(builder: *Builder, token_idx: ?Ast.TokenIndex, tok_type: TokenType) !void {
|
||||||
return try writeTokenMod(builder, token_idx, tok_type, .{});
|
return try writeTokenMod(builder, token_idx, tok_type, .{});
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fn writeTokenMod(builder: *Builder, token_idx: ?ast.TokenIndex, tok_type: TokenType, tok_mod: TokenModifiers) !void {
|
inline fn writeTokenMod(builder: *Builder, token_idx: ?Ast.TokenIndex, tok_type: TokenType, tok_mod: TokenModifiers) !void {
|
||||||
if (token_idx) |ti| {
|
if (token_idx) |ti| {
|
||||||
try builder.add(ti, tok_type, tok_mod);
|
try builder.add(ti, tok_type, tok_mod);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeDocComments(builder: *Builder, tree: SemanticToken.Tree, doc: ast.TokenIndex) !void {
|
fn writeDocComments(builder: *Builder, tree: SemanticToken.Tree, doc: Ast.TokenIndex) !void {
|
||||||
const token_tags = tree.tokens.items(.tag);
|
const token_tags = tree.tokens.items(.tag);
|
||||||
var tok_idx = doc;
|
var tok_idx = doc;
|
||||||
while (token_tags[tok_idx] == .doc_comment or
|
while (token_tags[tok_idx] == .doc_comment or
|
||||||
@ -200,7 +200,7 @@ fn writeDocComments(builder: *Builder, tree: SemanticToken.Tree, doc: ast.TokenI
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fieldTokenType(container_decl: ast.Node.Index, handle: *DocumentStore.Handle) ?TokenType {
|
fn fieldTokenType(container_decl: Ast.Node.Index, handle: *DocumentStore.Handle) ?TokenType {
|
||||||
const main_token = handle.tree.nodes.items(.main_token)[container_decl];
|
const main_token = handle.tree.nodes.items(.main_token)[container_decl];
|
||||||
if (main_token > handle.tree.tokens.len) return null;
|
if (main_token > handle.tree.tokens.len) return null;
|
||||||
return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) {
|
return @as(?TokenType, switch (handle.tree.tokens.items(.tag)[main_token]) {
|
||||||
@ -210,7 +210,7 @@ fn fieldTokenType(container_decl: ast.Node.Index, handle: *DocumentStore.Handle)
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHandle, target_tok: Ast.TokenIndex, tok_mod: TokenModifiers) !void {
|
||||||
if (type_node.type.is_type_val) {
|
if (type_node.type.is_type_val) {
|
||||||
var new_tok_mod = tok_mod;
|
var new_tok_mod = tok_mod;
|
||||||
if (type_node.isNamespace())
|
if (type_node.isNamespace())
|
||||||
@ -249,7 +249,7 @@ const WriteTokensError = error{
|
|||||||
MovedBackwards,
|
MovedBackwards,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?ast.Node.Index) WriteTokensError!void {
|
fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, maybe_node: ?Ast.Node.Index) WriteTokensError!void {
|
||||||
const node = maybe_node orelse return;
|
const node = maybe_node orelse return;
|
||||||
|
|
||||||
const handle = builder.handle;
|
const handle = builder.handle;
|
||||||
@ -294,10 +294,10 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
try writeToken(builder, main_token - 2, .label);
|
try writeToken(builder, main_token - 2, .label);
|
||||||
}
|
}
|
||||||
|
|
||||||
const statements: []const ast.Node.Index = switch (tag) {
|
const statements: []const Ast.Node.Index = switch (tag) {
|
||||||
.block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
.block, .block_semicolon => tree.extra_data[node_data[node].lhs..node_data[node].rhs],
|
||||||
.block_two, .block_two_semicolon => blk: {
|
.block_two, .block_two_semicolon => blk: {
|
||||||
const statements = &[_]ast.Node.Index{ node_data[node].lhs, node_data[node].rhs };
|
const statements = &[_]Ast.Node.Index{ node_data[node].lhs, node_data[node].rhs };
|
||||||
const len: usize = if (node_data[node].lhs == 0)
|
const len: usize = if (node_data[node].lhs == 0)
|
||||||
@as(usize, 0)
|
@as(usize, 0)
|
||||||
else if (node_data[node].rhs == 0)
|
else if (node_data[node].rhs == 0)
|
||||||
@ -366,8 +366,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.tagged_union_two,
|
.tagged_union_two,
|
||||||
.tagged_union_two_trailing,
|
.tagged_union_two_trailing,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const decl: ast.full.ContainerDecl = switch (tag) {
|
const decl: Ast.full.ContainerDecl = switch (tag) {
|
||||||
.container_decl, .container_decl_trailing => tree.containerDecl(node),
|
.container_decl, .container_decl_trailing => tree.containerDecl(node),
|
||||||
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node),
|
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(&buf, node),
|
||||||
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
|
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
|
||||||
@ -430,8 +430,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.fn_proto_multi,
|
.fn_proto_multi,
|
||||||
.fn_decl,
|
.fn_decl,
|
||||||
=> {
|
=> {
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
const fn_proto: ast.full.FnProto = SemanticToken.fnProto(tree, node, &buf).?;
|
const fn_proto: Ast.full.FnProto = SemanticToken.fnProto(tree, node, &buf).?;
|
||||||
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
|
||||||
try writeDocComments(builder, tree, docs);
|
try writeDocComments(builder, tree, docs);
|
||||||
|
|
||||||
@ -496,7 +496,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
=> {
|
=> {
|
||||||
try writeToken(builder, main_token, .keyword);
|
try writeToken(builder, main_token, .keyword);
|
||||||
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
try await @asyncCall(child_frame, {}, writeNodeTokens, .{ builder, arena, store, node_data[node].lhs });
|
||||||
const extra = tree.extraData(node_data[node].rhs, ast.Node.SubRange);
|
const extra = tree.extraData(node_data[node].rhs, Ast.Node.SubRange);
|
||||||
const cases = tree.extra_data[extra.start..extra.end];
|
const cases = tree.extra_data[extra.start..extra.end];
|
||||||
|
|
||||||
for (cases) |case_node| {
|
for (cases) |case_node| {
|
||||||
@ -590,8 +590,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.array_init_dot_two,
|
.array_init_dot_two,
|
||||||
.array_init_dot_two_comma,
|
.array_init_dot_two_comma,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const array_init: ast.full.ArrayInit = switch (tag) {
|
const array_init: Ast.full.ArrayInit = switch (tag) {
|
||||||
.array_init, .array_init_comma => tree.arrayInit(node),
|
.array_init, .array_init_comma => tree.arrayInit(node),
|
||||||
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
|
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
|
||||||
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
|
||||||
@ -611,8 +611,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.struct_init_dot_two,
|
.struct_init_dot_two,
|
||||||
.struct_init_dot_two_comma,
|
.struct_init_dot_two_comma,
|
||||||
=> {
|
=> {
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
const struct_init: ast.full.StructInit = switch (tag) {
|
const struct_init: Ast.full.StructInit = switch (tag) {
|
||||||
.struct_init, .struct_init_comma => tree.structInit(node),
|
.struct_init, .struct_init_comma => tree.structInit(node),
|
||||||
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
|
||||||
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
|
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
|
||||||
@ -654,8 +654,8 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.async_call_one,
|
.async_call_one,
|
||||||
.async_call_one_comma,
|
.async_call_one_comma,
|
||||||
=> {
|
=> {
|
||||||
var params: [1]ast.Node.Index = undefined;
|
var params: [1]Ast.Node.Index = undefined;
|
||||||
const call: ast.full.Call = switch (tag) {
|
const call: Ast.full.Call = switch (tag) {
|
||||||
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
|
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
|
||||||
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node),
|
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(¶ms, node),
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
@ -675,7 +675,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.slice_open,
|
.slice_open,
|
||||||
.slice_sentinel,
|
.slice_sentinel,
|
||||||
=> {
|
=> {
|
||||||
const slice: ast.full.Slice = switch (tag) {
|
const slice: Ast.full.Slice = switch (tag) {
|
||||||
.slice => tree.slice(node),
|
.slice => tree.slice(node),
|
||||||
.slice_open => tree.sliceOpen(node),
|
.slice_open => tree.sliceOpen(node),
|
||||||
.slice_sentinel => tree.sliceSentinel(node),
|
.slice_sentinel => tree.sliceSentinel(node),
|
||||||
@ -734,11 +734,11 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
const params = switch (tag) {
|
const params = switch (tag) {
|
||||||
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
.builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
|
||||||
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
.builtin_call_two, .builtin_call_two_comma => if (data.lhs == 0)
|
||||||
&[_]ast.Node.Index{}
|
&[_]Ast.Node.Index{}
|
||||||
else if (data.rhs == 0)
|
else if (data.rhs == 0)
|
||||||
&[_]ast.Node.Index{data.lhs}
|
&[_]Ast.Node.Index{data.lhs}
|
||||||
else
|
else
|
||||||
&[_]ast.Node.Index{ data.lhs, data.rhs },
|
&[_]Ast.Node.Index{ data.lhs, data.rhs },
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -768,7 +768,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.asm_input,
|
.asm_input,
|
||||||
.asm_simple,
|
.asm_simple,
|
||||||
=> {
|
=> {
|
||||||
const asm_node: ast.full.Asm = switch (tag) {
|
const asm_node: Ast.full.Asm = switch (tag) {
|
||||||
.@"asm" => tree.asmFull(node),
|
.@"asm" => tree.asmFull(node),
|
||||||
.asm_simple => tree.asmSimple(node),
|
.asm_simple => tree.asmSimple(node),
|
||||||
else => return, // TODO Inputs, outputs
|
else => return, // TODO Inputs, outputs
|
||||||
@ -950,7 +950,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
.array_type,
|
.array_type,
|
||||||
.array_type_sentinel,
|
.array_type_sentinel,
|
||||||
=> {
|
=> {
|
||||||
const array_type: ast.full.ArrayType = if (tag == .array_type)
|
const array_type: Ast.full.ArrayType = if (tag == .array_type)
|
||||||
tree.arrayType(node)
|
tree.arrayType(node)
|
||||||
else
|
else
|
||||||
tree.arrayTypeSentinel(node);
|
tree.arrayTypeSentinel(node);
|
||||||
@ -981,7 +981,7 @@ fn writeNodeTokens(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *D
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeContainerField(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, node: ast.Node.Index, field_token_type: ?TokenType, child_frame: anytype) !void {
|
fn writeContainerField(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *DocumentStore, node: Ast.Node.Index, field_token_type: ?TokenType, child_frame: anytype) !void {
|
||||||
const tree = builder.handle.tree;
|
const tree = builder.handle.tree;
|
||||||
const container_field = SemanticToken.containerField(tree, node).?;
|
const container_field = SemanticToken.containerField(tree, node).?;
|
||||||
const base = tree.nodes.items(.main_token)[node];
|
const base = tree.nodes.items(.main_token)[node];
|
||||||
@ -1002,7 +1002,7 @@ fn writeContainerField(builder: *Builder, arena: *std.heap.ArenaAllocator, store
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (container_field.ast.value_expr != 0) block: {
|
if (container_field.ast.value_expr != 0) block: {
|
||||||
const eq_tok: ast.TokenIndex = if (container_field.ast.align_expr != 0)
|
const eq_tok: Ast.TokenIndex = if (container_field.ast.align_expr != 0)
|
||||||
SemanticToken.lastToken(tree, container_field.ast.align_expr) + 2
|
SemanticToken.lastToken(tree, container_field.ast.align_expr) + 2
|
||||||
else if (container_field.ast.type_expr != 0)
|
else if (container_field.ast.type_expr != 0)
|
||||||
SemanticToken.lastToken(tree, container_field.ast.type_expr) + 1
|
SemanticToken.lastToken(tree, container_field.ast.type_expr) + 1
|
||||||
@ -1020,7 +1020,7 @@ pub fn writeAllSemanticTokens(arena: *std.heap.ArenaAllocator, store: *DocumentS
|
|||||||
errdefer builder.arr.deinit();
|
errdefer builder.arr.deinit();
|
||||||
|
|
||||||
// reverse the ast from the root declarations
|
// reverse the ast from the root declarations
|
||||||
var buf: [2]ast.Node.Index = undefined;
|
var buf: [2]Ast.Node.Index = undefined;
|
||||||
for (SemanticToken.declMembers(handle.tree, 0, &buf)) |child| {
|
for (SemanticToken.declMembers(handle.tree, 0, &buf)) |child| {
|
||||||
writeNodeTokens(&builder, arena, store, child) catch |err| switch (err) {
|
writeNodeTokens(&builder, arena, store, child) catch |err| switch (err) {
|
||||||
error.MovedBackwards => break,
|
error.MovedBackwards => break,
|
||||||
|
@ -3,21 +3,13 @@ const analysis = @import("./analysis.zig");
|
|||||||
const offsets = @import("./offsets.zig");
|
const offsets = @import("./offsets.zig");
|
||||||
const DocumentStore = @import("./document_store.zig");
|
const DocumentStore = @import("./document_store.zig");
|
||||||
const types = @import("./types.zig");
|
const types = @import("./types.zig");
|
||||||
const ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const Token = std.zig.Token;
|
const Token = std.zig.Token;
|
||||||
const identifierFromPosition = @import("./main.zig").identifierFromPosition;
|
const identifierFromPosition = @import("./main.zig").identifierFromPosition;
|
||||||
const SignatureHelp = @This();
|
const SignatureHelp = @This();
|
||||||
usingnamespace @import("./ast.zig");
|
usingnamespace @import("./ast.zig");
|
||||||
|
|
||||||
fn fnProtoToSignatureInfo(
|
fn fnProtoToSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAllocator, commas: u32, skip_self_param: bool, handle: *DocumentStore.Handle, fn_node: Ast.Node.Index, proto: Ast.full.FnProto) !types.SignatureInformation {
|
||||||
document_store: *DocumentStore,
|
|
||||||
arena: *std.heap.ArenaAllocator,
|
|
||||||
commas: u32,
|
|
||||||
skip_self_param: bool,
|
|
||||||
handle: *DocumentStore.Handle,
|
|
||||||
fn_node: ast.Node.Index,
|
|
||||||
proto: ast.full.FnProto,
|
|
||||||
) !types.SignatureInformation {
|
|
||||||
const ParameterInformation = types.SignatureInformation.ParameterInformation;
|
const ParameterInformation = types.SignatureInformation.ParameterInformation;
|
||||||
|
|
||||||
const tree = handle.tree;
|
const tree = handle.tree;
|
||||||
@ -287,7 +279,7 @@ pub fn getSignatureInfo(
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
var buf: [1]ast.Node.Index = undefined;
|
var buf: [1]Ast.Node.Index = undefined;
|
||||||
if (SignatureHelp.fnProto(type_handle.handle.tree, node, &buf)) |proto| {
|
if (SignatureHelp.fnProto(type_handle.handle.tree, node, &buf)) |proto| {
|
||||||
return try fnProtoToSignatureInfo(
|
return try fnProtoToSignatureInfo(
|
||||||
document_store,
|
document_store,
|
||||||
|
Loading…
Reference in New Issue
Block a user