remove Tree when it was the same as Ast
This commit is contained in:
parent
3c1152a536
commit
b63339e350
@ -19,7 +19,7 @@ pub fn deinit() void {
|
||||
}
|
||||
|
||||
/// Gets a declaration's doc comments. Caller owns returned memory.
|
||||
pub fn getDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, node: Ast.Node.Index, format: types.MarkupContent.Kind) !?[]const u8 {
|
||||
pub fn getDocComments(allocator: *std.mem.Allocator, tree: Ast, node: Ast.Node.Index, format: types.MarkupContent.Kind) !?[]const u8 {
|
||||
const base = tree.nodes.items(.main_token)[node];
|
||||
const base_kind = tree.nodes.items(.tag)[node];
|
||||
const tokens = tree.tokens.items(.tag);
|
||||
@ -67,7 +67,7 @@ pub fn getDocCommentTokenIndex(tokens: []std.zig.Token.Tag, base_token: Ast.Toke
|
||||
} else idx + 1;
|
||||
}
|
||||
|
||||
pub fn collectDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, doc_comments: Ast.TokenIndex, format: types.MarkupContent.Kind, container_doc: bool) ![]const u8 {
|
||||
pub fn collectDocComments(allocator: *std.mem.Allocator, tree: Ast, doc_comments: Ast.TokenIndex, format: types.MarkupContent.Kind, container_doc: bool) ![]const u8 {
|
||||
var lines = std.ArrayList([]const u8).init(allocator);
|
||||
defer lines.deinit();
|
||||
const tokens = tree.tokens.items(.tag);
|
||||
@ -84,7 +84,7 @@ pub fn collectDocComments(allocator: *std.mem.Allocator, tree: Analysis.Tree, do
|
||||
}
|
||||
|
||||
/// Gets a function's keyword, name, arguments and return value.
|
||||
pub fn getFunctionSignature(tree: Analysis.Tree, func: Ast.full.FnProto) []const u8 {
|
||||
pub fn getFunctionSignature(tree: Ast, func: Ast.full.FnProto) []const u8 {
|
||||
const start = offsets.tokenLocation(tree, func.ast.fn_token);
|
||||
|
||||
const end = if (func.ast.return_type != 0)
|
||||
@ -95,7 +95,7 @@ pub fn getFunctionSignature(tree: Analysis.Tree, func: Ast.full.FnProto) []const
|
||||
}
|
||||
|
||||
/// Creates snippet insert text for a function. Caller owns returned memory.
|
||||
pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: Analysis.Tree, func: Ast.full.FnProto, skip_self_param: bool) ![]const u8 {
|
||||
pub fn getFunctionSnippet(allocator: *std.mem.Allocator, tree: Ast, func: Ast.full.FnProto, skip_self_param: bool) ![]const u8 {
|
||||
const name_index = func.name_token.?;
|
||||
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
@ -190,13 +190,13 @@ pub fn hasSelfParam(arena: *std.heap.ArenaAllocator, document_store: *DocumentSt
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn getVariableSignature(tree: Analysis.Tree, var_decl: Ast.full.VarDecl) []const u8 {
|
||||
pub fn getVariableSignature(tree: Ast, var_decl: Ast.full.VarDecl) []const u8 {
|
||||
const start = offsets.tokenLocation(tree, var_decl.ast.mut_token).start;
|
||||
const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, var_decl.ast.init_node)).end;
|
||||
return tree.source[start..end];
|
||||
}
|
||||
|
||||
pub fn getContainerFieldSignature(tree: Analysis.Tree, field: Ast.full.ContainerField) []const u8 {
|
||||
pub fn getContainerFieldSignature(tree: Ast, field: Ast.full.ContainerField) []const u8 {
|
||||
const start = offsets.tokenLocation(tree, field.ast.name_token).start;
|
||||
const end_node = if (field.ast.value_expr != 0) field.ast.value_expr else field.ast.type_expr;
|
||||
const end = offsets.tokenLocation(tree, Analysis.lastToken(tree, end_node)).end;
|
||||
@ -204,18 +204,18 @@ pub fn getContainerFieldSignature(tree: Analysis.Tree, field: Ast.full.Container
|
||||
}
|
||||
|
||||
/// The node is the meta-type `type`
|
||||
fn isMetaType(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||
fn isMetaType(tree: Ast, node: Ast.Node.Index) bool {
|
||||
if (tree.nodes.items(.tag)[node] == .identifier) {
|
||||
return std.mem.eql(u8, tree.tokenSlice(tree.nodes.items(.main_token)[node]), "type");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn isTypeFunction(tree: Analysis.Tree, func: Ast.full.FnProto) bool {
|
||||
pub fn isTypeFunction(tree: Ast, func: Ast.full.FnProto) bool {
|
||||
return isMetaType(tree, func.ast.return_type);
|
||||
}
|
||||
|
||||
pub fn isGenericFunction(tree: Analysis.Tree, func: Ast.full.FnProto) bool {
|
||||
pub fn isGenericFunction(tree: Ast, func: Ast.full.FnProto) bool {
|
||||
var it = func.iterate(tree);
|
||||
while (it.next()) |param| {
|
||||
if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) {
|
||||
@ -241,7 +241,7 @@ pub fn isSnakeCase(name: []const u8) bool {
|
||||
|
||||
// ANALYSIS ENGINE
|
||||
|
||||
pub fn getDeclNameToken(tree: Analysis.Tree, node: Ast.Node.Index) ?Ast.TokenIndex {
|
||||
pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex {
|
||||
const tags = tree.nodes.items(.tag);
|
||||
const main_token = tree.nodes.items(.main_token)[node];
|
||||
return switch (tags[node]) {
|
||||
@ -278,7 +278,7 @@ pub fn getDeclNameToken(tree: Analysis.Tree, node: Ast.Node.Index) ?Ast.TokenInd
|
||||
};
|
||||
}
|
||||
|
||||
fn getDeclName(tree: Analysis.Tree, node: Ast.Node.Index) ?[]const u8 {
|
||||
fn getDeclName(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
|
||||
const name = tree.tokenSlice(getDeclNameToken(tree, node) orelse return null);
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.test_decl => name[1 .. name.len - 1],
|
||||
@ -369,7 +369,7 @@ pub fn resolveVarDeclAlias(store: *DocumentStore, arena: *std.heap.ArenaAllocato
|
||||
return null;
|
||||
}
|
||||
|
||||
fn isBlock(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||
fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.block,
|
||||
.block_semicolon,
|
||||
@ -380,7 +380,7 @@ fn isBlock(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||
};
|
||||
}
|
||||
|
||||
fn findReturnStatementInternal(tree: Analysis.Tree, fn_decl: Ast.full.FnProto, body: Ast.Node.Index, already_found: *bool) ?Ast.Node.Index {
|
||||
fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.Node.Index, already_found: *bool) ?Ast.Node.Index {
|
||||
var result: ?Ast.Node.Index = null;
|
||||
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
@ -429,7 +429,7 @@ fn findReturnStatementInternal(tree: Analysis.Tree, fn_decl: Ast.full.FnProto, b
|
||||
return result;
|
||||
}
|
||||
|
||||
fn findReturnStatement(tree: Analysis.Tree, fn_decl: Ast.full.FnProto, body: Ast.Node.Index) ?Ast.Node.Index {
|
||||
fn findReturnStatement(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.Node.Index) ?Ast.Node.Index {
|
||||
var already_found = false;
|
||||
return findReturnStatementInternal(tree, fn_decl, body, &already_found);
|
||||
}
|
||||
@ -507,7 +507,7 @@ fn resolveUnwrapErrorType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn isPtrType(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||
pub fn isPtrType(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.ptr_type,
|
||||
.ptr_type_aligned,
|
||||
@ -602,7 +602,7 @@ fn allDigits(str: []const u8) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn isTypeIdent(tree: Analysis.Tree, token_idx: Ast.TokenIndex) bool {
|
||||
pub fn isTypeIdent(tree: Ast, token_idx: Ast.TokenIndex) bool {
|
||||
const PrimitiveTypes = std.ComptimeStringMap(void, .{
|
||||
.{"isize"}, .{"usize"},
|
||||
.{"c_short"}, .{"c_ushort"},
|
||||
@ -1099,7 +1099,7 @@ pub fn resolveTypeOfNode(store: *DocumentStore, arena: *std.heap.ArenaAllocator,
|
||||
}
|
||||
|
||||
/// Collects all imports we can find into a slice of import paths (without quotes).
|
||||
pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: Analysis.Tree) !void {
|
||||
pub fn collectImports(import_arr: *std.ArrayList([]const u8), tree: Ast) !void {
|
||||
const tags = tree.tokens.items(.tag);
|
||||
|
||||
var i: usize = 0;
|
||||
@ -1285,7 +1285,7 @@ pub fn getFieldAccessType(store: *DocumentStore, arena: *std.heap.ArenaAllocator
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isNodePublic(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||
pub fn isNodePublic(tree: Ast, node: Ast.Node.Index) bool {
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.global_var_decl,
|
||||
@ -1303,7 +1303,7 @@ pub fn isNodePublic(tree: Analysis.Tree, node: Ast.Node.Index) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn nodeToString(tree: Analysis.Tree, node: Ast.Node.Index) ?[]const u8 {
|
||||
pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 {
|
||||
const data = tree.nodes.items(.data);
|
||||
const main_token = tree.nodes.items(.main_token)[node];
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
@ -1339,13 +1339,13 @@ pub fn nodeToString(tree: Analysis.Tree, node: Ast.Node.Index) ?[]const u8 {
|
||||
return null;
|
||||
}
|
||||
|
||||
fn nodeContainsSourceIndex(tree: Analysis.Tree, node: Ast.Node.Index, source_index: usize) bool {
|
||||
fn nodeContainsSourceIndex(tree: Ast, node: Ast.Node.Index, source_index: usize) bool {
|
||||
const first_token = offsets.tokenLocation(tree, tree.firstToken(node)).start;
|
||||
const last_token = offsets.tokenLocation(tree, Analysis.lastToken(tree, node)).end;
|
||||
return source_index >= first_token and source_index <= last_token;
|
||||
}
|
||||
|
||||
pub fn getImportStr(tree: Analysis.Tree, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
||||
pub fn getImportStr(tree: Ast, node: Ast.Node.Index, source_index: usize) ?[]const u8 {
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
if (Analysis.isContainer(tree, node)) {
|
||||
@ -1580,7 +1580,7 @@ pub fn documentPositionContext(arena: *std.heap.ArenaAllocator, document: types.
|
||||
};
|
||||
}
|
||||
|
||||
fn addOutlineNodes(allocator: *std.mem.Allocator, tree: Analysis.Tree, child: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
||||
fn addOutlineNodes(allocator: *std.mem.Allocator, tree: Ast, child: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
||||
switch (tree.nodes.items(.tag)[child]) {
|
||||
.string_literal,
|
||||
.integer_literal,
|
||||
@ -1731,7 +1731,7 @@ const GetDocumentSymbolsContext = struct {
|
||||
encoding: offsets.Encoding,
|
||||
};
|
||||
|
||||
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Analysis.Tree, node: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
||||
fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Ast, node: Ast.Node.Index, context: *GetDocumentSymbolsContext) anyerror!void {
|
||||
const name = getDeclName(tree, node) orelse return;
|
||||
if (name.len == 0)
|
||||
return;
|
||||
@ -1815,7 +1815,7 @@ fn getDocumentSymbolsInternal(allocator: *std.mem.Allocator, tree: Analysis.Tree
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: Analysis.Tree, encoding: offsets.Encoding) ![]types.DocumentSymbol {
|
||||
pub fn getDocumentSymbols(allocator: *std.mem.Allocator, tree: Ast, encoding: offsets.Encoding) ![]types.DocumentSymbol {
|
||||
var symbols = try std.ArrayList(types.DocumentSymbol).initCapacity(allocator, tree.rootDecls().len);
|
||||
|
||||
var context = GetDocumentSymbolsContext{
|
||||
@ -2372,7 +2372,7 @@ pub const Scope = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: Analysis.Tree) !DocumentScope {
|
||||
pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: Ast) !DocumentScope {
|
||||
var scopes = std.ArrayListUnmanaged(Scope){};
|
||||
var error_completions = CompletionSet{};
|
||||
var enum_completions = CompletionSet{};
|
||||
@ -2403,7 +2403,7 @@ pub fn makeDocumentScope(allocator: *std.mem.Allocator, tree: Analysis.Tree) !Do
|
||||
};
|
||||
}
|
||||
|
||||
fn nodeSourceRange(tree: Analysis.Tree, node: Ast.Node.Index) SourceRange {
|
||||
fn nodeSourceRange(tree: Ast, node: Ast.Node.Index) SourceRange {
|
||||
const loc_start = offsets.tokenLocation(tree, tree.firstToken(node));
|
||||
const loc_end = offsets.tokenLocation(tree, Analysis.lastToken(tree, node));
|
||||
|
||||
@ -2417,7 +2417,7 @@ const ScopeContext = struct {
|
||||
scopes: *std.ArrayListUnmanaged(Scope),
|
||||
enums: *CompletionSet,
|
||||
errors: *CompletionSet,
|
||||
tree: Analysis.Tree,
|
||||
tree: Ast,
|
||||
};
|
||||
|
||||
fn makeInnerScope(allocator: *std.mem.Allocator, context: ScopeContext, node_idx: Ast.Node.Index) error{OutOfMemory}!void {
|
||||
|
49
src/ast.zig
49
src/ast.zig
@ -4,12 +4,11 @@
|
||||
|
||||
const std = @import("std");
|
||||
const Ast = std.zig.Ast;
|
||||
pub const Tree = Ast;
|
||||
const Node = Ast.Node;
|
||||
const full = Ast.full;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
fn fullPtrType(tree: Tree, info: full.PtrType.Components) full.PtrType {
|
||||
fn fullPtrType(tree: Ast, info: full.PtrType.Components) full.PtrType {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
// TODO: looks like stage1 isn't quite smart enough to handle enum
|
||||
// literals in some places here
|
||||
@ -58,7 +57,7 @@ fn fullPtrType(tree: Tree, info: full.PtrType.Components) full.PtrType {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn ptrTypeSimple(tree: Tree, node: Node.Index) full.PtrType {
|
||||
pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType {
|
||||
assert(tree.nodes.items(.tag)[node] == .ptr_type);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.lhs, Node.PtrType);
|
||||
@ -73,7 +72,7 @@ pub fn ptrTypeSimple(tree: Tree, node: Node.Index) full.PtrType {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType {
|
||||
pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
|
||||
assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullPtrType(tree, .{
|
||||
@ -87,7 +86,7 @@ pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType {
|
||||
pub fn ptrTypeAligned(tree: Ast, node: Node.Index) full.PtrType {
|
||||
assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullPtrType(tree, .{
|
||||
@ -101,7 +100,7 @@ pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType {
|
||||
pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
|
||||
assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
|
||||
@ -116,7 +115,7 @@ pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType {
|
||||
});
|
||||
}
|
||||
|
||||
fn fullIf(tree: Tree, info: full.If.Components) full.If {
|
||||
fn fullIf(tree: Ast, info: full.If.Components) full.If {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var result: full.If = .{
|
||||
.ast = info,
|
||||
@ -141,7 +140,7 @@ fn fullIf(tree: Tree, info: full.If.Components) full.If {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn ifFull(tree: Tree, node: Node.Index) full.If {
|
||||
pub fn ifFull(tree: Ast, node: Node.Index) full.If {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
if (tree.nodes.items(.tag)[node] == .@"if") {
|
||||
const extra = tree.extraData(data.rhs, Node.If);
|
||||
@ -162,7 +161,7 @@ pub fn ifFull(tree: Tree, node: Node.Index) full.If {
|
||||
}
|
||||
}
|
||||
|
||||
fn fullWhile(tree: Tree, info: full.While.Components) full.While {
|
||||
fn fullWhile(tree: Ast, info: full.While.Components) full.While {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var result: full.While = .{
|
||||
.ast = info,
|
||||
@ -197,7 +196,7 @@ fn fullWhile(tree: Tree, info: full.While.Components) full.While {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn whileSimple(tree: Tree, node: Node.Index) full.While {
|
||||
pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullWhile(tree, .{
|
||||
.while_token = tree.nodes.items(.main_token)[node],
|
||||
@ -208,7 +207,7 @@ pub fn whileSimple(tree: Tree, node: Node.Index) full.While {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn whileCont(tree: Tree, node: Node.Index) full.While {
|
||||
pub fn whileCont(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.WhileCont);
|
||||
return fullWhile(tree, .{
|
||||
@ -220,7 +219,7 @@ pub fn whileCont(tree: Tree, node: Node.Index) full.While {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn whileFull(tree: Tree, node: Node.Index) full.While {
|
||||
pub fn whileFull(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.While);
|
||||
return fullWhile(tree, .{
|
||||
@ -232,7 +231,7 @@ pub fn whileFull(tree: Tree, node: Node.Index) full.While {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn forSimple(tree: Tree, node: Node.Index) full.While {
|
||||
pub fn forSimple(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
return fullWhile(tree, .{
|
||||
.while_token = tree.nodes.items(.main_token)[node],
|
||||
@ -243,7 +242,7 @@ pub fn forSimple(tree: Tree, node: Node.Index) full.While {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn forFull(tree: Tree, node: Node.Index) full.While {
|
||||
pub fn forFull(tree: Ast, node: Node.Index) full.While {
|
||||
const data = tree.nodes.items(.data)[node];
|
||||
const extra = tree.extraData(data.rhs, Node.If);
|
||||
return fullWhile(tree, .{
|
||||
@ -255,7 +254,7 @@ pub fn forFull(tree: Tree, node: Node.Index) full.While {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn lastToken(tree: Tree, node: Ast.Node.Index) Ast.TokenIndex {
|
||||
pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
|
||||
const TokenIndex = Ast.TokenIndex;
|
||||
const tags = tree.nodes.items(.tag);
|
||||
const datas = tree.nodes.items(.data);
|
||||
@ -875,7 +874,7 @@ pub fn lastToken(tree: Tree, node: Ast.Node.Index) Ast.TokenIndex {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn containerField(tree: Tree, node: Ast.Node.Index) ?Ast.full.ContainerField {
|
||||
pub fn containerField(tree: Ast, node: Ast.Node.Index) ?Ast.full.ContainerField {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.container_field => tree.containerField(node),
|
||||
.container_field_init => tree.containerFieldInit(node),
|
||||
@ -884,7 +883,7 @@ pub fn containerField(tree: Tree, node: Ast.Node.Index) ?Ast.full.ContainerField
|
||||
};
|
||||
}
|
||||
|
||||
pub fn ptrType(tree: Tree, node: Ast.Node.Index) ?Ast.full.PtrType {
|
||||
pub fn ptrType(tree: Ast, node: Ast.Node.Index) ?Ast.full.PtrType {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.ptr_type => ptrTypeSimple(tree, node),
|
||||
.ptr_type_aligned => ptrTypeAligned(tree, node),
|
||||
@ -894,7 +893,7 @@ pub fn ptrType(tree: Tree, node: Ast.Node.Index) ?Ast.full.PtrType {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn whileAst(tree: Tree, node: Ast.Node.Index) ?Ast.full.While {
|
||||
pub fn whileAst(tree: Ast, node: Ast.Node.Index) ?Ast.full.While {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.@"while" => whileFull(tree, node),
|
||||
.while_simple => whileSimple(tree, node),
|
||||
@ -905,7 +904,7 @@ pub fn whileAst(tree: Tree, node: Ast.Node.Index) ?Ast.full.While {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isContainer(tree: Tree, node: Ast.Node.Index) bool {
|
||||
pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.container_decl,
|
||||
.container_decl_trailing,
|
||||
@ -928,7 +927,7 @@ pub fn isContainer(tree: Tree, node: Ast.Node.Index) bool {
|
||||
|
||||
/// Returns the member indices of a given declaration container.
|
||||
/// Asserts given `tag` is a container node
|
||||
pub fn declMembers(tree: Tree, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
|
||||
pub fn declMembers(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
|
||||
std.debug.assert(isContainer(tree, node_idx));
|
||||
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members,
|
||||
@ -945,7 +944,7 @@ pub fn declMembers(tree: Tree, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.In
|
||||
|
||||
/// Returns an `ast.full.VarDecl` for a given node index.
|
||||
/// Returns null if the tag doesn't match
|
||||
pub fn varDecl(tree: Tree, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
|
||||
pub fn varDecl(tree: Ast, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
|
||||
return switch (tree.nodes.items(.tag)[node_idx]) {
|
||||
.global_var_decl => tree.globalVarDecl(node_idx),
|
||||
.local_var_decl => tree.localVarDecl(node_idx),
|
||||
@ -955,7 +954,7 @@ pub fn varDecl(tree: Tree, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isBuiltinCall(tree: Tree, node: Ast.Node.Index) bool {
|
||||
pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.builtin_call,
|
||||
.builtin_call_comma,
|
||||
@ -966,7 +965,7 @@ pub fn isBuiltinCall(tree: Tree, node: Ast.Node.Index) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isCall(tree: Tree, node: Ast.Node.Index) bool {
|
||||
pub fn isCall(tree: Ast, node: Ast.Node.Index) bool {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.call,
|
||||
.call_comma,
|
||||
@ -981,7 +980,7 @@ pub fn isCall(tree: Tree, node: Ast.Node.Index) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fnProto(tree: Tree, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
|
||||
pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.fn_proto => tree.fnProto(node),
|
||||
.fn_proto_multi => tree.fnProtoMulti(node),
|
||||
@ -992,7 +991,7 @@ pub fn fnProto(tree: Tree, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.f
|
||||
};
|
||||
}
|
||||
|
||||
pub fn callFull(tree: Tree, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.Call {
|
||||
pub fn callFull(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.Call {
|
||||
return switch (tree.nodes.items(.tag)[node]) {
|
||||
.call,
|
||||
.call_comma,
|
||||
|
@ -4,7 +4,7 @@ const URI = @import("./uri.zig");
|
||||
const analysis = @import("./analysis.zig");
|
||||
const offsets = @import("./offsets.zig");
|
||||
const log = std.log.scoped(.doc_store);
|
||||
const Tree = std.zig.Ast;
|
||||
const Ast = std.zig.Ast;
|
||||
const BuildAssociatedConfig = @import("./build_associated_config.zig");
|
||||
|
||||
const DocumentStore = @This();
|
||||
@ -34,7 +34,7 @@ pub const Handle = struct {
|
||||
import_uris: []const []const u8,
|
||||
/// Items in this array list come from `import_uris`
|
||||
imports_used: std.ArrayListUnmanaged([]const u8),
|
||||
tree: Tree,
|
||||
tree: Ast,
|
||||
document_scope: analysis.DocumentScope,
|
||||
|
||||
associated_build_file: ?*BuildFile,
|
||||
|
@ -1,6 +1,5 @@
|
||||
const std = @import("std");
|
||||
const build_options = @import("build_options");
|
||||
|
||||
const Config = @import("./config.zig");
|
||||
const DocumentStore = @import("./document_store.zig");
|
||||
const readRequestHeader = @import("./header.zig").readRequestHeader;
|
||||
|
@ -1,7 +1,6 @@
|
||||
const std = @import("std");
|
||||
const types = @import("./types.zig");
|
||||
const Ast = std.zig.Ast;
|
||||
const Tree = Ast;
|
||||
|
||||
pub const Encoding = enum {
|
||||
utf8,
|
||||
@ -63,7 +62,7 @@ pub fn documentPosition(doc: types.TextDocument, position: types.Position, encod
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lineSectionLength(tree: Tree, start_index: usize, end_index: usize, encoding: Encoding) !usize {
|
||||
pub fn lineSectionLength(tree: Ast, start_index: usize, end_index: usize, encoding: Encoding) !usize {
|
||||
const source = tree.source[start_index..];
|
||||
std.debug.assert(end_index >= start_index and source.len >= end_index - start_index);
|
||||
if (encoding == .utf8) {
|
||||
@ -104,7 +103,7 @@ pub const TokenLocation = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn tokenRelativeLocation(tree: Tree, start_index: usize, token_start: usize, encoding: Encoding) !TokenLocation {
|
||||
pub fn tokenRelativeLocation(tree: Ast, start_index: usize, token_start: usize, encoding: Encoding) !TokenLocation {
|
||||
std.debug.assert(token_start >= start_index);
|
||||
var loc = TokenLocation{
|
||||
.line = 0,
|
||||
@ -140,7 +139,7 @@ pub fn tokenRelativeLocation(tree: Tree, start_index: usize, token_start: usize,
|
||||
}
|
||||
|
||||
/// Asserts the token is comprised of valid utf8
|
||||
pub fn tokenLength(tree: Tree, token: Ast.TokenIndex, encoding: Encoding) usize {
|
||||
pub fn tokenLength(tree: Ast, token: Ast.TokenIndex, encoding: Encoding) usize {
|
||||
const token_loc = tokenLocation(tree, token);
|
||||
if (encoding == .utf8)
|
||||
return token_loc.end - token_loc.start;
|
||||
@ -166,7 +165,7 @@ pub const Loc = struct {
|
||||
end: usize,
|
||||
};
|
||||
|
||||
pub fn tokenLocation(tree: Tree, token_index: Ast.TokenIndex) Loc {
|
||||
pub fn tokenLocation(tree: Ast, token_index: Ast.TokenIndex) Loc {
|
||||
const start = tree.tokens.items(.start)[token_index];
|
||||
const tag = tree.tokens.items(.tag)[token_index];
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
const std = @import("std");
|
||||
const Ast = std.zig.Ast;
|
||||
const DocumentStore = @import("./document_store.zig");
|
||||
const analysis = @import("./analysis.zig");
|
||||
const types = @import("./types.zig");
|
||||
@ -7,8 +8,6 @@ const log = std.log.scoped(.references);
|
||||
const Reference = @This();
|
||||
usingnamespace @import("./ast.zig");
|
||||
|
||||
const Ast = std.zig.Ast;
|
||||
|
||||
fn tokenReference(handle: *DocumentStore.Handle, tok: Ast.TokenIndex, encoding: offsets.Encoding, context: anytype, comptime handler: anytype) !void {
|
||||
const loc = offsets.tokenRelativeLocation(handle.tree, 0, handle.tree.tokens.items(.start)[tok], encoding) catch return;
|
||||
try handler(context, types.Location{
|
||||
|
@ -187,7 +187,7 @@ inline fn writeTokenMod(builder: *Builder, token_idx: ?Ast.TokenIndex, tok_type:
|
||||
}
|
||||
}
|
||||
|
||||
fn writeDocComments(builder: *Builder, tree: SemanticToken.Tree, doc: Ast.TokenIndex) !void {
|
||||
fn writeDocComments(builder: *Builder, tree: Ast, doc: Ast.TokenIndex) !void {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
var tok_idx = doc;
|
||||
while (token_tags[tok_idx] == .doc_comment or
|
||||
|
Loading…
Reference in New Issue
Block a user