Merge branch 'master' into intern-pool

This commit is contained in:
Techatrix 2023-01-26 16:16:40 +01:00
commit ca644d67c1
15 changed files with 971 additions and 693 deletions

115
.github/workflows/fuzz.yml vendored Normal file
View File

@ -0,0 +1,115 @@
name: Sus Fuzzing
on:
# pull_request_target can be dangerous but necessary here to access secrets.
# I'm pretty comfortable using it because:
# - We limit all permissions (including GITHUB_TOKEN) to read-only
# - We limit runs to labelled PRs only which prevents random exploitation
# - We don't expose secrets in environment variables which makes exploitation much more difficult
# - The secrets that we reference aren't all that important anyways (they can only access our DigitalOcean Space)
pull_request_target:
types: [labeled, synchronize]
push:
paths:
- "**.zig"
branches:
- master
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
inputs:
fuzzing_duration:
type: string
description: How long should fuzzing last? (sleep time argument)
default: 15m
permissions: read-all
jobs:
fuzz:
if: github.repository_owner == 'zigtools' && (github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'pr:fuzz'))
runs-on: ubuntu-latest
steps:
- name: Set Swap Space
uses: pierotofy/set-swap-space@master
with:
swap-size-gb: 10
- name: Default fuzzing duration
if: github.event_name != 'pull_request_target'
run: |
echo "FUZZING_DURATION=${{ github.event.inputs.fuzzing_duration }}" >> $GITHUB_ENV
- name: PR fuzzing duration
if: github.event_name == 'pull_request_target'
run: |
echo "FUZZING_DURATION=15m" >> $GITHUB_ENV
- name: Grab zig
uses: goto-bus-stop/setup-zig@v1
with:
version: master
- run: zig version
- run: zig env
- name: Checkout zig
uses: actions/checkout@v3
with:
path: zig
repository: "ziglang/zig"
fetch-depth: 0
- name: Checkout zls
uses: actions/checkout@v3
with:
path: zls
fetch-depth: 0
submodules: true
- name: Build zls
run: |
cd $GITHUB_WORKSPACE/zls
pwd
zig build
- name: Checkout sus
uses: actions/checkout@v3
with:
path: sus
repository: "zigtools/sus"
fetch-depth: 0
submodules: recursive
- name: Build sus
run: |
cd $GITHUB_WORKSPACE/sus
pwd
zig build -Drelease-fast
- name: Run sus
continue-on-error: true
run: |
cd $GITHUB_WORKSPACE/sus
FUZZING_DURATION=${{ env.FUZZING_DURATION }}
{ sleep ${FUZZING_DURATION:-1h}; pkill -9 sus; } &
./zig-out/bin/sus $GITHUB_WORKSPACE/zls/zig-out/bin/zls markov $GITHUB_WORKSPACE/zig/lib/std
- name: Upload saved logs
uses: actions/upload-artifact@v3
with:
name: saved-logs
path: sus/saved_logs/
- uses: BetaHuhn/do-spaces-action@v2
with:
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
space_name: fuzzing-output
space_region: nyc3
source: sus/saved_logs/
out_dir: ${{ github.event.pull_request.head.repo.full_name || github.repository }}/${{ github.head_ref || github.ref_name }}/${{ github.event.pull_request.head.sha || github.sha }}

View File

@ -122,19 +122,3 @@ jobs:
REMOTE_HOST: ${{ secrets.WEBSITE_DEPLOY_HOST }} REMOTE_HOST: ${{ secrets.WEBSITE_DEPLOY_HOST }}
REMOTE_USER: ${{ secrets.WEBSITE_DEPLOY_USER }} REMOTE_USER: ${{ secrets.WEBSITE_DEPLOY_USER }}
TARGET: ${{ secrets.WEBSITE_DEPLOY_FOLDER }} TARGET: ${{ secrets.WEBSITE_DEPLOY_FOLDER }}
- name: Instruct fuzzing server to pull latest zls
if: ${{ matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/master' && github.repository_owner == 'zigtools' }}
uses: appleboy/ssh-action@v0.1.7
with:
host: fuzzing.zigtools.org
username: ${{ secrets.FUZZING_SSH_USERNAME }}
key: ${{ secrets.FUZZING_SSH_PRIVKEY }}
script: |
systemctl stop fuzzing
systemctl stop fuzzing-web
cd /root/sus
./script/setup.sh
systemctl start fuzzing
sleep 5s
systemctl start fuzzing-web

View File

@ -7,7 +7,7 @@ const zls_version = std.builtin.Version{ .major = 0, .minor = 11, .patch = 0 };
pub fn build(b: *std.build.Builder) !void { pub fn build(b: *std.build.Builder) !void {
comptime { comptime {
const current_zig = builtin.zig_version; const current_zig = builtin.zig_version;
const min_zig = std.SemanticVersion.parse("0.11.0-dev.874+40ed6ae84") catch return; // Changes to builtin.Type API const min_zig = std.SemanticVersion.parse("0.11.0-dev.1254+1f8f79cd5") catch return; // add helper functions to std.zig.Ast
if (current_zig.order(min_zig) == .lt) { if (current_zig.order(min_zig) == .lt) {
@compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig })); @compileError(std.fmt.comptimePrint("Your Zig version v{} does not meet the minimum build requirement of v{}", .{ current_zig, min_zig }));
} }

View File

@ -235,9 +235,10 @@ pub fn interpret(
defer fields.deinit(interpreter.allocator); defer fields.deinit(interpreter.allocator);
var buffer: [2]Ast.Node.Index = undefined; var buffer: [2]Ast.Node.Index = undefined;
const members = ast.declMembers(tree, node_idx, &buffer);
for (members) |member| { const container_decl = tree.fullContainerDecl(&buffer, node_idx).?;
const container_field = ast.containerField(tree, member) orelse { for (container_decl.ast.members) |member| {
const container_field = tree.fullContainerField(member) orelse {
_ = try interpreter.interpret(member, container_namespace, options); _ = try interpreter.interpret(member, container_namespace, options);
continue; continue;
}; };
@ -304,7 +305,7 @@ pub fn interpret(
if (decls.contains(name)) if (decls.contains(name))
return InterpretResult{ .nothing = {} }; return InterpretResult{ .nothing = {} };
const decl = ast.varDecl(tree, node_idx).?; const decl = tree.fullVarDecl(node_idx).?;
const type_value = if (decl.ast.type_node != 0) (try interpreter.interpret(decl.ast.type_node, namespace, .{})).maybeGetValue() else null; const type_value = if (decl.ast.type_node != 0) (try interpreter.interpret(decl.ast.type_node, namespace, .{})).maybeGetValue() else null;
const init_value = if (decl.ast.init_node != 0) (try interpreter.interpret(decl.ast.init_node, namespace, .{})).maybeGetValue() else null; const init_value = if (decl.ast.init_node != 0) (try interpreter.interpret(decl.ast.init_node, namespace, .{})).maybeGetValue() else null;
@ -651,7 +652,7 @@ pub fn interpret(
.@"if", .@"if",
.if_simple, .if_simple,
=> { => {
const if_info = ast.ifFull(tree, node_idx); const if_info = ast.fullIf(tree, node_idx).?;
// TODO: Don't evaluate runtime ifs // TODO: Don't evaluate runtime ifs
// if (options.observe_values) { // if (options.observe_values) {
const ir = try interpreter.interpret(if_info.ast.cond_expr, namespace, options); const ir = try interpreter.interpret(if_info.ast.cond_expr, namespace, options);
@ -942,7 +943,7 @@ pub fn interpret(
.fn_decl, .fn_decl,
=> { => {
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const func = ast.fnProto(tree, node_idx, &buf).?; const func = tree.fullFnProto(&buf, node_idx).?;
// TODO: Resolve function type // TODO: Resolve function type
@ -1008,7 +1009,7 @@ pub fn interpret(
.async_call_one_comma, .async_call_one_comma,
=> { => {
var params: [1]Ast.Node.Index = undefined; var params: [1]Ast.Node.Index = undefined;
const call_full = ast.callFull(tree, node_idx, &params).?; const call_full = tree.fullCall(&params, node_idx) orelse unreachable;
var args = try std.ArrayListUnmanaged(Value).initCapacity(interpreter.allocator, call_full.ast.params.len); var args = try std.ArrayListUnmanaged(Value).initCapacity(interpreter.allocator, call_full.ast.params.len);
defer args.deinit(interpreter.allocator); defer args.deinit(interpreter.allocator);
@ -1127,7 +1128,7 @@ pub fn call(
const tree = interpreter.getHandle().tree; const tree = interpreter.getHandle().tree;
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
var proto = ast.fnProto(tree, func_node_idx, &buf) orelse return error.CriticalAstFailure; var proto = tree.fullFnProto(&buf, func_node_idx) orelse return error.CriticalAstFailure;
// TODO: Make argument namespace to evaluate arguments in // TODO: Make argument namespace to evaluate arguments in
try interpreter.namespaces.append(interpreter.allocator, .{ try interpreter.namespaces.append(interpreter.allocator, .{

View File

@ -274,7 +274,7 @@ fn generateDiagnostics(server: *Server, handle: DocumentStore.Handle) error{OutO
.fn_decl, .fn_decl,
=> blk: { => blk: {
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const func = ast.fnProto(tree, decl_idx, &buf).?; const func = tree.fullFnProto(&buf, decl_idx).?;
if (func.extern_export_inline_token != null) break :blk; if (func.extern_export_inline_token != null) break :blk;
if (func.name_token) |name_token| { if (func.name_token) |name_token| {
@ -666,7 +666,7 @@ fn nodeToCompletion(
.fn_decl, .fn_decl,
=> { => {
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const func = ast.fnProto(tree, node, &buf).?; const func = tree.fullFnProto(&buf, node).?;
if (func.name_token) |name_token| { if (func.name_token) |name_token| {
const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets; const use_snippets = server.config.enable_snippets and server.client_capabilities.supports_snippets;
const insert_text = if (use_snippets) blk: { const insert_text = if (use_snippets) blk: {
@ -692,7 +692,7 @@ fn nodeToCompletion(
.aligned_var_decl, .aligned_var_decl,
.simple_var_decl, .simple_var_decl,
=> { => {
const var_decl = ast.varDecl(tree, node).?; const var_decl = tree.fullVarDecl(node).?;
const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const; const is_const = token_tags[var_decl.ast.mut_token] == .keyword_const;
if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, node_handle)) |result| { if (try analysis.resolveVarDeclAlias(&server.document_store, server.arena, node_handle)) |result| {
@ -717,7 +717,7 @@ fn nodeToCompletion(
.container_field_align, .container_field_align,
.container_field_init, .container_field_init,
=> { => {
const field = ast.containerField(tree, node).?; const field = tree.fullContainerField(node).?;
try list.append(allocator, .{ try list.append(allocator, .{
.label = handle.tree.tokenSlice(field.ast.main_token), .label = handle.tree.tokenSlice(field.ast.main_token),
.kind = if (field.ast.tuple_like) .Enum else .Field, .kind = if (field.ast.tuple_like) .Enum else .Field,
@ -743,7 +743,7 @@ fn nodeToCompletion(
.ptr_type_bit_range, .ptr_type_bit_range,
.ptr_type_sentinel, .ptr_type_sentinel,
=> { => {
const ptr_type = ast.ptrType(tree, node).?; const ptr_type = ast.fullPtrType(tree, node).?;
switch (ptr_type.size) { switch (ptr_type.size) {
.One, .C, .Many => if (server.config.operator_completions) { .One, .C, .Many => if (server.config.operator_completions) {
@ -814,12 +814,12 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
if (pos_index + 1 >= handle.text.len) return ""; if (pos_index + 1 >= handle.text.len) return "";
var start_idx = pos_index; var start_idx = pos_index;
while (start_idx > 0 and isSymbolChar(handle.text[start_idx - 1])) { while (start_idx > 0 and analysis.isSymbolChar(handle.text[start_idx - 1])) {
start_idx -= 1; start_idx -= 1;
} }
var end_idx = pos_index; var end_idx = pos_index;
while (end_idx < handle.text.len and isSymbolChar(handle.text[end_idx])) { while (end_idx < handle.text.len and analysis.isSymbolChar(handle.text[end_idx])) {
end_idx += 1; end_idx += 1;
} }
@ -827,10 +827,6 @@ pub fn identifierFromPosition(pos_index: usize, handle: DocumentStore.Handle) []
return handle.text[start_idx..end_idx]; return handle.text[start_idx..end_idx];
} }
fn isSymbolChar(char: u8) bool {
return std.ascii.isAlphanumeric(char) or char == '_';
}
fn gotoDefinitionSymbol( fn gotoDefinitionSymbol(
server: *Server, server: *Server,
decl_handle: analysis.DeclWithHandle, decl_handle: analysis.DeclWithHandle,
@ -881,11 +877,11 @@ fn hoverSymbol(server: *Server, decl_handle: analysis.DeclWithHandle) error{OutO
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
if (ast.varDecl(tree, node)) |var_decl| { if (tree.fullVarDecl(node)) |var_decl| {
break :def analysis.getVariableSignature(tree, var_decl); break :def analysis.getVariableSignature(tree, var_decl);
} else if (ast.fnProto(tree, node, &buf)) |fn_proto| { } else if (tree.fullFnProto(&buf, node)) |fn_proto| {
break :def analysis.getFunctionSignature(tree, fn_proto); break :def analysis.getFunctionSignature(tree, fn_proto);
} else if (ast.containerField(tree, node)) |field| { } else if (tree.fullContainerField(node)) |field| {
break :def analysis.getContainerFieldSignature(tree, field); break :def analysis.getContainerFieldSignature(tree, field);
} else { } else {
break :def analysis.nodeToString(tree, node) orelse return null; break :def analysis.nodeToString(tree, node) orelse return null;
@ -1303,16 +1299,15 @@ fn completeBuiltin(server: *Server) error{OutOfMemory}!?[]types.CompletionItem {
}); });
} }
var completions = try allocator.alloc(types.CompletionItem, builtin_completions.items.len); var completions = try builtin_completions.clone(allocator);
if (server.client_capabilities.label_details_support) { if (server.client_capabilities.label_details_support) {
for (builtin_completions.items) |item, i| { for (completions.items) |*item| {
completions[i] = item; try formatDetailledLabel(item, allocator);
try formatDetailledLabel(&completions[i], allocator);
} }
} }
return completions; return completions.items;
} }
fn completeGlobal(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}![]types.CompletionItem { fn completeGlobal(server: *Server, pos_index: usize, handle: *const DocumentStore.Handle) error{OutOfMemory}![]types.CompletionItem {
@ -2102,7 +2097,7 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
} }
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, false);
const maybe_completions = switch (pos_context) { const maybe_completions = switch (pos_context) {
.builtin => try server.completeBuiltin(), .builtin => try server.completeBuiltin(),
@ -2126,6 +2121,29 @@ fn completionHandler(server: *Server, request: types.CompletionParams) Error!?ty
const completions = maybe_completions orelse return null; const completions = maybe_completions orelse return null;
// The cursor is in the middle of a word or before a @, so we can replace
// the remaining identifier with the completion instead of just inserting.
// TODO Identify function call/struct init and replace the whole thing.
const lookahead_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
if (server.client_capabilities.supports_apply_edits and pos_context.loc() != null and lookahead_context.loc() != null and pos_context.loc().?.end != lookahead_context.loc().?.end) {
var end = lookahead_context.loc().?.end;
while (end < handle.text.len and (std.ascii.isAlphanumeric(handle.text[end]) or handle.text[end] == '"')) {
end += 1;
}
const replaceLoc = offsets.Loc{ .start = lookahead_context.loc().?.start, .end = end };
const replaceRange = offsets.locToRange(handle.text, replaceLoc, server.offset_encoding);
for (completions) |*item| {
item.textEdit = .{
.TextEdit = .{
.newText = item.insertText orelse item.label,
.range = replaceRange,
},
};
}
}
// truncate completions // truncate completions
for (completions) |*item| { for (completions) |*item| {
if (item.detail) |det| { if (item.detail) |det| {
@ -2183,7 +2201,7 @@ fn gotoHandler(server: *Server, request: types.TextDocumentPositionParams, resol
if (request.position.character == 0) return null; if (request.position.character == 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
return switch (pos_context) { return switch (pos_context) {
.var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias), .var_access => try server.gotoDefinitionGlobal(source_index, handle, resolve_alias),
@ -2223,7 +2241,7 @@ fn hoverHandler(server: *Server, request: types.HoverParams) Error!?types.Hover
if (request.position.character == 0) return null; if (request.position.character == 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position, server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
const response = switch (pos_context) { const response = switch (pos_context) {
.builtin => try server.hoverDefinitionBuiltin(source_index, handle), .builtin => try server.hoverDefinitionBuiltin(source_index, handle),
@ -2369,7 +2387,7 @@ fn generalReferencesHandler(server: *Server, request: GeneralReferencesRequest)
if (request.position().character <= 0) return null; if (request.position().character <= 0) return null;
const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding); const source_index = offsets.positionToIndex(handle.text, request.position(), server.offset_encoding);
const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index); const pos_context = try analysis.getPositionContext(server.arena.allocator(), handle.text, source_index, true);
const decl = switch (pos_context) { const decl = switch (pos_context) {
.var_access => try server.getSymbolGlobal(source_index, handle), .var_access => try server.getSymbolGlobal(source_index, handle),
@ -2486,6 +2504,9 @@ fn inlayHintHandler(server: *Server, request: types.InlayHintParams) Error!?[]ty
} }
fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]types.CodeAction { fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]types.CodeAction {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;
var builder = code_actions.Builder{ var builder = code_actions.Builder{
@ -2514,6 +2535,9 @@ fn codeActionHandler(server: *Server, request: types.CodeActionParams) Error!?[]
} }
fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error!?[]types.FoldingRange { fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error!?[]types.FoldingRange {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
const Token = std.zig.Token; const Token = std.zig.Token;
const Node = Ast.Node; const Node = Ast.Node;
const allocator = server.arena.allocator(); const allocator = server.arena.allocator();
@ -2521,37 +2545,26 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
const helper = struct { const helper = struct {
const Inclusivity = enum { inclusive, exclusive }; const Inclusivity = enum { inclusive, exclusive };
/// Returns true if added.
fn maybeAddTokRange(
p_ranges: *std.ArrayList(types.FoldingRange),
tree: Ast,
start: Ast.TokenIndex,
end: Ast.TokenIndex,
end_reach: Inclusivity,
encoding: offsets.Encoding,
) std.mem.Allocator.Error!bool {
const can_add = start < end and !tree.tokensOnSameLine(start, end);
if (can_add) {
try addTokRange(p_ranges, tree, start, end, end_reach, encoding);
}
return can_add;
}
fn addTokRange( fn addTokRange(
p_ranges: *std.ArrayList(types.FoldingRange), p_ranges: *std.ArrayList(types.FoldingRange),
tree: Ast, tree: Ast,
start: Ast.TokenIndex, start: Ast.TokenIndex,
end: Ast.TokenIndex, end: Ast.TokenIndex,
end_reach: Inclusivity, end_reach: Inclusivity,
encoding: offsets.Encoding,
) std.mem.Allocator.Error!void { ) std.mem.Allocator.Error!void {
std.debug.assert(!std.debug.runtime_safety or !tree.tokensOnSameLine(start, end)); if (tree.tokensOnSameLine(start, end)) return;
std.debug.assert(start <= end);
const start_line = offsets.tokenToPosition(tree, start, encoding).line; const start_index = offsets.tokenToIndex(tree, start);
const end_line = offsets.tokenToPosition(tree, end, encoding).line; const end_index = offsets.tokenToIndex(tree, end);
const start_line = std.mem.count(u8, tree.source[0..start_index], "\n");
const end_line = start_line + std.mem.count(u8, tree.source[start_index..end_index], "\n");
try p_ranges.append(.{ try p_ranges.append(.{
.startLine = start_line, .startLine = @intCast(u32, start_line),
.endLine = end_line - @boolToInt(end_reach == .exclusive), .endLine = @intCast(u32, end_line) - @boolToInt(end_reach == .exclusive),
}); });
} }
}; };
@ -2582,21 +2595,24 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
const node = @intCast(Node.Index, i); const node = @intCast(Node.Index, i);
switch (node_tag) { switch (node_tag) {
.root => continue,
// only fold the expression pertaining to the if statement, and the else statement, each respectively. // only fold the expression pertaining to the if statement, and the else statement, each respectively.
// TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures. // TODO: Should folding multiline condition expressions also be supported? Ditto for the other control flow structures.
.@"if", .if_simple => { .@"if",
const if_full = ast.ifFull(handle.tree, node); .if_simple,
=> {
const if_full = ast.fullIf(handle.tree, node).?;
const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr); const start_tok_1 = ast.lastToken(handle.tree, if_full.ast.cond_expr);
const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr); const end_tok_1 = ast.lastToken(handle.tree, if_full.ast.then_expr);
_ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive);
if (if_full.ast.else_expr == 0) continue; if (if_full.ast.else_expr == 0) continue;
const start_tok_2 = if_full.else_token; const start_tok_2 = if_full.else_token;
const end_tok_2 = ast.lastToken(handle.tree, if_full.ast.else_expr); const end_tok_2 = ast.lastToken(handle.tree, if_full.ast.else_expr);
_ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive);
}, },
// same as if/else // same as if/else
@ -2606,17 +2622,17 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
.while_cont, .while_cont,
.while_simple, .while_simple,
=> { => {
const loop_full = ast.whileAst(handle.tree, node).?; const loop_full = ast.fullWhile(handle.tree, node).?;
const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr); const start_tok_1 = ast.lastToken(handle.tree, loop_full.ast.cond_expr);
const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr); const end_tok_1 = ast.lastToken(handle.tree, loop_full.ast.then_expr);
_ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, start_tok_1, end_tok_1, .inclusive);
if (loop_full.ast.else_expr == 0) continue; if (loop_full.ast.else_expr == 0) continue;
const start_tok_2 = loop_full.else_token; const start_tok_2 = loop_full.else_token;
const end_tok_2 = ast.lastToken(handle.tree, loop_full.ast.else_expr); const end_tok_2 = ast.lastToken(handle.tree, loop_full.ast.else_expr);
_ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, start_tok_2, end_tok_2, .inclusive);
}, },
.global_var_decl, .global_var_decl,
@ -2644,22 +2660,19 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
start_doc_tok -= 1; start_doc_tok -= 1;
} }
_ = try helper.maybeAddTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, start_doc_tok, end_doc_tok, .inclusive);
} }
// Function prototype folding regions // Function prototype folding regions
var fn_proto_buffer: [1]Node.Index = undefined; var buffer: [1]Node.Index = undefined;
const fn_proto = ast.fnProto(handle.tree, node, fn_proto_buffer[0..]) orelse const fn_proto = handle.tree.fullFnProto(&buffer, node) orelse
break :decl_node_blk; break :decl_node_blk;
const list_start_tok: Ast.TokenIndex = fn_proto.lparen; const list_start_tok: Ast.TokenIndex = fn_proto.lparen;
const list_end_tok: Ast.TokenIndex = ast.lastToken(handle.tree, fn_proto.ast.proto_node); const list_end_tok: Ast.TokenIndex = ast.lastToken(handle.tree, fn_proto.ast.proto_node);
if (handle.tree.tokensOnSameLine(list_start_tok, list_end_tok)) break :decl_node_blk; if (handle.tree.tokensOnSameLine(list_start_tok, list_end_tok)) break :decl_node_blk;
try ranges.ensureUnusedCapacity(1 + fn_proto.ast.params.len); // best guess, doesn't include anytype params try helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive);
helper.addTokRange(&ranges, handle.tree, list_start_tok, list_end_tok, .exclusive, server.offset_encoding) catch |err| switch (err) {
error.OutOfMemory => unreachable,
};
var it = fn_proto.iterate(&handle.tree); var it = fn_proto.iterate(&handle.tree);
while (ast.nextFnParam(&it)) |param| { while (ast.nextFnParam(&it)) |param| {
@ -2669,7 +2682,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
while (token_tags[doc_end_tok + 1] == .doc_comment) while (token_tags[doc_end_tok + 1] == .doc_comment)
doc_end_tok += 1; doc_end_tok += 1;
_ = try helper.maybeAddTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, doc_start_tok, doc_end_tok, .inclusive);
} }
}, },
@ -2681,7 +2694,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
=> { => {
const start_tok = handle.tree.firstToken(node); const start_tok = handle.tree.firstToken(node);
const end_tok = ast.lastToken(handle.tree, node); const end_tok = ast.lastToken(handle.tree, node);
_ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .inclusive);
}, },
// most other trivial cases can go through here. // most other trivial cases can go through here.
@ -2728,7 +2741,7 @@ fn foldingRangeHandler(server: *Server, request: types.FoldingRangeParams) Error
const start_tok = handle.tree.firstToken(node); const start_tok = handle.tree.firstToken(node);
const end_tok = ast.lastToken(handle.tree, node); const end_tok = ast.lastToken(handle.tree, node);
_ = try helper.maybeAddTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive, server.offset_encoding); try helper.addTokRange(&ranges, handle.tree, start_tok, end_tok, .exclusive);
}, },
} }
} }
@ -2777,6 +2790,9 @@ pub const SelectionRange = struct {
}; };
fn selectionRangeHandler(server: *Server, request: types.SelectionRangeParams) Error!?[]*SelectionRange { fn selectionRangeHandler(server: *Server, request: types.SelectionRangeParams) Error!?[]*SelectionRange {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();
const allocator = server.arena.allocator(); const allocator = server.arena.allocator();
const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null;

File diff suppressed because it is too large Load Diff

View File

@ -7,7 +7,7 @@ const Ast = std.zig.Ast;
const Node = Ast.Node; const Node = Ast.Node;
const full = Ast.full; const full = Ast.full;
fn fullPtrType(tree: Ast, info: full.PtrType.Components) full.PtrType { fn fullPtrTypeComponents(tree: Ast, info: full.PtrType.Components) full.PtrType {
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
const size: std.builtin.Type.Pointer.Size = switch (token_tags[info.main_token]) { const size: std.builtin.Type.Pointer.Size = switch (token_tags[info.main_token]) {
.asterisk, .asterisk,
@ -57,7 +57,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type); std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type);
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.lhs, Node.PtrType); const extra = tree.extraData(data.lhs, Node.PtrType);
return fullPtrType(tree, .{ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node], .main_token = tree.nodes.items(.main_token)[node],
.align_node = extra.align_node, .align_node = extra.align_node,
.addrspace_node = extra.addrspace_node, .addrspace_node = extra.addrspace_node,
@ -71,7 +71,7 @@ pub fn ptrTypeSimple(tree: Ast, node: Node.Index) full.PtrType {
pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType { pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel); std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
return fullPtrType(tree, .{ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node], .main_token = tree.nodes.items(.main_token)[node],
.align_node = 0, .align_node = 0,
.addrspace_node = 0, .addrspace_node = 0,
@ -85,7 +85,7 @@ pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
pub fn ptrTypeAligned(tree: Ast, node: Node.Index) full.PtrType { pub fn ptrTypeAligned(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned); std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
return fullPtrType(tree, .{ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node], .main_token = tree.nodes.items(.main_token)[node],
.align_node = data.lhs, .align_node = data.lhs,
.addrspace_node = 0, .addrspace_node = 0,
@ -100,7 +100,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range); std.debug.assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange); const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
return fullPtrType(tree, .{ return fullPtrTypeComponents(tree, .{
.main_token = tree.nodes.items(.main_token)[node], .main_token = tree.nodes.items(.main_token)[node],
.align_node = extra.align_node, .align_node = extra.align_node,
.addrspace_node = extra.addrspace_node, .addrspace_node = extra.addrspace_node,
@ -111,7 +111,7 @@ pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
}); });
} }
fn fullIf(tree: Ast, info: full.If.Components) full.If { fn fullIfComponents(tree: Ast, info: full.If.Components) full.If {
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
var result: full.If = .{ var result: full.If = .{
.ast = info, .ast = info,
@ -137,27 +137,29 @@ fn fullIf(tree: Ast, info: full.If.Components) full.If {
} }
pub fn ifFull(tree: Ast, node: Node.Index) full.If { pub fn ifFull(tree: Ast, node: Node.Index) full.If {
std.debug.assert(tree.nodes.items(.tag)[node] == .@"if");
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
if (tree.nodes.items(.tag)[node] == .@"if") { const extra = tree.extraData(data.rhs, Node.If);
const extra = tree.extraData(data.rhs, Node.If); return fullIfComponents(tree, .{
return fullIf(tree, .{ .cond_expr = data.lhs,
.cond_expr = data.lhs, .then_expr = extra.then_expr,
.then_expr = extra.then_expr, .else_expr = extra.else_expr,
.else_expr = extra.else_expr, .if_token = tree.nodes.items(.main_token)[node],
.if_token = tree.nodes.items(.main_token)[node], });
});
} else {
std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple);
return fullIf(tree, .{
.cond_expr = data.lhs,
.then_expr = data.rhs,
.else_expr = 0,
.if_token = tree.nodes.items(.main_token)[node],
});
}
} }
fn fullWhile(tree: Ast, info: full.While.Components) full.While { pub fn ifSimple(tree: Ast, node: Node.Index) full.If {
std.debug.assert(tree.nodes.items(.tag)[node] == .if_simple);
const data = tree.nodes.items(.data)[node];
return fullIfComponents(tree, .{
.cond_expr = data.lhs,
.then_expr = data.rhs,
.else_expr = 0,
.if_token = tree.nodes.items(.main_token)[node],
});
}
fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
const token_tags = tree.tokens.items(.tag); const token_tags = tree.tokens.items(.tag);
var result: full.While = .{ var result: full.While = .{
.ast = info, .ast = info,
@ -194,7 +196,7 @@ fn fullWhile(tree: Ast, info: full.While.Components) full.While {
pub fn whileSimple(tree: Ast, node: Node.Index) full.While { pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
return fullWhile(tree, .{ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node], .while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs, .cond_expr = data.lhs,
.cont_expr = 0, .cont_expr = 0,
@ -206,7 +208,7 @@ pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
pub fn whileCont(tree: Ast, node: Node.Index) full.While { pub fn whileCont(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.WhileCont); const extra = tree.extraData(data.rhs, Node.WhileCont);
return fullWhile(tree, .{ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node], .while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs, .cond_expr = data.lhs,
.cont_expr = extra.cont_expr, .cont_expr = extra.cont_expr,
@ -218,7 +220,7 @@ pub fn whileCont(tree: Ast, node: Node.Index) full.While {
pub fn whileFull(tree: Ast, node: Node.Index) full.While { pub fn whileFull(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.While); const extra = tree.extraData(data.rhs, Node.While);
return fullWhile(tree, .{ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node], .while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs, .cond_expr = data.lhs,
.cont_expr = extra.cont_expr, .cont_expr = extra.cont_expr,
@ -229,7 +231,7 @@ pub fn whileFull(tree: Ast, node: Node.Index) full.While {
pub fn forSimple(tree: Ast, node: Node.Index) full.While { pub fn forSimple(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
return fullWhile(tree, .{ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node], .while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs, .cond_expr = data.lhs,
.cont_expr = 0, .cont_expr = 0,
@ -241,7 +243,7 @@ pub fn forSimple(tree: Ast, node: Node.Index) full.While {
pub fn forFull(tree: Ast, node: Node.Index) full.While { pub fn forFull(tree: Ast, node: Node.Index) full.While {
const data = tree.nodes.items(.data)[node]; const data = tree.nodes.items(.data)[node];
const extra = tree.extraData(data.rhs, Node.If); const extra = tree.extraData(data.rhs, Node.If);
return fullWhile(tree, .{ return fullWhileComponents(tree, .{
.while_token = tree.nodes.items(.main_token)[node], .while_token = tree.nodes.items(.main_token)[node],
.cond_expr = data.lhs, .cond_expr = data.lhs,
.cont_expr = 0, .cont_expr = 0,
@ -250,6 +252,35 @@ pub fn forFull(tree: Ast, node: Node.Index) full.While {
}); });
} }
pub fn fullPtrType(tree: Ast, node: Node.Index) ?full.PtrType {
return switch (tree.nodes.items(.tag)[node]) {
.ptr_type_aligned => tree.ptrTypeAligned(node),
.ptr_type_sentinel => tree.ptrTypeSentinel(node),
.ptr_type => tree.ptrType(node),
.ptr_type_bit_range => tree.ptrTypeBitRange(node),
else => null,
};
}
pub fn fullIf(tree: Ast, node: Node.Index) ?full.If {
return switch (tree.nodes.items(.tag)[node]) {
.if_simple => tree.ifSimple(node),
.@"if" => tree.ifFull(node),
else => null,
};
}
pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
return switch (tree.nodes.items(.tag)[node]) {
.while_simple => tree.whileSimple(node),
.while_cont => tree.whileCont(node),
.@"while" => tree.whileFull(node),
.for_simple => tree.forSimple(node),
.@"for" => tree.forFull(node),
else => null,
};
}
pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex { pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex {
const TokenIndex = Ast.TokenIndex; const TokenIndex = Ast.TokenIndex;
const tags = tree.nodes.items(.tag); const tags = tree.nodes.items(.tag);
@ -911,36 +942,6 @@ pub fn paramLastToken(tree: Ast, param: Ast.full.FnProto.Param) Ast.TokenIndex {
return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr); return param.anytype_ellipsis3 orelse tree.lastToken(param.type_expr);
} }
pub fn containerField(tree: Ast, node: Ast.Node.Index) ?Ast.full.ContainerField {
return switch (tree.nodes.items(.tag)[node]) {
.container_field => tree.containerField(node),
.container_field_init => tree.containerFieldInit(node),
.container_field_align => tree.containerFieldAlign(node),
else => null,
};
}
pub fn ptrType(tree: Ast, node: Ast.Node.Index) ?Ast.full.PtrType {
return switch (tree.nodes.items(.tag)[node]) {
.ptr_type => ptrTypeSimple(tree, node),
.ptr_type_aligned => ptrTypeAligned(tree, node),
.ptr_type_bit_range => ptrTypeBitRange(tree, node),
.ptr_type_sentinel => ptrTypeSentinel(tree, node),
else => null,
};
}
pub fn whileAst(tree: Ast, node: Ast.Node.Index) ?Ast.full.While {
return switch (tree.nodes.items(.tag)[node]) {
.@"while" => whileFull(tree, node),
.while_simple => whileSimple(tree, node),
.while_cont => whileCont(tree, node),
.@"for" => forFull(tree, node),
.for_simple => forSimple(tree, node),
else => null,
};
}
pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool { pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) { return switch (tree.nodes.items(.tag)[node]) {
.container_decl, .container_decl,
@ -962,58 +963,6 @@ pub fn isContainer(tree: Ast, node: Ast.Node.Index) bool {
}; };
} }
pub fn containerDecl(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) ?full.ContainerDecl {
return switch (tree.nodes.items(.tag)[node_idx]) {
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx),
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx),
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx),
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx),
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx),
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx),
else => null,
};
}
/// Returns the member indices of a given declaration container.
/// Asserts given `tag` is a container node
pub fn declMembers(tree: Ast, node_idx: Ast.Node.Index, buffer: *[2]Ast.Node.Index) []const Ast.Node.Index {
std.debug.assert(isContainer(tree, node_idx));
return switch (tree.nodes.items(.tag)[node_idx]) {
.container_decl, .container_decl_trailing => tree.containerDecl(node_idx).ast.members,
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node_idx).ast.members,
.container_decl_two, .container_decl_two_trailing => tree.containerDeclTwo(buffer, node_idx).ast.members,
.tagged_union, .tagged_union_trailing => tree.taggedUnion(node_idx).ast.members,
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => tree.taggedUnionEnumTag(node_idx).ast.members,
.tagged_union_two, .tagged_union_two_trailing => tree.taggedUnionTwo(buffer, node_idx).ast.members,
.root => tree.rootDecls(),
.error_set_decl => &[_]Ast.Node.Index{},
else => unreachable,
};
}
/// Returns an `ast.full.VarDecl` for a given node index.
/// Returns null if the tag doesn't match
pub fn varDecl(tree: Ast, node_idx: Ast.Node.Index) ?Ast.full.VarDecl {
return switch (tree.nodes.items(.tag)[node_idx]) {
.global_var_decl => tree.globalVarDecl(node_idx),
.local_var_decl => tree.localVarDecl(node_idx),
.aligned_var_decl => tree.alignedVarDecl(node_idx),
.simple_var_decl => tree.simpleVarDecl(node_idx),
else => null,
};
}
pub fn isPtrType(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) {
.ptr_type,
.ptr_type_aligned,
.ptr_type_bit_range,
.ptr_type_sentinel,
=> true,
else => false,
};
}
pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool { pub fn isBuiltinCall(tree: Ast, node: Ast.Node.Index) bool {
return switch (tree.nodes.items(.tag)[node]) { return switch (tree.nodes.items(.tag)[node]) {
.builtin_call, .builtin_call,
@ -1051,45 +1000,6 @@ pub fn isBlock(tree: Ast, node: Ast.Node.Index) bool {
}; };
} }
pub fn fnProtoHasBody(tree: Ast, node: Ast.Node.Index) ?bool {
return switch (tree.nodes.items(.tag)[node]) {
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
=> false,
.fn_decl => true,
else => null,
};
}
pub fn fnProto(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.FnProto {
return switch (tree.nodes.items(.tag)[node]) {
.fn_proto => tree.fnProto(node),
.fn_proto_multi => tree.fnProtoMulti(node),
.fn_proto_one => tree.fnProtoOne(buf, node),
.fn_proto_simple => tree.fnProtoSimple(buf, node),
.fn_decl => fnProto(tree, tree.nodes.items(.data)[node].lhs, buf),
else => null,
};
}
pub fn callFull(tree: Ast, node: Ast.Node.Index, buf: *[1]Ast.Node.Index) ?Ast.full.Call {
return switch (tree.nodes.items(.tag)[node]) {
.call,
.call_comma,
.async_call,
.async_call_comma,
=> tree.callFull(node),
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
=> tree.callOne(buf, node),
else => null,
};
}
/// returns a list of parameters /// returns a list of parameters
pub fn builtinCallParams(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index { pub fn builtinCallParams(tree: Ast, node: Ast.Node.Index, buf: *[2]Ast.Node.Index) ?[]const Node.Index {
const node_data = tree.nodes.items(.data); const node_data = tree.nodes.items(.data);

View File

@ -43,8 +43,10 @@ pub fn printTree(tree: std.zig.Ast) void {
pub fn printDocumentScope(doc_scope: analysis.DocumentScope) void { pub fn printDocumentScope(doc_scope: analysis.DocumentScope) void {
if (!std.debug.runtime_safety) @compileError("this function should only be used in debug mode!"); if (!std.debug.runtime_safety) @compileError("this function should only be used in debug mode!");
for (doc_scope.scopes.items) |scope, i| { var index: usize = 0;
if (i != 0) std.debug.print("\n\n", .{}); while(index < doc_scope.scopes.len) : (index += 1) {
const scope = doc_scope.scopes.get(index);
if (index != 0) std.debug.print("\n\n", .{});
std.debug.print( std.debug.print(
\\[{d}, {d}] {} \\[{d}, {d}] {}
\\usingnamespaces: {d} \\usingnamespaces: {d}

View File

@ -84,7 +84,7 @@ fn writeCallHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store: *Doc
switch (decl.*) { switch (decl.*) {
.ast_node => |fn_node| { .ast_node => |fn_node| {
var buffer: [1]Ast.Node.Index = undefined; var buffer: [1]Ast.Node.Index = undefined;
if (ast.fnProto(decl_tree, fn_node, &buffer)) |fn_proto| { if (decl_tree.fullFnProto(&buffer, fn_node)) |fn_proto| {
var i: usize = 0; var i: usize = 0;
var it = fn_proto.iterate(&decl_tree); var it = fn_proto.iterate(&decl_tree);
@ -282,7 +282,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.async_call_comma, .async_call_comma,
=> { => {
var params: [1]Ast.Node.Index = undefined; var params: [1]Ast.Node.Index = undefined;
const call = ast.callFull(tree, node, &params).?; const call = tree.fullCall(&params, node).?;
try writeCallNodeHint(builder, arena, store, call); try writeCallNodeHint(builder, arena, store, call);
for (call.ast.params) |param| { for (call.ast.params) |param| {
@ -351,7 +351,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.ptr_type, .ptr_type,
.ptr_type_bit_range, .ptr_type_bit_range,
=> { => {
const ptr_type: Ast.full.PtrType = ast.ptrType(tree, node).?; const ptr_type: Ast.full.PtrType = ast.fullPtrType(tree, node).?;
if (ptr_type.ast.sentinel != 0) { if (ptr_type.ast.sentinel != 0) {
return try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.sentinel, range }); return try callWriteNodeInlayHint(allocator, .{ builder, arena, store, ptr_type.ast.sentinel, range });
@ -458,12 +458,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.slice, .slice,
.slice_sentinel, .slice_sentinel,
=> { => {
const slice: Ast.full.Slice = switch (tag) { const slice: Ast.full.Slice = tree.fullSlice(node).?;
.slice => tree.slice(node),
.slice_open => tree.sliceOpen(node),
.slice_sentinel => tree.sliceSentinel(node),
else => unreachable,
};
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.sliced, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.sliced, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.start, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, slice.ast.start, range });
@ -481,13 +476,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.array_init_comma, .array_init_comma,
=> { => {
var buffer: [2]Ast.Node.Index = undefined; var buffer: [2]Ast.Node.Index = undefined;
const array_init: Ast.full.ArrayInit = switch (tag) { const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buffer, node).?;
.array_init, .array_init_comma => tree.arrayInit(node),
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buffer[0..1], node),
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buffer, node),
else => unreachable,
};
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, array_init.ast.type_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, array_init.ast.type_expr, range });
for (array_init.ast.elements) |elem| { for (array_init.ast.elements) |elem| {
@ -505,13 +494,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.struct_init_comma, .struct_init_comma,
=> { => {
var buffer: [2]Ast.Node.Index = undefined; var buffer: [2]Ast.Node.Index = undefined;
const struct_init: Ast.full.StructInit = switch (tag) { const struct_init: Ast.full.StructInit = tree.fullStructInit(&buffer, node).?;
.struct_init, .struct_init_comma => tree.structInit(node),
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buffer[0..1], node),
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buffer, node),
else => unreachable,
};
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, struct_init.ast.type_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, struct_init.ast.type_expr, range });
@ -546,7 +529,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.switch_case_inline_one, .switch_case_inline_one,
.switch_case_inline, .switch_case_inline,
=> { => {
const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node); const switch_case = tree.fullSwitchCase(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, switch_case.ast.target_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, switch_case.ast.target_expr, range });
}, },
@ -557,7 +540,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.for_simple, .for_simple,
.@"for", .@"for",
=> { => {
const while_node = ast.whileAst(tree, node).?; const while_node = ast.fullWhile(tree, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cond_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cond_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cont_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, while_node.ast.cont_expr, range });
@ -571,7 +554,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.if_simple, .if_simple,
.@"if", .@"if",
=> { => {
const if_node = ast.ifFull(tree, node); const if_node = ast.fullIf(tree, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.cond_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.cond_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.then_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.then_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.else_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, if_node.ast.else_expr, range });
@ -584,7 +567,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.fn_decl, .fn_decl,
=> { => {
var buffer: [1]Ast.Node.Index = undefined; var buffer: [1]Ast.Node.Index = undefined;
const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buffer).?; const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buffer, node).?;
var it = fn_proto.iterate(&tree); var it = fn_proto.iterate(&tree);
while (ast.nextFnParam(&it)) |param_decl| { while (ast.nextFnParam(&it)) |param_decl| {
@ -617,7 +600,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.tagged_union_enum_tag_trailing, .tagged_union_enum_tag_trailing,
=> { => {
var buffer: [2]Ast.Node.Index = undefined; var buffer: [2]Ast.Node.Index = undefined;
const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buffer).?; const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buffer, node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, decl.ast.arg, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, decl.ast.arg, range });
@ -634,7 +617,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.container_field_align, .container_field_align,
.container_field, .container_field,
=> { => {
const container_field = ast.containerField(tree, node).?; const container_field = tree.fullContainerField(node).?;
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.value_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.value_expr, range });
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.align_expr, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, container_field.ast.align_expr, range });
@ -666,11 +649,7 @@ fn writeNodeInlayHint(builder: *Builder, arena: *std.heap.ArenaAllocator, store:
.asm_output, .asm_output,
.asm_input, .asm_input,
=> { => {
const asm_node: Ast.full.Asm = switch (tag) { const asm_node: Ast.full.Asm = tree.fullAsm(node) orelse return;
.@"asm" => tree.asmFull(node),
.asm_simple => tree.asmSimple(node),
else => return,
};
try callWriteNodeInlayHint(allocator, .{ builder, arena, store, asm_node.ast.template, range }); try callWriteNodeInlayHint(allocator, .{ builder, arena, store, asm_node.ast.template, range });
}, },
@ -700,8 +679,7 @@ pub fn writeRangeInlayHint(
.encoding = encoding, .encoding = encoding,
}; };
var buf: [2]Ast.Node.Index = undefined; for (handle.tree.rootDecls()) |child| {
for (ast.declMembers(handle.tree, 0, &buf)) |child| {
if (!isNodeInRange(handle.tree, child, range)) continue; if (!isNodeInRange(handle.tree, child, range)) continue;
try writeNodeInlayHint(&builder, arena, store, child, range); try writeNodeInlayHint(&builder, arena, store, child, range);
} }

View File

@ -118,18 +118,19 @@ fn symbolReferencesInternal(
.tagged_union_enum_tag, .tagged_union_enum_tag,
.tagged_union_enum_tag_trailing, .tagged_union_enum_tag_trailing,
.root, .root,
.error_set_decl,
=> { => {
var buf: [2]Ast.Node.Index = undefined; var buf: [2]Ast.Node.Index = undefined;
for (ast.declMembers(tree, node, &buf)) |member| const container_decl = tree.fullContainerDecl(&buf, node).?;
for (container_decl.ast.members) |member|
try symbolReferencesInternal(builder, member, handle, false); try symbolReferencesInternal(builder, member, handle, false);
}, },
.error_set_decl => {},
.global_var_decl, .global_var_decl,
.local_var_decl, .local_var_decl,
.simple_var_decl, .simple_var_decl,
.aligned_var_decl, .aligned_var_decl,
=> { => {
const var_decl = ast.varDecl(tree, node).?; const var_decl = tree.fullVarDecl(node).?;
try symbolReferencesInternal(builder, var_decl.ast.type_node, handle, false); try symbolReferencesInternal(builder, var_decl.ast.type_node, handle, false);
try symbolReferencesInternal(builder, var_decl.ast.init_node, handle, false); try symbolReferencesInternal(builder, var_decl.ast.init_node, handle, false);
}, },
@ -137,7 +138,7 @@ fn symbolReferencesInternal(
.container_field_align, .container_field_align,
.container_field_init, .container_field_init,
=> { => {
const field = ast.containerField(tree, node).?; const field = tree.fullContainerField(node).?;
try symbolReferencesInternal(builder, field.ast.type_expr, handle, false); try symbolReferencesInternal(builder, field.ast.type_expr, handle, false);
try symbolReferencesInternal(builder, field.ast.value_expr, handle, false); try symbolReferencesInternal(builder, field.ast.value_expr, handle, false);
}, },
@ -152,7 +153,7 @@ fn symbolReferencesInternal(
.fn_decl, .fn_decl,
=> { => {
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const fn_proto = ast.fnProto(tree, node, &buf).?; const fn_proto = tree.fullFnProto(&buf, node).?;
var it = fn_proto.iterate(&tree); var it = fn_proto.iterate(&tree);
while (ast.nextFnParam(&it)) |param| { while (ast.nextFnParam(&it)) |param| {
try symbolReferencesInternal(builder, param.type_expr, handle, false); try symbolReferencesInternal(builder, param.type_expr, handle, false);
@ -179,16 +180,10 @@ fn symbolReferencesInternal(
}, },
.switch_case_one, .switch_case_one,
.switch_case_inline_one, .switch_case_inline_one,
=> {
const case_one = tree.switchCaseOne(node);
try symbolReferencesInternal(builder, case_one.ast.target_expr, handle, false);
for (case_one.ast.values) |val|
try symbolReferencesInternal(builder, val, handle, false);
},
.switch_case, .switch_case,
.switch_case_inline, .switch_case_inline,
=> { => {
const case = tree.switchCase(node); const case = tree.fullSwitchCase(node).?;
try symbolReferencesInternal(builder, case.ast.target_expr, handle, false); try symbolReferencesInternal(builder, case.ast.target_expr, handle, false);
for (case.ast.values) |val| for (case.ast.values) |val|
try symbolReferencesInternal(builder, val, handle, false); try symbolReferencesInternal(builder, val, handle, false);
@ -199,7 +194,7 @@ fn symbolReferencesInternal(
.for_simple, .for_simple,
.@"for", .@"for",
=> { => {
const loop = ast.whileAst(tree, node).?; const loop = ast.fullWhile(tree, node).?;
try symbolReferencesInternal(builder, loop.ast.cond_expr, handle, false); try symbolReferencesInternal(builder, loop.ast.cond_expr, handle, false);
try symbolReferencesInternal(builder, loop.ast.then_expr, handle, false); try symbolReferencesInternal(builder, loop.ast.then_expr, handle, false);
try symbolReferencesInternal(builder, loop.ast.cont_expr, handle, false); try symbolReferencesInternal(builder, loop.ast.cont_expr, handle, false);
@ -208,7 +203,7 @@ fn symbolReferencesInternal(
.@"if", .@"if",
.if_simple, .if_simple,
=> { => {
const if_node = ast.ifFull(tree, node); const if_node = ast.fullIf(tree, node).?;
try symbolReferencesInternal(builder, if_node.ast.cond_expr, handle, false); try symbolReferencesInternal(builder, if_node.ast.cond_expr, handle, false);
try symbolReferencesInternal(builder, if_node.ast.then_expr, handle, false); try symbolReferencesInternal(builder, if_node.ast.then_expr, handle, false);
try symbolReferencesInternal(builder, if_node.ast.else_expr, handle, false); try symbolReferencesInternal(builder, if_node.ast.else_expr, handle, false);
@ -218,7 +213,7 @@ fn symbolReferencesInternal(
.ptr_type_bit_range, .ptr_type_bit_range,
.ptr_type_sentinel, .ptr_type_sentinel,
=> { => {
const ptr_type = ast.ptrType(tree, node).?; const ptr_type = ast.fullPtrType(tree, node).?;
if (ptr_type.ast.align_node != 0) { if (ptr_type.ast.align_node != 0) {
try symbolReferencesInternal(builder, ptr_type.ast.align_node, handle, false); try symbolReferencesInternal(builder, ptr_type.ast.align_node, handle, false);
@ -239,15 +234,10 @@ fn symbolReferencesInternal(
.array_init_one_comma, .array_init_one_comma,
.array_init_dot_two, .array_init_dot_two,
.array_init_dot_two_comma, .array_init_dot_two_comma,
=> |tag| { => {
var buf: [2]Ast.Node.Index = undefined; var buf: [2]Ast.Node.Index = undefined;
const array_init = switch (tag) { const array_init = tree.fullArrayInit(&buf, node).?;
.array_init, .array_init_comma => tree.arrayInit(node),
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
else => unreachable,
};
try symbolReferencesInternal(builder, array_init.ast.type_expr, handle, false); try symbolReferencesInternal(builder, array_init.ast.type_expr, handle, false);
for (array_init.ast.elements) |e| for (array_init.ast.elements) |e|
try symbolReferencesInternal(builder, e, handle, false); try symbolReferencesInternal(builder, e, handle, false);
@ -260,15 +250,10 @@ fn symbolReferencesInternal(
.struct_init_dot_two_comma, .struct_init_dot_two_comma,
.struct_init_one, .struct_init_one,
.struct_init_one_comma, .struct_init_one_comma,
=> |tag| { => {
var buf: [2]Ast.Node.Index = undefined; var buf: [2]Ast.Node.Index = undefined;
const struct_init: Ast.full.StructInit = switch (tag) { const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?;
.struct_init, .struct_init_comma => tree.structInit(node),
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
else => unreachable,
};
try symbolReferencesInternal(builder, struct_init.ast.type_expr, handle, false); try symbolReferencesInternal(builder, struct_init.ast.type_expr, handle, false);
for (struct_init.ast.fields) |field| for (struct_init.ast.fields) |field|
try symbolReferencesInternal(builder, field, handle, false); try symbolReferencesInternal(builder, field, handle, false);
@ -283,7 +268,7 @@ fn symbolReferencesInternal(
.async_call_one_comma, .async_call_one_comma,
=> { => {
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const call = ast.callFull(tree, node, &buf).?; const call = tree.fullCall(&buf, node).?;
try symbolReferencesInternal(builder, call.ast.fn_expr, handle, false); try symbolReferencesInternal(builder, call.ast.fn_expr, handle, false);
@ -294,13 +279,8 @@ fn symbolReferencesInternal(
.slice, .slice,
.slice_sentinel, .slice_sentinel,
.slice_open, .slice_open,
=> |tag| { => {
const slice: Ast.full.Slice = switch (tag) { const slice: Ast.full.Slice = tree.fullSlice(node).?;
.slice => tree.slice(node),
.slice_open => tree.sliceOpen(node),
.slice_sentinel => tree.sliceSentinel(node),
else => unreachable,
};
try symbolReferencesInternal(builder, slice.ast.sliced, handle, false); try symbolReferencesInternal(builder, slice.ast.sliced, handle, false);
try symbolReferencesInternal(builder, slice.ast.start, handle, false); try symbolReferencesInternal(builder, slice.ast.start, handle, false);
@ -514,13 +494,13 @@ pub fn symbolReferences(
.param_payload => |pay| blk: { .param_payload => |pay| blk: {
// Rename the param tok. // Rename the param tok.
const param = pay.param; const param = pay.param;
for (curr_handle.document_scope.scopes.items) |scope| { for (curr_handle.document_scope.scopes.items(.data)) |scope_data| {
if (scope.data != .function) continue; if (scope_data != .function) continue;
const proto = scope.data.function; const proto = scope_data.function;
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const fn_proto = ast.fnProto(curr_handle.tree, proto, &buf).?; const fn_proto = curr_handle.tree.fullFnProto(&buf, proto).?;
var it = fn_proto.iterate(&curr_handle.tree); var it = fn_proto.iterate(&curr_handle.tree);
while (ast.nextFnParam(&it)) |candidate| { while (ast.nextFnParam(&it)) |candidate| {

View File

@ -339,7 +339,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.simple_var_decl, .simple_var_decl,
.aligned_var_decl, .aligned_var_decl,
=> { => {
const var_decl = ast.varDecl(tree, node).?; const var_decl = tree.fullVarDecl(node).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx| if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |comment_idx|
try writeDocComments(builder, tree, comment_idx); try writeDocComments(builder, tree, comment_idx);
@ -386,7 +386,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.tagged_union_two_trailing, .tagged_union_two_trailing,
=> { => {
var buf: [2]Ast.Node.Index = undefined; var buf: [2]Ast.Node.Index = undefined;
const decl: Ast.full.ContainerDecl = ast.containerDecl(tree, node, &buf).?; const decl: Ast.full.ContainerDecl = tree.fullContainerDecl(&buf, node).?;
try writeToken(builder, decl.layout_token, .keyword); try writeToken(builder, decl.layout_token, .keyword);
try writeToken(builder, decl.ast.main_token, .keyword); try writeToken(builder, decl.ast.main_token, .keyword);
@ -446,7 +446,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.fn_decl, .fn_decl,
=> { => {
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
const fn_proto: Ast.full.FnProto = ast.fnProto(tree, node, &buf).?; const fn_proto: Ast.full.FnProto = tree.fullFnProto(&buf, node).?;
if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs| if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |docs|
try writeDocComments(builder, tree, docs); try writeDocComments(builder, tree, docs);
@ -523,7 +523,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.switch_case_inline_one, .switch_case_inline_one,
.switch_case_inline, .switch_case_inline,
=> { => {
const switch_case = if (tag == .switch_case or tag == .switch_case_inline) tree.switchCase(node) else tree.switchCaseOne(node); const switch_case = tree.fullSwitchCase(node).?;
try writeToken(builder, switch_case.inline_token, .keyword); try writeToken(builder, switch_case.inline_token, .keyword);
for (switch_case.ast.values) |item_node| try callWriteNodeTokens(allocator, .{ builder, item_node }); for (switch_case.ast.values) |item_node| try callWriteNodeTokens(allocator, .{ builder, item_node });
// check it it's 'else' // check it it's 'else'
@ -541,7 +541,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.for_simple, .for_simple,
.@"for", .@"for",
=> { => {
const while_node = ast.whileAst(tree, node).?; const while_node = ast.fullWhile(tree, node).?;
try writeToken(builder, while_node.label_token, .label); try writeToken(builder, while_node.label_token, .label);
try writeToken(builder, while_node.inline_token, .keyword); try writeToken(builder, while_node.inline_token, .keyword);
try writeToken(builder, while_node.ast.while_token, .keyword); try writeToken(builder, while_node.ast.while_token, .keyword);
@ -575,7 +575,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.@"if", .@"if",
.if_simple, .if_simple,
=> { => {
const if_node = ast.ifFull(tree, node); const if_node = ast.fullIf(tree, node).?;
try writeToken(builder, if_node.ast.if_token, .keyword); try writeToken(builder, if_node.ast.if_token, .keyword);
try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr }); try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr });
@ -609,13 +609,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.array_init_dot_two_comma, .array_init_dot_two_comma,
=> { => {
var buf: [2]Ast.Node.Index = undefined; var buf: [2]Ast.Node.Index = undefined;
const array_init: Ast.full.ArrayInit = switch (tag) { const array_init: Ast.full.ArrayInit = tree.fullArrayInit(&buf, node).?;
.array_init, .array_init_comma => tree.arrayInit(node),
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buf[0..1], node),
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(&buf, node),
else => unreachable,
};
try callWriteNodeTokens(allocator, .{ builder, array_init.ast.type_expr }); try callWriteNodeTokens(allocator, .{ builder, array_init.ast.type_expr });
for (array_init.ast.elements) |elem| try callWriteNodeTokens(allocator, .{ builder, elem }); for (array_init.ast.elements) |elem| try callWriteNodeTokens(allocator, .{ builder, elem });
@ -630,13 +624,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.struct_init_dot_two_comma, .struct_init_dot_two_comma,
=> { => {
var buf: [2]Ast.Node.Index = undefined; var buf: [2]Ast.Node.Index = undefined;
const struct_init: Ast.full.StructInit = switch (tag) { const struct_init: Ast.full.StructInit = tree.fullStructInit(&buf, node).?;
.struct_init, .struct_init_comma => tree.structInit(node),
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buf[0..1], node),
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(&buf, node),
else => unreachable,
};
var field_token_type: ?TokenType = null; var field_token_type: ?TokenType = null;
@ -674,7 +662,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.async_call_one_comma, .async_call_one_comma,
=> { => {
var params: [1]Ast.Node.Index = undefined; var params: [1]Ast.Node.Index = undefined;
const call = ast.callFull(tree, node, &params).?; const call = tree.fullCall(&params, node).?;
try writeToken(builder, call.async_token, .keyword); try writeToken(builder, call.async_token, .keyword);
try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr }); try callWriteNodeTokens(allocator, .{ builder, call.ast.fn_expr });
@ -690,12 +678,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.slice_open, .slice_open,
.slice_sentinel, .slice_sentinel,
=> { => {
const slice: Ast.full.Slice = switch (tag) { const slice: Ast.full.Slice = tree.fullSlice(node).?;
.slice => tree.slice(node),
.slice_open => tree.sliceOpen(node),
.slice_sentinel => tree.sliceSentinel(node),
else => unreachable,
};
try callWriteNodeTokens(allocator, .{ builder, slice.ast.sliced }); try callWriteNodeTokens(allocator, .{ builder, slice.ast.sliced });
try callWriteNodeTokens(allocator, .{ builder, slice.ast.start }); try callWriteNodeTokens(allocator, .{ builder, slice.ast.start });
@ -772,11 +755,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.asm_input, .asm_input,
.asm_simple, .asm_simple,
=> { => {
const asm_node: Ast.full.Asm = switch (tag) { const asm_node: Ast.full.Asm = tree.fullAsm(node).?;
.@"asm" => tree.asmFull(node),
.asm_simple => tree.asmSimple(node),
else => return, // TODO Inputs, outputs
};
try writeToken(builder, main_token, .keyword); try writeToken(builder, main_token, .keyword);
try writeToken(builder, asm_node.volatile_token, .keyword); try writeToken(builder, asm_node.volatile_token, .keyword);
@ -920,7 +899,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.ptr_type_bit_range, .ptr_type_bit_range,
.ptr_type_sentinel, .ptr_type_sentinel,
=> { => {
const ptr_type = ast.ptrType(tree, node).?; const ptr_type = ast.fullPtrType(tree, node).?;
if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and if (ptr_type.size == .One and token_tags[main_token] == .asterisk_asterisk and
main_token == main_tokens[ptr_type.ast.child_type]) main_token == main_tokens[ptr_type.ast.child_type])
@ -955,10 +934,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
.array_type, .array_type,
.array_type_sentinel, .array_type_sentinel,
=> { => {
const array_type: Ast.full.ArrayType = if (tag == .array_type) const array_type: Ast.full.ArrayType = tree.fullArrayType(node).?;
tree.arrayType(node)
else
tree.arrayTypeSentinel(node);
try callWriteNodeTokens(allocator, .{ builder, array_type.ast.elem_count }); try callWriteNodeTokens(allocator, .{ builder, array_type.ast.elem_count });
try callWriteNodeTokens(allocator, .{ builder, array_type.ast.sentinel }); try callWriteNodeTokens(allocator, .{ builder, array_type.ast.sentinel });
@ -988,7 +964,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) WriteTokensEr
fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void { fn writeContainerField(builder: *Builder, node: Ast.Node.Index, field_token_type: ?TokenType) !void {
const tree = builder.handle.tree; const tree = builder.handle.tree;
const container_field = ast.containerField(tree, node).?; const container_field = tree.fullContainerField(node).?;
const base = tree.nodes.items(.main_token)[node]; const base = tree.nodes.items(.main_token)[node];
const tokens = tree.tokens.items(.tag); const tokens = tree.tokens.items(.tag);
@ -1033,8 +1009,7 @@ pub fn writeAllSemanticTokens(
var builder = Builder.init(arena, store, handle, encoding); var builder = Builder.init(arena, store, handle, encoding);
// reverse the ast from the root declarations // reverse the ast from the root declarations
var buf: [2]Ast.Node.Index = undefined; for (handle.tree.rootDecls()) |child| {
for (ast.declMembers(handle.tree, 0, &buf)) |child| {
writeNodeTokens(&builder, child) catch |err| switch (err) { writeNodeTokens(&builder, child) catch |err| switch (err) {
error.MovedBackwards => break, error.MovedBackwards => break,
else => |e| return e, else => |e| return e,

View File

@ -275,7 +275,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
}; };
var buf: [1]Ast.Node.Index = undefined; var buf: [1]Ast.Node.Index = undefined;
if (ast.fnProto(type_handle.handle.tree, node, &buf)) |proto| { if (type_handle.handle.tree.fullFnProto(&buf, node)) |proto| {
return try fnProtoToSignatureInfo( return try fnProtoToSignatureInfo(
document_store, document_store,
arena, arena,
@ -327,7 +327,7 @@ pub fn getSignatureInfo(document_store: *DocumentStore, arena: *std.heap.ArenaAl
} }
} }
if (ast.fnProto(res_handle.tree, node, &buf)) |proto| { if (res_handle.tree.fullFnProto(&buf, node)) |proto| {
return try fnProtoToSignatureInfo( return try fnProtoToSignatureInfo(
document_store, document_store,
arena, arena,

View File

@ -69,8 +69,8 @@ fn convertCIncludeInternal(
var writer = output.writer(allocator); var writer = output.writer(allocator);
var buffer: [2]Ast.Node.Index = undefined; var buffer: [2]Ast.Node.Index = undefined;
if (ast.isBlock(tree, node)) { if (ast.blockStatements(tree, node, &buffer)) |statements| {
for (ast.blockStatements(tree, node, &buffer).?) |statement| { for (statements) |statement| {
try callConvertCIncludeInternal(stack_allocator, .{ allocator, stack_allocator, tree, statement, output }); try callConvertCIncludeInternal(stack_allocator, .{ allocator, stack_allocator, tree, statement, output });
} }
} else if (ast.builtinCallParams(tree, node, &buffer)) |params| { } else if (ast.builtinCallParams(tree, node, &buffer)) |params| {

View File

@ -28,15 +28,10 @@ test "definition - cursor is at the end of an identifier" {
} }
test "definition - cursor is at the start of an identifier" { test "definition - cursor is at the start of an identifier" {
testDefinition( try testDefinition(
\\fn main() void { <>foo(); } \\fn main() void { <>foo(); }
\\fn <def>foo</def>() void {} \\fn <def>foo</def>() void {}
) catch |err| switch (err) { );
error.UnresolvedDefinition => {
// TODO: #891
},
else => return err,
};
} }
fn testDefinition(source: []const u8) !void { fn testDefinition(source: []const u8) !void {

View File

@ -9,91 +9,307 @@ const allocator = std.testing.allocator;
test "position context - var access" { test "position context - var access" {
try testContext( try testContext(
\\const this_var = id<cursor>entifier; \\const a_var =<cursor> identifier;
, ,
.var_access, .empty,
"id", null,
); );
try testContext( try testContext(
\\const this_var = identifier<cursor>; \\const a_var = <cursor>identifier;
,
.var_access,
"i",
);
try testContext(
\\const a_var = iden<cursor>tifier;
,
.var_access,
"ident",
);
try testContext(
\\const a_var = identifier<cursor>;
, ,
.var_access, .var_access,
"identifier", "identifier",
); );
try testContext(
\\const a_var = identifier;<cursor>
,
.empty,
null,
);
try testContext(
\\ fn foo() !<cursor>Str {
,
.var_access,
"S",
);
try testContext(
\\ fn foo() !St<cursor>r {
,
.var_access,
"Str",
);
try testContext( try testContext(
\\ fn foo() !Str<cursor> { \\ fn foo() !Str<cursor> {
, ,
.var_access, .var_access,
"Str", "Str",
); );
// TODO fix failing test! try testContext(
\\ fn foo() !Str <cursor>{
,
.var_access,
"Str",
);
// TODO fix failing tests
// try testContext(
// \\ fn foo() <cursor>Err!void {
// ,
// .var_access,
// "E",
// );
// try testContext(
// \\ fn foo() Er<cursor>r!void {
// ,
// .var_access,
// "Err",
// );
// try testContext( // try testContext(
// \\ fn foo() Err<cursor>!void { // \\ fn foo() Err<cursor>!void {
// , // ,
// .var_access, // .var_access,
// "Err", // "Err",
// ); // );
// try testContext(
// \\ fn foo() Err!<cursor>void {
// ,
// .var_access,
// "v",
// );
try testContext(
\\if (<cursor>bar.field == foo) {
,
.var_access,
"b",
);
try testContext(
\\if (ba<cursor>r.field == foo) {
,
.var_access,
"bar",
);
try testContext(
\\if (bar<cursor>.field == foo) {
,
.var_access,
"bar",
);
try testContext(
\\if (bar[0]<cursor>.field == foo) {
,
.var_access,
"bar",
);
} }
test "position context - field access" { test "position context - field access" {
try testContext( try testContext(
\\if (foo.<cursor>field == foo) { \\if (bar.<cursor>field == foo) {
, ,
.field_access, .field_access,
"foo.", "bar.f",
); );
try testContext( try testContext(
\\if (foo.member.<cursor>field == foo) { \\if (bar.fie<cursor>ld == foo) {
, ,
.field_access, .field_access,
"foo.member.", "bar.fiel",
); );
try testContext( try testContext(
\\if (foo.*.?.<cursor>field == foo) { \\if (bar.field<cursor> == foo) {
, ,
.field_access, .field_access,
"foo.*.?.", "bar.field",
);
try testContext(
\\if (bar.member<cursor>.field == foo) {
,
.field_access,
"bar.member",
); );
try testContext( try testContext(
\\if (foo[0].<cursor>field == foo) { \\if (bar.member.<cursor>field == foo) {
, ,
.field_access, .field_access,
"foo[0].", "bar.member.f",
); );
try testContext( try testContext(
\\if (foo.<cursor>@"field" == foo) { \\if (bar.member.fie<cursor>ld == foo) {
, ,
.field_access, .field_access,
"foo.", "bar.member.fiel",
);
try testContext(
\\if (bar.member.field<cursor> == foo) {
,
.field_access,
"bar.member.field",
);
try testContext(
\\if (bar.*.?<cursor>.field == foo) {
,
.field_access,
"bar.*.?",
);
try testContext(
\\if (bar.*.?.<cursor>field == foo) {
,
.field_access,
"bar.*.?.f",
);
try testContext(
\\if (bar[0].<cursor>field == foo) {
,
.field_access,
"bar[0].f",
);
try testContext(
\\if (bar.<cursor>@"field" == foo) {
,
.field_access,
"bar.@\"",
);
try testContext(
\\if (bar.@"fie<cursor>ld" == foo) {
,
.field_access,
"bar.@\"fiel",
);
try testContext(
\\if (bar.@"field"<cursor> == foo) {
,
.field_access,
"bar.@\"field\"",
);
try testContext(
\\const arr = std.ArrayList(SomeStruct(a, b, c, d)).<cursor>init(allocator);
,
.field_access,
"std.ArrayList(SomeStruct(a, b, c, d)).i",
); );
try testContext( try testContext(
\\const arr = std.ArrayList(SomeStruct(a, b, c, d)).in<cursor>it(allocator); \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).in<cursor>it(allocator);
, ,
.field_access, .field_access,
"std.ArrayList(SomeStruct(a, b, c, d)).in", "std.ArrayList(SomeStruct(a, b, c, d)).ini",
); );
try testContext( try testContext(
\\fn foo() !Foo.b<cursor> { \\const arr = std.ArrayList(SomeStruct(a, b, c, d)).init<cursor>(allocator);
,
.field_access,
"std.ArrayList(SomeStruct(a, b, c, d)).init",
);
try testContext(
\\fn foo() !Foo.<cursor>bar {
, ,
.field_access, .field_access,
"Foo.b", "Foo.b",
); );
// TODO fix failing test! try testContext(
\\fn foo() !Foo.ba<cursor>r {
,
.field_access,
"Foo.bar",
);
try testContext(
\\fn foo() !Foo.bar<cursor> {
,
.field_access,
"Foo.bar",
);
// TODO fix failing tests
// try testContext( // try testContext(
// \\fn foo() Foo.b<cursor>!void { // \\fn foo() Foo.<cursor>bar!void {
// , // ,
// .field_access, // .field_access,
// "Foo.b", // "Foo.b",
// ); // );
// try testContext(
// \\fn foo() Foo.ba<cursor>r!void {
// ,
// .field_access,
// "Foo.bar",
// );
// try testContext(
// \\fn foo() Foo.bar<cursor>!void {
// ,
// .field_access,
// "Foo.bar",
// );
} }
test "position context - builtin" { test "position context - builtin" {
try testContext(
\\var foo = <cursor>@
,
.empty,
null,
);
try testContext(
\\var foo = <cursor>@intC(u32, 5);
,
.builtin,
"@i",
);
try testContext(
\\var foo = @<cursor>intC(u32, 5);
,
.builtin,
"@i",
);
try testContext(
\\var foo = @int<cursor>C(u32, 5);
,
.builtin,
"@intC",
);
try testContext( try testContext(
\\var foo = @intC<cursor>(u32, 5); \\var foo = @intC<cursor>(u32, 5);
, ,
.builtin, .builtin,
"@intC", "@intC",
); );
try testContext(
\\fn foo() void { <cursor>@setRuntime(false); };
,
.builtin,
"@s",
);
try testContext(
\\fn foo() void { @<cursor>setRuntime(false); };
,
.builtin,
"@s",
);
try testContext(
\\fn foo() void { @set<cursor>Runtime(false); };
,
.builtin,
"@setR",
);
try testContext( try testContext(
\\fn foo() void { @setRuntime<cursor>(false); }; \\fn foo() void { @setRuntime<cursor>(false); };
, ,
@ -118,6 +334,12 @@ test "position context - comment" {
} }
test "position context - import/embedfile string literal" { test "position context - import/embedfile string literal" {
try testContext(
\\const std = @import("s<cursor>t");
,
.import_string_literal,
"\"st", // maybe report just "st"
);
try testContext( try testContext(
\\const std = @import("st<cursor>"); \\const std = @import("st<cursor>");
, ,
@ -130,6 +352,12 @@ test "position context - import/embedfile string literal" {
.embedfile_string_literal, .embedfile_string_literal,
"\"file.", // maybe report just "file." "\"file.", // maybe report just "file."
); );
try testContext(
\\const std = @embedFile("file<cursor>.");
,
.embedfile_string_literal,
"\"file", // maybe report just "file."
);
} }
test "position context - string literal" { test "position context - string literal" {
@ -137,29 +365,49 @@ test "position context - string literal" {
\\var foo = "he<cursor>llo world!"; \\var foo = "he<cursor>llo world!";
, ,
.string_literal, .string_literal,
"\"he", // maybe report just "he" "\"hel", // maybe report just "he"
); );
try testContext( try testContext(
\\var foo = \\hello<cursor>; \\var foo = \\hell<cursor>o;
, ,
.string_literal, .string_literal,
"\\\\hello", // maybe report just "hello" "\\\\hello", // maybe report just "hello;"
); );
} }
test "position context - global error set" { test "position context - global error set" {
// TODO why is this a .var_access instead of a .global_error_set?
// try testContext(
// \\fn foo() <cursor>error!void {
// ,
// .global_error_set,
// null,
// );
try testContext(
\\fn foo() erro<cursor>r!void {
,
.global_error_set,
null,
);
try testContext( try testContext(
\\fn foo() error<cursor>!void { \\fn foo() error<cursor>!void {
, ,
.global_error_set, .global_error_set,
null, null,
); );
try testContext(
\\fn foo() error<cursor>.!void {
,
.global_error_set,
null,
);
try testContext( try testContext(
\\fn foo() error.<cursor>!void { \\fn foo() error.<cursor>!void {
, ,
.global_error_set, .global_error_set,
null, null,
); );
// TODO this should probably also be .global_error_set // TODO this should probably also be .global_error_set
// try testContext( // try testContext(
// \\fn foo() error{<cursor>}!void { // \\fn foo() error{<cursor>}!void {
@ -176,12 +424,30 @@ test "position context - global error set" {
} }
test "position context - enum literal" { test "position context - enum literal" {
try testContext(
\\var foo = .<cursor>tag;
,
.enum_literal,
null,
);
try testContext(
\\var foo = .ta<cursor>g;
,
.enum_literal,
null,
);
try testContext( try testContext(
\\var foo = .tag<cursor>; \\var foo = .tag<cursor>;
, ,
.enum_literal, .enum_literal,
null, null,
); );
try testContext(
\\var foo = <cursor>.;
,
.empty,
null,
);
try testContext( try testContext(
\\var foo = .<cursor>; \\var foo = .<cursor>;
, ,
@ -191,6 +457,24 @@ test "position context - enum literal" {
} }
test "position context - label" { test "position context - label" {
try testContext(
\\var foo = blk: { break <cursor>:blk null };
,
.pre_label,
null,
);
try testContext(
\\var foo = blk: { break :<cursor>blk null };
,
.label,
null,
);
try testContext(
\\var foo = blk: { break :bl<cursor>k null };
,
.label,
null,
);
try testContext( try testContext(
\\var foo = blk: { break :blk<cursor> null }; \\var foo = blk: { break :blk<cursor> null };
, ,
@ -206,12 +490,6 @@ test "position context - empty" {
.empty, .empty,
null, null,
); );
try testContext(
\\<cursor>const foo = struct {};
,
.empty,
null,
);
try testContext( try testContext(
\\try foo(arg, slice[<cursor>]); \\try foo(arg, slice[<cursor>]);
, ,
@ -237,7 +515,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "<cursor>".len ..] }); const final_line = try std.mem.concat(allocator, u8, &.{ line[0..cursor_idx], line[cursor_idx + "<cursor>".len ..] });
defer allocator.free(final_line); defer allocator.free(final_line);
const ctx = try analysis.getPositionContext(allocator, line, cursor_idx); const ctx = try analysis.getPositionContext(allocator, final_line, cursor_idx, true);
if (std.meta.activeTag(ctx) != tag) { if (std.meta.activeTag(ctx) != tag) {
std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) }); std.debug.print("Expected tag `{s}`, got `{s}`\n", .{ @tagName(tag), @tagName(std.meta.activeTag(ctx)) });
@ -253,7 +531,7 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
const expected_range = maybe_range orelse { const expected_range = maybe_range orelse {
std.debug.print("Expected null range, got `{s}`\n", .{ std.debug.print("Expected null range, got `{s}`\n", .{
line[actual_loc.start..actual_loc.end], final_line[actual_loc.start..actual_loc.end],
}); });
return error.DifferentRange; return error.DifferentRange;
}; };
@ -263,8 +541,8 @@ fn testContext(line: []const u8, tag: std.meta.Tag(analysis.PositionContext), ma
if (expected_range_start != actual_loc.start or expected_range_end != actual_loc.end) { if (expected_range_start != actual_loc.start or expected_range_end != actual_loc.end) {
std.debug.print("Expected range `{s}` ({}..{}), got `{s}` ({}..{})\n", .{ std.debug.print("Expected range `{s}` ({}..{}), got `{s}` ({}..{})\n", .{
line[expected_range_start..expected_range_end], expected_range_start, expected_range_end, final_line[expected_range_start..expected_range_end], expected_range_start, expected_range_end,
line[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end, final_line[actual_loc.start..actual_loc.end], actual_loc.start, actual_loc.end,
}); });
return error.DifferentRange; return error.DifferentRange;
} }